summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xbin/egencache6
-rwxr-xr-xbin/emaint4
-rwxr-xr-xbin/glsa-check2
-rwxr-xr-xbin/portageq4
-rwxr-xr-xbin/quickpkg2
-rwxr-xr-xbin/repoman18
-rw-r--r--pym/_emerge/BinpkgFetcher.py2
-rw-r--r--pym/_emerge/BlockerCache.py2
-rw-r--r--pym/_emerge/EbuildMetadataPhase.py2
-rw-r--r--pym/_emerge/JobStatusDisplay.py4
-rw-r--r--pym/_emerge/MetadataRegen.py6
-rw-r--r--pym/_emerge/Package.py2
-rw-r--r--pym/_emerge/PackageVirtualDbapi.py4
-rw-r--r--pym/_emerge/PipeReader.py6
-rw-r--r--pym/_emerge/PollSelectAdapter.py2
-rw-r--r--pym/_emerge/RepoDisplay.py6
-rw-r--r--pym/_emerge/RootConfig.py2
-rw-r--r--pym/_emerge/Scheduler.py6
-rw-r--r--pym/_emerge/SpawnProcess.py2
-rw-r--r--pym/_emerge/SubProcess.py2
-rw-r--r--pym/_emerge/actions.py26
-rw-r--r--pym/_emerge/clear_caches.py2
-rw-r--r--pym/_emerge/depgraph.py30
-rw-r--r--pym/_emerge/main.py22
-rw-r--r--pym/_emerge/unmerge.py6
-rw-r--r--pym/portage/__init__.py100
-rw-r--r--pym/portage/cache/anydbm.py4
-rw-r--r--pym/portage/cache/ebuild_xattr.py2
-rw-r--r--pym/portage/cache/mappings.py34
-rw-r--r--pym/portage/cache/sqlite.py2
-rw-r--r--pym/portage/cache/template.py16
-rw-r--r--pym/portage/checksum.py2
-rw-r--r--pym/portage/cvstree.py4
-rw-r--r--pym/portage/dbapi/bintree.py6
-rw-r--r--pym/portage/dbapi/porttree.py14
-rw-r--r--pym/portage/dbapi/vartree.py34
-rw-r--r--pym/portage/dbapi/virtual.py2
-rw-r--r--pym/portage/debug.py2
-rw-r--r--pym/portage/dep.py6
-rw-r--r--pym/portage/dispatch_conf.py2
-rw-r--r--pym/portage/eclass_cache.py2
-rw-r--r--pym/portage/elog/__init__.py4
-rw-r--r--pym/portage/elog/mod_mail_summary.py4
-rw-r--r--pym/portage/getbinpkg.py8
-rw-r--r--pym/portage/manifest.py10
-rw-r--r--pym/portage/news.py4
-rw-r--r--pym/portage/output.py4
-rw-r--r--pym/portage/process.py2
-rw-r--r--pym/portage/proxy/lazyimport.py2
-rw-r--r--pym/portage/sets/base.py2
-rw-r--r--pym/portage/sets/files.py8
-rw-r--r--pym/portage/sets/libs.py4
-rw-r--r--pym/portage/tests/env/config/test_PackageKeywordsFile.py2
-rw-r--r--pym/portage/tests/env/config/test_PackageUseFile.py2
-rw-r--r--pym/portage/tests/env/config/test_PortageModulesFile.py2
-rw-r--r--pym/portage/update.py6
-rw-r--r--pym/portage/util.py8
-rw-r--r--pym/portage/xpak.py4
-rw-r--r--pym/repoman/utilities.py2
59 files changed, 240 insertions, 240 deletions
diff --git a/bin/egencache b/bin/egencache
index 3af060ef8..5c59e6dd0 100755
--- a/bin/egencache
+++ b/bin/egencache
@@ -168,7 +168,7 @@ class GenCache(object):
# mtime on the ebuild (and the corresponding cache entry).
# See bug #139134.
max_mtime = sc.mtime
- for ec, (loc, ec_mtime) in metadata['_eclasses_'].iteritems():
+ for ec, (loc, ec_mtime) in metadata['_eclasses_'].items():
if max_mtime < ec_mtime:
max_mtime = ec_mtime
if max_mtime == sc.mtime:
@@ -201,7 +201,7 @@ class GenCache(object):
dead_nodes = set()
if self._global_cleanse:
try:
- for cpv in trg_cache.iterkeys():
+ for cpv in trg_cache.keys():
cp = cpv_getkey(cpv)
if cp is None:
self.returncode |= 1
@@ -221,7 +221,7 @@ class GenCache(object):
else:
cp_set = self._cp_set
try:
- for cpv in trg_cache.iterkeys():
+ for cpv in trg_cache.keys():
cp = cpv_getkey(cpv)
if cp is None:
self.returncode |= 1
diff --git a/bin/emaint b/bin/emaint
index 407f8a66d..611c323da 100755
--- a/bin/emaint
+++ b/bin/emaint
@@ -209,7 +209,7 @@ class BinhostHandler(object):
bintree.populated = False
del pkgindex.packages[:]
- pkgindex.packages.extend(metadata.itervalues())
+ pkgindex.packages.extend(metadata.values())
from portage.util import atomic_ofstream
f = atomic_ofstream(self._pkgindex_file)
try:
@@ -475,7 +475,7 @@ def emaint_main(myargv):
"cleanresume":CleanResume
}
- module_names = modules.keys()
+ module_names = list(modules.keys())
module_names.sort()
module_names.insert(0, "all")
diff --git a/bin/glsa-check b/bin/glsa-check
index e9c28f9e6..787cf39db 100755
--- a/bin/glsa-check
+++ b/bin/glsa-check
@@ -172,7 +172,7 @@ def summarylist(myglsalist, fd1=sys.stdout, fd2=sys.stderr):
fd1.write(color(myglsa.nr) + " " + color(status) + " " + color(access) + myglsa.title + " (")
if not verbose:
- for pkg in myglsa.packages.keys()[:3]:
+ for pkg in list(myglsa.packages.keys())[:3]:
fd1.write(" " + pkg + " ")
if len(myglsa.packages) > 3:
fd1.write("... ")
diff --git a/bin/portageq b/bin/portageq
index 34c4fb401..cea14231f 100755
--- a/bin/portageq
+++ b/bin/portageq
@@ -222,7 +222,7 @@ def owners(argv):
owners = vardb._owners.get_owners(files)
msg = []
- for pkg, owned_files in owners.iteritems():
+ for pkg, owned_files in owners.items():
cpv = pkg.mycpv
msg.append("%s\n" % cpv)
for f in sorted(owned_files):
@@ -560,7 +560,7 @@ def usage(argv):
#
non_commands = frozenset(['exithandler', 'main',
'usage', 'writemsg', 'writemsg_stdout'])
- commands = sorted(k for k, v in globals().iteritems() \
+ commands = sorted(k for k, v in globals().items() \
if type(v) is types.FunctionType and k not in non_commands)
for name in commands:
diff --git a/bin/quickpkg b/bin/quickpkg
index fd1728570..383fe20fb 100755
--- a/bin/quickpkg
+++ b/bin/quickpkg
@@ -124,7 +124,7 @@ def quickpkg_main(options, args, eout):
required_metadata["CATEGORY"] = category
required_metadata["PF"] = pf
update_metadata = {}
- for k, v in required_metadata.iteritems():
+ for k, v in required_metadata.items():
if v != existing_metadata[k]:
update_metadata[k] = v
if update_metadata:
diff --git a/bin/repoman b/bin/repoman
index 13c126e30..950226206 100755
--- a/bin/repoman
+++ b/bin/repoman
@@ -153,7 +153,7 @@ def ParseArgs(args, qahelp):
'scan' : 'Scan directory tree for QA issues'
}
- mode_keys = modes.keys()
+ mode_keys = list(modes.keys())
mode_keys.sort()
parser = RepomanOptionParser(formatter=RepomanHelpFormatter(), usage="%prog [options] [mode]")
@@ -198,7 +198,7 @@ def ParseArgs(args, qahelp):
parser.add_option('--without-mask', dest='without_mask', action='store_true',
default=False, help='behave as if no package.mask entries exist (not allowed with commit mode)')
- parser.add_option('--mode', type='choice', dest='mode', choices=modes.keys(),
+ parser.add_option('--mode', type='choice', dest='mode', choices=list(modes.keys()),
help='specify which mode repoman will run in (default=full)')
parser.on_tail("\n " + green("Modes".ljust(20) + " Description\n"))
@@ -208,7 +208,7 @@ def ParseArgs(args, qahelp):
parser.on_tail("\n " + green("QA keyword".ljust(20) + " Description\n"))
- sorted_qa = qahelp.keys()
+ sorted_qa = list(qahelp.keys())
sorted_qa.sort()
for k in sorted_qa:
parser.on_tail(" %s %s\n" % (k.ljust(20), qahelp[k]))
@@ -330,7 +330,7 @@ qahelp={
"upstream.workaround":"The ebuild works around an upstream bug, an upstream bug should be filed and tracked in bugs.gentoo.org"
}
-qacats = qahelp.keys()
+qacats = list(qahelp.keys())
qacats.sort()
qawarnings = set((
@@ -735,7 +735,7 @@ def dev_keywords(profiles):
want to add the --include-dev option.
"""
type_arch_map = {}
- for arch, arch_profiles in profiles.iteritems():
+ for arch, arch_profiles in profiles.items():
for prof in arch_profiles:
arch_set = type_arch_map.get(prof.status)
if arch_set is None:
@@ -872,7 +872,7 @@ check_ebuild_notadded = not \
(vcs == "svn" and repolevel < 3 and options.mode != "commit")
# Build a regex from thirdpartymirrors for the SRC_URI.mirror check.
-thirdpartymirrors = portage.flatten(repoman_settings.thirdpartymirrors().values())
+thirdpartymirrors = portage.flatten(list(repoman_settings.thirdpartymirrors().values()))
for x in scanlist:
#ebuilds and digests added to cvs respectively.
@@ -900,7 +900,7 @@ for x in scanlist:
fetchlist_dict=fetchlist_dict)
mf.create(requiredDistfiles=None,
assumeDistHashesAlways=True)
- for distfiles in fetchlist_dict.itervalues():
+ for distfiles in fetchlist_dict.values():
for distfile in distfiles:
if os.path.isfile(os.path.join(distdir, distfile)):
mf.fhashdict['DIST'].pop(distfile, None)
@@ -1242,7 +1242,7 @@ for x in scanlist:
pkg = pkgs[y]
if pkg.invalid:
- for k, msgs in pkg.invalid.iteritems():
+ for k, msgs in pkg.invalid.items():
for msg in msgs:
stats[k] = stats[k] + 1
fails[k].append("%s %s" % (relative_path, msg))
@@ -1253,7 +1253,7 @@ for x in scanlist:
inherited = pkg.inherited
live_ebuild = live_eclasses.intersection(inherited)
- for k, v in myaux.iteritems():
+ for k, v in myaux.items():
if not isinstance(v, basestring):
continue
m = non_ascii_re.search(v)
diff --git a/pym/_emerge/BinpkgFetcher.py b/pym/_emerge/BinpkgFetcher.py
index 1f244cb1a..576d00a60 100644
--- a/pym/_emerge/BinpkgFetcher.py
+++ b/pym/_emerge/BinpkgFetcher.py
@@ -78,7 +78,7 @@ class BinpkgFetcher(SpawnProcess):
"FILE" : os.path.basename(pkg_path)
}
- fetch_env = dict(settings.iteritems())
+ fetch_env = dict(settings.items())
fetch_args = [portage.util.varexpand(x, mydict=fcmd_vars) \
for x in portage.util.shlex_split(fcmd)]
diff --git a/pym/_emerge/BlockerCache.py b/pym/_emerge/BlockerCache.py
index c4270242f..7c6be8ada 100644
--- a/pym/_emerge/BlockerCache.py
+++ b/pym/_emerge/BlockerCache.py
@@ -71,7 +71,7 @@ class BlockerCache(portage.cache.mappings.MutableMapping):
# Validate all the atoms and counters so that
# corruption is detected as soon as possible.
invalid_items = set()
- for k, v in self._cache_data["blockers"].iteritems():
+ for k, v in self._cache_data["blockers"].items():
if not isinstance(k, basestring):
invalid_items.add(k)
continue
diff --git a/pym/_emerge/EbuildMetadataPhase.py b/pym/_emerge/EbuildMetadataPhase.py
index 1231b1b11..2ff0b64db 100644
--- a/pym/_emerge/EbuildMetadataPhase.py
+++ b/pym/_emerge/EbuildMetadataPhase.py
@@ -70,7 +70,7 @@ class EbuildMetadataPhase(SubProcess):
fd_pipes.setdefault(2, sys.stderr.fileno())
# flush any pending output
- for fd in fd_pipes.itervalues():
+ for fd in fd_pipes.values():
if fd == sys.stdout.fileno():
sys.stdout.flush()
if fd == sys.stderr.fileno():
diff --git a/pym/_emerge/JobStatusDisplay.py b/pym/_emerge/JobStatusDisplay.py
index fbda727ee..1c80c5ffa 100644
--- a/pym/_emerge/JobStatusDisplay.py
+++ b/pym/_emerge/JobStatusDisplay.py
@@ -60,7 +60,7 @@ class JobStatusDisplay(object):
object.__setattr__(self, "_isatty", isatty)
if not isatty or not self._init_term():
term_codes = {}
- for k, capname in self._termcap_name_map.iteritems():
+ for k, capname in self._termcap_name_map.items():
term_codes[k] = self._default_term_codes[capname]
object.__setattr__(self, "_term_codes", term_codes)
encoding = sys.getdefaultencoding()
@@ -106,7 +106,7 @@ class JobStatusDisplay(object):
return False
term_codes = {}
- for k, capname in self._termcap_name_map.iteritems():
+ for k, capname in self._termcap_name_map.items():
code = tigetstr(capname)
if code is None:
code = self._default_term_codes[capname]
diff --git a/pym/_emerge/MetadataRegen.py b/pym/_emerge/MetadataRegen.py
index 4c237d34c..88b157fd0 100644
--- a/pym/_emerge/MetadataRegen.py
+++ b/pym/_emerge/MetadataRegen.py
@@ -89,7 +89,7 @@ class MetadataRegen(PollScheduler):
if self._global_cleanse:
for mytree in portdb.porttrees:
try:
- dead_nodes[mytree] = set(portdb.auxdb[mytree].iterkeys())
+ dead_nodes[mytree] = set(portdb.auxdb[mytree].keys())
except CacheError as e:
portage.writemsg("Error listing cache entries for " + \
"'%s': %s, continuing...\n" % (mytree, e),
@@ -103,7 +103,7 @@ class MetadataRegen(PollScheduler):
for mytree in portdb.porttrees:
try:
dead_nodes[mytree] = set(cpv for cpv in \
- portdb.auxdb[mytree].iterkeys() \
+ portdb.auxdb[mytree].keys() \
if cpv_getkey(cpv) in cp_set)
except CacheError as e:
portage.writemsg("Error listing cache entries for " + \
@@ -119,7 +119,7 @@ class MetadataRegen(PollScheduler):
if portdb.findname2(y, mytree=mytree)[0]:
dead_nodes[mytree].discard(y)
- for mytree, nodes in dead_nodes.iteritems():
+ for mytree, nodes in dead_nodes.items():
auxdb = portdb.auxdb[mytree]
for y in nodes:
try:
diff --git a/pym/_emerge/Package.py b/pym/_emerge/Package.py
index 0ea119ef8..15471d1b2 100644
--- a/pym/_emerge/Package.py
+++ b/pym/_emerge/Package.py
@@ -171,7 +171,7 @@ class _PackageMetadataWrapper(dict):
# USE is lazy, but we want it to show up in self.keys().
self['USE'] = ''
self.update(metadata)
- for k, v in self.iteritems():
+ for k, v in self.items():
if k == 'INHERITED':
if isinstance(v, basestring):
v = frozenset(v.split())
diff --git a/pym/_emerge/PackageVirtualDbapi.py b/pym/_emerge/PackageVirtualDbapi.py
index f041e1d8c..adf269116 100644
--- a/pym/_emerge/PackageVirtualDbapi.py
+++ b/pym/_emerge/PackageVirtualDbapi.py
@@ -32,13 +32,13 @@ class PackageVirtualDbapi(portage.dbapi):
obj = PackageVirtualDbapi(self.settings)
obj._match_cache = self._match_cache.copy()
obj._cp_map = self._cp_map.copy()
- for k, v in obj._cp_map.iteritems():
+ for k, v in obj._cp_map.items():
obj._cp_map[k] = v[:]
obj._cpv_map = self._cpv_map.copy()
return obj
def __iter__(self):
- return self._cpv_map.itervalues()
+ return iter(self._cpv_map.values())
def __contains__(self, item):
existing = self._cpv_map.get(item.cpv)
diff --git a/pym/_emerge/PipeReader.py b/pym/_emerge/PipeReader.py
index 5b64306e7..369918de3 100644
--- a/pym/_emerge/PipeReader.py
+++ b/pym/_emerge/PipeReader.py
@@ -24,7 +24,7 @@ class PipeReader(AbstractPollTask):
def _start(self):
self._reg_ids = set()
self._read_data = []
- for k, f in self.input_files.iteritems():
+ for k, f in self.input_files.items():
fcntl.fcntl(f.fileno(), fcntl.F_SETFL,
fcntl.fcntl(f.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
self._reg_ids.add(self.scheduler.register(f.fileno(),
@@ -65,7 +65,7 @@ class PipeReader(AbstractPollTask):
if event & PollConstants.POLLIN:
- for f in self.input_files.itervalues():
+ for f in self.input_files.values():
if fd == f.fileno():
break
@@ -97,7 +97,7 @@ class PipeReader(AbstractPollTask):
self._reg_ids = None
if self.input_files is not None:
- for f in self.input_files.itervalues():
+ for f in self.input_files.values():
f.close()
self.input_files = None
diff --git a/pym/_emerge/PollSelectAdapter.py b/pym/_emerge/PollSelectAdapter.py
index 64f17094e..7b7d3d95a 100644
--- a/pym/_emerge/PollSelectAdapter.py
+++ b/pym/_emerge/PollSelectAdapter.py
@@ -48,7 +48,7 @@ class PollSelectAdapter(PollConstants):
select_args = self._select_args
if select_args is None:
- select_args = [self._registered.keys(), [], []]
+ select_args = [list(self._registered.keys()), [], []]
if timeout is not None:
select_args = select_args[:]
diff --git a/pym/_emerge/RepoDisplay.py b/pym/_emerge/RepoDisplay.py
index e98be93c4..67bf62672 100644
--- a/pym/_emerge/RepoDisplay.py
+++ b/pym/_emerge/RepoDisplay.py
@@ -10,7 +10,7 @@ class RepoDisplay(object):
self._shown_repos = {}
self._unknown_repo = False
repo_paths = set()
- for root_config in roots.itervalues():
+ for root_config in roots.values():
portdir = root_config.settings.get("PORTDIR")
if portdir:
repo_paths.add(portdir)
@@ -23,7 +23,7 @@ class RepoDisplay(object):
for repo_path in repo_paths ]
# pre-allocate index for PORTDIR so that it always has index 0.
- for root_config in roots.itervalues():
+ for root_config in roots.values():
portdb = root_config.trees["porttree"].dbapi
portdir = portdb.porttree_root
if portdir:
@@ -58,7 +58,7 @@ class RepoDisplay(object):
if shown_repos or self._unknown_repo:
output.append("Portage tree and overlays:\n")
show_repo_paths = list(shown_repos)
- for repo_path, repo_index in shown_repos.iteritems():
+ for repo_path, repo_index in shown_repos.items():
show_repo_paths[repo_index] = repo_path
if show_repo_paths:
for index, repo_path in enumerate(show_repo_paths):
diff --git a/pym/_emerge/RootConfig.py b/pym/_emerge/RootConfig.py
index 48af99120..70e933482 100644
--- a/pym/_emerge/RootConfig.py
+++ b/pym/_emerge/RootConfig.py
@@ -15,7 +15,7 @@ class RootConfig(object):
}
tree_pkg_map = {}
- for k, v in pkg_tree_map.iteritems():
+ for k, v in pkg_tree_map.items():
tree_pkg_map[v] = k
def __init__(self, settings, trees, setconfig):
diff --git a/pym/_emerge/Scheduler.py b/pym/_emerge/Scheduler.py
index 83ffd4b9f..78b9e57d8 100644
--- a/pym/_emerge/Scheduler.py
+++ b/pym/_emerge/Scheduler.py
@@ -561,7 +561,7 @@ class Scheduler(PollScheduler):
digest = '--digest' in self.myopts
if not digest:
- for pkgsettings in self.pkgsettings.itervalues():
+ for pkgsettings in self.pkgsettings.values():
if 'digest' in pkgsettings.features:
digest = True
break
@@ -604,7 +604,7 @@ class Scheduler(PollScheduler):
shown_verifying_msg = False
quiet_settings = {}
- for myroot, pkgsettings in self.pkgsettings.iteritems():
+ for myroot, pkgsettings in self.pkgsettings.items():
quiet_config = portage.config(clone=pkgsettings)
quiet_config["PORTAGE_QUIET"] = "1"
quiet_config.backup_changes("PORTAGE_QUIET")
@@ -759,7 +759,7 @@ class Scheduler(PollScheduler):
# any of bad_resume_opts from leaking in
# via EMERGE_DEFAULT_OPTS.
resume_opts["--ignore-default-opts"] = True
- for myopt, myarg in resume_opts.iteritems():
+ for myopt, myarg in resume_opts.items():
if myopt not in bad_resume_opts:
if myarg is True:
mynewargv.append(myopt)
diff --git a/pym/_emerge/SpawnProcess.py b/pym/_emerge/SpawnProcess.py
index 4a99b50c7..1486e6773 100644
--- a/pym/_emerge/SpawnProcess.py
+++ b/pym/_emerge/SpawnProcess.py
@@ -43,7 +43,7 @@ class SpawnProcess(SubProcess):
fd_pipes.setdefault(2, sys.stderr.fileno())
# flush any pending output
- for fd in fd_pipes.itervalues():
+ for fd in fd_pipes.values():
if fd == sys.stdout.fileno():
sys.stdout.flush()
if fd == sys.stderr.fileno():
diff --git a/pym/_emerge/SubProcess.py b/pym/_emerge/SubProcess.py
index 0df94b9e7..5889f556f 100644
--- a/pym/_emerge/SubProcess.py
+++ b/pym/_emerge/SubProcess.py
@@ -91,7 +91,7 @@ class SubProcess(AbstractPollTask):
self._reg_id = None
if self._files is not None:
- for f in self._files.itervalues():
+ for f in self._files.values():
f.close()
self._files = None
diff --git a/pym/_emerge/actions.py b/pym/_emerge/actions.py
index 5c351a092..0bdda1fc5 100644
--- a/pym/_emerge/actions.py
+++ b/pym/_emerge/actions.py
@@ -744,7 +744,7 @@ def calc_depclean(settings, trees, ldpath_mtimes,
continue
set_args = {}
- for s, package_set in required_sets.iteritems():
+ for s, package_set in required_sets.items():
set_atom = SETPREFIX + s
set_arg = SetArg(arg=set_atom, set=package_set,
root_config=resolver._frozen_config.roots[myroot])
@@ -927,7 +927,7 @@ def calc_depclean(settings, trees, ldpath_mtimes,
if not consumers:
continue
- for lib, lib_consumers in consumers.items():
+ for lib, lib_consumers in list(consumers.items()):
for consumer_file in list(lib_consumers):
if pkg_dblink.isowner(consumer_file, myroot):
lib_consumers.remove(consumer_file)
@@ -937,7 +937,7 @@ def calc_depclean(settings, trees, ldpath_mtimes,
if not consumers:
continue
- for lib, lib_consumers in consumers.iteritems():
+ for lib, lib_consumers in consumers.items():
soname = soname_cache.get(lib)
if soname is None:
@@ -963,8 +963,8 @@ def calc_depclean(settings, trees, ldpath_mtimes,
if consumer_map:
search_files = set()
- for consumers in consumer_map.itervalues():
- for lib, consumer_providers in consumers.iteritems():
+ for consumers in consumer_map.values():
+ for lib, consumer_providers in consumers.items():
for lib_consumer, providers in consumer_providers:
search_files.add(lib_consumer)
search_files.update(providers)
@@ -972,8 +972,8 @@ def calc_depclean(settings, trees, ldpath_mtimes,
writemsg_level(">>> Assigning files to packages...\n")
file_owners = real_vardb._owners.getFileOwnerMap(search_files)
- for pkg, consumers in consumer_map.items():
- for lib, consumer_providers in consumers.items():
+ for pkg, consumers in list(consumer_map.items()):
+ for lib, consumer_providers in list(consumers.items()):
lib_consumers = set()
for lib_consumer, providers in consumer_providers:
@@ -1044,7 +1044,7 @@ def calc_depclean(settings, trees, ldpath_mtimes,
# and also add any dependencies pulled in by the provider.
writemsg_level(">>> Adding lib providers to graph...\n")
- for pkg, consumers in consumer_map.iteritems():
+ for pkg, consumers in consumer_map.items():
for consumer_dblink in set(chain(*consumers.values())):
consumer_pkg = vardb.get(("installed", myroot,
consumer_dblink.mycpv, "nomerge"))
@@ -1287,7 +1287,7 @@ def action_info(settings, trees, myopts, myfiles):
libtool_vers = ",".join(trees["/"]["vartree"].dbapi.match("sys-devel/libtool"))
if "--verbose" in myopts:
- myvars=settings.keys()
+ myvars=list(settings.keys())
else:
myvars = ['GENTOO_MIRRORS', 'CONFIG_PROTECT', 'CONFIG_PROTECT_MASK',
'PORTDIR', 'DISTDIR', 'PKGDIR', 'PORTAGE_TMPDIR',
@@ -1647,7 +1647,7 @@ def action_metadata(settings, portdb, myopts, porttrees=None):
for tree_data in porttrees_data:
try:
- dead_nodes = set(tree_data.dest_db.iterkeys())
+ dead_nodes = set(tree_data.dest_db.keys())
except CacheError as e:
writemsg_level("Error listing cache entries for " + \
"'%s': %s, continuing...\n" % (tree_data.path, e),
@@ -2553,7 +2553,7 @@ def git_sync_timestamps(settings, portdir):
continue
inconsistent = False
- for ec, (ec_path, ec_mtime) in ec_mtimes.iteritems():
+ for ec, (ec_path, ec_mtime) in ec_mtimes.items():
updated_mtime = updated_ec_mtimes.get(ec)
if updated_mtime is not None and updated_mtime != ec_mtime:
writemsg_level("!!! Inconsistent eclass mtime: %s %s\n" % \
@@ -2567,7 +2567,7 @@ def git_sync_timestamps(settings, portdir):
if current_eb_mtime != eb_mtime:
os.utime(eb_path, (eb_mtime, eb_mtime))
- for ec, (ec_path, ec_mtime) in ec_mtimes.iteritems():
+ for ec, (ec_path, ec_mtime) in ec_mtimes.items():
if ec in updated_ec_mtimes:
continue
ec_path = os.path.join(ec_dir, ec + ".eclass")
@@ -2586,7 +2586,7 @@ def load_emerge_config(trees=None):
kwargs[k] = v
trees = portage.create_trees(trees=trees, **kwargs)
- for root, root_trees in trees.iteritems():
+ for root, root_trees in trees.items():
settings = root_trees["vartree"].settings
setconfig = load_default_config(settings, root_trees)
root_trees["root_config"] = RootConfig(settings, root_trees, setconfig)
diff --git a/pym/_emerge/clear_caches.py b/pym/_emerge/clear_caches.py
index 1fd667f36..83ab77eb4 100644
--- a/pym/_emerge/clear_caches.py
+++ b/pym/_emerge/clear_caches.py
@@ -6,7 +6,7 @@ import gc
import portage
def clear_caches(trees):
- for d in trees.itervalues():
+ for d in trees.values():
d["porttree"].dbapi.melt()
d["porttree"].dbapi._aux_cache.clear()
d["bintree"].dbapi._aux_cache.clear()
diff --git a/pym/_emerge/depgraph.py b/pym/_emerge/depgraph.py
index 44bb7b45f..ed2e88af4 100644
--- a/pym/_emerge/depgraph.py
+++ b/pym/_emerge/depgraph.py
@@ -163,7 +163,7 @@ class _dynamic_depgraph_config(object):
runtime_pkg_mask = {}
else:
runtime_pkg_mask = dict((k, v.copy()) for (k, v) in \
- runtime_pkg_mask.iteritems())
+ runtime_pkg_mask.items())
self._runtime_pkg_mask = runtime_pkg_mask
self._need_restart = False
@@ -281,7 +281,7 @@ class depgraph(object):
# missed update from each SLOT.
missed_updates = {}
for pkg, mask_reasons in \
- self._dynamic_config._runtime_pkg_mask.iteritems():
+ self._dynamic_config._runtime_pkg_mask.items():
if pkg.installed:
# Exclude installed here since we only
# want to show available updates.
@@ -291,7 +291,7 @@ class depgraph(object):
other_pkg, mask_type, parent_atoms = missed_updates[k]
if other_pkg > pkg:
continue
- for mask_type, parent_atoms in mask_reasons.iteritems():
+ for mask_type, parent_atoms in mask_reasons.items():
if not parent_atoms:
continue
missed_updates[k] = (pkg, mask_type, parent_atoms)
@@ -301,7 +301,7 @@ class depgraph(object):
return
missed_update_types = {}
- for pkg, mask_type, parent_atoms in missed_updates.itervalues():
+ for pkg, mask_type, parent_atoms in missed_updates.values():
missed_update_types.setdefault(mask_type,
[]).append((pkg, parent_atoms))
@@ -395,7 +395,7 @@ class depgraph(object):
explanation_columns = 70
explanations = 0
for (slot_atom, root), slot_nodes \
- in self._dynamic_config._slot_collision_info.iteritems():
+ in self._dynamic_config._slot_collision_info.items():
msg.append(str(slot_atom))
if root != '/':
msg.append(" for %s" % (root,))
@@ -599,7 +599,7 @@ class depgraph(object):
packages that have been pulled into a given slot.
"""
for (slot_atom, root), slot_nodes \
- in self._dynamic_config._slot_collision_info.iteritems():
+ in self._dynamic_config._slot_collision_info.items():
all_parent_atoms = set()
for pkg in slot_nodes:
@@ -1171,7 +1171,7 @@ class depgraph(object):
# by dep_zapdeps. We preserve actual parent/child relationships
# here in order to avoid distorting the dependency graph like
# <=portage-2.1.6.x did.
- for virt_pkg, atoms in selected_atoms.iteritems():
+ for virt_pkg, atoms in selected_atoms.items():
# Just assume depth + 1 here for now, though it's not entirely
# accurate since multilple levels of indirect virtual deps may
@@ -1744,7 +1744,7 @@ class depgraph(object):
args_set.add(atom)
self._dynamic_config._set_atoms.clear()
- self._dynamic_config._set_atoms.update(chain(*self._dynamic_config._sets.itervalues()))
+ self._dynamic_config._set_atoms.update(chain(*self._dynamic_config._sets.values()))
atom_arg_map = self._dynamic_config._atom_arg_map
atom_arg_map.clear()
for arg in args:
@@ -1760,7 +1760,7 @@ class depgraph(object):
# Invalidate the package selection cache, since
# arguments influence package selections.
self._dynamic_config._highest_pkg_cache.clear()
- for trees in self._dynamic_config._filtered_trees.itervalues():
+ for trees in self._dynamic_config._filtered_trees.values():
trees["porttree"].dbapi._clear_cache()
def _greedy_slots(self, root_config, atom, blocker_lookahead=False):
@@ -1809,7 +1809,7 @@ class depgraph(object):
except portage.exception.InvalidDependString:
continue
blocker_atoms = []
- for atoms in selected_atoms.itervalues():
+ for atoms in selected_atoms.values():
blocker_atoms.extend(x for x in atoms if x.blocker)
blockers[pkg] = InternalPackageSet(initial_atoms=blocker_atoms)
@@ -3062,7 +3062,7 @@ class depgraph(object):
(running_root, running_portage.cpv, e), noiselevel=-1)
del e
portage_rdepend = {running_portage : []}
- for atoms in portage_rdepend.itervalues():
+ for atoms in portage_rdepend.values():
runtime_deps.update(atom for atom in atoms \
if not atom.blocker)
@@ -3648,7 +3648,7 @@ class depgraph(object):
# Reduce noise by pruning packages that are only
# pulled in by other conflict packages.
pruned_pkgs = set()
- for pkg, parent_atoms in conflict_pkgs.iteritems():
+ for pkg, parent_atoms in conflict_pkgs.items():
relevant_parent = False
for parent, atom in parent_atoms:
if parent not in conflict_pkgs:
@@ -3665,7 +3665,7 @@ class depgraph(object):
indent = " "
# Max number of parents shown, to avoid flooding the display.
max_parents = 3
- for pkg, parent_atoms in conflict_pkgs.iteritems():
+ for pkg, parent_atoms in conflict_pkgs.items():
pruned_list = set()
@@ -4538,7 +4538,7 @@ class depgraph(object):
"merged because it is listed in\n")
msg.append("package.provided:\n\n")
problems_sets = set()
- for (arg, atom), refs in arg_refs.iteritems():
+ for (arg, atom), refs in arg_refs.items():
ref_string = ""
if refs:
problems_sets.update(refs)
@@ -5243,7 +5243,7 @@ def get_masking_status(pkg, pkgsettings, root_config):
mreasons.append("CHOST: %s" % \
pkg.metadata["CHOST"])
if pkg.invalid:
- for msg_type, msgs in pkg.invalid.iteritems():
+ for msg_type, msgs in pkg.invalid.items():
for msg in msgs:
mreasons.append("invalid: %s" % (msg,))
diff --git a/pym/_emerge/main.py b/pym/_emerge/main.py
index 6ac1b50eb..09522375f 100644
--- a/pym/_emerge/main.py
+++ b/pym/_emerge/main.py
@@ -255,7 +255,7 @@ def display_preserved_libs(vardbapi, myopts):
samefile_map[obj_key] = alt_paths
alt_paths.add(f)
- for alt_paths in samefile_map.itervalues():
+ for alt_paths in samefile_map.values():
alt_paths = sorted(alt_paths)
for p in alt_paths:
print(colorize("WARN", " * ") + " - %s" % (p,))
@@ -422,13 +422,13 @@ def insert_optional_args(args):
continue
match = None
- for k, arg_choices in short_arg_opts.iteritems():
+ for k, arg_choices in short_arg_opts.items():
if k in arg:
match = k
break
if match is None:
- for k, arg_choices in short_arg_opts_n.iteritems():
+ for k, arg_choices in short_arg_opts_n.items():
if k in arg:
match = k
break
@@ -631,14 +631,14 @@ def parse_opts(tmpcmdline, silent=False):
for myopt in options:
parser.add_option(myopt, action="store_true",
dest=myopt.lstrip("--").replace("-", "_"), default=False)
- for shortopt, longopt in shortmapping.iteritems():
+ for shortopt, longopt in shortmapping.items():
parser.add_option("-" + shortopt, action="store_true",
dest=longopt.lstrip("--").replace("-", "_"), default=False)
- for myalias, myopt in longopt_aliases.iteritems():
+ for myalias, myopt in longopt_aliases.items():
parser.add_option(myalias, action="store_true",
dest=myopt.lstrip("--").replace("-", "_"), default=False)
- for myopt, kwargs in argument_options.iteritems():
+ for myopt, kwargs in argument_options.items():
shortopt = kwargs.pop("shortopt", None)
args = [myopt]
if shortopt is not None:
@@ -941,7 +941,7 @@ def expand_set_arguments(myfiles, myaction, root_config):
def repo_name_check(trees):
missing_repo_names = set()
- for root, root_trees in trees.iteritems():
+ for root, root_trees in trees.items():
if "porttree" in root_trees:
portdb = root_trees["porttree"].dbapi
missing_repo_names.update(portdb.porttrees)
@@ -973,7 +973,7 @@ def repo_name_check(trees):
def repo_name_duplicate_check(trees):
ignored_repos = {}
- for root, root_trees in trees.iteritems():
+ for root, root_trees in trees.items():
if 'porttree' in root_trees:
portdb = root_trees['porttree'].dbapi
if portdb.mysettings.get('PORTAGE_REPO_DUPLICATE_WARN') != '0':
@@ -1003,7 +1003,7 @@ def repo_name_duplicate_check(trees):
return bool(ignored_repos)
def config_protect_check(trees):
- for root, root_trees in trees.iteritems():
+ for root, root_trees in trees.items():
if not root_trees["root_config"].settings.get("CONFIG_PROTECT"):
msg = "!!! CONFIG_PROTECT is empty"
if root != "/":
@@ -1013,7 +1013,7 @@ def config_protect_check(trees):
def profile_check(trees, myaction):
if myaction in ("help", "info", "sync", "version"):
return os.EX_OK
- for root, root_trees in trees.iteritems():
+ for root, root_trees in trees.items():
if root_trees["root_config"].settings.profiles:
continue
# generate some profile related warning messages
@@ -1095,7 +1095,7 @@ def emerge_main():
repo_name_duplicate_check(trees)
config_protect_check(trees)
- for mytrees in trees.itervalues():
+ for mytrees in trees.values():
mydb = mytrees["porttree"].dbapi
# Freeze the portdbapi for performance (memoize all xmatch results).
mydb.freeze()
diff --git a/pym/_emerge/unmerge.py b/pym/_emerge/unmerge.py
index 710a57b28..ddcfa8180 100644
--- a/pym/_emerge/unmerge.py
+++ b/pym/_emerge/unmerge.py
@@ -243,7 +243,7 @@ def unmerge(root_config, myopts, unmerge_action,
slotmap[myslot][vartree.dbapi.cpv_counter(mypkg)] = mypkg
for myslot in slotmap:
- counterkeys = slotmap[myslot].keys()
+ counterkeys = list(slotmap[myslot].keys())
if not counterkeys:
continue
counterkeys.sort()
@@ -411,7 +411,7 @@ def unmerge(root_config, myopts, unmerge_action,
unordered[cp] = cp_dict
for k in d:
cp_dict[k] = set()
- for k, v in d.iteritems():
+ for k, v in d.items():
cp_dict[k].update(v)
pkgmap = [unordered[cp] for cp in sorted(unordered)]
@@ -419,7 +419,7 @@ def unmerge(root_config, myopts, unmerge_action,
selected = pkgmap[x]["selected"]
if not selected:
continue
- for mytype, mylist in pkgmap[x].iteritems():
+ for mytype, mylist in pkgmap[x].items():
if mytype == "selected":
continue
mylist.difference_update(all_selected)
diff --git a/pym/portage/__init__.py b/pym/portage/__init__.py
index fe696b6dc..e8532e60d 100644
--- a/pym/portage/__init__.py
+++ b/pym/portage/__init__.py
@@ -183,7 +183,7 @@ class _unicode_func_wrapper(object):
wrapped_kwargs = dict(
(_unicode_encode(k, encoding=encoding, errors='strict'),
_unicode_encode(v, encoding=encoding, errors='strict'))
- for k, v in kwargs.iteritems())
+ for k, v in kwargs.items())
else:
wrapped_kwargs = {}
@@ -784,13 +784,13 @@ class digraph(object):
return list(self.nodes[node][0])
children = []
if hasattr(ignore_priority, '__call__'):
- for child, priorities in self.nodes[node][0].iteritems():
+ for child, priorities in self.nodes[node][0].items():
for priority in priorities:
if not ignore_priority(priority):
children.append(child)
break
else:
- for child, priorities in self.nodes[node][0].iteritems():
+ for child, priorities in self.nodes[node][0].items():
if ignore_priority < priorities[-1]:
children.append(child)
return children
@@ -801,13 +801,13 @@ class digraph(object):
return list(self.nodes[node][1])
parents = []
if hasattr(ignore_priority, '__call__'):
- for parent, priorities in self.nodes[node][1].iteritems():
+ for parent, priorities in self.nodes[node][1].items():
for priority in priorities:
if not ignore_priority(priority):
parents.append(parent)
break
else:
- for parent, priorities in self.nodes[node][1].iteritems():
+ for parent, priorities in self.nodes[node][1].items():
if ignore_priority < priorities[-1]:
parents.append(parent)
return parents
@@ -826,7 +826,7 @@ class digraph(object):
elif hasattr(ignore_priority, '__call__'):
for node in self.order:
is_leaf_node = True
- for child, priorities in self.nodes[node][0].iteritems():
+ for child, priorities in self.nodes[node][0].items():
for priority in priorities:
if not ignore_priority(priority):
is_leaf_node = False
@@ -838,7 +838,7 @@ class digraph(object):
else:
for node in self.order:
is_leaf_node = True
- for child, priorities in self.nodes[node][0].iteritems():
+ for child, priorities in self.nodes[node][0].items():
if ignore_priority < priorities[-1]:
is_leaf_node = False
break
@@ -860,7 +860,7 @@ class digraph(object):
elif hasattr(ignore_priority, '__call__'):
for node in self.order:
is_root_node = True
- for parent, priorities in self.nodes[node][1].iteritems():
+ for parent, priorities in self.nodes[node][1].items():
for priority in priorities:
if not ignore_priority(priority):
is_root_node = False
@@ -872,7 +872,7 @@ class digraph(object):
else:
for node in self.order:
is_root_node = True
- for parent, priorities in self.nodes[node][1].iteritems():
+ for parent, priorities in self.nodes[node][1].items():
if ignore_priority < priorities[-1]:
is_root_node = False
break
@@ -888,16 +888,16 @@ class digraph(object):
clone = digraph()
clone.nodes = {}
memo = {}
- for children, parents, node in self.nodes.itervalues():
+ for children, parents, node in self.nodes.values():
children_clone = {}
- for child, priorities in children.iteritems():
+ for child, priorities in children.items():
priorities_clone = memo.get(id(priorities))
if priorities_clone is None:
priorities_clone = priorities[:]
memo[id(priorities)] = priorities_clone
children_clone[child] = priorities_clone
parents_clone = {}
- for parent, priorities in parents.iteritems():
+ for parent, priorities in parents.items():
priorities_clone = memo.get(id(priorities))
if priorities_clone is None:
priorities_clone = priorities[:]
@@ -941,7 +941,7 @@ class digraph(object):
output("depends on\n")
else:
output("(no children)\n")
- for child, priorities in self.nodes[node][0].iteritems():
+ for child, priorities in self.nodes[node][0].items():
output(" %s (%s)\n" % (child, priorities[-1],))
#parse /etc/env.d and generate /etc/profile.env
@@ -1135,7 +1135,7 @@ def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None,
not ld_cache_update and \
contents is not None:
libdir_contents_changed = False
- for mypath, mydata in contents.iteritems():
+ for mypath, mydata in contents.items():
if mydata[0] not in ("obj","sym"):
continue
head, tail = os.path.split(mypath)
@@ -1740,7 +1740,7 @@ class config(object):
for x in self.profiles]
for pkeyworddict in rawpkeywords:
cpdict = {}
- for k, v in pkeyworddict.iteritems():
+ for k, v in pkeyworddict.items():
cpdict.setdefault(k.cp, {})[k] = v
self._pkeywords_list.append(cpdict)
@@ -1758,7 +1758,7 @@ class config(object):
recursive=1) for x in self.profiles]
for pusemaskdict in rawpusemask:
cpdict = {}
- for k, v in pusemaskdict.iteritems():
+ for k, v in pusemaskdict.items():
cpdict.setdefault(k.cp, {})[k] = v
self.pusemask_list.append(cpdict)
del rawpusemask
@@ -1768,7 +1768,7 @@ class config(object):
juststrings=True, recursive=1) for x in self.profiles]
for rawpusedict in rawprofileuse:
cpdict = {}
- for k, v in rawpusedict.iteritems():
+ for k, v in rawpusedict.items():
cpdict.setdefault(k.cp, {})[k] = v
self.pkgprofileuse.append(cpdict)
del rawprofileuse
@@ -1784,7 +1784,7 @@ class config(object):
for x in self.profiles]
for rawpusefdict in rawpuseforce:
cpdict = {}
- for k, v in rawpusefdict.iteritems():
+ for k, v in rawpusefdict.items():
cpdict.setdefault(k.cp, {})[k] = v
self.puseforce_list.append(cpdict)
del rawpuseforce
@@ -1840,7 +1840,7 @@ class config(object):
# Avoid potential UnicodeDecodeError exceptions later.
env_unicode = dict((_unicode_decode(k), _unicode_decode(v))
- for k, v in env.iteritems())
+ for k, v in env.items())
self.backupenv = env_unicode
@@ -1848,7 +1848,7 @@ class config(object):
# Remove duplicate values so they don't override updated
# profile.env values later (profile.env is reloaded in each
# call to self.regenerate).
- for k, v in env_d.iteritems():
+ for k, v in env_d.items():
try:
if self.backupenv[k] == v:
del self.backupenv[k]
@@ -1965,14 +1965,14 @@ class config(object):
pmask_locations.append(abs_user_config)
pusedict = grabdict_package(
os.path.join(abs_user_config, "package.use"), recursive=1)
- for k, v in pusedict.iteritems():
+ for k, v in pusedict.items():
self.pusedict.setdefault(k.cp, {})[k] = v
#package.keywords
pkgdict = grabdict_package(
os.path.join(abs_user_config, "package.keywords"),
recursive=1)
- for k, v in pkgdict.iteritems():
+ for k, v in pkgdict.items():
# default to ~arch if no specific keyword is given
if not v:
mykeywordlist = []
@@ -1990,7 +1990,7 @@ class config(object):
#package.license
licdict = grabdict_package(os.path.join(
abs_user_config, "package.license"), recursive=1)
- for k, v in licdict.iteritems():
+ for k, v in licdict.items():
cp = k.cp
cp_dict = self._plicensedict.get(cp)
if not cp_dict:
@@ -2001,7 +2001,7 @@ class config(object):
#package.properties
propdict = grabdict_package(os.path.join(
abs_user_config, "package.properties"), recursive=1)
- for k, v in propdict.iteritems():
+ for k, v in propdict.items():
cp = k.cp
cp_dict = self._ppropertiesdict.get(cp)
if not cp_dict:
@@ -2214,7 +2214,7 @@ class config(object):
}
for mypath, (gid, mode, modemask, preserve_perms) \
- in dir_mode_map.iteritems():
+ in dir_mode_map.items():
mydir = os.path.join(self["ROOT"], mypath)
if preserve_perms and os.path.isdir(mydir):
# Only adjust permissions on some directories if
@@ -2617,7 +2617,7 @@ class config(object):
for i, pkgprofileuse_dict in enumerate(self.pkgprofileuse):
cpdict = pkgprofileuse_dict.get(cp)
if cpdict:
- keys = cpdict.keys()
+ keys = list(cpdict.keys())
while keys:
bestmatch = best_match_to_list(cpv_slot, keys)
if bestmatch:
@@ -2647,7 +2647,7 @@ class config(object):
self.puse = ""
cpdict = self.pusedict.get(cp)
if cpdict:
- keys = cpdict.keys()
+ keys = list(cpdict.keys())
while keys:
self.pusekey = best_match_to_list(cpv_slot, keys)
if self.pusekey:
@@ -2744,7 +2744,7 @@ class config(object):
# allows packages that support LINGUAS but don't
# declare it in IUSE to use the variable outside of the
# USE_EXPAND context.
- for k, use_expand_iuse in use_expand_iuses.iteritems():
+ for k, use_expand_iuse in use_expand_iuses.items():
if k + '_*' in use:
use.update( x for x in use_expand_iuse if x not in usemask )
k = k.upper()
@@ -2799,7 +2799,7 @@ class config(object):
for i, pusemask_dict in enumerate(self.pusemask_list):
cpdict = pusemask_dict.get(cp)
if cpdict:
- keys = cpdict.keys()
+ keys = list(cpdict.keys())
while keys:
best_match = best_match_to_list(pkg, keys)
if best_match:
@@ -2822,7 +2822,7 @@ class config(object):
for i, puseforce_dict in enumerate(self.puseforce_list):
cpdict = puseforce_dict.get(cp)
if cpdict:
- keys = cpdict.keys()
+ keys = list(cpdict.keys())
while keys:
best_match = best_match_to_list(pkg, keys)
if best_match:
@@ -2939,7 +2939,7 @@ class config(object):
matches = False
if pkgdict:
cpv_slot_list = ["%s:%s" % (cpv, metadata["SLOT"])]
- for atom, pkgkeywords in pkgdict.iteritems():
+ for atom, pkgkeywords in pkgdict.items():
if match_from_list(atom, cpv_slot_list):
matches = True
pgroups.extend(pkgkeywords)
@@ -3007,7 +3007,7 @@ class config(object):
if cpdict:
accept_license = list(self._accept_license)
cpv_slot = "%s:%s" % (cpv, metadata["SLOT"])
- for atom in match_to_list(cpv_slot, cpdict.keys()):
+ for atom in match_to_list(cpv_slot, list(cpdict.keys())):
accept_license.extend(cpdict[atom])
licenses = set(flatten(dep.use_reduce(dep.paren_reduce(
@@ -3088,7 +3088,7 @@ class config(object):
if cpdict:
accept_properties = list(self._accept_properties)
cpv_slot = "%s:%s" % (cpv, metadata["SLOT"])
- for atom in match_to_list(cpv_slot, cpdict.keys()):
+ for atom in match_to_list(cpv_slot, list(cpdict.keys())):
accept_properties.extend(cpdict[atom])
properties = set(flatten(dep.use_reduce(dep.paren_reduce(
@@ -3468,7 +3468,7 @@ class config(object):
virtuals_file = os.path.join(x, "virtuals")
virtuals_dict = grabdict(virtuals_file)
atoms_dict = {}
- for k, v in virtuals_dict.iteritems():
+ for k, v in virtuals_dict.items():
try:
virt_atom = portage.dep.Atom(k)
except portage.exception.InvalidAtom:
@@ -3527,7 +3527,7 @@ class config(object):
def _populate_treeVirtuals(self, vartree):
"""Reduce the provides into a list by CP."""
- for provide, cpv_list in vartree.get_all_provides().iteritems():
+ for provide, cpv_list in vartree.get_all_provides().items():
try:
provide = dep.Atom(provide)
except exception.InvalidAtom:
@@ -3547,7 +3547,7 @@ class config(object):
# Virtuals by profile+tree preferences.
ptVirtuals = {}
- for virt, installed_list in self.treeVirtuals.iteritems():
+ for virt, installed_list in self.treeVirtuals.items():
profile_list = self.dirVirtuals.get(virt, None)
if not profile_list:
continue
@@ -3631,7 +3631,7 @@ class config(object):
yield (k, self[k])
def items(self):
- return list(self.iteritems())
+ return list(self.items())
def __setitem__(self,mykey,myvalue):
"set a value; will be thrown away at reset() time"
@@ -3850,7 +3850,7 @@ def spawn(mystring, mysettings, debug=0, free=0, droppriv=0, sesandbox=0, fakero
# it needs to be flushed before allowing a child process to use it
# so that output always shows in the correct order.
stdout_filenos = (sys.stdout.fileno(), sys.stderr.fileno())
- for fd in fd_pipes.itervalues():
+ for fd in fd_pipes.values():
if fd in stdout_filenos:
sys.stdout.flush()
sys.stderr.flush()
@@ -4011,7 +4011,7 @@ def _spawn_fetch(settings, args, **kwargs):
if args[0] != BASH_BINARY:
args = [BASH_BINARY, "-c", "exec \"$@\"", args[0]] + args
- rval = spawn_func(args, env=dict(settings.iteritems()), **kwargs)
+ rval = spawn_func(args, env=dict(iter(settings.items())), **kwargs)
return rval
@@ -4302,7 +4302,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
file_uri_tuples = []
if isinstance(myuris, dict):
- for myfile, uri_set in myuris.iteritems():
+ for myfile, uri_set in myuris.items():
for myuri in uri_set:
file_uri_tuples.append((myfile, myuri))
else:
@@ -4365,7 +4365,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
# Prefer thirdpartymirrors over normal mirrors in cases when
# the file does not yet exist on the normal mirrors.
- for myfile, uris in thirdpartymirror_uris.iteritems():
+ for myfile, uris in thirdpartymirror_uris.items():
primaryuri_dict.setdefault(myfile, []).extend(uris)
can_fetch=True
@@ -4700,7 +4700,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
mysettings.get("PORTAGE_QUIET", None) == "1"
digests = mydigests.get(myfile)
if digests:
- digests = digests.keys()
+ digests = list(digests.keys())
digests.sort()
eout.ebegin(
"%s %s ;-)" % (myfile, " ".join(digests)))
@@ -5207,7 +5207,7 @@ def digestgen(myarchives, mysettings, overwrite=1, manifestonly=0, myportdb=None
writemsg(_("!!! Permission Denied: %s\n") % (e,), noiselevel=-1)
return 0
if "assume-digests" not in mysettings.features:
- distlist = mf.fhashdict.get("DIST", {}).keys()
+ distlist = list(mf.fhashdict.get("DIST", {}).keys())
distlist.sort()
auto_assumed = []
for filename in distlist:
@@ -5271,7 +5271,7 @@ def digestcheck(myfiles, mysettings, strict=0, justmanifest=0):
return 1
mf = Manifest(pkgdir, mysettings["DISTDIR"])
manifest_empty = True
- for d in mf.fhashdict.itervalues():
+ for d in mf.fhashdict.values():
if d:
manifest_empty = False
break
@@ -6178,7 +6178,7 @@ def _prepare_features_dirs(mysettings):
droppriv = secpass >= 2 and \
"userpriv" in mysettings.features and \
"userpriv" not in restrict
- for myfeature, kwargs in features_dirs.iteritems():
+ for myfeature, kwargs in features_dirs.items():
if myfeature in mysettings.features:
failure = False
basedir = mysettings.get(kwargs["basedir_var"])
@@ -8307,7 +8307,7 @@ def getmaskingstatus(mycpv, settings=None, portdb=None):
matches = False
if pkgdict:
cpv_slot_list = ["%s:%s" % (mycpv, metadata["SLOT"])]
- for atom, pkgkeywords in pkgdict.iteritems():
+ for atom, pkgkeywords in pkgdict.items():
if match_from_list(atom, cpv_slot_list):
matches = True
pgroups.extend(pkgkeywords)
@@ -8413,9 +8413,9 @@ class FetchlistDict(portage.cache.mappings.Mapping):
self.portdb = mydbapi
def __getitem__(self, pkg_key):
"""Returns the complete fetch list for a given package."""
- return self.portdb.getFetchMap(pkg_key, mytree=self.mytree).keys()
+ return list(self.portdb.getFetchMap(pkg_key, mytree=self.mytree).keys())
def __contains__(self, cpv):
- return cpv in self.keys()
+ return cpv in list(self.keys())
def has_key(self, pkg_key):
"""Returns true if the given package exists within pkgdir."""
return pkg_key in self
@@ -8712,7 +8712,7 @@ def _global_updates(trees, prev_mtimes):
# until after _all_ of the above updates have
# been processed because the mtimedb will
# automatically commit when killed by ctrl C.
- for mykey, mtime in timestamps.iteritems():
+ for mykey, mtime in timestamps.items():
prev_mtimes[mykey] = mtime
# We gotta do the brute force updates for these now.
@@ -8785,7 +8785,7 @@ class MtimeDB(dict):
mtimedbkeys = set(("info", "ldpath", "resume", "resume_backup",
"starttime", "updates", "version"))
- for k in d.keys():
+ for k in list(d.keys()):
if k not in mtimedbkeys:
writemsg(_("Deleting invalid mtimedb key: %s\n") % str(k))
del d[k]
diff --git a/pym/portage/cache/anydbm.py b/pym/portage/cache/anydbm.py
index 5b771adad..143c321e1 100644
--- a/pym/portage/cache/anydbm.py
+++ b/pym/portage/cache/anydbm.py
@@ -51,7 +51,7 @@ class database(fs_template.FsBased):
self._ensure_access(self._db_path)
def iteritems(self):
- return self.__db.iteritems()
+ return iter(self.__db.items())
def _getitem(self, cpv):
# we override getitem because it's just a cpickling of the data handed in.
@@ -64,7 +64,7 @@ class database(fs_template.FsBased):
del self.__db[cpv]
def __iter__(self):
- return iter(self.__db.keys())
+ return iter(list(self.__db.keys()))
def __contains__(self, cpv):
return cpv in self.__db
diff --git a/pym/portage/cache/ebuild_xattr.py b/pym/portage/cache/ebuild_xattr.py
index 4bcc6c846..7ea4aaaca 100644
--- a/pym/portage/cache/ebuild_xattr.py
+++ b/pym/portage/cache/ebuild_xattr.py
@@ -123,7 +123,7 @@ class database(fs_template.FsBased):
def _setitem(self, cpv, values):
path = self.__get_path(cpv)
max = self.max_len
- for key,value in values.iteritems():
+ for key,value in values.items():
# mtime comes in as long so need to convert to strings
s = str(value)
# We need to split long values
diff --git a/pym/portage/cache/mappings.py b/pym/portage/cache/mappings.py
index 066927ac1..69e37fa15 100644
--- a/pym/portage/cache/mappings.py
+++ b/pym/portage/cache/mappings.py
@@ -23,7 +23,7 @@ class Mapping(object):
"""
def __iter__(self):
- return self.iterkeys()
+ return iter(self.keys())
def keys(self):
return list(self.__iter__())
@@ -48,14 +48,14 @@ class Mapping(object):
return self.__iter__()
def itervalues(self):
- for _, v in self.iteritems():
+ for _, v in self.items():
yield v
def values(self):
- return [v for _, v in self.iteritems()]
+ return [v for _, v in self.items()]
def items(self):
- return list(self.iteritems())
+ return list(self.items())
def get(self, key, default=None):
try:
@@ -64,10 +64,10 @@ class Mapping(object):
return default
def __repr__(self):
- return repr(dict(self.iteritems()))
+ return repr(dict(self.items()))
def __len__(self):
- return len(self.keys())
+ return len(list(self.keys()))
if sys.hexversion >= 0x3000000:
items = iteritems
@@ -80,7 +80,7 @@ class MutableMapping(Mapping):
"""
def clear(self):
- for key in self.keys():
+ for key in list(self.keys()):
del self[key]
def setdefault(self, key, default=None):
@@ -105,7 +105,7 @@ class MutableMapping(Mapping):
def popitem(self):
try:
- k, v = self.iteritems().next()
+ k, v = iter(self.items()).next()
except StopIteration:
raise KeyError('container is empty')
del self[k]
@@ -226,9 +226,9 @@ class ProtectedDict(MutableMapping):
def __iter__(self):
- for k in self.new.iterkeys():
+ for k in self.new.keys():
yield k
- for k in self.orig.iterkeys():
+ for k in self.orig.keys():
if k not in self.blacklist and k not in self.new:
yield k
@@ -333,12 +333,12 @@ def slot_dict_class(keys, prefix="_val_"):
self.update(kwargs)
def __iter__(self):
- for k, v in self.iteritems():
+ for k, v in self.items():
yield k
def __len__(self):
l = 0
- for i in self.iteritems():
+ for i in self.items():
l += 1
return l
@@ -353,14 +353,14 @@ def slot_dict_class(keys, prefix="_val_"):
pass
def items(self):
- return list(self.iteritems())
+ return list(self.items())
def itervalues(self):
- for k, v in self.iteritems():
+ for k, v in self.items():
yield v
def values(self):
- return list(self.itervalues())
+ return list(self.values())
def __delitem__(self, k):
try:
@@ -442,7 +442,7 @@ def slot_dict_class(keys, prefix="_val_"):
def popitem(self):
try:
- k, v = self.iteritems().next()
+ k, v = iter(self.items()).next()
except StopIteration:
raise KeyError('container is empty')
del self[k]
@@ -461,7 +461,7 @@ def slot_dict_class(keys, prefix="_val_"):
pass
def __str__(self):
- return str(dict(self.iteritems()))
+ return str(dict(self.items()))
if sys.hexversion >= 0x3000000:
items = iteritems
diff --git a/pym/portage/cache/sqlite.py b/pym/portage/cache/sqlite.py
index 2bb740722..48dfd3aac 100644
--- a/pym/portage/cache/sqlite.py
+++ b/pym/portage/cache/sqlite.py
@@ -106,7 +106,7 @@ class database(fs_template.FsBased):
self._allowed_keys
cursor = self._db_cursor
- for k, v in self._db_table.iteritems():
+ for k, v in self._db_table.items():
if self._db_table_exists(v["table_name"]):
create_statement = self._db_table_get_create(v["table_name"])
if create_statement != v["create"]:
diff --git a/pym/portage/cache/template.py b/pym/portage/cache/template.py
index f4e9dfb8d..e90c9dba5 100644
--- a/pym/portage/cache/template.py
+++ b/pym/portage/cache/template.py
@@ -67,7 +67,7 @@ class database(object):
raise cache_errors.ReadOnlyRestriction()
if self.cleanse_keys:
d=ProtectedDict(values)
- for k in d.keys():
+ for k in list(d.keys()):
if d[k] == '':
del d[k]
if self.serialize_eclasses and "_eclasses_" in values:
@@ -109,17 +109,17 @@ class database(object):
return cpv in self
def keys(self):
- return tuple(self.iterkeys())
+ return tuple(self.keys())
def iterkeys(self):
return iter(self)
def iteritems(self):
- for x in self.iterkeys():
+ for x in self.keys():
yield (x, self[x])
def items(self):
- return list(self.iteritems())
+ return list(self.items())
def sync(self, rate=0):
self.sync_rate = rate
@@ -151,7 +151,7 @@ class database(object):
if self.iterkeys is database.iterkeys:
# prevent a possible recursive loop
raise NotImplementedError(self)
- return self.iterkeys()
+ return iter(self.keys())
def get(self, k, x=None):
try:
@@ -169,7 +169,7 @@ class database(object):
import re
restricts = {}
- for key,match in match_dict.iteritems():
+ for key,match in match_dict.items():
# XXX this sucks.
try:
if isinstance(match, basestring):
@@ -181,10 +181,10 @@ class database(object):
if key not in self.__known_keys:
raise InvalidRestriction(key, match, "Key isn't valid")
- for cpv in self.iterkeys():
+ for cpv in self.keys():
cont = True
vals = self[cpv]
- for key, match in restricts.iteritems():
+ for key, match in restricts.items():
if not match(vals[key]):
cont = False
break
diff --git a/pym/portage/checksum.py b/pym/portage/checksum.py
index 93076d304..9cc2d82e7 100644
--- a/pym/portage/checksum.py
+++ b/pym/portage/checksum.py
@@ -134,7 +134,7 @@ def perform_all(x, calc_prelink=0):
return mydict
def get_valid_checksum_keys():
- return hashfunc_map.keys()
+ return list(hashfunc_map.keys())
def get_hash_origin(hashtype):
if hashtype not in hashfunc_map:
diff --git a/pym/portage/cvstree.py b/pym/portage/cvstree.py
index 0dd8f10bc..eaf7f37a5 100644
--- a/pym/portage/cvstree.py
+++ b/pym/portage/cvstree.py
@@ -89,12 +89,12 @@ def findoption(entries, pattern, recursive=0, basedir=""):
basedir."""
if not basedir.endswith("/"):
basedir += "/"
- for myfile, mydata in entries["files"].iteritems():
+ for myfile, mydata in entries["files"].items():
if "cvs" in mydata["status"]:
if pattern.search(mydata["flags"]):
yield basedir+myfile
if recursive:
- for mydir, mydata in entries["dirs"].iteritems():
+ for mydir, mydata in entries["dirs"].items():
for x in findoption(mydata, pattern,
recursive, basedir+mydir):
yield x
diff --git a/pym/portage/dbapi/bintree.py b/pym/portage/dbapi/bintree.py
index c37242419..ff277a8fa 100644
--- a/pym/portage/dbapi/bintree.py
+++ b/pym/portage/dbapi/bintree.py
@@ -114,7 +114,7 @@ class bindbapi(fakedbapi):
mytbz2 = portage.xpak.tbz2(tbz2path)
mydata = mytbz2.get_data()
- for k, v in values.iteritems():
+ for k, v in values.items():
k = _unicode_encode(k,
encoding=_encodings['repo.content'], errors='backslashreplace')
v = _unicode_encode(v,
@@ -645,7 +645,7 @@ class binarytree(object):
# from xpak.
if update_pkgindex and os.access(self.pkgdir, os.W_OK):
del pkgindex.packages[:]
- pkgindex.packages.extend(metadata.itervalues())
+ pkgindex.packages.extend(iter(metadata.values()))
self._update_pkgindex_header(pkgindex.header)
from portage.util import atomic_ofstream
f = atomic_ofstream(self._pkgindex_file)
@@ -741,7 +741,7 @@ class binarytree(object):
# Remote package instances override local package
# if they are not identical.
hash_names = ["SIZE"] + self._pkgindex_hashes
- for cpv, local_metadata in metadata.iteritems():
+ for cpv, local_metadata in metadata.items():
remote_metadata = self._remotepkgs.get(cpv)
if remote_metadata is None:
continue
diff --git a/pym/portage/dbapi/porttree.py b/pym/portage/dbapi/porttree.py
index 98af60d7f..bf97af63e 100644
--- a/pym/portage/dbapi/porttree.py
+++ b/pym/portage/dbapi/porttree.py
@@ -201,10 +201,10 @@ class portdbapi(dbapi):
# earlier ones that correspond to the same name.
porttrees = [x for x in porttrees if x not in identically_named_paths]
ignored_map = {}
- for path, repo_name in identically_named_paths.iteritems():
+ for path, repo_name in identically_named_paths.items():
ignored_map.setdefault(repo_name, []).append(path)
self._ignored_repos = tuple((repo_name, tuple(paths)) \
- for repo_name, paths in ignored_map.iteritems())
+ for repo_name, paths in ignored_map.items())
self.porttrees = porttrees
porttree_root = porttrees[0]
@@ -232,7 +232,7 @@ class portdbapi(dbapi):
repo_aliases = {}
if local_repo_configs is not None:
default_loc_repo_config = local_repo_configs.get('DEFAULT')
- for repo_name, loc_repo_conf in local_repo_configs.iteritems():
+ for repo_name, loc_repo_conf in local_repo_configs.items():
if loc_repo_conf.aliases is not None:
for alias in loc_repo_conf.aliases:
overridden_alias = repo_aliases.get(alias)
@@ -503,7 +503,7 @@ class portdbapi(dbapi):
i = metadata
if hasattr(metadata, "iteritems"):
- i = metadata.iteritems()
+ i = iter(metadata.items())
metadata = dict(i)
if metadata.get("INHERITED", False):
@@ -761,7 +761,7 @@ class portdbapi(dbapi):
all_uris = []
all_files = []
- for filename, uris in uri_map.iteritems():
+ for filename, uris in uri_map.items():
for uri in uris:
all_uris.append(uri)
all_files.append(filename)
@@ -858,7 +858,7 @@ class portdbapi(dbapi):
y == "CVS":
continue
d[x+"/"+y] = None
- l = d.keys()
+ l = list(d.keys())
l.sort()
return l
@@ -915,7 +915,7 @@ class portdbapi(dbapi):
(mycp, self.mysettings["PORTAGE_CONFIGROOT"]), noiselevel=-1)
mylist = []
else:
- mylist = d.keys()
+ mylist = list(d.keys())
# Always sort in ascending order here since it's handy
# and the result can be easily cached and reused.
self._cpv_sort_ascending(mylist)
diff --git a/pym/portage/dbapi/vartree.py b/pym/portage/dbapi/vartree.py
index 5db04e4b1..635ba6026 100644
--- a/pym/portage/dbapi/vartree.py
+++ b/pym/portage/dbapi/vartree.py
@@ -605,8 +605,8 @@ class LinkageMap(object):
rValue = []
if not self._libs:
self.rebuild()
- for arch_map in self._libs.itervalues():
- for soname_map in arch_map.itervalues():
+ for arch_map in self._libs.values():
+ for soname_map in arch_map.values():
for obj_key in soname_map.providers:
rValue.extend(self._obj_properties[obj_key][4])
return rValue
@@ -1019,7 +1019,7 @@ class vardbapi(dbapi):
self.invalidentry(self.getpath(y))
continue
d[mysplit[0]+"/"+mysplit[1]] = None
- return d.keys()
+ return list(d.keys())
def checkblockers(self, origdep):
pass
@@ -1224,7 +1224,7 @@ class vardbapi(dbapi):
cache_valid = cache_mtime == mydir_mtime
if cache_valid:
# Migrate old metadata to unicode.
- for k, v in metadata.iteritems():
+ for k, v in metadata.items():
metadata[k] = _unicode_decode(v,
encoding=_encodings['repo.content'], errors='replace')
@@ -1298,7 +1298,7 @@ class vardbapi(dbapi):
treetype="vartree", vartree=self.vartree)
if not mylink.exists():
raise KeyError(cpv)
- for k, v in values.iteritems():
+ for k, v in values.items():
if v:
mylink.setfile(k, v)
else:
@@ -1561,7 +1561,7 @@ class vardbapi(dbapi):
def getFileOwnerMap(self, path_iter):
owners = self.get_owners(path_iter)
file_owners = {}
- for pkg_dblink, files in owners.iteritems():
+ for pkg_dblink, files in owners.items():
for f in files:
owner_set = file_owners.get(f)
if owner_set is None:
@@ -2183,7 +2183,7 @@ class dblink(object):
cpv_lib_map = self._find_unused_preserved_libs()
if cpv_lib_map:
self._remove_preserved_libs(cpv_lib_map)
- for cpv, removed in cpv_lib_map.iteritems():
+ for cpv, removed in cpv_lib_map.items():
if not self.vartree.dbapi.cpv_exists(cpv):
for dblnk in others_in_slot:
if dblnk.mycpv == cpv:
@@ -2359,7 +2359,7 @@ class dblink(object):
if pkgfiles:
self.updateprotect()
- mykeys = pkgfiles.keys()
+ mykeys = list(pkgfiles.keys())
mykeys.sort()
mykeys.reverse()
@@ -2829,7 +2829,7 @@ class dblink(object):
# Create consumer nodes and add them to the graph.
# Note that consumers can also be providers.
- for provider_node, consumers in consumer_map.iteritems():
+ for provider_node, consumers in consumer_map.items():
for c in consumers:
if self.isowner(c, root):
continue
@@ -2955,7 +2955,7 @@ class dblink(object):
return node
linkmap = self.vartree.dbapi.linkmap
- for cpv, plibs in plib_dict.iteritems():
+ for cpv, plibs in plib_dict.items():
for f in plibs:
path_cpv_map[f] = cpv
preserved_node = path_to_node(f)
@@ -3036,7 +3036,7 @@ class dblink(object):
os = _os_merge
files_to_remove = set()
- for files in cpv_lib_map.itervalues():
+ for files in cpv_lib_map.values():
files_to_remove.update(files)
files_to_remove = sorted(files_to_remove)
showMessage = self._display_merge
@@ -3088,7 +3088,7 @@ class dblink(object):
plib_dict = self.vartree.dbapi.plib_registry.getPreservedLibs()
plib_cpv_map = {}
plib_paths = set()
- for cpv, paths in plib_dict.iteritems():
+ for cpv, paths in plib_dict.items():
plib_paths.update(paths)
for f in paths:
plib_cpv_map[f] = cpv
@@ -3266,7 +3266,7 @@ class dblink(object):
k = (s.st_dev, s.st_ino)
inode_map.setdefault(k, []).append((path, s))
suspicious_hardlinks = []
- for path_list in inode_map.itervalues():
+ for path_list in inode_map.values():
path, s = path_list[0]
if len(path_list) == s.st_nlink:
# All hardlinks seem to be owned by this package.
@@ -3644,7 +3644,7 @@ class dblink(object):
owners = self.vartree.dbapi._owners.get_owners(collisions)
self.vartree.dbapi.flush_cache()
- for pkg, owned_files in owners.iteritems():
+ for pkg, owned_files in owners.items():
cpv = pkg.mycpv
msg = []
msg.append("%s" % cpv)
@@ -3891,7 +3891,7 @@ class dblink(object):
# and update the contents of the packages that owned them.
plib_registry = self.vartree.dbapi.plib_registry
plib_dict = plib_registry.getPreservedLibs()
- for cpv, paths in plib_collisions.iteritems():
+ for cpv, paths in plib_collisions.items():
if cpv not in plib_dict:
continue
if cpv == self.mycpv:
@@ -3948,7 +3948,7 @@ class dblink(object):
cpv_lib_map = self._find_unused_preserved_libs()
if cpv_lib_map:
self._remove_preserved_libs(cpv_lib_map)
- for cpv, removed in cpv_lib_map.iteritems():
+ for cpv, removed in cpv_lib_map.items():
if not self.vartree.dbapi.cpv_exists(cpv):
continue
self.vartree.dbapi.removeFromContents(cpv, removed)
@@ -4428,7 +4428,7 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None):
curval = 0
if onProgress:
onProgress(maxval, 0)
- paths = contents.keys()
+ paths = list(contents.keys())
paths.sort()
for path in paths:
curval += 1
diff --git a/pym/portage/dbapi/virtual.py b/pym/portage/dbapi/virtual.py
index bf90f305b..5d1e933a1 100644
--- a/pym/portage/dbapi/virtual.py
+++ b/pym/portage/dbapi/virtual.py
@@ -59,7 +59,7 @@ class fakedbapi(dbapi):
return list(self.cpdict)
def cpv_all(self):
- return self.cpvdict.keys()
+ return list(self.cpvdict.keys())
def cpv_inject(self, mycpv, metadata=None):
"""Adds a cpv to the list of available packages. See the
diff --git a/pym/portage/debug.py b/pym/portage/debug.py
index 343f8ab6e..18e8482e4 100644
--- a/pym/portage/debug.py
+++ b/pym/portage/debug.py
@@ -89,7 +89,7 @@ class trace_handler(object):
# We omit items that will lead to unreasonable bloat of the trace
# output (and resulting log file).
- for k, v in my_locals.iteritems():
+ for k, v in my_locals.items():
my_repr = repr(v)
if len(my_repr) > self.max_repr_length:
my_locals[k] = "omitted"
diff --git a/pym/portage/dep.py b/pym/portage/dep.py
index 9d055912d..c8b9f6d44 100644
--- a/pym/portage/dep.py
+++ b/pym/portage/dep.py
@@ -393,16 +393,16 @@ class _use_dep(object):
self.required = frozenset(chain(
enabled_flags,
disabled_flags,
- *conditional.values()
+ *list(conditional.values())
))
self.enabled = frozenset(enabled_flags)
self.disabled = frozenset(disabled_flags)
self.conditional = None
- for v in conditional.itervalues():
+ for v in conditional.values():
if v:
- for k, v in conditional.iteritems():
+ for k, v in conditional.items():
conditional[k] = frozenset(v)
self.conditional = conditional
break
diff --git a/pym/portage/dispatch_conf.py b/pym/portage/dispatch_conf.py
index 096ce0a7c..377eb45c3 100644
--- a/pym/portage/dispatch_conf.py
+++ b/pym/portage/dispatch_conf.py
@@ -36,7 +36,7 @@ def read_config(mandatory_opts):
# Handle quote removal here, since KeyValuePairFileLoader doesn't do that.
quotes = "\"'"
- for k, v in opts.iteritems():
+ for k, v in opts.items():
if v[:1] in quotes and v[:1] == v[-1:]:
opts[k] = v[1:-1]
diff --git a/pym/portage/eclass_cache.py b/pym/portage/eclass_cache.py
index 30b83ea3d..2c3c66c86 100644
--- a/pym/portage/eclass_cache.py
+++ b/pym/portage/eclass_cache.py
@@ -114,7 +114,7 @@ class cache(object):
def is_eclass_data_valid(self, ec_dict):
if not isinstance(ec_dict, dict):
return False
- for eclass, tup in ec_dict.iteritems():
+ for eclass, tup in ec_dict.items():
cached_data = self.eclasses.get(eclass, None)
""" Only use the mtime for validation since the probability of a
collision is small and, depending on the cache implementation, the
diff --git a/pym/portage/elog/__init__.py b/pym/portage/elog/__init__.py
index c689d821d..53c0a85f7 100644
--- a/pym/portage/elog/__init__.py
+++ b/pym/portage/elog/__init__.py
@@ -100,7 +100,7 @@ def elog_process(cpv, mysettings, phasefilter=None):
else:
all_logentries[cpv] = ebuild_logentries
- for key in _preserve_logentries.keys():
+ for key in list(_preserve_logentries.keys()):
if key in all_logentries:
all_logentries[key] = _merge_logentries(_preserve_logentries[key], all_logentries[key])
else:
@@ -140,7 +140,7 @@ def elog_process(cpv, mysettings, phasefilter=None):
listener(mysettings, str(key), default_logentries, default_fulllog)
# pass the processing to the individual modules
- for s, levels in logsystems.iteritems():
+ for s, levels in logsystems.items():
# allow per module overrides of PORTAGE_ELOG_CLASSES
if levels:
mod_logentries = filter_loglevels(all_logentries[key], levels)
diff --git a/pym/portage/elog/mod_mail_summary.py b/pym/portage/elog/mod_mail_summary.py
index 4380146d2..3ea0e6fe9 100644
--- a/pym/portage/elog/mod_mail_summary.py
+++ b/pym/portage/elog/mod_mail_summary.py
@@ -31,7 +31,7 @@ def finalize(mysettings=None):
an older version of portage will import the module from a newer version
when it upgrades itself."""
global _items
- for mysettings, items in _items.itervalues():
+ for mysettings, items in _items.values():
_finalize(mysettings, items)
_items.clear()
@@ -59,7 +59,7 @@ def _finalize(mysettings, items):
mybody += "- %s\n" % key
mymessage = portage.mail.create_message(myfrom, myrecipient, mysubject,
- mybody, attachments=items.values())
+ mybody, attachments=list(items.values()))
def timeout_handler(signum, frame):
raise PortageException("Timeout in finalize() for elog system 'mail_summary'")
diff --git a/pym/portage/getbinpkg.py b/pym/portage/getbinpkg.py
index 5d3864fd1..35bef052a 100644
--- a/pym/portage/getbinpkg.py
+++ b/pym/portage/getbinpkg.py
@@ -801,7 +801,7 @@ class PackageIndex(object):
if not mycpv:
continue
if self._default_pkg_data:
- for k, v in self._default_pkg_data.iteritems():
+ for k, v in self._default_pkg_data.items():
d.setdefault(k, v)
if self._inherited_keys:
for k in self._inherited_keys:
@@ -814,7 +814,7 @@ class PackageIndex(object):
if self.modified:
self.header["TIMESTAMP"] = str(long(time.time()))
self.header["PACKAGES"] = str(len(self.packages))
- keys = self.header.keys()
+ keys = list(self.header.keys())
keys.sort()
self._writepkgindex(pkgfile, [(k, self.header[k]) \
for k in keys if self.header[k]])
@@ -827,10 +827,10 @@ class PackageIndex(object):
if v is not None and v == metadata.get(k):
del metadata[k]
if self._default_pkg_data:
- for k, v in self._default_pkg_data.iteritems():
+ for k, v in self._default_pkg_data.items():
if metadata.get(k) == v:
metadata.pop(k, None)
- keys = metadata.keys()
+ keys = list(metadata.keys())
keys.sort()
self._writepkgindex(pkgfile,
[(k, metadata[k]) for k in keys if metadata[k]])
diff --git a/pym/portage/manifest.py b/pym/portage/manifest.py
index cd2609c3b..7b20a3c2d 100644
--- a/pym/portage/manifest.py
+++ b/pym/portage/manifest.py
@@ -68,13 +68,13 @@ def parseManifest2(mysplit):
class ManifestEntry(object):
__slots__ = ("type", "name", "hashes")
def __init__(self, **kwargs):
- for k, v in kwargs.iteritems():
+ for k, v in kwargs.items():
setattr(self, k, v)
class Manifest2Entry(ManifestEntry):
def __str__(self):
myline = " ".join([self.type, self.name, str(self.hashes["size"])])
- myhashkeys = self.hashes.keys()
+ myhashkeys = list(self.hashes.keys())
myhashkeys.remove("size")
myhashkeys.sort()
for h in myhashkeys:
@@ -202,15 +202,15 @@ class Manifest(object):
return myhashdict
def _createManifestEntries(self):
- mytypes = self.fhashdict.keys()
+ mytypes = list(self.fhashdict.keys())
mytypes.sort()
for t in mytypes:
- myfiles = self.fhashdict[t].keys()
+ myfiles = list(self.fhashdict[t].keys())
myfiles.sort()
for f in myfiles:
myentry = Manifest2Entry(
type=t, name=f, hashes=self.fhashdict[t][f].copy())
- myhashkeys = myentry.hashes.keys()
+ myhashkeys = list(myentry.hashes.keys())
myhashkeys.sort()
for h in myhashkeys:
if h not in ["size"] + portage.const.MANIFEST2_HASH_FUNCTIONS:
diff --git a/pym/portage/news.py b/pym/portage/news.py
index 84d9ab877..c178a8c48 100644
--- a/pym/portage/news.py
+++ b/pym/portage/news.py
@@ -235,7 +235,7 @@ class NewsItem(object):
'profile' : profile }
all_match = True
- for values in self.restrictions.itervalues():
+ for values in self.restrictions.values():
any_match = False
for restriction in values:
if restriction.checkRestriction(**kwargs):
@@ -269,7 +269,7 @@ class NewsItem(object):
restricts = { _installedRE : DisplayInstalledRestriction,
_profileRE : DisplayProfileRestriction,
_keywordRE : DisplayKeywordRestriction }
- for regex, restriction in restricts.iteritems():
+ for regex, restriction in restricts.items():
match = regex.match(line)
if match:
restrict = restriction(match.groups()[0].strip())
diff --git a/pym/portage/output.py b/pym/portage/output.py
index 5f310d300..6044f2bbc 100644
--- a/pym/portage/output.py
+++ b/pym/portage/output.py
@@ -759,10 +759,10 @@ def _init(config_root='/'):
codes = object.__getattribute__(codes, '_attr')
_styles = object.__getattribute__(_styles, '_attr')
- for k, v in codes.iteritems():
+ for k, v in codes.items():
codes[k] = _unicode_decode(v)
- for k, v in _styles.iteritems():
+ for k, v in _styles.items():
_styles[k] = _unicode_decode(v)
try:
diff --git a/pym/portage/process.py b/pym/portage/process.py
index b49f061f1..6ce304b4f 100644
--- a/pym/portage/process.py
+++ b/pym/portage/process.py
@@ -189,7 +189,7 @@ def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False,
# Avoid a potential UnicodeEncodeError from os.execve().
env_bytes = {}
- for k, v in env.iteritems():
+ for k, v in env.items():
env_bytes[_unicode_encode(k, encoding=_encodings['content'])] = \
_unicode_encode(v, encoding=_encodings['content'])
env = env_bytes
diff --git a/pym/portage/proxy/lazyimport.py b/pym/portage/proxy/lazyimport.py
index 62428543e..c62f90788 100644
--- a/pym/portage/proxy/lazyimport.py
+++ b/pym/portage/proxy/lazyimport.py
@@ -50,7 +50,7 @@ def _unregister_module_proxy(name):
object.__getattribute__(proxy, '_get_target')()
modules = sys.modules
- for name, proxy_list in list(_module_proxies.iteritems()):
+ for name, proxy_list in list(_module_proxies.items()):
if name not in modules:
continue
# First delete this name from the dict so that
diff --git a/pym/portage/sets/base.py b/pym/portage/sets/base.py
index 4b39e9df9..0c08ee194 100644
--- a/pym/portage/sets/base.py
+++ b/pym/portage/sets/base.py
@@ -126,7 +126,7 @@ class PackageSet(object):
rev_transform[atom] = atom
else:
rev_transform[Atom(atom.replace(atom.cp, pkg.cp, 1))] = atom
- best_match = best_match_to_list(pkg, rev_transform.iterkeys())
+ best_match = best_match_to_list(pkg, iter(rev_transform.keys()))
if best_match:
return rev_transform[best_match]
return None
diff --git a/pym/portage/sets/files.py b/pym/portage/sets/files.py
index 15ecd752b..8a7e4d84d 100644
--- a/pym/portage/sets/files.py
+++ b/pym/portage/sets/files.py
@@ -93,7 +93,7 @@ class StaticFileSet(EditablePackageSet):
# in the latest new slot that may be available.
atoms.append(a)
else:
- atoms = data.keys()
+ atoms = list(data.keys())
self._setAtoms(atoms)
self._mtime = mtime
@@ -183,7 +183,7 @@ class ConfigFileSet(PackageSet):
def load(self):
data, errors = self.loader.load()
- self._setAtoms(data.keys())
+ self._setAtoms(list(data.keys()))
def singleBuilder(self, options, settings, trees):
if not "filename" in options:
@@ -254,7 +254,7 @@ class WorldSet(EditablePackageSet):
raise
del e
data = {}
- atoms = data.keys()
+ atoms = list(data.keys())
self._mtime = mtime
atoms_changed = True
else:
@@ -274,7 +274,7 @@ class WorldSet(EditablePackageSet):
raise
del e
data = {}
- nonatoms = data.keys()
+ nonatoms = list(data.keys())
self._mtime2 = mtime
atoms_changed = True
else:
diff --git a/pym/portage/sets/libs.py b/pym/portage/sets/libs.py
index d7e4c731a..b8aa3c41c 100644
--- a/pym/portage/sets/libs.py
+++ b/pym/portage/sets/libs.py
@@ -64,7 +64,7 @@ class PreservedLibraryConsumerSet(LibraryConsumerSet):
consumers = set()
if reg:
plib_dict = reg.getPreservedLibs()
- for libs in plib_dict.itervalues():
+ for libs in plib_dict.values():
for lib in libs:
if self.debug:
print(lib)
@@ -74,7 +74,7 @@ class PreservedLibraryConsumerSet(LibraryConsumerSet):
consumers.update(self.dbapi.linkmap.findConsumers(lib))
# Don't rebuild packages just because they contain preserved
# libs that happen to be consumers of other preserved libs.
- for libs in plib_dict.itervalues():
+ for libs in plib_dict.values():
consumers.difference_update(libs)
else:
return
diff --git a/pym/portage/tests/env/config/test_PackageKeywordsFile.py b/pym/portage/tests/env/config/test_PackageKeywordsFile.py
index 228cf2ecb..d01633125 100644
--- a/pym/portage/tests/env/config/test_PackageKeywordsFile.py
+++ b/pym/portage/tests/env/config/test_PackageKeywordsFile.py
@@ -23,7 +23,7 @@ class PackageKeywordsFileTestCase(TestCase):
f = PackageKeywordsFile(self.fname)
f.load()
i = 0
- for cpv, keyword in f.iteritems():
+ for cpv, keyword in f.items():
self.assertEqual( cpv, self.cpv[i] )
[k for k in keyword if self.assertTrue(k in self.keywords)]
i = i + 1
diff --git a/pym/portage/tests/env/config/test_PackageUseFile.py b/pym/portage/tests/env/config/test_PackageUseFile.py
index 575ede505..4157d6097 100644
--- a/pym/portage/tests/env/config/test_PackageUseFile.py
+++ b/pym/portage/tests/env/config/test_PackageUseFile.py
@@ -22,7 +22,7 @@ class PackageUseFileTestCase(TestCase):
try:
f = PackageUseFile(self.fname)
f.load()
- for cpv, use in f.iteritems():
+ for cpv, use in f.items():
self.assertEqual( cpv, self.cpv )
[flag for flag in use if self.assertTrue(flag in self.useflags)]
finally:
diff --git a/pym/portage/tests/env/config/test_PortageModulesFile.py b/pym/portage/tests/env/config/test_PortageModulesFile.py
index 41a44c4cc..8a37b73bd 100644
--- a/pym/portage/tests/env/config/test_PortageModulesFile.py
+++ b/pym/portage/tests/env/config/test_PortageModulesFile.py
@@ -32,7 +32,7 @@ class PortageModulesFileTestCase(TestCase):
def BuildFile(self):
fd, self.fname = mkstemp()
f = os.fdopen(fd, 'w')
- for k, v in self.items.iteritems():
+ for k, v in self.items.items():
f.write('%s=%s\n' % (k,v))
f.close()
diff --git a/pym/portage/update.py b/pym/portage/update.py
index 0941ba06b..1ea15e7e9 100644
--- a/pym/portage/update.py
+++ b/pym/portage/update.py
@@ -56,7 +56,7 @@ def update_dbentries(update_iter, mydata):
"""Performs update commands and returns a
dict containing only the updated items."""
updated_items = {}
- for k, mycontent in mydata.iteritems():
+ for k, mycontent in mydata.items():
k_unicode = _unicode_decode(k,
encoding=_encodings['repo.content'], errors='replace')
if k_unicode not in ignored_dbentries:
@@ -83,7 +83,7 @@ def fixdbentries(update_iter, dbdir):
mode='r', encoding=_encodings['repo.content'],
errors='replace').read()
updated_items = update_dbentries(update_iter, mydata)
- for myfile, mycontent in updated_items.iteritems():
+ for myfile, mycontent in updated_items.items():
file_path = os.path.join(dbdir, myfile)
write_atomic(file_path, mycontent, encoding=_encodings['repo.content'])
return len(updated_items) > 0
@@ -231,7 +231,7 @@ def update_config_files(config_root, protect, protect_mask, update_iter):
# update /etc/portage/packages.*
ignore_line_re = re.compile(r'^#|^\s*$')
for update_cmd in update_iter:
- for x, contents in file_contents.iteritems():
+ for x, contents in file_contents.items():
for pos, line in enumerate(contents):
if ignore_line_re.match(line):
continue
diff --git a/pym/portage/util.py b/pym/portage/util.py
index 60e72fbb4..5f1a42c2f 100644
--- a/pym/portage/util.py
+++ b/pym/portage/util.py
@@ -220,7 +220,7 @@ def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0):
return None
if final_dict is None:
final_dict = {}
- for y in mydict.keys():
+ for y in list(mydict.keys()):
if True:
if y in final_dict and (incremental or (y in incrementals)):
final_dict[y] += " "+mydict[y][:]
@@ -247,7 +247,7 @@ def stack_lists(lists, incremental=1):
new_list[y] = True
else:
new_list[y] = True
- return new_list.keys()
+ return list(new_list.keys())
def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
"""
@@ -287,7 +287,7 @@ def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
else:
newdict[myline[0]] = myline[1:]
if juststrings:
- for k, v in newdict.iteritems():
+ for k, v in newdict.items():
newdict[k] = " ".join(v)
return newdict
@@ -299,7 +299,7 @@ def grabdict_package(myfilename, juststrings=0, recursive=0):
# "RuntimeError: dictionary changed size during iteration"
# when an invalid atom is deleted.
atoms = {}
- for k, v in pkgs.iteritems():
+ for k, v in pkgs.items():
try:
k = Atom(k)
except InvalidAtom:
diff --git a/pym/portage/xpak.py b/pym/portage/xpak.py
index 29c28af32..b5878befb 100644
--- a/pym/portage/xpak.py
+++ b/pym/portage/xpak.py
@@ -99,7 +99,7 @@ def xpak_mem(mydata):
"""Create an xpack segement from a map object."""
mydata_encoded = {}
- for k, v in mydata.iteritems():
+ for k, v in mydata.items():
k = _unicode_encode(k,
encoding=_encodings['repo.content'], errors='backslashreplace')
v = _unicode_encode(v,
@@ -112,7 +112,7 @@ def xpak_mem(mydata):
indexpos=0
dataglob = _unicode_encode('')
datapos=0
- for x, newglob in mydata.iteritems():
+ for x, newglob in mydata.items():
mydatasize=len(newglob)
indexglob=indexglob+encodeint(len(x))+x+encodeint(datapos)+encodeint(mydatasize)
indexpos=indexpos+4+len(x)+4+4
diff --git a/pym/repoman/utilities.py b/pym/repoman/utilities.py
index 23b85ffe3..45160cfc8 100644
--- a/pym/repoman/utilities.py
+++ b/pym/repoman/utilities.py
@@ -220,7 +220,7 @@ def format_qa_output(formatter, stats, fails, dofull, dofail, options, qawarning
full = options.mode == 'full'
# we only want key value pairs where value > 0
for category, number in \
- filter(lambda myitem: myitem[1] > 0, stats.iteritems()):
+ filter(lambda myitem: myitem[1] > 0, iter(stats.items())):
formatter.add_literal_data(_unicode_decode(" " + category.ljust(30)))
if category in qawarnings:
formatter.push_style("WARN")