summaryrefslogtreecommitdiffstats
path: root/pym/portage
diff options
context:
space:
mode:
authorArfrever Frehtes Taifersar Arahesis <arfrever@gentoo.org>2009-09-21 16:07:07 +0000
committerArfrever Frehtes Taifersar Arahesis <arfrever@gentoo.org>2009-09-21 16:07:07 +0000
commit6907b88a99468f88e6ba2ca479d419dcdf921131 (patch)
treef5cb8ca5056485096ac54022907299cacc2acc23 /pym/portage
parent31505dfc2b6a6a0533123fe43d2240aab177d7c4 (diff)
downloadportage-6907b88a99468f88e6ba2ca479d419dcdf921131.tar.gz
portage-6907b88a99468f88e6ba2ca479d419dcdf921131.tar.bz2
portage-6907b88a99468f88e6ba2ca479d419dcdf921131.zip
Use dict.(keys|values|items)() instead of dict.(iterkeys|itervalues|iteritems)() for compatibility with Python 3.
(2to3-3.1 -f dict -nw ${FILES}) svn path=/main/trunk/; revision=14327
Diffstat (limited to 'pym/portage')
-rw-r--r--pym/portage/__init__.py100
-rw-r--r--pym/portage/cache/anydbm.py4
-rw-r--r--pym/portage/cache/ebuild_xattr.py2
-rw-r--r--pym/portage/cache/mappings.py34
-rw-r--r--pym/portage/cache/sqlite.py2
-rw-r--r--pym/portage/cache/template.py16
-rw-r--r--pym/portage/checksum.py2
-rw-r--r--pym/portage/cvstree.py4
-rw-r--r--pym/portage/dbapi/bintree.py6
-rw-r--r--pym/portage/dbapi/porttree.py14
-rw-r--r--pym/portage/dbapi/vartree.py34
-rw-r--r--pym/portage/dbapi/virtual.py2
-rw-r--r--pym/portage/debug.py2
-rw-r--r--pym/portage/dep.py6
-rw-r--r--pym/portage/dispatch_conf.py2
-rw-r--r--pym/portage/eclass_cache.py2
-rw-r--r--pym/portage/elog/__init__.py4
-rw-r--r--pym/portage/elog/mod_mail_summary.py4
-rw-r--r--pym/portage/getbinpkg.py8
-rw-r--r--pym/portage/manifest.py10
-rw-r--r--pym/portage/news.py4
-rw-r--r--pym/portage/output.py4
-rw-r--r--pym/portage/process.py2
-rw-r--r--pym/portage/proxy/lazyimport.py2
-rw-r--r--pym/portage/sets/base.py2
-rw-r--r--pym/portage/sets/files.py8
-rw-r--r--pym/portage/sets/libs.py4
-rw-r--r--pym/portage/tests/env/config/test_PackageKeywordsFile.py2
-rw-r--r--pym/portage/tests/env/config/test_PackageUseFile.py2
-rw-r--r--pym/portage/tests/env/config/test_PortageModulesFile.py2
-rw-r--r--pym/portage/update.py6
-rw-r--r--pym/portage/util.py8
-rw-r--r--pym/portage/xpak.py4
33 files changed, 154 insertions, 154 deletions
diff --git a/pym/portage/__init__.py b/pym/portage/__init__.py
index fe696b6dc..e8532e60d 100644
--- a/pym/portage/__init__.py
+++ b/pym/portage/__init__.py
@@ -183,7 +183,7 @@ class _unicode_func_wrapper(object):
wrapped_kwargs = dict(
(_unicode_encode(k, encoding=encoding, errors='strict'),
_unicode_encode(v, encoding=encoding, errors='strict'))
- for k, v in kwargs.iteritems())
+ for k, v in kwargs.items())
else:
wrapped_kwargs = {}
@@ -784,13 +784,13 @@ class digraph(object):
return list(self.nodes[node][0])
children = []
if hasattr(ignore_priority, '__call__'):
- for child, priorities in self.nodes[node][0].iteritems():
+ for child, priorities in self.nodes[node][0].items():
for priority in priorities:
if not ignore_priority(priority):
children.append(child)
break
else:
- for child, priorities in self.nodes[node][0].iteritems():
+ for child, priorities in self.nodes[node][0].items():
if ignore_priority < priorities[-1]:
children.append(child)
return children
@@ -801,13 +801,13 @@ class digraph(object):
return list(self.nodes[node][1])
parents = []
if hasattr(ignore_priority, '__call__'):
- for parent, priorities in self.nodes[node][1].iteritems():
+ for parent, priorities in self.nodes[node][1].items():
for priority in priorities:
if not ignore_priority(priority):
parents.append(parent)
break
else:
- for parent, priorities in self.nodes[node][1].iteritems():
+ for parent, priorities in self.nodes[node][1].items():
if ignore_priority < priorities[-1]:
parents.append(parent)
return parents
@@ -826,7 +826,7 @@ class digraph(object):
elif hasattr(ignore_priority, '__call__'):
for node in self.order:
is_leaf_node = True
- for child, priorities in self.nodes[node][0].iteritems():
+ for child, priorities in self.nodes[node][0].items():
for priority in priorities:
if not ignore_priority(priority):
is_leaf_node = False
@@ -838,7 +838,7 @@ class digraph(object):
else:
for node in self.order:
is_leaf_node = True
- for child, priorities in self.nodes[node][0].iteritems():
+ for child, priorities in self.nodes[node][0].items():
if ignore_priority < priorities[-1]:
is_leaf_node = False
break
@@ -860,7 +860,7 @@ class digraph(object):
elif hasattr(ignore_priority, '__call__'):
for node in self.order:
is_root_node = True
- for parent, priorities in self.nodes[node][1].iteritems():
+ for parent, priorities in self.nodes[node][1].items():
for priority in priorities:
if not ignore_priority(priority):
is_root_node = False
@@ -872,7 +872,7 @@ class digraph(object):
else:
for node in self.order:
is_root_node = True
- for parent, priorities in self.nodes[node][1].iteritems():
+ for parent, priorities in self.nodes[node][1].items():
if ignore_priority < priorities[-1]:
is_root_node = False
break
@@ -888,16 +888,16 @@ class digraph(object):
clone = digraph()
clone.nodes = {}
memo = {}
- for children, parents, node in self.nodes.itervalues():
+ for children, parents, node in self.nodes.values():
children_clone = {}
- for child, priorities in children.iteritems():
+ for child, priorities in children.items():
priorities_clone = memo.get(id(priorities))
if priorities_clone is None:
priorities_clone = priorities[:]
memo[id(priorities)] = priorities_clone
children_clone[child] = priorities_clone
parents_clone = {}
- for parent, priorities in parents.iteritems():
+ for parent, priorities in parents.items():
priorities_clone = memo.get(id(priorities))
if priorities_clone is None:
priorities_clone = priorities[:]
@@ -941,7 +941,7 @@ class digraph(object):
output("depends on\n")
else:
output("(no children)\n")
- for child, priorities in self.nodes[node][0].iteritems():
+ for child, priorities in self.nodes[node][0].items():
output(" %s (%s)\n" % (child, priorities[-1],))
#parse /etc/env.d and generate /etc/profile.env
@@ -1135,7 +1135,7 @@ def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None,
not ld_cache_update and \
contents is not None:
libdir_contents_changed = False
- for mypath, mydata in contents.iteritems():
+ for mypath, mydata in contents.items():
if mydata[0] not in ("obj","sym"):
continue
head, tail = os.path.split(mypath)
@@ -1740,7 +1740,7 @@ class config(object):
for x in self.profiles]
for pkeyworddict in rawpkeywords:
cpdict = {}
- for k, v in pkeyworddict.iteritems():
+ for k, v in pkeyworddict.items():
cpdict.setdefault(k.cp, {})[k] = v
self._pkeywords_list.append(cpdict)
@@ -1758,7 +1758,7 @@ class config(object):
recursive=1) for x in self.profiles]
for pusemaskdict in rawpusemask:
cpdict = {}
- for k, v in pusemaskdict.iteritems():
+ for k, v in pusemaskdict.items():
cpdict.setdefault(k.cp, {})[k] = v
self.pusemask_list.append(cpdict)
del rawpusemask
@@ -1768,7 +1768,7 @@ class config(object):
juststrings=True, recursive=1) for x in self.profiles]
for rawpusedict in rawprofileuse:
cpdict = {}
- for k, v in rawpusedict.iteritems():
+ for k, v in rawpusedict.items():
cpdict.setdefault(k.cp, {})[k] = v
self.pkgprofileuse.append(cpdict)
del rawprofileuse
@@ -1784,7 +1784,7 @@ class config(object):
for x in self.profiles]
for rawpusefdict in rawpuseforce:
cpdict = {}
- for k, v in rawpusefdict.iteritems():
+ for k, v in rawpusefdict.items():
cpdict.setdefault(k.cp, {})[k] = v
self.puseforce_list.append(cpdict)
del rawpuseforce
@@ -1840,7 +1840,7 @@ class config(object):
# Avoid potential UnicodeDecodeError exceptions later.
env_unicode = dict((_unicode_decode(k), _unicode_decode(v))
- for k, v in env.iteritems())
+ for k, v in env.items())
self.backupenv = env_unicode
@@ -1848,7 +1848,7 @@ class config(object):
# Remove duplicate values so they don't override updated
# profile.env values later (profile.env is reloaded in each
# call to self.regenerate).
- for k, v in env_d.iteritems():
+ for k, v in env_d.items():
try:
if self.backupenv[k] == v:
del self.backupenv[k]
@@ -1965,14 +1965,14 @@ class config(object):
pmask_locations.append(abs_user_config)
pusedict = grabdict_package(
os.path.join(abs_user_config, "package.use"), recursive=1)
- for k, v in pusedict.iteritems():
+ for k, v in pusedict.items():
self.pusedict.setdefault(k.cp, {})[k] = v
#package.keywords
pkgdict = grabdict_package(
os.path.join(abs_user_config, "package.keywords"),
recursive=1)
- for k, v in pkgdict.iteritems():
+ for k, v in pkgdict.items():
# default to ~arch if no specific keyword is given
if not v:
mykeywordlist = []
@@ -1990,7 +1990,7 @@ class config(object):
#package.license
licdict = grabdict_package(os.path.join(
abs_user_config, "package.license"), recursive=1)
- for k, v in licdict.iteritems():
+ for k, v in licdict.items():
cp = k.cp
cp_dict = self._plicensedict.get(cp)
if not cp_dict:
@@ -2001,7 +2001,7 @@ class config(object):
#package.properties
propdict = grabdict_package(os.path.join(
abs_user_config, "package.properties"), recursive=1)
- for k, v in propdict.iteritems():
+ for k, v in propdict.items():
cp = k.cp
cp_dict = self._ppropertiesdict.get(cp)
if not cp_dict:
@@ -2214,7 +2214,7 @@ class config(object):
}
for mypath, (gid, mode, modemask, preserve_perms) \
- in dir_mode_map.iteritems():
+ in dir_mode_map.items():
mydir = os.path.join(self["ROOT"], mypath)
if preserve_perms and os.path.isdir(mydir):
# Only adjust permissions on some directories if
@@ -2617,7 +2617,7 @@ class config(object):
for i, pkgprofileuse_dict in enumerate(self.pkgprofileuse):
cpdict = pkgprofileuse_dict.get(cp)
if cpdict:
- keys = cpdict.keys()
+ keys = list(cpdict.keys())
while keys:
bestmatch = best_match_to_list(cpv_slot, keys)
if bestmatch:
@@ -2647,7 +2647,7 @@ class config(object):
self.puse = ""
cpdict = self.pusedict.get(cp)
if cpdict:
- keys = cpdict.keys()
+ keys = list(cpdict.keys())
while keys:
self.pusekey = best_match_to_list(cpv_slot, keys)
if self.pusekey:
@@ -2744,7 +2744,7 @@ class config(object):
# allows packages that support LINGUAS but don't
# declare it in IUSE to use the variable outside of the
# USE_EXPAND context.
- for k, use_expand_iuse in use_expand_iuses.iteritems():
+ for k, use_expand_iuse in use_expand_iuses.items():
if k + '_*' in use:
use.update( x for x in use_expand_iuse if x not in usemask )
k = k.upper()
@@ -2799,7 +2799,7 @@ class config(object):
for i, pusemask_dict in enumerate(self.pusemask_list):
cpdict = pusemask_dict.get(cp)
if cpdict:
- keys = cpdict.keys()
+ keys = list(cpdict.keys())
while keys:
best_match = best_match_to_list(pkg, keys)
if best_match:
@@ -2822,7 +2822,7 @@ class config(object):
for i, puseforce_dict in enumerate(self.puseforce_list):
cpdict = puseforce_dict.get(cp)
if cpdict:
- keys = cpdict.keys()
+ keys = list(cpdict.keys())
while keys:
best_match = best_match_to_list(pkg, keys)
if best_match:
@@ -2939,7 +2939,7 @@ class config(object):
matches = False
if pkgdict:
cpv_slot_list = ["%s:%s" % (cpv, metadata["SLOT"])]
- for atom, pkgkeywords in pkgdict.iteritems():
+ for atom, pkgkeywords in pkgdict.items():
if match_from_list(atom, cpv_slot_list):
matches = True
pgroups.extend(pkgkeywords)
@@ -3007,7 +3007,7 @@ class config(object):
if cpdict:
accept_license = list(self._accept_license)
cpv_slot = "%s:%s" % (cpv, metadata["SLOT"])
- for atom in match_to_list(cpv_slot, cpdict.keys()):
+ for atom in match_to_list(cpv_slot, list(cpdict.keys())):
accept_license.extend(cpdict[atom])
licenses = set(flatten(dep.use_reduce(dep.paren_reduce(
@@ -3088,7 +3088,7 @@ class config(object):
if cpdict:
accept_properties = list(self._accept_properties)
cpv_slot = "%s:%s" % (cpv, metadata["SLOT"])
- for atom in match_to_list(cpv_slot, cpdict.keys()):
+ for atom in match_to_list(cpv_slot, list(cpdict.keys())):
accept_properties.extend(cpdict[atom])
properties = set(flatten(dep.use_reduce(dep.paren_reduce(
@@ -3468,7 +3468,7 @@ class config(object):
virtuals_file = os.path.join(x, "virtuals")
virtuals_dict = grabdict(virtuals_file)
atoms_dict = {}
- for k, v in virtuals_dict.iteritems():
+ for k, v in virtuals_dict.items():
try:
virt_atom = portage.dep.Atom(k)
except portage.exception.InvalidAtom:
@@ -3527,7 +3527,7 @@ class config(object):
def _populate_treeVirtuals(self, vartree):
"""Reduce the provides into a list by CP."""
- for provide, cpv_list in vartree.get_all_provides().iteritems():
+ for provide, cpv_list in vartree.get_all_provides().items():
try:
provide = dep.Atom(provide)
except exception.InvalidAtom:
@@ -3547,7 +3547,7 @@ class config(object):
# Virtuals by profile+tree preferences.
ptVirtuals = {}
- for virt, installed_list in self.treeVirtuals.iteritems():
+ for virt, installed_list in self.treeVirtuals.items():
profile_list = self.dirVirtuals.get(virt, None)
if not profile_list:
continue
@@ -3631,7 +3631,7 @@ class config(object):
yield (k, self[k])
def items(self):
- return list(self.iteritems())
+ return list(self.items())
def __setitem__(self,mykey,myvalue):
"set a value; will be thrown away at reset() time"
@@ -3850,7 +3850,7 @@ def spawn(mystring, mysettings, debug=0, free=0, droppriv=0, sesandbox=0, fakero
# it needs to be flushed before allowing a child process to use it
# so that output always shows in the correct order.
stdout_filenos = (sys.stdout.fileno(), sys.stderr.fileno())
- for fd in fd_pipes.itervalues():
+ for fd in fd_pipes.values():
if fd in stdout_filenos:
sys.stdout.flush()
sys.stderr.flush()
@@ -4011,7 +4011,7 @@ def _spawn_fetch(settings, args, **kwargs):
if args[0] != BASH_BINARY:
args = [BASH_BINARY, "-c", "exec \"$@\"", args[0]] + args
- rval = spawn_func(args, env=dict(settings.iteritems()), **kwargs)
+ rval = spawn_func(args, env=dict(iter(settings.items())), **kwargs)
return rval
@@ -4302,7 +4302,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
file_uri_tuples = []
if isinstance(myuris, dict):
- for myfile, uri_set in myuris.iteritems():
+ for myfile, uri_set in myuris.items():
for myuri in uri_set:
file_uri_tuples.append((myfile, myuri))
else:
@@ -4365,7 +4365,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
# Prefer thirdpartymirrors over normal mirrors in cases when
# the file does not yet exist on the normal mirrors.
- for myfile, uris in thirdpartymirror_uris.iteritems():
+ for myfile, uris in thirdpartymirror_uris.items():
primaryuri_dict.setdefault(myfile, []).extend(uris)
can_fetch=True
@@ -4700,7 +4700,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
mysettings.get("PORTAGE_QUIET", None) == "1"
digests = mydigests.get(myfile)
if digests:
- digests = digests.keys()
+ digests = list(digests.keys())
digests.sort()
eout.ebegin(
"%s %s ;-)" % (myfile, " ".join(digests)))
@@ -5207,7 +5207,7 @@ def digestgen(myarchives, mysettings, overwrite=1, manifestonly=0, myportdb=None
writemsg(_("!!! Permission Denied: %s\n") % (e,), noiselevel=-1)
return 0
if "assume-digests" not in mysettings.features:
- distlist = mf.fhashdict.get("DIST", {}).keys()
+ distlist = list(mf.fhashdict.get("DIST", {}).keys())
distlist.sort()
auto_assumed = []
for filename in distlist:
@@ -5271,7 +5271,7 @@ def digestcheck(myfiles, mysettings, strict=0, justmanifest=0):
return 1
mf = Manifest(pkgdir, mysettings["DISTDIR"])
manifest_empty = True
- for d in mf.fhashdict.itervalues():
+ for d in mf.fhashdict.values():
if d:
manifest_empty = False
break
@@ -6178,7 +6178,7 @@ def _prepare_features_dirs(mysettings):
droppriv = secpass >= 2 and \
"userpriv" in mysettings.features and \
"userpriv" not in restrict
- for myfeature, kwargs in features_dirs.iteritems():
+ for myfeature, kwargs in features_dirs.items():
if myfeature in mysettings.features:
failure = False
basedir = mysettings.get(kwargs["basedir_var"])
@@ -8307,7 +8307,7 @@ def getmaskingstatus(mycpv, settings=None, portdb=None):
matches = False
if pkgdict:
cpv_slot_list = ["%s:%s" % (mycpv, metadata["SLOT"])]
- for atom, pkgkeywords in pkgdict.iteritems():
+ for atom, pkgkeywords in pkgdict.items():
if match_from_list(atom, cpv_slot_list):
matches = True
pgroups.extend(pkgkeywords)
@@ -8413,9 +8413,9 @@ class FetchlistDict(portage.cache.mappings.Mapping):
self.portdb = mydbapi
def __getitem__(self, pkg_key):
"""Returns the complete fetch list for a given package."""
- return self.portdb.getFetchMap(pkg_key, mytree=self.mytree).keys()
+ return list(self.portdb.getFetchMap(pkg_key, mytree=self.mytree).keys())
def __contains__(self, cpv):
- return cpv in self.keys()
+ return cpv in list(self.keys())
def has_key(self, pkg_key):
"""Returns true if the given package exists within pkgdir."""
return pkg_key in self
@@ -8712,7 +8712,7 @@ def _global_updates(trees, prev_mtimes):
# until after _all_ of the above updates have
# been processed because the mtimedb will
# automatically commit when killed by ctrl C.
- for mykey, mtime in timestamps.iteritems():
+ for mykey, mtime in timestamps.items():
prev_mtimes[mykey] = mtime
# We gotta do the brute force updates for these now.
@@ -8785,7 +8785,7 @@ class MtimeDB(dict):
mtimedbkeys = set(("info", "ldpath", "resume", "resume_backup",
"starttime", "updates", "version"))
- for k in d.keys():
+ for k in list(d.keys()):
if k not in mtimedbkeys:
writemsg(_("Deleting invalid mtimedb key: %s\n") % str(k))
del d[k]
diff --git a/pym/portage/cache/anydbm.py b/pym/portage/cache/anydbm.py
index 5b771adad..143c321e1 100644
--- a/pym/portage/cache/anydbm.py
+++ b/pym/portage/cache/anydbm.py
@@ -51,7 +51,7 @@ class database(fs_template.FsBased):
self._ensure_access(self._db_path)
def iteritems(self):
- return self.__db.iteritems()
+ return iter(self.__db.items())
def _getitem(self, cpv):
# we override getitem because it's just a cpickling of the data handed in.
@@ -64,7 +64,7 @@ class database(fs_template.FsBased):
del self.__db[cpv]
def __iter__(self):
- return iter(self.__db.keys())
+ return iter(list(self.__db.keys()))
def __contains__(self, cpv):
return cpv in self.__db
diff --git a/pym/portage/cache/ebuild_xattr.py b/pym/portage/cache/ebuild_xattr.py
index 4bcc6c846..7ea4aaaca 100644
--- a/pym/portage/cache/ebuild_xattr.py
+++ b/pym/portage/cache/ebuild_xattr.py
@@ -123,7 +123,7 @@ class database(fs_template.FsBased):
def _setitem(self, cpv, values):
path = self.__get_path(cpv)
max = self.max_len
- for key,value in values.iteritems():
+ for key,value in values.items():
# mtime comes in as long so need to convert to strings
s = str(value)
# We need to split long values
diff --git a/pym/portage/cache/mappings.py b/pym/portage/cache/mappings.py
index 066927ac1..69e37fa15 100644
--- a/pym/portage/cache/mappings.py
+++ b/pym/portage/cache/mappings.py
@@ -23,7 +23,7 @@ class Mapping(object):
"""
def __iter__(self):
- return self.iterkeys()
+ return iter(self.keys())
def keys(self):
return list(self.__iter__())
@@ -48,14 +48,14 @@ class Mapping(object):
return self.__iter__()
def itervalues(self):
- for _, v in self.iteritems():
+ for _, v in self.items():
yield v
def values(self):
- return [v for _, v in self.iteritems()]
+ return [v for _, v in self.items()]
def items(self):
- return list(self.iteritems())
+ return list(self.items())
def get(self, key, default=None):
try:
@@ -64,10 +64,10 @@ class Mapping(object):
return default
def __repr__(self):
- return repr(dict(self.iteritems()))
+ return repr(dict(self.items()))
def __len__(self):
- return len(self.keys())
+ return len(list(self.keys()))
if sys.hexversion >= 0x3000000:
items = iteritems
@@ -80,7 +80,7 @@ class MutableMapping(Mapping):
"""
def clear(self):
- for key in self.keys():
+ for key in list(self.keys()):
del self[key]
def setdefault(self, key, default=None):
@@ -105,7 +105,7 @@ class MutableMapping(Mapping):
def popitem(self):
try:
- k, v = self.iteritems().next()
+ k, v = iter(self.items()).next()
except StopIteration:
raise KeyError('container is empty')
del self[k]
@@ -226,9 +226,9 @@ class ProtectedDict(MutableMapping):
def __iter__(self):
- for k in self.new.iterkeys():
+ for k in self.new.keys():
yield k
- for k in self.orig.iterkeys():
+ for k in self.orig.keys():
if k not in self.blacklist and k not in self.new:
yield k
@@ -333,12 +333,12 @@ def slot_dict_class(keys, prefix="_val_"):
self.update(kwargs)
def __iter__(self):
- for k, v in self.iteritems():
+ for k, v in self.items():
yield k
def __len__(self):
l = 0
- for i in self.iteritems():
+ for i in self.items():
l += 1
return l
@@ -353,14 +353,14 @@ def slot_dict_class(keys, prefix="_val_"):
pass
def items(self):
- return list(self.iteritems())
+ return list(self.items())
def itervalues(self):
- for k, v in self.iteritems():
+ for k, v in self.items():
yield v
def values(self):
- return list(self.itervalues())
+ return list(self.values())
def __delitem__(self, k):
try:
@@ -442,7 +442,7 @@ def slot_dict_class(keys, prefix="_val_"):
def popitem(self):
try:
- k, v = self.iteritems().next()
+ k, v = iter(self.items()).next()
except StopIteration:
raise KeyError('container is empty')
del self[k]
@@ -461,7 +461,7 @@ def slot_dict_class(keys, prefix="_val_"):
pass
def __str__(self):
- return str(dict(self.iteritems()))
+ return str(dict(self.items()))
if sys.hexversion >= 0x3000000:
items = iteritems
diff --git a/pym/portage/cache/sqlite.py b/pym/portage/cache/sqlite.py
index 2bb740722..48dfd3aac 100644
--- a/pym/portage/cache/sqlite.py
+++ b/pym/portage/cache/sqlite.py
@@ -106,7 +106,7 @@ class database(fs_template.FsBased):
self._allowed_keys
cursor = self._db_cursor
- for k, v in self._db_table.iteritems():
+ for k, v in self._db_table.items():
if self._db_table_exists(v["table_name"]):
create_statement = self._db_table_get_create(v["table_name"])
if create_statement != v["create"]:
diff --git a/pym/portage/cache/template.py b/pym/portage/cache/template.py
index f4e9dfb8d..e90c9dba5 100644
--- a/pym/portage/cache/template.py
+++ b/pym/portage/cache/template.py
@@ -67,7 +67,7 @@ class database(object):
raise cache_errors.ReadOnlyRestriction()
if self.cleanse_keys:
d=ProtectedDict(values)
- for k in d.keys():
+ for k in list(d.keys()):
if d[k] == '':
del d[k]
if self.serialize_eclasses and "_eclasses_" in values:
@@ -109,17 +109,17 @@ class database(object):
return cpv in self
def keys(self):
- return tuple(self.iterkeys())
+ return tuple(self.keys())
def iterkeys(self):
return iter(self)
def iteritems(self):
- for x in self.iterkeys():
+ for x in self.keys():
yield (x, self[x])
def items(self):
- return list(self.iteritems())
+ return list(self.items())
def sync(self, rate=0):
self.sync_rate = rate
@@ -151,7 +151,7 @@ class database(object):
if self.iterkeys is database.iterkeys:
# prevent a possible recursive loop
raise NotImplementedError(self)
- return self.iterkeys()
+ return iter(self.keys())
def get(self, k, x=None):
try:
@@ -169,7 +169,7 @@ class database(object):
import re
restricts = {}
- for key,match in match_dict.iteritems():
+ for key,match in match_dict.items():
# XXX this sucks.
try:
if isinstance(match, basestring):
@@ -181,10 +181,10 @@ class database(object):
if key not in self.__known_keys:
raise InvalidRestriction(key, match, "Key isn't valid")
- for cpv in self.iterkeys():
+ for cpv in self.keys():
cont = True
vals = self[cpv]
- for key, match in restricts.iteritems():
+ for key, match in restricts.items():
if not match(vals[key]):
cont = False
break
diff --git a/pym/portage/checksum.py b/pym/portage/checksum.py
index 93076d304..9cc2d82e7 100644
--- a/pym/portage/checksum.py
+++ b/pym/portage/checksum.py
@@ -134,7 +134,7 @@ def perform_all(x, calc_prelink=0):
return mydict
def get_valid_checksum_keys():
- return hashfunc_map.keys()
+ return list(hashfunc_map.keys())
def get_hash_origin(hashtype):
if hashtype not in hashfunc_map:
diff --git a/pym/portage/cvstree.py b/pym/portage/cvstree.py
index 0dd8f10bc..eaf7f37a5 100644
--- a/pym/portage/cvstree.py
+++ b/pym/portage/cvstree.py
@@ -89,12 +89,12 @@ def findoption(entries, pattern, recursive=0, basedir=""):
basedir."""
if not basedir.endswith("/"):
basedir += "/"
- for myfile, mydata in entries["files"].iteritems():
+ for myfile, mydata in entries["files"].items():
if "cvs" in mydata["status"]:
if pattern.search(mydata["flags"]):
yield basedir+myfile
if recursive:
- for mydir, mydata in entries["dirs"].iteritems():
+ for mydir, mydata in entries["dirs"].items():
for x in findoption(mydata, pattern,
recursive, basedir+mydir):
yield x
diff --git a/pym/portage/dbapi/bintree.py b/pym/portage/dbapi/bintree.py
index c37242419..ff277a8fa 100644
--- a/pym/portage/dbapi/bintree.py
+++ b/pym/portage/dbapi/bintree.py
@@ -114,7 +114,7 @@ class bindbapi(fakedbapi):
mytbz2 = portage.xpak.tbz2(tbz2path)
mydata = mytbz2.get_data()
- for k, v in values.iteritems():
+ for k, v in values.items():
k = _unicode_encode(k,
encoding=_encodings['repo.content'], errors='backslashreplace')
v = _unicode_encode(v,
@@ -645,7 +645,7 @@ class binarytree(object):
# from xpak.
if update_pkgindex and os.access(self.pkgdir, os.W_OK):
del pkgindex.packages[:]
- pkgindex.packages.extend(metadata.itervalues())
+ pkgindex.packages.extend(iter(metadata.values()))
self._update_pkgindex_header(pkgindex.header)
from portage.util import atomic_ofstream
f = atomic_ofstream(self._pkgindex_file)
@@ -741,7 +741,7 @@ class binarytree(object):
# Remote package instances override local package
# if they are not identical.
hash_names = ["SIZE"] + self._pkgindex_hashes
- for cpv, local_metadata in metadata.iteritems():
+ for cpv, local_metadata in metadata.items():
remote_metadata = self._remotepkgs.get(cpv)
if remote_metadata is None:
continue
diff --git a/pym/portage/dbapi/porttree.py b/pym/portage/dbapi/porttree.py
index 98af60d7f..bf97af63e 100644
--- a/pym/portage/dbapi/porttree.py
+++ b/pym/portage/dbapi/porttree.py
@@ -201,10 +201,10 @@ class portdbapi(dbapi):
# earlier ones that correspond to the same name.
porttrees = [x for x in porttrees if x not in identically_named_paths]
ignored_map = {}
- for path, repo_name in identically_named_paths.iteritems():
+ for path, repo_name in identically_named_paths.items():
ignored_map.setdefault(repo_name, []).append(path)
self._ignored_repos = tuple((repo_name, tuple(paths)) \
- for repo_name, paths in ignored_map.iteritems())
+ for repo_name, paths in ignored_map.items())
self.porttrees = porttrees
porttree_root = porttrees[0]
@@ -232,7 +232,7 @@ class portdbapi(dbapi):
repo_aliases = {}
if local_repo_configs is not None:
default_loc_repo_config = local_repo_configs.get('DEFAULT')
- for repo_name, loc_repo_conf in local_repo_configs.iteritems():
+ for repo_name, loc_repo_conf in local_repo_configs.items():
if loc_repo_conf.aliases is not None:
for alias in loc_repo_conf.aliases:
overridden_alias = repo_aliases.get(alias)
@@ -503,7 +503,7 @@ class portdbapi(dbapi):
i = metadata
if hasattr(metadata, "iteritems"):
- i = metadata.iteritems()
+ i = iter(metadata.items())
metadata = dict(i)
if metadata.get("INHERITED", False):
@@ -761,7 +761,7 @@ class portdbapi(dbapi):
all_uris = []
all_files = []
- for filename, uris in uri_map.iteritems():
+ for filename, uris in uri_map.items():
for uri in uris:
all_uris.append(uri)
all_files.append(filename)
@@ -858,7 +858,7 @@ class portdbapi(dbapi):
y == "CVS":
continue
d[x+"/"+y] = None
- l = d.keys()
+ l = list(d.keys())
l.sort()
return l
@@ -915,7 +915,7 @@ class portdbapi(dbapi):
(mycp, self.mysettings["PORTAGE_CONFIGROOT"]), noiselevel=-1)
mylist = []
else:
- mylist = d.keys()
+ mylist = list(d.keys())
# Always sort in ascending order here since it's handy
# and the result can be easily cached and reused.
self._cpv_sort_ascending(mylist)
diff --git a/pym/portage/dbapi/vartree.py b/pym/portage/dbapi/vartree.py
index 5db04e4b1..635ba6026 100644
--- a/pym/portage/dbapi/vartree.py
+++ b/pym/portage/dbapi/vartree.py
@@ -605,8 +605,8 @@ class LinkageMap(object):
rValue = []
if not self._libs:
self.rebuild()
- for arch_map in self._libs.itervalues():
- for soname_map in arch_map.itervalues():
+ for arch_map in self._libs.values():
+ for soname_map in arch_map.values():
for obj_key in soname_map.providers:
rValue.extend(self._obj_properties[obj_key][4])
return rValue
@@ -1019,7 +1019,7 @@ class vardbapi(dbapi):
self.invalidentry(self.getpath(y))
continue
d[mysplit[0]+"/"+mysplit[1]] = None
- return d.keys()
+ return list(d.keys())
def checkblockers(self, origdep):
pass
@@ -1224,7 +1224,7 @@ class vardbapi(dbapi):
cache_valid = cache_mtime == mydir_mtime
if cache_valid:
# Migrate old metadata to unicode.
- for k, v in metadata.iteritems():
+ for k, v in metadata.items():
metadata[k] = _unicode_decode(v,
encoding=_encodings['repo.content'], errors='replace')
@@ -1298,7 +1298,7 @@ class vardbapi(dbapi):
treetype="vartree", vartree=self.vartree)
if not mylink.exists():
raise KeyError(cpv)
- for k, v in values.iteritems():
+ for k, v in values.items():
if v:
mylink.setfile(k, v)
else:
@@ -1561,7 +1561,7 @@ class vardbapi(dbapi):
def getFileOwnerMap(self, path_iter):
owners = self.get_owners(path_iter)
file_owners = {}
- for pkg_dblink, files in owners.iteritems():
+ for pkg_dblink, files in owners.items():
for f in files:
owner_set = file_owners.get(f)
if owner_set is None:
@@ -2183,7 +2183,7 @@ class dblink(object):
cpv_lib_map = self._find_unused_preserved_libs()
if cpv_lib_map:
self._remove_preserved_libs(cpv_lib_map)
- for cpv, removed in cpv_lib_map.iteritems():
+ for cpv, removed in cpv_lib_map.items():
if not self.vartree.dbapi.cpv_exists(cpv):
for dblnk in others_in_slot:
if dblnk.mycpv == cpv:
@@ -2359,7 +2359,7 @@ class dblink(object):
if pkgfiles:
self.updateprotect()
- mykeys = pkgfiles.keys()
+ mykeys = list(pkgfiles.keys())
mykeys.sort()
mykeys.reverse()
@@ -2829,7 +2829,7 @@ class dblink(object):
# Create consumer nodes and add them to the graph.
# Note that consumers can also be providers.
- for provider_node, consumers in consumer_map.iteritems():
+ for provider_node, consumers in consumer_map.items():
for c in consumers:
if self.isowner(c, root):
continue
@@ -2955,7 +2955,7 @@ class dblink(object):
return node
linkmap = self.vartree.dbapi.linkmap
- for cpv, plibs in plib_dict.iteritems():
+ for cpv, plibs in plib_dict.items():
for f in plibs:
path_cpv_map[f] = cpv
preserved_node = path_to_node(f)
@@ -3036,7 +3036,7 @@ class dblink(object):
os = _os_merge
files_to_remove = set()
- for files in cpv_lib_map.itervalues():
+ for files in cpv_lib_map.values():
files_to_remove.update(files)
files_to_remove = sorted(files_to_remove)
showMessage = self._display_merge
@@ -3088,7 +3088,7 @@ class dblink(object):
plib_dict = self.vartree.dbapi.plib_registry.getPreservedLibs()
plib_cpv_map = {}
plib_paths = set()
- for cpv, paths in plib_dict.iteritems():
+ for cpv, paths in plib_dict.items():
plib_paths.update(paths)
for f in paths:
plib_cpv_map[f] = cpv
@@ -3266,7 +3266,7 @@ class dblink(object):
k = (s.st_dev, s.st_ino)
inode_map.setdefault(k, []).append((path, s))
suspicious_hardlinks = []
- for path_list in inode_map.itervalues():
+ for path_list in inode_map.values():
path, s = path_list[0]
if len(path_list) == s.st_nlink:
# All hardlinks seem to be owned by this package.
@@ -3644,7 +3644,7 @@ class dblink(object):
owners = self.vartree.dbapi._owners.get_owners(collisions)
self.vartree.dbapi.flush_cache()
- for pkg, owned_files in owners.iteritems():
+ for pkg, owned_files in owners.items():
cpv = pkg.mycpv
msg = []
msg.append("%s" % cpv)
@@ -3891,7 +3891,7 @@ class dblink(object):
# and update the contents of the packages that owned them.
plib_registry = self.vartree.dbapi.plib_registry
plib_dict = plib_registry.getPreservedLibs()
- for cpv, paths in plib_collisions.iteritems():
+ for cpv, paths in plib_collisions.items():
if cpv not in plib_dict:
continue
if cpv == self.mycpv:
@@ -3948,7 +3948,7 @@ class dblink(object):
cpv_lib_map = self._find_unused_preserved_libs()
if cpv_lib_map:
self._remove_preserved_libs(cpv_lib_map)
- for cpv, removed in cpv_lib_map.iteritems():
+ for cpv, removed in cpv_lib_map.items():
if not self.vartree.dbapi.cpv_exists(cpv):
continue
self.vartree.dbapi.removeFromContents(cpv, removed)
@@ -4428,7 +4428,7 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None):
curval = 0
if onProgress:
onProgress(maxval, 0)
- paths = contents.keys()
+ paths = list(contents.keys())
paths.sort()
for path in paths:
curval += 1
diff --git a/pym/portage/dbapi/virtual.py b/pym/portage/dbapi/virtual.py
index bf90f305b..5d1e933a1 100644
--- a/pym/portage/dbapi/virtual.py
+++ b/pym/portage/dbapi/virtual.py
@@ -59,7 +59,7 @@ class fakedbapi(dbapi):
return list(self.cpdict)
def cpv_all(self):
- return self.cpvdict.keys()
+ return list(self.cpvdict.keys())
def cpv_inject(self, mycpv, metadata=None):
"""Adds a cpv to the list of available packages. See the
diff --git a/pym/portage/debug.py b/pym/portage/debug.py
index 343f8ab6e..18e8482e4 100644
--- a/pym/portage/debug.py
+++ b/pym/portage/debug.py
@@ -89,7 +89,7 @@ class trace_handler(object):
# We omit items that will lead to unreasonable bloat of the trace
# output (and resulting log file).
- for k, v in my_locals.iteritems():
+ for k, v in my_locals.items():
my_repr = repr(v)
if len(my_repr) > self.max_repr_length:
my_locals[k] = "omitted"
diff --git a/pym/portage/dep.py b/pym/portage/dep.py
index 9d055912d..c8b9f6d44 100644
--- a/pym/portage/dep.py
+++ b/pym/portage/dep.py
@@ -393,16 +393,16 @@ class _use_dep(object):
self.required = frozenset(chain(
enabled_flags,
disabled_flags,
- *conditional.values()
+ *list(conditional.values())
))
self.enabled = frozenset(enabled_flags)
self.disabled = frozenset(disabled_flags)
self.conditional = None
- for v in conditional.itervalues():
+ for v in conditional.values():
if v:
- for k, v in conditional.iteritems():
+ for k, v in conditional.items():
conditional[k] = frozenset(v)
self.conditional = conditional
break
diff --git a/pym/portage/dispatch_conf.py b/pym/portage/dispatch_conf.py
index 096ce0a7c..377eb45c3 100644
--- a/pym/portage/dispatch_conf.py
+++ b/pym/portage/dispatch_conf.py
@@ -36,7 +36,7 @@ def read_config(mandatory_opts):
# Handle quote removal here, since KeyValuePairFileLoader doesn't do that.
quotes = "\"'"
- for k, v in opts.iteritems():
+ for k, v in opts.items():
if v[:1] in quotes and v[:1] == v[-1:]:
opts[k] = v[1:-1]
diff --git a/pym/portage/eclass_cache.py b/pym/portage/eclass_cache.py
index 30b83ea3d..2c3c66c86 100644
--- a/pym/portage/eclass_cache.py
+++ b/pym/portage/eclass_cache.py
@@ -114,7 +114,7 @@ class cache(object):
def is_eclass_data_valid(self, ec_dict):
if not isinstance(ec_dict, dict):
return False
- for eclass, tup in ec_dict.iteritems():
+ for eclass, tup in ec_dict.items():
cached_data = self.eclasses.get(eclass, None)
""" Only use the mtime for validation since the probability of a
collision is small and, depending on the cache implementation, the
diff --git a/pym/portage/elog/__init__.py b/pym/portage/elog/__init__.py
index c689d821d..53c0a85f7 100644
--- a/pym/portage/elog/__init__.py
+++ b/pym/portage/elog/__init__.py
@@ -100,7 +100,7 @@ def elog_process(cpv, mysettings, phasefilter=None):
else:
all_logentries[cpv] = ebuild_logentries
- for key in _preserve_logentries.keys():
+ for key in list(_preserve_logentries.keys()):
if key in all_logentries:
all_logentries[key] = _merge_logentries(_preserve_logentries[key], all_logentries[key])
else:
@@ -140,7 +140,7 @@ def elog_process(cpv, mysettings, phasefilter=None):
listener(mysettings, str(key), default_logentries, default_fulllog)
# pass the processing to the individual modules
- for s, levels in logsystems.iteritems():
+ for s, levels in logsystems.items():
# allow per module overrides of PORTAGE_ELOG_CLASSES
if levels:
mod_logentries = filter_loglevels(all_logentries[key], levels)
diff --git a/pym/portage/elog/mod_mail_summary.py b/pym/portage/elog/mod_mail_summary.py
index 4380146d2..3ea0e6fe9 100644
--- a/pym/portage/elog/mod_mail_summary.py
+++ b/pym/portage/elog/mod_mail_summary.py
@@ -31,7 +31,7 @@ def finalize(mysettings=None):
an older version of portage will import the module from a newer version
when it upgrades itself."""
global _items
- for mysettings, items in _items.itervalues():
+ for mysettings, items in _items.values():
_finalize(mysettings, items)
_items.clear()
@@ -59,7 +59,7 @@ def _finalize(mysettings, items):
mybody += "- %s\n" % key
mymessage = portage.mail.create_message(myfrom, myrecipient, mysubject,
- mybody, attachments=items.values())
+ mybody, attachments=list(items.values()))
def timeout_handler(signum, frame):
raise PortageException("Timeout in finalize() for elog system 'mail_summary'")
diff --git a/pym/portage/getbinpkg.py b/pym/portage/getbinpkg.py
index 5d3864fd1..35bef052a 100644
--- a/pym/portage/getbinpkg.py
+++ b/pym/portage/getbinpkg.py
@@ -801,7 +801,7 @@ class PackageIndex(object):
if not mycpv:
continue
if self._default_pkg_data:
- for k, v in self._default_pkg_data.iteritems():
+ for k, v in self._default_pkg_data.items():
d.setdefault(k, v)
if self._inherited_keys:
for k in self._inherited_keys:
@@ -814,7 +814,7 @@ class PackageIndex(object):
if self.modified:
self.header["TIMESTAMP"] = str(long(time.time()))
self.header["PACKAGES"] = str(len(self.packages))
- keys = self.header.keys()
+ keys = list(self.header.keys())
keys.sort()
self._writepkgindex(pkgfile, [(k, self.header[k]) \
for k in keys if self.header[k]])
@@ -827,10 +827,10 @@ class PackageIndex(object):
if v is not None and v == metadata.get(k):
del metadata[k]
if self._default_pkg_data:
- for k, v in self._default_pkg_data.iteritems():
+ for k, v in self._default_pkg_data.items():
if metadata.get(k) == v:
metadata.pop(k, None)
- keys = metadata.keys()
+ keys = list(metadata.keys())
keys.sort()
self._writepkgindex(pkgfile,
[(k, metadata[k]) for k in keys if metadata[k]])
diff --git a/pym/portage/manifest.py b/pym/portage/manifest.py
index cd2609c3b..7b20a3c2d 100644
--- a/pym/portage/manifest.py
+++ b/pym/portage/manifest.py
@@ -68,13 +68,13 @@ def parseManifest2(mysplit):
class ManifestEntry(object):
__slots__ = ("type", "name", "hashes")
def __init__(self, **kwargs):
- for k, v in kwargs.iteritems():
+ for k, v in kwargs.items():
setattr(self, k, v)
class Manifest2Entry(ManifestEntry):
def __str__(self):
myline = " ".join([self.type, self.name, str(self.hashes["size"])])
- myhashkeys = self.hashes.keys()
+ myhashkeys = list(self.hashes.keys())
myhashkeys.remove("size")
myhashkeys.sort()
for h in myhashkeys:
@@ -202,15 +202,15 @@ class Manifest(object):
return myhashdict
def _createManifestEntries(self):
- mytypes = self.fhashdict.keys()
+ mytypes = list(self.fhashdict.keys())
mytypes.sort()
for t in mytypes:
- myfiles = self.fhashdict[t].keys()
+ myfiles = list(self.fhashdict[t].keys())
myfiles.sort()
for f in myfiles:
myentry = Manifest2Entry(
type=t, name=f, hashes=self.fhashdict[t][f].copy())
- myhashkeys = myentry.hashes.keys()
+ myhashkeys = list(myentry.hashes.keys())
myhashkeys.sort()
for h in myhashkeys:
if h not in ["size"] + portage.const.MANIFEST2_HASH_FUNCTIONS:
diff --git a/pym/portage/news.py b/pym/portage/news.py
index 84d9ab877..c178a8c48 100644
--- a/pym/portage/news.py
+++ b/pym/portage/news.py
@@ -235,7 +235,7 @@ class NewsItem(object):
'profile' : profile }
all_match = True
- for values in self.restrictions.itervalues():
+ for values in self.restrictions.values():
any_match = False
for restriction in values:
if restriction.checkRestriction(**kwargs):
@@ -269,7 +269,7 @@ class NewsItem(object):
restricts = { _installedRE : DisplayInstalledRestriction,
_profileRE : DisplayProfileRestriction,
_keywordRE : DisplayKeywordRestriction }
- for regex, restriction in restricts.iteritems():
+ for regex, restriction in restricts.items():
match = regex.match(line)
if match:
restrict = restriction(match.groups()[0].strip())
diff --git a/pym/portage/output.py b/pym/portage/output.py
index 5f310d300..6044f2bbc 100644
--- a/pym/portage/output.py
+++ b/pym/portage/output.py
@@ -759,10 +759,10 @@ def _init(config_root='/'):
codes = object.__getattribute__(codes, '_attr')
_styles = object.__getattribute__(_styles, '_attr')
- for k, v in codes.iteritems():
+ for k, v in codes.items():
codes[k] = _unicode_decode(v)
- for k, v in _styles.iteritems():
+ for k, v in _styles.items():
_styles[k] = _unicode_decode(v)
try:
diff --git a/pym/portage/process.py b/pym/portage/process.py
index b49f061f1..6ce304b4f 100644
--- a/pym/portage/process.py
+++ b/pym/portage/process.py
@@ -189,7 +189,7 @@ def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False,
# Avoid a potential UnicodeEncodeError from os.execve().
env_bytes = {}
- for k, v in env.iteritems():
+ for k, v in env.items():
env_bytes[_unicode_encode(k, encoding=_encodings['content'])] = \
_unicode_encode(v, encoding=_encodings['content'])
env = env_bytes
diff --git a/pym/portage/proxy/lazyimport.py b/pym/portage/proxy/lazyimport.py
index 62428543e..c62f90788 100644
--- a/pym/portage/proxy/lazyimport.py
+++ b/pym/portage/proxy/lazyimport.py
@@ -50,7 +50,7 @@ def _unregister_module_proxy(name):
object.__getattribute__(proxy, '_get_target')()
modules = sys.modules
- for name, proxy_list in list(_module_proxies.iteritems()):
+ for name, proxy_list in list(_module_proxies.items()):
if name not in modules:
continue
# First delete this name from the dict so that
diff --git a/pym/portage/sets/base.py b/pym/portage/sets/base.py
index 4b39e9df9..0c08ee194 100644
--- a/pym/portage/sets/base.py
+++ b/pym/portage/sets/base.py
@@ -126,7 +126,7 @@ class PackageSet(object):
rev_transform[atom] = atom
else:
rev_transform[Atom(atom.replace(atom.cp, pkg.cp, 1))] = atom
- best_match = best_match_to_list(pkg, rev_transform.iterkeys())
+ best_match = best_match_to_list(pkg, iter(rev_transform.keys()))
if best_match:
return rev_transform[best_match]
return None
diff --git a/pym/portage/sets/files.py b/pym/portage/sets/files.py
index 15ecd752b..8a7e4d84d 100644
--- a/pym/portage/sets/files.py
+++ b/pym/portage/sets/files.py
@@ -93,7 +93,7 @@ class StaticFileSet(EditablePackageSet):
# in the latest new slot that may be available.
atoms.append(a)
else:
- atoms = data.keys()
+ atoms = list(data.keys())
self._setAtoms(atoms)
self._mtime = mtime
@@ -183,7 +183,7 @@ class ConfigFileSet(PackageSet):
def load(self):
data, errors = self.loader.load()
- self._setAtoms(data.keys())
+ self._setAtoms(list(data.keys()))
def singleBuilder(self, options, settings, trees):
if not "filename" in options:
@@ -254,7 +254,7 @@ class WorldSet(EditablePackageSet):
raise
del e
data = {}
- atoms = data.keys()
+ atoms = list(data.keys())
self._mtime = mtime
atoms_changed = True
else:
@@ -274,7 +274,7 @@ class WorldSet(EditablePackageSet):
raise
del e
data = {}
- nonatoms = data.keys()
+ nonatoms = list(data.keys())
self._mtime2 = mtime
atoms_changed = True
else:
diff --git a/pym/portage/sets/libs.py b/pym/portage/sets/libs.py
index d7e4c731a..b8aa3c41c 100644
--- a/pym/portage/sets/libs.py
+++ b/pym/portage/sets/libs.py
@@ -64,7 +64,7 @@ class PreservedLibraryConsumerSet(LibraryConsumerSet):
consumers = set()
if reg:
plib_dict = reg.getPreservedLibs()
- for libs in plib_dict.itervalues():
+ for libs in plib_dict.values():
for lib in libs:
if self.debug:
print(lib)
@@ -74,7 +74,7 @@ class PreservedLibraryConsumerSet(LibraryConsumerSet):
consumers.update(self.dbapi.linkmap.findConsumers(lib))
# Don't rebuild packages just because they contain preserved
# libs that happen to be consumers of other preserved libs.
- for libs in plib_dict.itervalues():
+ for libs in plib_dict.values():
consumers.difference_update(libs)
else:
return
diff --git a/pym/portage/tests/env/config/test_PackageKeywordsFile.py b/pym/portage/tests/env/config/test_PackageKeywordsFile.py
index 228cf2ecb..d01633125 100644
--- a/pym/portage/tests/env/config/test_PackageKeywordsFile.py
+++ b/pym/portage/tests/env/config/test_PackageKeywordsFile.py
@@ -23,7 +23,7 @@ class PackageKeywordsFileTestCase(TestCase):
f = PackageKeywordsFile(self.fname)
f.load()
i = 0
- for cpv, keyword in f.iteritems():
+ for cpv, keyword in f.items():
self.assertEqual( cpv, self.cpv[i] )
[k for k in keyword if self.assertTrue(k in self.keywords)]
i = i + 1
diff --git a/pym/portage/tests/env/config/test_PackageUseFile.py b/pym/portage/tests/env/config/test_PackageUseFile.py
index 575ede505..4157d6097 100644
--- a/pym/portage/tests/env/config/test_PackageUseFile.py
+++ b/pym/portage/tests/env/config/test_PackageUseFile.py
@@ -22,7 +22,7 @@ class PackageUseFileTestCase(TestCase):
try:
f = PackageUseFile(self.fname)
f.load()
- for cpv, use in f.iteritems():
+ for cpv, use in f.items():
self.assertEqual( cpv, self.cpv )
[flag for flag in use if self.assertTrue(flag in self.useflags)]
finally:
diff --git a/pym/portage/tests/env/config/test_PortageModulesFile.py b/pym/portage/tests/env/config/test_PortageModulesFile.py
index 41a44c4cc..8a37b73bd 100644
--- a/pym/portage/tests/env/config/test_PortageModulesFile.py
+++ b/pym/portage/tests/env/config/test_PortageModulesFile.py
@@ -32,7 +32,7 @@ class PortageModulesFileTestCase(TestCase):
def BuildFile(self):
fd, self.fname = mkstemp()
f = os.fdopen(fd, 'w')
- for k, v in self.items.iteritems():
+ for k, v in self.items.items():
f.write('%s=%s\n' % (k,v))
f.close()
diff --git a/pym/portage/update.py b/pym/portage/update.py
index 0941ba06b..1ea15e7e9 100644
--- a/pym/portage/update.py
+++ b/pym/portage/update.py
@@ -56,7 +56,7 @@ def update_dbentries(update_iter, mydata):
"""Performs update commands and returns a
dict containing only the updated items."""
updated_items = {}
- for k, mycontent in mydata.iteritems():
+ for k, mycontent in mydata.items():
k_unicode = _unicode_decode(k,
encoding=_encodings['repo.content'], errors='replace')
if k_unicode not in ignored_dbentries:
@@ -83,7 +83,7 @@ def fixdbentries(update_iter, dbdir):
mode='r', encoding=_encodings['repo.content'],
errors='replace').read()
updated_items = update_dbentries(update_iter, mydata)
- for myfile, mycontent in updated_items.iteritems():
+ for myfile, mycontent in updated_items.items():
file_path = os.path.join(dbdir, myfile)
write_atomic(file_path, mycontent, encoding=_encodings['repo.content'])
return len(updated_items) > 0
@@ -231,7 +231,7 @@ def update_config_files(config_root, protect, protect_mask, update_iter):
# update /etc/portage/packages.*
ignore_line_re = re.compile(r'^#|^\s*$')
for update_cmd in update_iter:
- for x, contents in file_contents.iteritems():
+ for x, contents in file_contents.items():
for pos, line in enumerate(contents):
if ignore_line_re.match(line):
continue
diff --git a/pym/portage/util.py b/pym/portage/util.py
index 60e72fbb4..5f1a42c2f 100644
--- a/pym/portage/util.py
+++ b/pym/portage/util.py
@@ -220,7 +220,7 @@ def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0):
return None
if final_dict is None:
final_dict = {}
- for y in mydict.keys():
+ for y in list(mydict.keys()):
if True:
if y in final_dict and (incremental or (y in incrementals)):
final_dict[y] += " "+mydict[y][:]
@@ -247,7 +247,7 @@ def stack_lists(lists, incremental=1):
new_list[y] = True
else:
new_list[y] = True
- return new_list.keys()
+ return list(new_list.keys())
def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
"""
@@ -287,7 +287,7 @@ def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
else:
newdict[myline[0]] = myline[1:]
if juststrings:
- for k, v in newdict.iteritems():
+ for k, v in newdict.items():
newdict[k] = " ".join(v)
return newdict
@@ -299,7 +299,7 @@ def grabdict_package(myfilename, juststrings=0, recursive=0):
# "RuntimeError: dictionary changed size during iteration"
# when an invalid atom is deleted.
atoms = {}
- for k, v in pkgs.iteritems():
+ for k, v in pkgs.items():
try:
k = Atom(k)
except InvalidAtom:
diff --git a/pym/portage/xpak.py b/pym/portage/xpak.py
index 29c28af32..b5878befb 100644
--- a/pym/portage/xpak.py
+++ b/pym/portage/xpak.py
@@ -99,7 +99,7 @@ def xpak_mem(mydata):
"""Create an xpack segement from a map object."""
mydata_encoded = {}
- for k, v in mydata.iteritems():
+ for k, v in mydata.items():
k = _unicode_encode(k,
encoding=_encodings['repo.content'], errors='backslashreplace')
v = _unicode_encode(v,
@@ -112,7 +112,7 @@ def xpak_mem(mydata):
indexpos=0
dataglob = _unicode_encode('')
datapos=0
- for x, newglob in mydata.iteritems():
+ for x, newglob in mydata.items():
mydatasize=len(newglob)
indexglob=indexglob+encodeint(len(x))+x+encodeint(datapos)+encodeint(mydatasize)
indexpos=indexpos+4+len(x)+4+4