diff options
author | Zac Medico <zmedico@gentoo.org> | 2008-07-01 12:38:49 +0000 |
---|---|---|
committer | Zac Medico <zmedico@gentoo.org> | 2008-07-01 12:38:49 +0000 |
commit | 83499617458a93217e8f1ed5194c16f2b3bedc40 (patch) | |
tree | 3337cac977a844506cc7ac03fa046a65ec130c9b | |
parent | 3ab46b943befdd480b24c3d459555011dd04e120 (diff) | |
download | portage-83499617458a93217e8f1ed5194c16f2b3bedc40.tar.gz portage-83499617458a93217e8f1ed5194c16f2b3bedc40.tar.bz2 portage-83499617458a93217e8f1ed5194c16f2b3bedc40.zip |
Py3k compatibility patch #1 by Ali Polatel <hawking@g.o>.
Replace dict.has_key() calls with "in" and "not in" operators..
svn path=/main/trunk/; revision=10870
-rw-r--r-- | pym/_emerge/__init__.py | 6 | ||||
-rw-r--r-- | pym/portage/__init__.py | 62 | ||||
-rw-r--r-- | pym/portage/cvstree.py | 16 | ||||
-rw-r--r-- | pym/portage/dispatch_conf.py | 2 | ||||
-rw-r--r-- | pym/portage/getbinpkg.py | 12 | ||||
-rw-r--r-- | pym/portage/glsa.py | 6 | ||||
-rw-r--r-- | pym/portage/locks.py | 6 | ||||
-rw-r--r-- | pym/portage/manifest.py | 4 | ||||
-rw-r--r-- | pym/portage/util.py | 4 |
9 files changed, 60 insertions, 58 deletions
diff --git a/pym/_emerge/__init__.py b/pym/_emerge/__init__.py index 51bb2fddc..e2cb01a29 100644 --- a/pym/_emerge/__init__.py +++ b/pym/_emerge/__init__.py @@ -7299,7 +7299,7 @@ class Scheduler(object): emergelog(xterm_titles, " *** Finished. Cleaning up...") # We're out of the loop... We're done. Delete the resume data. - if mtimedb.has_key("resume"): + if "resume" in mtimedb: del mtimedb["resume"] mtimedb.commit() @@ -7537,7 +7537,7 @@ def unmerge(root_config, myopts, unmerge_action, # since we're pruning, we don't care about slots # and put all the pkgs in together myslot = 0 - if not slotmap.has_key(myslot): + if myslot not in slotmap: slotmap[myslot] = {} slotmap[myslot][localtree.dbapi.cpv_counter(mypkg)] = mypkg @@ -8241,7 +8241,7 @@ def action_sync(settings, trees, mtimedb, myopts, myaction): rsync_initial_timeout = 15 try: - if settings.has_key("RSYNC_RETRIES"): + if "RSYNC_RETRIES" in settings: print yellow("WARNING:")+" usage of RSYNC_RETRIES is deprecated, use PORTAGE_RSYNC_RETRIES instead" maxretries=int(settings["RSYNC_RETRIES"]) else: diff --git a/pym/portage/__init__.py b/pym/portage/__init__.py index 50711da28..d5b82ec13 100644 --- a/pym/portage/__init__.py +++ b/pym/portage/__init__.py @@ -160,7 +160,7 @@ def load_mod(name): def best_from_dict(key, top_dict, key_order, EmptyOnError=1, FullCopy=1, AllowEmpty=1): for x in key_order: - if top_dict.has_key(x) and top_dict[x].has_key(key): + if x in top_dict and key in top_dict[x]: if FullCopy: return copy.deepcopy(top_dict[x][key]) else: @@ -194,7 +194,7 @@ cacheStale=0 def cacheddir(my_original_path, ignorecvs, ignorelist, EmptyOnError, followSymlinks=True): global cacheHit,cacheMiss,cacheStale mypath = normalize_path(my_original_path) - if dircache.has_key(mypath): + if mypath in dircache: cacheHit += 1 cached_mtime, list, ftype = dircache[mypath] else: @@ -219,7 +219,7 @@ def cacheddir(my_original_path, ignorecvs, ignorelist, EmptyOnError, followSymli return None, None # Python retuns mtime in seconds, so if it was changed in the last few seconds, it could be invalid if mtime != cached_mtime or time.time() - mtime < 4: - if dircache.has_key(mypath): + if mypath in dircache: cacheStale += 1 try: list = os.listdir(mypath) @@ -851,7 +851,7 @@ def ExtractKernelVersion(base_dir): # Check the .config for a CONFIG_LOCALVERSION and append that too, also stripping whitespace kernelconfig = getconfig(base_dir+"/.config") - if kernelconfig and kernelconfig.has_key("CONFIG_LOCALVERSION"): + if kernelconfig and "CONFIG_LOCALVERSION" in kernelconfig: version += "".join(kernelconfig["CONFIG_LOCALVERSION"].split()) return (version,None) @@ -1218,7 +1218,7 @@ class config(object): self.prevmaskdict={} for x in self.packages: mycatpkg=dep_getkey(x) - if not self.prevmaskdict.has_key(mycatpkg): + if mycatpkg not in self.prevmaskdict: self.prevmaskdict[mycatpkg]=[x] else: self.prevmaskdict[mycatpkg].append(x) @@ -1440,7 +1440,7 @@ class config(object): os.path.join(abs_user_config, "package.use"), recursive=1) for key in pusedict.keys(): cp = dep_getkey(key) - if not self.pusedict.has_key(cp): + if cp not in self.pusedict: self.pusedict[cp] = {} self.pusedict[cp][key] = pusedict[key] @@ -1452,7 +1452,8 @@ class config(object): # default to ~arch if no specific keyword is given if not pkgdict[key]: mykeywordlist = [] - if self.configdict["defaults"] and self.configdict["defaults"].has_key("ACCEPT_KEYWORDS"): + if self.configdict["defaults"] and \ + "ACCEPT_KEYWORDS" in self.configdict["defaults"]: groups = self.configdict["defaults"]["ACCEPT_KEYWORDS"].split() else: groups = [] @@ -1461,7 +1462,7 @@ class config(object): mykeywordlist.append("~"+keyword) pkgdict[key] = mykeywordlist cp = dep_getkey(key) - if not self.pkeywordsdict.has_key(cp): + if cp not in self.pkeywordsdict: self.pkeywordsdict[cp] = {} self.pkeywordsdict[cp][key] = pkgdict[key] @@ -1482,7 +1483,7 @@ class config(object): recursive=1) for x in pkgunmasklines: mycatpkg=dep_getkey(x) - if self.punmaskdict.has_key(mycatpkg): + if mycatpkg in self.punmaskdict: self.punmaskdict[mycatpkg].append(x) else: self.punmaskdict[mycatpkg]=[x] @@ -1506,7 +1507,7 @@ class config(object): self.pmaskdict = {} for x in pkgmasklines: mycatpkg=dep_getkey(x) - if self.pmaskdict.has_key(mycatpkg): + if mycatpkg in self.pmaskdict: self.pmaskdict[mycatpkg].append(x) else: self.pmaskdict[mycatpkg]=[x] @@ -1544,7 +1545,7 @@ class config(object): if not x: continue mycatpkg=dep_getkey(x) - if self.pprovideddict.has_key(mycatpkg): + if mycatpkg in self.pprovideddict: self.pprovideddict[mycatpkg].append(x) else: self.pprovideddict[mycatpkg]=[x] @@ -1806,7 +1807,7 @@ class config(object): def backup_changes(self,key=None): self.modifying() - if key and self.configdict["env"].has_key(key): + if key and key in self.configdict["env"]: self.backupenv[key] = copy.deepcopy(self.configdict["env"][key]) else: raise KeyError, "No such key defined in environment: %s" % key @@ -2646,7 +2647,7 @@ class config(object): if virts: for x in virts: vkeysplit = x.split("/") - if not self.virts_p.has_key(vkeysplit[1]): + if vkeysplit[1] not in self.virts_p: self.virts_p[vkeysplit[1]] = virts[x] return self.virts_p @@ -2819,7 +2820,7 @@ class config(object): # remain unset. continue mydict[x] = myvalue - if not mydict.has_key("HOME") and mydict.has_key("BUILD_PREFIX"): + if "HOME" not in mydict and "BUILD_PREFIX" in mydict: writemsg("*** HOME not set. Setting to "+mydict["BUILD_PREFIX"]+"\n") mydict["HOME"]=mydict["BUILD_PREFIX"][:] @@ -3301,7 +3302,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks", # use_locks = 0 # local mirrors are always added - if custommirrors.has_key("local"): + if "local" in custommirrors: mymirrors += custommirrors["local"] if "nomirror" in restrict or \ @@ -3348,7 +3349,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks", primaryuri_dict = {} for myuri in myuris: myfile=os.path.basename(myuri) - if not filedict.has_key(myfile): + if myfile not in filedict: filedict[myfile]=[] for y in range(0,len(locations)): filedict[myfile].append(locations[y]+"/distfiles/"+myfile) @@ -3358,14 +3359,14 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks", mirrorname = myuri[9:eidx] # Try user-defined mirrors first - if custommirrors.has_key(mirrorname): + if mirrorname in custommirrors: for cmirr in custommirrors[mirrorname]: filedict[myfile].append(cmirr+"/"+myuri[eidx+1:]) # remove the mirrors we tried from the list of official mirrors if cmirr.strip() in thirdpartymirrors[mirrorname]: thirdpartymirrors[mirrorname].remove(cmirr) # now try the official mirrors - if thirdpartymirrors.has_key(mirrorname): + if mirrorname in thirdpartymirrors: shuffle(thirdpartymirrors[mirrorname]) for locmirr in thirdpartymirrors[mirrorname]: @@ -3382,7 +3383,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks", continue if "primaryuri" in restrict: # Use the source site first. - if primaryuri_indexes.has_key(myfile): + if myfile in primaryuri_indexes: primaryuri_indexes[myfile] += 1 else: primaryuri_indexes[myfile] = 0 @@ -3693,11 +3694,11 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks", continue # allow different fetchcommands per protocol protocol = loc[0:loc.find("://")] - if mysettings.has_key("FETCHCOMMAND_"+protocol.upper()): + if "FETCHCOMMAND_" + protocol.upper() in mysettings: fetchcommand=mysettings["FETCHCOMMAND_"+protocol.upper()] else: fetchcommand=mysettings["FETCHCOMMAND"] - if mysettings.has_key("RESUMECOMMAND_"+protocol.upper()): + if "RESUMECOMMAND_" + protocol.upper() in mysettings: resumecommand=mysettings["RESUMECOMMAND_"+protocol.upper()] else: resumecommand=mysettings["RESUMECOMMAND"] @@ -3814,7 +3815,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks", except EnvironmentError: pass - if mydigests!=None and mydigests.has_key(myfile): + if mydigests is not None and myfile in mydigests: try: mystat = os.stat(myfile_path) except OSError, e: @@ -4382,7 +4383,7 @@ def doebuild_environment(myebuild, mydo, myroot, mysettings, debug, use_cache, m ebuild_path = os.path.abspath(myebuild) pkg_dir = os.path.dirname(ebuild_path) - if mysettings.configdict["pkg"].has_key("CATEGORY"): + if "CATEGORY" in mysettings.configdict["pkg"]: cat = mysettings.configdict["pkg"]["CATEGORY"] else: cat = os.path.basename(normalize_path(os.path.join(pkg_dir, ".."))) @@ -4483,7 +4484,7 @@ def doebuild_environment(myebuild, mydo, myroot, mysettings, debug, use_cache, m else: mysettings["PVR"]=mysplit[1]+"-"+mysplit[2] - if mysettings.has_key("PATH"): + if "PATH" in mysettings: mysplit=mysettings["PATH"].split(":") else: mysplit=[] @@ -6191,7 +6192,7 @@ def dep_wordreduce(mydeplist,mysettings,mydbapi,mode,use_cache=1): pass else: mykey = dep_getkey(deplist[mypos]) - if mysettings and mysettings.pprovideddict.has_key(mykey) and \ + if mysettings and mykey in mysettings.pprovideddict and \ match_from_list(deplist[mypos], mysettings.pprovideddict[mykey]): deplist[mypos]=True elif mydbapi is None: @@ -6242,12 +6243,13 @@ def key_expand(mykey, mydb=None, use_cache=1, settings=None): for x in mydb.categories: if mydb.cp_list(x+"/"+mykey,use_cache=use_cache): return x+"/"+mykey - if virts_p.has_key(mykey): + if mykey in virts_p: return(virts_p[mykey][0]) return "null/"+mykey elif mydb: if hasattr(mydb, "cp_list"): - if (not mydb.cp_list(mykey,use_cache=use_cache)) and virts and virts.has_key(mykey): + if not mydb.cp_list(mykey, use_cache=use_cache) and \ + virts and mykey in virts: return virts[mykey][0] return mykey @@ -6321,7 +6323,7 @@ def cpv_expand(mycpv, mydb=None, use_cache=1, settings=None): mykey=matches[0] if not mykey and not isinstance(mydb, list): - if virts_p.has_key(myp): + if myp in virts_p: mykey=virts_p[myp][0] #again, we only perform virtual expansion if we have a dbapi (not a list) if not mykey: @@ -6369,7 +6371,7 @@ def getmaskingreason(mycpv, metadata=None, settings=None, portdb=None, return_lo locations.reverse() pmasklists = [(x, grablines(os.path.join(x, "package.mask"), recursive=1)) for x in locations] - if settings.pmaskdict.has_key(mycp): + if mycp in settings.pmaskdict: for x in settings.pmaskdict[mycp]: if match_from_list(x, cpv_slot_list): comment = "" @@ -6750,7 +6752,7 @@ def commit_mtimedb(mydict=None, filename=None): def portageexit(): global uid,portage_gid,portdb,db - if secpass and not os.environ.has_key("SANDBOX_ACTIVE"): + if secpass and os.environ.get("SANDBOX_ON") != "1": close_portdbapi_caches() commit_mtimedb() diff --git a/pym/portage/cvstree.py b/pym/portage/cvstree.py index f74ecd4ac..d283fb47f 100644 --- a/pym/portage/cvstree.py +++ b/pym/portage/cvstree.py @@ -17,13 +17,13 @@ def pathdata(entries, path): mytarget=mysplit[-1] mysplit=mysplit[:-1] for mys in mysplit: - if myentries["dirs"].has_key(mys): + if mys in myentries["dirs"]: myentries=myentries["dirs"][mys] else: return None - if myentries["dirs"].has_key(mytarget): + if mytarget in myentries["dirs"]: return myentries["dirs"][mytarget] - elif myentries["files"].has_key(mytarget): + elif mytarget in myentries["files"]: return myentries["files"][mytarget] else: return None @@ -242,9 +242,9 @@ def getentries(mydir,recursive=0): if file=="digest-framerd-2.4.3": print mydir,file if os.path.isdir(mydir+"/"+file): - if not entries["dirs"].has_key(file): + if file not in entries["dirs"]: entries["dirs"][file]={"dirs":{},"files":{}} - if entries["dirs"][file].has_key("status"): + if "status" in entries["dirs"][file]: if "exists" not in entries["dirs"][file]["status"]: entries["dirs"][file]["status"]+=["exists"] else: @@ -252,9 +252,9 @@ def getentries(mydir,recursive=0): elif os.path.isfile(mydir+"/"+file): if file=="digest-framerd-2.4.3": print "isfile" - if not entries["files"].has_key(file): + if file not in entries["files"]: entries["files"][file]={"revision":"","date":"","flags":"","tags":""} - if entries["files"][file].has_key("status"): + if "status" in entries["files"][file]: if file=="digest-framerd-2.4.3": print "has status" if "exists" not in entries["files"][file]["status"]: @@ -270,7 +270,7 @@ def getentries(mydir,recursive=0): print "stat'ing" mystat=os.stat(mydir+"/"+file) mytime=time.asctime(time.gmtime(mystat[ST_MTIME])) - if not entries["files"][file].has_key("status"): + if "status" not in entries["files"][file]: if file=="digest-framerd-2.4.3": print "status not set" entries["files"][file]["status"]=[] diff --git a/pym/portage/dispatch_conf.py b/pym/portage/dispatch_conf.py index 860ffea30..487065a84 100644 --- a/pym/portage/dispatch_conf.py +++ b/pym/portage/dispatch_conf.py @@ -31,7 +31,7 @@ def read_config(mandatory_opts): sys.exit(1) for key in mandatory_opts: - if not opts.has_key(key): + if key not in opts: if key == "merge": opts["merge"] = "sdiff --suppress-common-lines --output='%s' '%s' '%s'" else: diff --git a/pym/portage/getbinpkg.py b/pym/portage/getbinpkg.py index 497194d67..412d753df 100644 --- a/pym/portage/getbinpkg.py +++ b/pym/portage/getbinpkg.py @@ -478,15 +478,15 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache= metadatafile.close() except (cPickle.UnpicklingError, OSError, IOError, EOFError): metadata = {} - if not metadata.has_key(baseurl): + if baseurl not in metadata: metadata[baseurl]={} - if not metadata[baseurl].has_key("indexname"): + if "indexname" not in metadata[baseurl]: metadata[baseurl]["indexname"]="" - if not metadata[baseurl].has_key("timestamp"): + if "timestamp" not in metadata[baseurl]: metadata[baseurl]["timestamp"]=0 - if not metadata[baseurl].has_key("unmodified"): + if "unmodified" not in metadata[baseurl]: metadata[baseurl]["unmodified"]=0 - if not metadata[baseurl].has_key("data"): + if "data" not in metadata[baseurl]: metadata[baseurl]["data"]={} if not os.access(cache_path, os.W_OK): @@ -648,7 +648,7 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache= out.flush() try: - if metadata[baseurl].has_key("modified") and metadata[baseurl]["modified"]: + if "modified" in metadata[baseurl] and metadata[baseurl]["modified"]: metadata[baseurl]["timestamp"] = int(time.time()) metadatafile = open("/var/cache/edb/remote_metadata.pickle", "w+") cPickle.dump(metadata,metadatafile) diff --git a/pym/portage/glsa.py b/pym/portage/glsa.py index 2d2f27b30..4dc05f7e1 100644 --- a/pym/portage/glsa.py +++ b/pym/portage/glsa.py @@ -92,7 +92,7 @@ def get_glsa_list(myconfig): """ rValue = [] - if myconfig.has_key("GLSA_DIR"): + if "GLSA_DIR" in myconfig: repository = myconfig["GLSA_DIR"] else: repository = os.path.join(myconfig["PORTDIR"], "metadata", "glsa") @@ -407,7 +407,7 @@ class Glsa: @rtype: None @return: None """ - if self.config.has_key("GLSA_DIR"): + if "GLSA_DIR" in self.config: repository = "file://" + self.config["GLSA_DIR"]+"/" else: repository = "file://" + self.config["PORTDIR"] + "/metadata/glsa/" @@ -470,7 +470,7 @@ class Glsa: self.packages = {} for p in self.affected.getElementsByTagName("package"): name = p.getAttribute("name") - if not self.packages.has_key(name): + if name not in self.packages: self.packages[name] = [] tmp = {} tmp["arch"] = p.getAttribute("arch") diff --git a/pym/portage/locks.py b/pym/portage/locks.py index c6d22c1c5..279804475 100644 --- a/pym/portage/locks.py +++ b/pym/portage/locks.py @@ -289,9 +289,9 @@ def hardlock_cleanup(path, remove_all_locks=False): host = "-".join(hostpid[:-1]) pid = hostpid[-1] - if not mylist.has_key(filename): + if filename not in mylist: mylist[filename] = {} - if not mylist[filename].has_key(host): + if host not in mylist[filename]: mylist[filename][host] = [] mylist[filename][host].append(pid) @@ -301,7 +301,7 @@ def hardlock_cleanup(path, remove_all_locks=False): results.append("Found %(count)s locks" % {"count":mycount}) for x in mylist: - if mylist[x].has_key(myhost) or remove_all_locks: + if myhost in mylist[x] or remove_all_locks: mylockname = hardlock_name(path+"/"+x) if hardlink_is_mine(mylockname, path+"/"+x) or \ not os.path.exists(path+"/"+x) or \ diff --git a/pym/portage/manifest.py b/pym/portage/manifest.py index 9c8ab371a..50ed2f52f 100644 --- a/pym/portage/manifest.py +++ b/pym/portage/manifest.py @@ -419,9 +419,9 @@ class Manifest(object): """ Regenerate hashes for the given file """ if checkExisting: self.checkFileHashes(ftype, fname, ignoreMissing=ignoreMissing) - if not ignoreMissing and not self.fhashdict[ftype].has_key(fname): + if not ignoreMissing and fname not in self.fhashdict[ftype]: raise FileNotInManifestException(fname) - if not self.fhashdict[ftype].has_key(fname): + if fname not in self.fhashdict[ftype]: self.fhashdict[ftype][fname] = {} myhashkeys = list(self.hashes) if reuseExisting: diff --git a/pym/portage/util.py b/pym/portage/util.py index af99adedd..5ae3cc1f3 100644 --- a/pym/portage/util.py +++ b/pym/portage/util.py @@ -174,7 +174,7 @@ def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0): final_dict = {} for y in mydict.keys(): if True: - if final_dict.has_key(y) and (incremental or (y in incrementals)): + if y in final_dict and (incremental or (y in incrementals)): final_dict[y] += " "+mydict[y][:] else: final_dict[y] = mydict[y][:] @@ -493,7 +493,7 @@ def varexpand(mystring, mydict={}): cexpand[mystring]="" return "" numvars=numvars+1 - if mydict.has_key(myvarname): + if myvarname in mydict: newstring=newstring+mydict[myvarname] else: newstring=newstring+mystring[pos] |