summaryrefslogtreecommitdiffstats
path: root/pym
diff options
context:
space:
mode:
authorZac Medico <zmedico@gentoo.org>2011-03-25 02:47:17 -0700
committerZac Medico <zmedico@gentoo.org>2011-03-25 02:47:17 -0700
commit7ffa0683cd9c40e630488af5783c549bee5cd3c8 (patch)
tree9bb1a3973f365d16bfe6bbced7f7170e8dfabc55 /pym
parent2fd76b639d44f3ff3624ed8dbe96d214a42875e5 (diff)
downloadportage-7ffa0683cd9c40e630488af5783c549bee5cd3c8.tar.gz
portage-7ffa0683cd9c40e630488af5783c549bee5cd3c8.tar.bz2
portage-7ffa0683cd9c40e630488af5783c549bee5cd3c8.zip
vartree: remove unused scheduler references
Since all the merge code runs inside MergeProcess now, there's no reason to yield to the scheduler.
Diffstat (limited to 'pym')
-rw-r--r--pym/portage/dbapi/vartree.py40
1 files changed, 7 insertions, 33 deletions
diff --git a/pym/portage/dbapi/vartree.py b/pym/portage/dbapi/vartree.py
index 8203de85e..4f8c2ee58 100644
--- a/pym/portage/dbapi/vartree.py
+++ b/pym/portage/dbapi/vartree.py
@@ -878,7 +878,7 @@ class vardbapi(dbapi):
def populate(self):
self._populate()
- def _populate(self, scheduler=None):
+ def _populate(self):
owners_cache = vardbapi._owners_cache(self._vardb)
cached_hashes = set()
base_names = self._vardb._aux_cache["owners"]["base_names"]
@@ -902,10 +902,6 @@ class vardbapi(dbapi):
# Cache any missing packages.
for cpv in uncached_pkgs:
-
- if scheduler is not None:
- scheduler.scheduleYield()
-
owners_cache.add(cpv)
# Delete any stale cache.
@@ -919,12 +915,12 @@ class vardbapi(dbapi):
return owners_cache
- def get_owners(self, path_iter, scheduler=None):
+ def get_owners(self, path_iter):
"""
@return the owners as a dblink -> set(files) mapping.
"""
owners = {}
- for owner, f in self.iter_owners(path_iter, scheduler=scheduler):
+ for owner, f in self.iter_owners(path_iter):
owned_files = owners.get(owner)
if owned_files is None:
owned_files = set()
@@ -944,7 +940,7 @@ class vardbapi(dbapi):
owner_set.add(pkg_dblink)
return file_owners
- def iter_owners(self, path_iter, scheduler=None):
+ def iter_owners(self, path_iter):
"""
Iterate over tuples of (dblink, path). In order to avoid
consuming too many resources for too much time, resources
@@ -956,7 +952,7 @@ class vardbapi(dbapi):
if not isinstance(path_iter, list):
path_iter = list(path_iter)
- owners_cache = self._populate(scheduler=scheduler)
+ owners_cache = self._populate()
vardb = self._vardb
root = vardb._eroot
hash_pkg = owners_cache._hash_pkg
@@ -1015,23 +1011,19 @@ class vardbapi(dbapi):
if dblink(cpv).isowner(path):
owners.append((cpv, path))
- if scheduler is not None:
- scheduler.scheduleYield()
-
except StopIteration:
path_iter.append(path)
del owners[:]
dblink_cache.clear()
gc.collect()
- for x in self._iter_owners_low_mem(path_iter,
- scheduler=scheduler):
+ for x in self._iter_owners_low_mem(path_iter):
yield x
return
else:
for cpv, p in owners:
yield (dblink(cpv), p)
- def _iter_owners_low_mem(self, path_list, scheduler=None):
+ def _iter_owners_low_mem(self, path_list):
"""
This implemention will make a short-lived dblink instance (and
parse CONTENTS) for every single installed package. This is
@@ -1053,10 +1045,6 @@ class vardbapi(dbapi):
root = self._vardb._eroot
for cpv in self._vardb.cpv_all():
-
- if scheduler is not None:
- scheduler.scheduleYield()
-
dblnk = self._vardb._dblink(cpv)
for path, name, is_basename in path_info_list:
@@ -1197,10 +1185,6 @@ class dblink(object):
r')$'
)
- # When looping over files for merge/unmerge, temporarily yield to the
- # scheduler each time this many files are processed.
- _file_merge_yield_interval = 20
-
def __init__(self, cat, pkg, myroot=None, settings=None, treetype=None,
vartree=None, blockers=None, scheduler=None, pipe=None):
"""
@@ -2586,7 +2570,6 @@ class dblink(object):
plib_collisions = {}
showMessage = self._display_merge
- scheduler = self._scheduler
stopmerge = False
collisions = []
destroot = self.settings['ROOT']
@@ -2596,10 +2579,6 @@ class dblink(object):
if i % 1000 == 0 and i != 0:
showMessage(_("%d files checked ...\n") % i)
- if scheduler is not None and \
- 0 == i % self._file_merge_yield_interval:
- scheduler.scheduleYield()
-
dest_path = normalize_path(
os.path.join(destroot, f.lstrip(os.path.sep)))
try:
@@ -2708,7 +2687,6 @@ class dblink(object):
os = _os_merge
showMessage = self._display_merge
- scheduler = self._scheduler
file_paths = set()
for dblnk in installed_instances:
@@ -2717,10 +2695,6 @@ class dblink(object):
real_paths = set()
for i, path in enumerate(file_paths):
- if scheduler is not None and \
- 0 == i % self._file_merge_yield_interval:
- scheduler.scheduleYield()
-
if os is _os_merge:
try:
_unicode_encode(path,