From 9795b7d47aa9982a940f3832250f355845f34d07 Mon Sep 17 00:00:00 2001 From: Zac Medico Date: Mon, 1 Feb 2010 22:51:00 +0000 Subject: Optimize parallel-fetch for the case where all files are already fetched and have the correct size. In this case we can avoid the expense of spawning ebuild(1). svn path=/main/trunk/; revision=15312 --- pym/_emerge/EbuildFetcher.py | 52 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) (limited to 'pym') diff --git a/pym/_emerge/EbuildFetcher.py b/pym/_emerge/EbuildFetcher.py index b8931df15..bc443dda2 100644 --- a/pym/_emerge/EbuildFetcher.py +++ b/pym/_emerge/EbuildFetcher.py @@ -26,6 +26,12 @@ class EbuildFetcher(SpawnProcess): raise AssertionError("ebuild not found for '%s'" % self.pkg.cpv) settings = self.config_pool.allocate() settings.setcpv(self.pkg) + if self.prefetch and \ + self._prefetch_size_ok(portdb, settings, ebuild_path): + self.config_pool.deallocate(settings) + self.returncode = os.EX_OK + self.wait() + return # In prefetch mode, logging goes to emerge-fetch.log and the builddir # should not be touched since otherwise it could interfere with @@ -78,6 +84,52 @@ class EbuildFetcher(SpawnProcess): self.config_pool.deallocate(settings) SpawnProcess._start(self) + def _prefetch_size_ok(self, portdb, settings, ebuild_path): + pkgdir = os.path.dirname(ebuild_path) + mytree = os.path.dirname(os.path.dirname(pkgdir)) + distdir = settings["DISTDIR"] + use = None + if not self.fetchall: + use = frozenset(settings["PORTAGE_USE"].split()) + + try: + uri_map = portdb.getFetchMap(self.pkg.cpv, + useflags=use, mytree=mytree) + except portage.exception.InvalidDependString as e: + return False + + sizes = {} + for filename in uri_map: + try: + st = os.lstat(os.path.join(distdir, filename)) + except OSError: + return False + if st.st_size == 0: + return False + sizes[filename] = st.st_size + + digests = portage.Manifest(pkgdir, distdir).getTypeDigests("DIST") + for filename, actual_size in sizes.items(): + size = digests.get(filename, {}).get('size') + if size is None: + continue + if size != actual_size: + return False + + # All files are present and sizes are ok. In this case the normal + # fetch code will be skipped, so we need to generate equivalent + # output here. + if self.logfile is not None: + f = codecs.open(_unicode_encode(self.logfile, + encoding=_encodings['fs'], errors='strict'), + mode='a', encoding=_encodings['content'], errors='replace') + for filename in uri_map: + f.write((' * %s size ;-) ...' % \ + filename).ljust(73) + '[ ok ]\n') + f.close() + + return True + def _pipe(self, fd_pipes): """When appropriate, use a pty so that fetcher progress bars, like wget has, will work properly.""" -- cgit v1.2.3-1-g7c22