summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorNarayan Desai <desai@mcs.anl.gov>2009-06-23 01:13:41 +0000
committerNarayan Desai <desai@mcs.anl.gov>2009-06-23 01:13:41 +0000
commit7c4ce56bf3898af3475020066234fcc47bdbf103 (patch)
tree3cbf8c0b05a19501d3f33ffceb1f564027e32982 /src
parentff9ba08761a0fcf6150941eb258299edf5dfc471 (diff)
downloadbcfg2-7c4ce56bf3898af3475020066234fcc47bdbf103.tar.gz
bcfg2-7c4ce56bf3898af3475020066234fcc47bdbf103.tar.bz2
bcfg2-7c4ce56bf3898af3475020066234fcc47bdbf103.zip
Packages: fix handling of architectures for RawURL YumSources
Add dictionary tracking YumSource architecture mappings for cache files git-svn-id: https://svn.mcs.anl.gov/repos/bcfg/trunk/bcfg2@5292 ce84e21b-d406-0410-9b95-82705330c041
Diffstat (limited to 'src')
-rw-r--r--src/lib/Server/Plugins/Packages.py40
1 files changed, 23 insertions, 17 deletions
diff --git a/src/lib/Server/Plugins/Packages.py b/src/lib/Server/Plugins/Packages.py
index 7ed28b8b1..2a26f8ffd 100644
--- a/src/lib/Server/Plugins/Packages.py
+++ b/src/lib/Server/Plugins/Packages.py
@@ -164,6 +164,7 @@ class YUMSource(Source):
self.provides = dict([('global', dict())])
self.filemap = dict([(x, dict()) for x in ['global'] + self.arches])
self.needed_paths = set()
+ self.file_to_arch = dict()
def save_state(self):
cache = file(self.cachefile, 'wb')
@@ -177,33 +178,38 @@ class YUMSource(Source):
self.filemap) = cPickle.load(data)
def get_urls(self):
- usettings = [{'version': self.version, 'component':comp, 'arch':arch}
- for comp in self.components for arch in self.arches]
- surls = [self.baseurl % setting for setting in usettings]
+ surls = list()
+ for arch in self.arches:
+ usettings = [{'version': self.version, 'component':comp,
+ 'arch':arch} for comp in self.components]
+ surls.append((arch, [self.baseurl % setting for setting in usettings]))
urls = []
- for surl in surls:
- rmdurl = surl + '/repodata/repomd.xml'
- try:
- repomd = urllib2.urlopen(rmdurl).read()
- xdata = lxml.etree.XML(repomd)
- except:
- logger.error("Failed to process url %s" % rmdurl)
- continue
- for elt in xdata.findall(self.rpo + 'data'):
- if elt.get('type') not in ['filelists', 'primary']:
+ for (sarch, surl_list) in surls:
+ for surl in surl_list:
+ rmdurl = surl + '/repodata/repomd.xml'
+ try:
+ repomd = urllib2.urlopen(rmdurl).read()
+ xdata = lxml.etree.XML(repomd)
+ except:
+ logger.error("Failed to process url %s" % rmdurl)
continue
- floc = elt.find(self.rpo + 'location')
- urls.append(surl + floc.get('href'))
+ for elt in xdata.findall(self.rpo + 'data'):
+ if elt.get('type') not in ['filelists', 'primary']:
+ continue
+ floc = elt.find(self.rpo + 'location')
+ fullurl = surl + floc.get('href')
+ urls.append(fullurl)
+ self.file_to_arch[self.escape_url(fullurl)] = arch
return urls
urls = property(get_urls)
def read_files(self):
for fname in [f for f in self.files if f.endswith('primary.xml.gz')]:
- farch = fname.split('@')[-3]
+ farch = self.file_to_arch[fname]
fdata = lxml.etree.parse(fname).getroot()
self.parse_primary(fdata, farch)
for fname in [f for f in self.files if f.endswith('filelists.xml.gz')]:
- farch = fname.split('@')[-3]
+ farch = self.file_to_arch[fname]
fdata = lxml.etree.parse(fname).getroot()
self.parse_filelist(fdata, farch)
# merge data