summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
authorSol Jerome <sol.jerome@gmail.com>2011-04-04 08:59:54 -0500
committerSol Jerome <sol.jerome@gmail.com>2011-04-06 19:35:21 -0500
commit50d489f38e18577cb7e75605515d4b8d567aaa52 (patch)
tree3604a9eb742e7be758311f658b7398befde46d39 /tools
parentf1cf457edb2a9552adf82d72541af69487f897f4 (diff)
downloadbcfg2-50d489f38e18577cb7e75605515d4b8d567aaa52.tar.gz
bcfg2-50d489f38e18577cb7e75605515d4b8d567aaa52.tar.bz2
bcfg2-50d489f38e18577cb7e75605515d4b8d567aaa52.zip
tools: PY3K + PEP8 fixes
Signed-off-by: Sol Jerome <sol.jerome@gmail.com>
Diffstat (limited to 'tools')
-rw-r--r--tools/basebuilder.py8
-rwxr-xr-xtools/batchadd.py66
-rw-r--r--tools/create-debian-pkglist-gp.py59
-rwxr-xr-xtools/create-debian-pkglist.py99
-rw-r--r--tools/create-rpm-pkglist.py32
-rwxr-xr-xtools/export.py10
-rwxr-xr-xtools/hostbase.py42
-rwxr-xr-xtools/hostbasepush.py10
-rwxr-xr-xtools/hostinfo.py79
-rwxr-xr-xtools/pkgmgr_gen.py247
-rwxr-xr-xtools/pkgmgr_update.py116
-rw-r--r--tools/rpmlisting.py94
-rw-r--r--tools/yum-listpkgs-xml.py51
13 files changed, 549 insertions, 364 deletions
diff --git a/tools/basebuilder.py b/tools/basebuilder.py
index 704769755..a9ab6b288 100644
--- a/tools/basebuilder.py
+++ b/tools/basebuilder.py
@@ -7,11 +7,11 @@ if __name__ == '__main__':
dir = argv[1]
imagename = dir.split('/')[-1]
e = Element("Image", name=imagename)
- for line in open("%s/base.ConfigFile"%(dir)).readlines():
+ for line in open("%s/base.ConfigFile" % (dir)).readlines():
SubElement(e, "ConfigFile", name=line.strip())
- for line in open("%s/base.Package"%(dir)).readlines():
+ for line in open("%s/base.Package" % (dir)).readlines():
SubElement(e, "Package", name=line.strip())
- for line in open("%s/base.Service"%(dir)).readlines():
+ for line in open("%s/base.Service" % (dir)).readlines():
SubElement(e, "Service", name=line.strip().split()[0])
- print tostring(e)
+ print(tostring(e))
diff --git a/tools/batchadd.py b/tools/batchadd.py
index ce47650a5..e8008b330 100755
--- a/tools/batchadd.py
+++ b/tools/batchadd.py
@@ -1,23 +1,37 @@
#!/usr/bin/python
-import sys, os
from datetime import date
+import os
+import sys
+
os.environ['DJANGO_SETTINGS_MODULE'] = 'Bcfg2.Server.Hostbase.settings'
from Bcfg2.Server.Hostbase.hostbase.models import *
from Bcfg2.Server.Hostbase.settings import DEFAULT_MX, PRIORITY
import Bcfg2.Server.Hostbase.regex
-host_attribs = ['hostname', 'whatami', 'netgroup', 'security_class', 'support',
- 'csi', 'printq', 'outbound_smtp', 'primary_user',
- 'administrator', 'location', 'expiration_date', 'comments']
+host_attribs = ['administrator',
+ 'comments',
+ 'csi',
+ 'expiration_date',
+ 'hostname',
+ 'location',
+ 'netgroup',
+ 'outbound_smtp',
+ 'primary_user',
+ 'printq',
+ 'security_class',
+ 'support',
+ 'whatami']
+
def handle_error(field):
if '-f' in sys.argv:
return
- print "Error: %s is already defined in hostbase" % field
+ print("Error: %s is already defined in hostbase" % field)
if '-s' in sys.argv:
sys.exit(1)
+
def checkformat(values, indices):
"""Ensures file contains all necessary attributes in order """
filelist = [pair[0] for pair in values]
@@ -34,8 +48,8 @@ def checkformat(values, indices):
# process rest of host attributes
try:
next = filelist[1:].index('hostname')
- remaining = filelist[13:next+1]
- filelist = filelist[next+1:]
+ remaining = filelist[13:next + 1]
+ filelist = filelist[next + 1:]
except:
remaining = filelist[13:]
needfields = ['mac_addr', 'hdwr_type', 'ip_addr']
@@ -50,7 +64,7 @@ if __name__ == '__main__':
try:
fd = open(sys.argv[1], 'r')
except (IndexError, IOError):
- print "\nUsage: batchadd.py filename\n"
+ print("\nUsage: batchadd.py filename\n")
sys.exit()
lines = fd.readlines()
@@ -59,18 +73,20 @@ if __name__ == '__main__':
if line.lstrip(' ')[0] != '#' and line != '\n']
if info[0][0] == 'mx' and info[1][0] == 'priority':
- mx, created = MX.objects.get_or_create(mx=info[0][1], priority=info[1][1])
+ mx, created = MX.objects.get_or_create(mx=info[0][1],
+ priority=info[1][1])
info = info[2:]
-
else:
- mx, created = MX.objects.get_or_create(mx=DEFAULT_MX, priority=PRIORITY)
+ mx, created = MX.objects.get_or_create(mx=DEFAULT_MX,
+ priority=PRIORITY)
if created:
mx.save()
- hostindices = [num for num in range(0, len(info)) if info[num][0] == 'hostname']
+ hostindices = [num for num in range(0, len(info))
+ if info[num][0] == 'hostname']
if not checkformat(info, hostindices):
- print "Error: file format"
+ print("Error: file format")
sys.exit()
#################
@@ -83,7 +99,8 @@ if __name__ == '__main__':
# do something here
pass
- macindices = [num for num in range(0, len(info)) if info[num][0] == 'mac_addr']
+ macindices = [num for num in range(0, len(info))
+ if info[num][0] == 'mac_addr']
for mac_addr in macindices:
try:
host = Interface.objects.get(mac_addr=info[mac_addr][1])
@@ -103,7 +120,9 @@ if __name__ == '__main__':
blank.__dict__[pair[0]] = 0
elif pair[0] == 'expiration_date':
(year, month, day) = pair[1].split("-")
- blank.expiration_date = date(int(year), int(month), int(day))
+ blank.expiration_date = date(int(year),
+ int(month),
+ int(day))
else:
blank.__dict__[pair[0]] = pair[1]
blank.status = 'active'
@@ -113,7 +132,9 @@ if __name__ == '__main__':
while info and info[0][0] != 'hostname':
if info[0][0] == 'mac_addr':
pair = info.pop(0)
- inter = Interface.objects.create(host=blank, mac_addr=pair[1], hdwr_type='eth')
+ inter = Interface.objects.create(host=blank,
+ mac_addr=pair[1],
+ hdwr_type='eth')
if not pair[1]:
inter.dhcp = False
inter.save()
@@ -124,13 +145,17 @@ if __name__ == '__main__':
elif info[0][0] == 'ip_addr':
pair = info.pop(0)
ip = IP.objects.create(interface=inter, ip_addr=pair[1])
- hostnamenode = Name(ip=ip, name=blank.hostname, dns_view='global', only=False)
+ hostnamenode = Name(ip=ip,
+ name=blank.hostname,
+ dns_view='global',
+ only=False)
hostnamenode.save()
- namenode = Name(ip=ip, name=".".join([newhostname + "-" + inter.hdwr_type,
- newdomain]),
+ namenode = Name(ip=ip,
+ name=".".join([newhostname + "-" + inter.hdwr_type,
+ newdomain]),
dns_view="global", only=False)
namenode.save()
- subnetnode = Name(ip=ip, name=newhostname + "-" +
+ subnetnode = Name(ip=ip, name=newhostname + "-" +
ip.ip_addr.split(".")[2] + "." +
newdomain, dns_view="global", only=False)
subnetnode.save()
@@ -141,4 +166,3 @@ if __name__ == '__main__':
pair = info.pop(0)
cname = CName.objects.create(name=hostnamenode, cname=pair[1])
cname.save()
-
diff --git a/tools/create-debian-pkglist-gp.py b/tools/create-debian-pkglist-gp.py
index ae038e056..b7f18bf1a 100644
--- a/tools/create-debian-pkglist-gp.py
+++ b/tools/create-debian-pkglist-gp.py
@@ -5,22 +5,31 @@ __revision__ = '$Id: create-debian-pkglist.py 11778 2007-12-11 13:46:06Z guillau
# Original code from Bcfg2 sources
-import gzip, os, urllib, cStringIO, sys, ConfigParser, commands
+import gzip
+import os
+import urllib
+import cStringIO
+import sys
+import ConfigParser
+import subprocess
+
def debug(msg):
'''print debug messages'''
if '-v' in sys.argv:
sys.stdout.write(msg)
+
def get_as_list(somestring):
""" Input : a string like this : 'a, g, f,w'
Output : a list like this : ['a', 'g', 'f', 'w'] """
return somestring.replace(' ', '').split(',')
+
def list_contains_all_the_same_values(l):
if len(l) == 0:
return True
- # The list contains all the same values if all elements in
+ # The list contains all the same values if all elements in
# the list are equal to the first element.
first = l[0]
for elem in l:
@@ -28,6 +37,7 @@ def list_contains_all_the_same_values(l):
return False
return True
+
class SourceURL:
def __init__(self, deb_url):
deb_url_tokens = deb_url.split()
@@ -35,13 +45,14 @@ class SourceURL:
self.url = deb_url_tokens[1]
self.distribution = deb_url_tokens[2]
self.sections = deb_url_tokens[3:]
-
+
def __str__(self):
return "deb %s %s %s" % (self.url, self.distribution, ' '.join(self.sections))
-
+
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, str(self))
+
class Source:
def __init__(self, confparser, section, bcfg2_repos_prefix):
self.filename = "%s/Pkgmgr/%s.xml" % (bcfg2_repos_prefix, section)
@@ -52,11 +63,11 @@ class Source:
self.source_urls = []
self.source_urls.append(SourceURL(confparser.get(section, "deb_url")))
# Agregate urls in the form of deb_url0, deb_url1, ... to deb_url9
- for i in range(10): # 0 to 9
+ for i in range(10): # 0 to 9
option_name = "deb_url%s" % i
if confparser.has_option(section, option_name):
self.source_urls.append(SourceURL(confparser.get(section, option_name)))
-
+
self.file = None
self.indent_level = 0
@@ -66,13 +77,13 @@ Groups: %s
Priority: %s
Architectures: %s
Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectures, self.source_urls)
-
+
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, str(self))
def _open_file(self):
self.file = open(self.filename + '~', 'w')
-
+
def _close_file(self):
self.file.close()
@@ -88,7 +99,8 @@ Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectu
# Avoid forking a new process if the two strings are equals
if version1 == version2:
return False
- (status, output) = commands.getstatusoutput("/usr/bin/dpkg --compare-versions %s lt %s" % (version1, version2))
+ (status, output) = subprocess.getstatusoutput("/usr/bin/dpkg --compare-versions %s lt %s" % (version1,
+ version2))
#print "%s dpkg --compare-versions %s lt %s" % (status, version1, version2)
return status == 0
@@ -119,16 +131,17 @@ Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectu
pkgdata[pkgname][arch] = version
else:
# First entry for this package
- pkgdata[pkgname] = {arch:version}
+ pkgdata[pkgname] = {arch: version}
else:
continue
except:
- raise Exception("Could not process URL %s\n%s\nPlease verify the URL." % (url, sys.exc_value))
+ raise Exception("Could not process URL %s\n%s\nPlease "
+ "verify the URL." % (url, sys.exc_info()[1]))
return pkgdata
-
+
def _get_sorted_pkg_keys(self, pkgdata):
pkgs = []
- for k in pkgdata.keys():
+ for k in list(pkgdata.keys()):
pkgs.append(k)
pkgs.sort()
return pkgs
@@ -142,7 +155,7 @@ Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectu
# (There is exactly one version per architecture)
archdata = pkgdata[pkg]
# List of versions for all architectures of this package
- pkgversions = archdata.values()
+ pkgversions = list(archdata.values())
# If the versions for all architectures are the same
if list_contains_all_the_same_values(pkgversions):
# Write the package data
@@ -153,7 +166,7 @@ Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectu
del pkgdata[pkg]
def _write_perarch_entries(self, pkgdata):
- # Write entries that are left, i.e. packages that have different
+ # Write entries that are left, i.e. packages that have different
# versions per architecture
#perarch = 0
if pkgdata:
@@ -170,29 +183,29 @@ Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectu
def process(self):
'''Build package indices for source'''
-
+
# First, build the pkgdata structure without touching the file,
# so the file does not contain incomplete informations if the
# network in not reachable.
pkgdata = {}
for source_url in self.source_urls:
pkgdata = self._update_pkgdata(pkgdata, source_url)
-
+
# Construct the file.
self._open_file()
for source_url in self.source_urls:
self._write_to_file('<!-- %s -->' % source_url)
-
+
self._write_to_file('<PackageList priority="%s" type="deb">' % self.priority)
-
+
self.indent_level = self.indent_level + 1
for group in self.groups:
self._write_to_file('<Group name="%s">' % group)
self.indent_level = self.indent_level + 1
-
+
self._write_common_entries(pkgdata)
self._write_perarch_entries(pkgdata)
-
+
for group in self.groups:
self.indent_level = self.indent_level - 1
self._write_to_file('</Group>')
@@ -205,10 +218,10 @@ if __name__ == '__main__':
main_conf_parser = ConfigParser.SafeConfigParser()
main_conf_parser.read(['/etc/bcfg2.conf'])
repo = main_conf_parser.get('server', 'repository')
-
+
confparser = ConfigParser.SafeConfigParser()
confparser.read(os.path.join(repo, "etc/debian-pkglist.conf"))
-
+
# We read the whole configuration file before processing each entries
# to avoid doing work if there is a problem in the file.
sources_list = []
diff --git a/tools/create-debian-pkglist.py b/tools/create-debian-pkglist.py
index 450c6aba6..1127f0448 100755
--- a/tools/create-debian-pkglist.py
+++ b/tools/create-debian-pkglist.py
@@ -5,7 +5,15 @@ __revision__ = '$Id$'
# Original code from Bcfg2 sources
-import glob, gzip, lxml.etree, os, re, urllib, cStringIO, sys, ConfigParser, apt_pkg
+import apt_pkg
+import ConfigParser
+import cStringIO
+import gzip
+import os
+import re
+import urllib
+import sys
+
apt_pkg.init()
@@ -14,15 +22,17 @@ def debug(msg):
if '-v' in sys.argv:
sys.stdout.write(msg)
+
def get_as_list(somestring):
""" Input : a string like this : 'a, g, f,w'
Output : a list like this : ['a', 'g', 'f', 'w'] """
return somestring.replace(' ', '').split(',')
+
def list_contains_all_the_same_values(l):
if len(l) == 0:
return True
- # The list contains all the same values if all elements in
+ # The list contains all the same values if all elements in
# the list are equal to the first element.
first = l[0]
for elem in l:
@@ -30,6 +40,7 @@ def list_contains_all_the_same_values(l):
return False
return True
+
class SourceURL:
def __init__(self, deb_url, arch):
deb_url_tokens = deb_url.split()
@@ -38,13 +49,14 @@ class SourceURL:
self.distribution = deb_url_tokens[2]
self.sections = deb_url_tokens[3:]
self.arch = arch
-
+
def __str__(self):
return "deb %s %s %s" % (self.url, self.distribution, ' '.join(self.sections))
-
+
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, str(self))
+
class Source:
def __init__(self, confparser, section, bcfg2_repos_prefix):
self.filename = "%s/Pkgmgr/%s.xml" % (bcfg2_repos_prefix, section)
@@ -58,25 +70,32 @@ class Source:
self.arch_specialurl = set()
self.source_urls = []
- self.source_urls.append(SourceURL(confparser.get(section, "deb_url"),"all"))
+ self.source_urls.append(SourceURL(confparser.get(section, "deb_url"),
+ "all"))
# Agregate urls in the form of deb_url0, deb_url1, ... to deb_url9
- for i in range(10): # 0 to 9
+ for i in range(10): # 0 to 9
option_name = "deb_url%s" % i
if confparser.has_option(section, option_name):
- self.source_urls.append(SourceURL(confparser.get(section, option_name),"all"))
+ self.source_urls.append(SourceURL(confparser.get(section,
+ option_name),
+ "all"))
# Aggregate architecture specific urls (if present)
for arch in self.architectures:
- if not confparser.has_option(section, "deb_"+arch+"_url"):
+ if not confparser.has_option(section, "deb_" + arch + "_url"):
continue
- self.source_urls.append(SourceURL(confparser.get(section, "deb_"+arch+"_url"),arch))
+ self.source_urls.append(SourceURL(confparser.get(section,
+ "deb_" + arch + "_url"),
+ arch))
# Agregate urls in the form of deb_url0, deb_url1, ... to deb_url9
- for i in range(10): # 0 to 9
- option_name = "deb_"+arch+"_url%s" % i
+ for i in range(10): # 0 to 9
+ option_name = "deb_" + arch + "_url%s" % i
if confparser.has_option(section, option_name):
- self.source_urls.append(SourceURL(confparser.get(section, option_name),arch))
+ self.source_urls.append(SourceURL(confparser.get(section,
+ option_name),
+ arch))
self.arch_specialurl.add(arch)
-
+
self.file = None
self.indent_level = 0
@@ -86,13 +105,13 @@ Groups: %s
Priority: %s
Architectures: %s
Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectures, self.source_urls)
-
+
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, str(self))
def _open_file(self):
self.file = open(self.filename + '~', 'w')
-
+
def _close_file(self):
self.file.close()
@@ -118,7 +137,10 @@ Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectu
continue
if source_url.arch == "all" and arch in self.arch_specialurl:
continue
- url = "%s/dists/%s/%s/binary-%s/Packages.gz" % (source_url.url, source_url.distribution, section, arch)
+ url = "%s/dists/%s/%s/binary-%s/Packages.gz" % (source_url.url,
+ source_url.distribution,
+ section,
+ arch)
debug("Processing url %s\n" % (url))
try:
data = urllib.urlopen(url)
@@ -129,8 +151,8 @@ Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectu
pkgname = line.split(' ')[1].strip()
elif line[:8] == 'Version:':
version = line.split(' ')[1].strip()
- if pkgdata.has_key(pkgname):
- if pkgdata[pkgname].has_key(arch):
+ if pkgname in pkgdata:
+ if arch in pkgdata[pkgname]:
# The package is listed twice for the same architecture
# We keep the most recent version
old_version = pkgdata[pkgname][arch]
@@ -142,20 +164,21 @@ Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectu
pkgdata[pkgname][arch] = version
else:
# First entry for this package
- pkgdata[pkgname] = {arch:version}
+ pkgdata[pkgname] = {arch: version}
else:
continue
except:
- raise Exception("Could not process URL %s\n%s\nPlease verify the URL." % (url, sys.exc_value))
- return dict((k,v) for (k,v) in pkgdata.items() \
+ raise Exception("Could not process URL %s\n%s\nPlease "
+ "verify the URL." % (url, sys.exc_info()[1]))
+ return dict((k, v) for (k, v) in list(pkgdata.items()) \
if re.search(self.pattern, k))
-
+
def _get_sorted_pkg_keys(self, pkgdata):
- pkgs = []
- for k in pkgdata.keys():
- pkgs.append(k)
- pkgs.sort()
- return pkgs
+ pkgs = []
+ for k in list(pkgdata.keys()):
+ pkgs.append(k)
+ pkgs.sort()
+ return pkgs
def _write_common_entries(self, pkgdata):
# Write entries for packages that have the same version
@@ -166,7 +189,7 @@ Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectu
# (There is exactly one version per architecture)
archdata = pkgdata[pkg]
# List of versions for all architectures of this package
- pkgversions = archdata.values()
+ pkgversions = list(archdata.values())
# If the versions for all architectures are the same
if len(self.architectures) == len(pkgversions) and list_contains_all_the_same_values(pkgversions):
# Write the package data
@@ -177,7 +200,7 @@ Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectu
del pkgdata[pkg]
def _write_perarch_entries(self, pkgdata):
- # Write entries that are left, i.e. packages that have different
+ # Write entries that are left, i.e. packages that have different
# versions per architecture
#perarch = 0
if pkgdata:
@@ -185,7 +208,7 @@ Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectu
self._write_to_file('<Group name="%s">' % (arch))
self.indent_level = self.indent_level + 1
for pkg in self._get_sorted_pkg_keys(pkgdata):
- if pkgdata[pkg].has_key(arch):
+ if arch in pkgdata[pkg]:
self._write_to_file('<Package name="%s" version="%s"/>' % (pkg, pkgdata[pkg][arch]))
#perarch += 1
self.indent_level = self.indent_level - 1
@@ -194,29 +217,29 @@ Source URLS: %s""" % (self.filename, self.groups, self.priority, self.architectu
def process(self):
'''Build package indices for source'''
-
+
# First, build the pkgdata structure without touching the file,
# so the file does not contain incomplete informations if the
# network in not reachable.
pkgdata = {}
for source_url in self.source_urls:
pkgdata = self._update_pkgdata(pkgdata, source_url)
-
+
# Construct the file.
self._open_file()
for source_url in self.source_urls:
self._write_to_file('<!-- %s -->' % source_url)
-
+
self._write_to_file('<PackageList priority="%s" type="deb">' % self.priority)
-
+
self.indent_level = self.indent_level + 1
for group in self.groups:
self._write_to_file('<Group name="%s">' % group)
self.indent_level = self.indent_level + 1
-
+
self._write_common_entries(pkgdata)
self._write_perarch_entries(pkgdata)
-
+
for group in self.groups:
self.indent_level = self.indent_level - 1
self._write_to_file('</Group>')
@@ -229,10 +252,10 @@ if __name__ == '__main__':
# Prefix is relative to script path
complete_script_path = os.path.join(os.getcwd(), sys.argv[0])
prefix = complete_script_path[:-len('etc/create-debian-pkglist.py')]
-
+
confparser = ConfigParser.SafeConfigParser()
confparser.read(prefix + "etc/debian-pkglist.conf")
-
+
# We read the whole configuration file before processing each entries
# to avoid doing work if there is a problem in the file.
sources_list = []
diff --git a/tools/create-rpm-pkglist.py b/tools/create-rpm-pkglist.py
index e88de4191..f9dc258ab 100644
--- a/tools/create-rpm-pkglist.py
+++ b/tools/create-rpm-pkglist.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
#
-# Copyright (c) 2010 Fabian Affolter, Bernewireless.net.
+# Copyright (c) 2010 Fabian Affolter, Bernewireless.net.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
@@ -27,17 +27,17 @@
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Fabian Affolter <fabian at bernewireless.net>
-#
+#
-import yum
-import os
-import sys
from lxml import etree
from optparse import OptionParser
+import os
+import yum
__author__ = 'Fabian Affolter <fabian@bernewireless.net>'
__version__ = '0.1'
+
def retrievePackages():
"""Getting the installed packages with yum."""
yb = yum.YumBase()
@@ -46,10 +46,11 @@ def retrievePackages():
pkglist = []
for pkg in sorted(pl.installed):
pkgdata = pkg.name, pkg.version
- pkglist.append(pkgdata)
-
+ pkglist.append(pkgdata)
+
return pkglist
+
def parse_command_line_parameters():
"""Parses command line arguments."""
usage = "usage: %prog [options]"
@@ -62,22 +63,23 @@ def parse_command_line_parameters():
parser.add_option("-f", "--filename", dest="filename",
type="string",
metavar="FILE", default="packages.xml",
- help="Write the output to an XML FILE" )
+ help="Write the output to an XML FILE")
(options, args) = parser.parse_args()
num_args = 1
return options, args
+
def indent(elem, level=0):
"""Helps clean up the XML."""
# Stolen from http://effbot.org/zone/element-lib.htm
- i = "\n" + level*" "
+ i = "\n" + level * " "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
for e in elem:
- indent(e, level+1)
+ indent(e, level + 1)
if not e.tail or not e.tail.strip():
e.tail = i + " "
if not e.tail or not e.tail.strip():
@@ -86,17 +88,19 @@ def indent(elem, level=0):
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
+
def transformXML():
"""Transform the package list to an XML file."""
packagelist = retrievePackages()
root = etree.Element("PackageList")
- for i,j in packagelist:
- root.append( etree.Element("Package", name = i, version = j) )
+ for i, j in packagelist:
+ root.append(etree.Element("Package", name=i, version=j))
#Print the content
#print(etree.tostring(root, pretty_print=True))
tree = etree.ElementTree(root)
return tree
+
def main():
options, args = parse_command_line_parameters()
filename = options.filename
@@ -105,14 +109,14 @@ def main():
if options.show == True:
tree = etree.parse(filename)
for node in tree.findall("//Package"):
- print node.attrib["name"]
+ print(node.attrib["name"])
indent(packagelist.getroot())
packagelist.write(filename, encoding="utf-8")
if options.pkgversion == True:
tree = etree.parse(filename)
for node in tree.findall("//Package"):
- print "%s-%s" % (node.attrib["name"], node.attrib["version"])
+ print("%s-%s" % (node.attrib["name"], node.attrib["version"]))
#FIXME : This should be changed to the standard way of optparser
#FIXME : Make an option available to strip the version number of the pkg
diff --git a/tools/export.py b/tools/export.py
index 971102290..2d8d38e20 100755
--- a/tools/export.py
+++ b/tools/export.py
@@ -12,17 +12,18 @@ import sys
pkgname = 'bcfg2'
ftphost = 'terra.mcs.anl.gov'
ftpdir = '/mcs/ftp/pub/bcfg'
-version = raw_input("Please enter the version you are tagging (e.g. 1.0.0): ")
+version = input("Please enter the version you are tagging (e.g. 1.0.0): ")
tarname = '/tmp/%s-%s.tar.gz' % (pkgname, version)
+
def run(command):
return Popen(command, shell=True, stdout=PIPE).communicate()
# update the version
majorver = version[:5]
minorver = version[5:]
-name = raw_input("Your name: ")
-email = raw_input("Your email: ")
+name = input("Your name: ")
+email = input("Your email: ")
newchangelog = \
"""bcfg2 (%s-0.0%s) unstable; urgency=low
@@ -68,7 +69,8 @@ for line in fileinput.input('solaris/pkginfo.bcfg2-server', inplace=1):
line = line.replace(line, 'VERSION=%s\n' % version)
sys.stdout.write(line)
# update the version in reports
-for line in fileinput.input('src/lib/Server/Reports/reports/templates/base.html', inplace=1):
+for line in fileinput.input('src/lib/Server/Reports/reports/templates/base.html',
+ inplace=1):
if 'Bcfg2 Version' in line:
line = line.replace(line, ' <span>Bcfg2 Version %s</span>\n' % version)
sys.stdout.write(line)
diff --git a/tools/hostbase.py b/tools/hostbase.py
index 974577e69..7474e68b7 100755
--- a/tools/hostbase.py
+++ b/tools/hostbase.py
@@ -1,36 +1,50 @@
#!/usr/bin/python
-import sys, os
-os.environ['DJANGO_SETTINGS_MODULE'] = 'Hostbase.settings'
-from Hostbase.hostbase.models import Host, Interface
+import os
from getopt import getopt, GetoptError
from re import split
+import sys
+
+os.environ['DJANGO_SETTINGS_MODULE'] = 'Hostbase.settings'
+from Hostbase.hostbase.models import Host
-attribs = ['hostname', 'whatami', 'netgroup', 'security_class', 'support',
- 'csi', 'printq', 'dhcp', 'outbound_smtp', 'primary_user',
- 'administrator', 'location', 'expiration_date', 'comments',
- 'status', 'last']
+attribs = ['administrator',
+ 'comments',
+ 'csi',
+ 'dhcp',
+ 'expiration_date',
+ 'hostname',
+ 'last',
+ 'location',
+ 'netgroup',
+ 'outbound_smtp',
+ 'primary_user',
+ 'printq',
+ 'security_class',
+ 'support',
+ 'status',
+ 'whatami']
already_exists = None
#here's my attempt at making the command line idiot proof
#you must supply and arugument and hostname for hostbase.py to run
try:
- (opts, args) = getopt(sys.argv[1:],'l:c:')
+ (opts, args) = getopt(sys.argv[1:], 'l:c:')
sys.argv[1]
if len(split("\.", opts[0][1])) == 1:
hosttouse = opts[0][1] + ".mcs.anl.gov"
else:
hosttouse = opts[0][1]
except (GetoptError, IndexError):
- print "\nUsage: hostbase.py -flag (hostname)\n"
- print "Flags:"
- print "\t-l look (hostname)\n"
-# print "\t-c copy (hostname)\n"
+ print("\nUsage: hostbase.py -flag (hostname)\n")
+ print("Flags:")
+ print("\t-l look (hostname)\n")
+# print("\t-c copy (hostname)\n")
sys.exit()
try:
host = Host.objects.get(hostname=hosttouse)
except:
- print "Error: host %s not in hostbase" % hosttouse
+ print("Error: host %s not in hostbase" % hosttouse)
sys.exit(1)
interfaces = []
for interface in host.interface_set.all():
@@ -53,7 +67,7 @@ for interface in interfaces:
if opts[0][0] == '-l':
"""Displays general host information"""
- print hostinfo
+ print(hostinfo)
if opts[0][0] == '-c':
"""Provides pre-filled template to copy a host record"""
diff --git a/tools/hostbasepush.py b/tools/hostbasepush.py
index 61f2e046b..070711c82 100755
--- a/tools/hostbasepush.py
+++ b/tools/hostbasepush.py
@@ -2,15 +2,15 @@
__revision__ = "$Revision: $"
-import Bcfg2.Client.Proxy, os
+import os
+import Bcfg2.Client.Proxy
if not os.getuid() == 0:
- print 'this command must be run as root'
+ print("this command must be run as root")
raise SystemExit
proxy = Bcfg2.Client.Proxy.bcfg2()
-print 'building files...'
+print("building files...")
proxy.run_method('Hostbase.rebuildState', ())
-print 'running bcfg...'
+print("running bcfg...")
os.system('bcfg2 -q -d -v')
-
diff --git a/tools/hostinfo.py b/tools/hostinfo.py
index d2b3628a3..8ae5c4df6 100755
--- a/tools/hostinfo.py
+++ b/tools/hostinfo.py
@@ -12,9 +12,14 @@ host_attribs = ["hostname", "whatami", "netgroup", "security_class",
"support", "csi", "memory", "printq", "dhcp", "outbound_smtp",
"primary_user", "administrator", "location",
"comments", "last", "expiration_date"]
-dispatch = {'mac_addr':' i.', 'hdwr_type':' i.', 'ip_addr':' p.',
- 'name':' n.', 'dns_view':' n.',
- 'cname':' c.', 'mx':' m.', 'priority':' m.'}
+dispatch = {'mac_addr': ' i.',
+ 'hdwr_type': ' i.',
+ 'ip_addr': ' p.',
+ 'name': ' n.',
+ 'dns_view': ' n.',
+ 'cname': ' c.',
+ 'mx': ' m.',
+ 'priority': ' m.'}
def pinger(hosts):
@@ -25,6 +30,7 @@ def pinger(hosts):
system("fping -r 1" + hostnames)
sys.exit()
+
def get_query(arguments):
"""Parses the command line options and returns the necessary
data for an SQL query"""
@@ -51,28 +57,36 @@ def get_query(arguments):
operator = "<>"
querysplit = arguments[querypos].split("==")
if querysplit[0] in host_attribs:
- querystring = " h.%s%s\'%s\'" % (querysplit[0], operator, querysplit[1])
+ querystring = " h.%s%s\'%s\'" % (querysplit[0],
+ operator,
+ querysplit[1])
elif querysplit[0] in dispatch:
querystring = dispatch[querysplit[0]]
- querystring += "%s%s\'%s\'" % (querysplit[0], operator, querysplit[1])
+ querystring += "%s%s\'%s\'" % (querysplit[0],
+ operator,
+ querysplit[1])
elif len(arguments[querypos].split("=")) > 1:
notstring = ''
if notflag:
notstring = 'NOT '
querysplit = arguments[querypos].split("=")
if querysplit[0] in host_attribs:
- querystring = " h.%s %sLIKE \'%%%%%s%%%%\'" % (querysplit[0], notstring, querysplit[1])
+ querystring = " h.%s %sLIKE \'%%%%%s%%%%\'" % (querysplit[0],
+ notstring,
+ querysplit[1])
elif querysplit[0] in dispatch:
querystring = dispatch[querysplit[0]]
- querystring += "%s %sLIKE \'%%%%%s%%%%\'" % (querysplit[0], notstring, querysplit[1])
+ querystring += "%s %sLIKE \'%%%%%s%%%%\'" % (querysplit[0],
+ notstring,
+ querysplit[1])
else:
- print "ERROR: bad query format"
+ print("ERROR: bad query format")
sys.exit()
if not querystring:
- print "ERROR: bad query format"
+ print("ERROR: bad query format")
sys.exit()
resultset.append((querystring, logic))
- arguments = arguments[querypos+1:]
+ arguments = arguments[querypos + 1:]
if arguments == [] or arguments[0] not in logic_ops:
break
return resultset
@@ -82,12 +96,12 @@ try:
'q:', ['showfields', 'fields', 'ping', 'summary'])
cursor = connection.cursor()
if ('--showfields', '') in opts:
- print "\nhost fields:\n"
+ print("\nhost fields:\n")
for field in host_attribs:
- print field
+ print(field)
for field in dispatch:
- print field
- print ''
+ print(field)
+ print("")
sys.exit()
if opts[0][0] == '-q':
results = get_query(sys.argv[2:])
@@ -113,19 +127,19 @@ try:
cursor.execute(query)
results = cursor.fetchall()
if not results:
- print "No matches were found for your query"
+ print("No matches were found for your query")
sys.exit()
- print '\n%-32s %-10s %-10s %-10s' % ('Hostname', 'Type', 'Location', 'User')
- print '================================ ========== ========== =========='
+ print("\n%-32s %-10s %-10s %-10s" % ('Hostname', 'Type', 'Location', 'User'))
+ print("================================ ========== ========== ==========")
for host in results:
- print '%-32s %-10s %-10s %-10s' % (host)
- print ''
+ print("%-32s %-10s %-10s %-10s" % (host))
+ print("")
elif ('--fields', '') in opts:
tolook = [arg for arg in args if arg in host_attribs or arg in dispatch]
fields = ""
fields = ", ".join(tolook)
if not fields:
- print "No valid fields were entered. exiting..."
+ print("No valid fields were entered. exiting...")
sys.exit()
query = """SELECT DISTINCT %s FROM (((((hostbase_host h
INNER JOIN hostbase_interface i ON h.id = i.host_id)
@@ -137,19 +151,18 @@ try:
WHERE %s ORDER BY h.hostname
""" % (fields, queryoptions)
-
cursor.execute(query)
results = cursor.fetchall()
last = results[0]
for field in results[0]:
- print repr(field) + "\t",
+ print(repr(field) + "\t")
for host in results:
if not host == last:
for field in host:
- print repr(field) + "\t",
+ print(repr(field) + "\t")
last = host
- print ''
+ print("")
else:
basequery = """SELECT DISTINCT h.hostname FROM (((((hostbase_host h
INNER JOIN hostbase_interface i ON h.id = i.host_id)
@@ -164,21 +177,21 @@ try:
results = cursor.fetchall()
if not results:
- print "No matches were found for your query"
+ print("No matches were found for your query")
sys.exit()
if ("--ping", '') in opts:
pinger(results)
for host in results:
- print host[0]
+ print(host[0])
+
-
except (GetoptError, IndexError):
- print "\nUsage: hostinfo.py -q <field>=[=]<value> [and/or <field>=<value> [--long option]]"
- print " hostinfo.py --showfields\tshows all data fields"
- print "\n long options:"
- print "\t --fields f1 f2 ...\tspecifies the fields displayed from the queried hosts"
- print "\t --summary\t\tprints out a predetermined set of fields"
- print "\t --ping\t\t\tuses fping to ping all queried hosts\n"
+ print("\nUsage: hostinfo.py -q <field>=[=]<value> [and/or <field>=<value> [--long option]]")
+ print(" hostinfo.py --showfields\tshows all data fields")
+ print("\n long options:")
+ print("\t --fields f1 f2 ...\tspecifies the fields displayed from the queried hosts")
+ print("\t --summary\t\tprints out a predetermined set of fields")
+ print("\t --ping\t\t\tuses fping to ping all queried hosts\n")
sys.exit()
diff --git a/tools/pkgmgr_gen.py b/tools/pkgmgr_gen.py
index d318e46c0..cc7ab3a4c 100755
--- a/tools/pkgmgr_gen.py
+++ b/tools/pkgmgr_gen.py
@@ -1,40 +1,44 @@
#!/usr/bin/python
-""" Program to generate a bcfg2 Pkgmgr configuration file from a list
- of directories that contain RPMS.
+"""Program to generate a bcfg2 Pkgmgr configuration file from a list
+ of directories that contain RPMS.
- All versions or only the latest may be included in the output.
- rpm.labelCompare is used to compare the package versions, so that
- a proper rpm version comparison is done (epoch:version-release).
+ All versions or only the latest may be included in the output.
+ rpm.labelCompare is used to compare the package versions, so that
+ a proper rpm version comparison is done (epoch:version-release).
- The output file may be formated for use with the RPM or Yum
- bcfg2 client drivers. The output can also contain the PackageList
- and nested group headers.
+ The output file may be formated for use with the RPM or Yum
+ bcfg2 client drivers. The output can also contain the PackageList
+ and nested group headers.
"""
__revision__ = '$Revision: $'
-import sys
-import os
-import rpm
-import optparse
+import collections
import datetime
import glob
-import urllib
import gzip
+import optparse
+import os
+import rpm
+import sys
+import urllib
import urlparse
from lxml.etree import parse
import xml.sax
from xml.sax.handler import ContentHandler
+
def info(object, spacing=10, collapse=1):
"""Print methods and doc strings.
Takes module, class, list, dictionary, or string.
"""
methodList = [method for method in dir(object)
- if callable(getattr(object, method))]
+ if isinstance(getattr(object, method),
+ collections.Callable)]
processFunc = collapse and (lambda s: " ".join(s.split())) or (lambda s: s)
- print "\n".join(["%s %s" %
+ print("\n".join(["%s %s" %
(method.ljust(spacing),
processFunc(str(getattr(object, method).__doc__)))
- for method in methodList])
+ for method in methodList]))
+
def readRpmHeader(ts, filename):
"""
@@ -43,19 +47,21 @@ def readRpmHeader(ts, filename):
try:
fd = os.open(filename, os.O_RDONLY)
except:
- print 'Failed to open RPM file %s' % filename
+ print("Failed to open RPM file %s" % filename)
h = ts.hdrFromFdno(fd)
os.close(fd)
return h
+
def sortedDictValues(adict):
"""
Sort a dictionary by its keys and return the items in sorted key order.
"""
- keys = adict.keys()
+ keys = list(adict.keys())
keys.sort()
- return map(adict.get, keys)
+ return list(map(adict.get, keys))
+
def cmpRpmHeader(a, b):
"""
@@ -79,6 +85,7 @@ def cmpRpmHeader(a, b):
ret = rpm.labelCompare((e1, v1, r1), (e2, v2, r2))
return ret
+
def loadRpms(dirs):
"""
dirs is a list of directories to search for rpms.
@@ -114,18 +121,18 @@ def loadRpms(dirs):
for dir in dirs:
if options.verbose:
- print 'Scanning directory: %s' % dir
+ print("Scanning directory: %s" % dir)
for file in [files for files in os.listdir(dir)
if files.endswith('.rpm')]:
- filename = os.path.join( dir, file )
+ filename = os.path.join(dir, file)
# Get the mtime of the RPM file.
file_mtime = datetime.date.fromtimestamp(os.stat(filename).st_mtime)
# Get the RPM header
- header = readRpmHeader( ts, filename )
+ header = readRpmHeader(ts, filename)
# Get what we are interesting in out of the header.
name = header[rpm.RPMTAG_NAME]
@@ -138,9 +145,13 @@ def loadRpms(dirs):
if subarch in subarchs or 'all' in subarchs:
# Store what we want in our structure.
- packages.setdefault(name, []).append({'filename':file, 'mtime':file_mtime, 'name':name, \
- 'arch':subarch, 'epoch':epoch, 'version':version, \
- 'release':release})
+ packages.setdefault(name, []).append({'filename': file,
+ 'mtime': file_mtime,
+ 'name': name,
+ 'arch': subarch,
+ 'epoch': epoch,
+ 'version': version,
+ 'release': release})
# Print '.' for each package. stdio is line buffered, so have to flush it.
if options.verbose:
@@ -151,6 +162,7 @@ def loadRpms(dirs):
return packages
+
class pkgmgr_URLopener(urllib.FancyURLopener):
"""
Override default error handling so that we can see what the errors are.
@@ -159,57 +171,62 @@ class pkgmgr_URLopener(urllib.FancyURLopener):
"""
Override default error handling so that we can see what the errors are.
"""
- print "ERROR %s: Unable to retrieve %s" % (errcode, url)
+ print("ERROR %s: Unable to retrieve %s" % (errcode, url))
+
class PrimaryParser(ContentHandler):
def __init__(self, packages):
- self.inPackage = 0
- self.inName = 0
- self.inArch = 0
- self.packages = packages
+ self.inPackage = 0
+ self.inName = 0
+ self.inArch = 0
+ self.packages = packages
def startElement(self, name, attrs):
- if name == "package":
- self.package = {'file': None, 'name': '', 'subarch': '',
- 'epoch': None, 'version': None, 'release': None}
- self.inPackage = 1
- elif self.inPackage:
- if name == "name":
- self.inName = 1
- elif name == "arch":
- self.inArch = 1
- elif name == "version":
- self.package['epoch'] = attrs.getValue('epoch')
- self.package['version'] = attrs.getValue('ver')
- self.package['release'] = attrs.getValue('rel')
- elif name == "location":
- self.package['file'] = attrs.getValue('href')
+ if name == "package":
+ self.package = {'file': None, 'name': '', 'subarch': '',
+ 'epoch': None, 'version': None, 'release': None}
+ self.inPackage = 1
+ elif self.inPackage:
+ if name == "name":
+ self.inName = 1
+ elif name == "arch":
+ self.inArch = 1
+ elif name == "version":
+ self.package['epoch'] = attrs.getValue('epoch')
+ self.package['version'] = attrs.getValue('ver')
+ self.package['release'] = attrs.getValue('rel')
+ elif name == "location":
+ self.package['file'] = attrs.getValue('href')
def endElement(self, name):
- if name == "package":
- self.inPackage = 0
- # Only load RPMs with subarchitectures as calculated from the --archs option.
- if self.package['subarch'] in subarchs or 'all' in subarchs:
- self.packages.setdefault(self.package['name'], []).append(
- {'filename':self.package['file'], 'name':self.package['name'],
- 'arch':self.package['subarch'], 'epoch':self.package['epoch'],
- 'version':self.package['version'], 'release':self.package['release']})
- # Print '.' for each package. stdio is line buffered, so have to flush it.
- if options.verbose:
- sys.stdout.write('.')
- sys.stdout.flush()
- elif self.inPackage:
- if name == "name":
- self.inName = 0
- elif name == "arch":
- self.inArch = 0
+ if name == "package":
+ self.inPackage = 0
+ # Only load RPMs with subarchitectures as calculated from the --archs option.
+ if self.package['subarch'] in subarchs or 'all' in subarchs:
+ self.packages.setdefault(self.package['name'], []).append(
+ {'filename': self.package['file'],
+ 'name': self.package['name'],
+ 'arch': self.package['subarch'],
+ 'epoch': self.package['epoch'],
+ 'version': self.package['version'],
+ 'release': self.package['release']})
+ # Print '.' for each package. stdio is line buffered, so have to flush it.
+ if options.verbose:
+ sys.stdout.write('.')
+ sys.stdout.flush()
+ elif self.inPackage:
+ if name == "name":
+ self.inName = 0
+ elif name == "arch":
+ self.inArch = 0
def characters(self, content):
- if self.inPackage:
- if self.inName:
- self.package['name'] += content
- if self.inArch:
- self.package['subarch'] += content
+ if self.inPackage:
+ if self.inName:
+ self.package['name'] += content
+ if self.inArch:
+ self.package['subarch'] += content
+
def loadRepos(repolist):
'''
@@ -242,7 +259,7 @@ def loadRepos(repolist):
url = urlparse.urljoin(repo, './repodata/repomd.xml')
if options.verbose:
- print 'Loading repo metadata : %s' % url
+ print("Loading repo metadata : %s" % url)
try:
opener = pkgmgr_URLopener()
@@ -253,7 +270,7 @@ def loadRepos(repolist):
try:
tree = parse(file)
except IOError:
- print "ERROR: Unable to parse retrieved repomd.xml."
+ print("ERROR: Unable to parse retrieved repomd.xml.")
sys.exit()
repomd = tree.getroot()
@@ -266,7 +283,7 @@ def loadRepos(repolist):
url = urlparse.urljoin(repo, './' + primaryhref)
if options.verbose:
- print 'Loading : %s' % url
+ print("Loading : %s" % url)
try:
opener = pkgmgr_URLopener()
@@ -277,7 +294,7 @@ def loadRepos(repolist):
try:
repo_file = gzip.open(file)
except IOError:
- print "ERROR: Unable to parse retrieved file."
+ print("ERROR: Unable to parse retrieved file.")
sys.exit()
parser = xml.sax.make_parser()
@@ -289,6 +306,7 @@ def loadRepos(repolist):
repo_file.close()
return packages
+
def printInstance(instance, group_count):
"""
Print the details for a package instance with the appropriate indentation and
@@ -303,16 +321,17 @@ def printInstance(instance, group_count):
output_line = ''
if options.format == 'rpm':
- output_line = '%s<Instance simplefile=\'%s\' ' % ( indent * group_count , instance['filename'])
+ output_line = '%s<Instance simplefile=\'%s\' ' % (indent * group_count, instance['filename'])
else:
output_line = '%s<Instance ' % (indent * group_count)
if epoch:
output_line += 'epoch=\'%s\' ' % (epoch)
- output_line += 'version=\'%s\' release=\'%s\' arch=\'%s\'/>\n' % ( version, release, arch)
+ output_line += 'version=\'%s\' release=\'%s\' arch=\'%s\'/>\n' % (version, release, arch)
output.write(output_line)
+
def printPackage(entry, group_count):
"""
Print the details of a package with the appropriate indentation.
@@ -321,7 +340,7 @@ def printPackage(entry, group_count):
entry is a single package entry as created in loadRpms().
"""
output.write('%s<Package name=\'%s\' type=\'%s\'>\n' \
- % ( group_count * indent, entry[0]['name'], options.format) )
+ % (group_count * indent, entry[0]['name'], options.format))
subarch_dict = {}
arch_dict = {}
@@ -333,17 +352,17 @@ def printPackage(entry, group_count):
if instance['arch'] in subarch_dict:
subarch_dict[instance['arch']].append(instance)
else:
- subarch_dict[instance['arch']] = [ instance ]
+ subarch_dict[instance['arch']] = [instance]
# Keep track of the subarchitectures we have found in each architecture.
if subarch_mapping[instance['arch']] in arch_dict:
if instance['arch'] not in arch_dict[subarch_mapping[instance['arch']]]:
arch_dict[subarch_mapping[instance['arch']]].append(instance['arch'])
else:
- arch_dict[subarch_mapping[instance['arch']]] = [ instance['arch'] ]
+ arch_dict[subarch_mapping[instance['arch']]] = [instance['arch']]
# Only keep the 'highest' subarchitecture in each architecture.
- for arch in arch_dict.iterkeys():
+ for arch in arch_dict.keys():
if len(arch_dict[arch]) > 1:
arch_dict[arch].sort()
for s in arch_dict[arch][:-1]:
@@ -361,12 +380,13 @@ def printPackage(entry, group_count):
# Output the latest
printInstance(subarch_dict[arch][-1], group_count)
- output.write('%s</Package>\n' % ( group_count * indent ) )
+ output.write('%s</Package>\n' % (group_count * indent))
+
def main():
if options.verbose:
- print 'Loading package headers'
+ print("Loading package headers")
if options.rpmdirs:
package_dict = loadRpms(search_dirs)
@@ -374,18 +394,18 @@ def main():
package_dict = loadRepos(repos)
if options.verbose:
- print 'Processing package headers'
+ print("Processing package headers")
if options.pkgmgrhdr:
if options.format == "rpm":
- output.write("<PackageList uri='%s' priority='%s' type='rpm'>\n" % ( options.uri, options.priority ))
+ output.write("<PackageList uri='%s' priority='%s' type='rpm'>\n" % (options.uri, options.priority))
else:
- output.write("<PackageList priority='%s' type='yum'>\n" %( options.priority ))
+ output.write("<PackageList priority='%s' type='yum'>\n" % (options.priority))
group_count = 1
if groups_list:
for group in groups_list:
- output.write("%s<Group name='%s'>\n" % ( indent * group_count , group ) )
+ output.write("%s<Group name='%s'>\n" % (indent * group_count, group))
group_count = group_count + 1
# Process packages in name order
@@ -395,14 +415,14 @@ def main():
if groups_list:
group_count = group_count - 1
while group_count:
- output.write( '%s</Group>\n' % ( indent * group_count ) )
+ output.write('%s</Group>\n' % (indent * group_count))
group_count = group_count - 1
if options.pkgmgrhdr:
- output.write( '</PackageList>\n' )
+ output.write('</PackageList>\n')
if options.verbose:
- print '%i package instances were processed' % len(package_dict)
+ print("%i package instances were processed" % len(package_dict))
if __name__ == "__main__":
@@ -434,7 +454,7 @@ if __name__ == "__main__":
p.add_option('--format', '-f', action='store', \
default='yum', \
type='choice', \
- choices=('yum','rpm'), \
+ choices=('yum', 'rpm'), \
help='''Format of the Output. Choices are yum or rpm.
(Default: yum)
''')
@@ -469,7 +489,7 @@ if __name__ == "__main__":
p.add_option('--release', '-r', action='store', \
default='latest', \
type='choice', \
- choices=('all','latest'), \
+ choices=('all', 'latest'), \
help='''Which releases to include in the output. Choices are
all or latest. (Default: latest).''')
@@ -492,11 +512,12 @@ if __name__ == "__main__":
options, arguments = p.parse_args()
if options.pkgmgrhdr and options.format == 'rpm' and not options.uri:
- print "Option --uri must be specified to produce a PackageList Tag for rpm formatted files."
+ print("Option --uri must be specified to produce a PackageList Tag "
+ "for rpm formatted files.")
sys.exit(1)
if not options.rpmdirs and not options.yumrepos:
- print "One of --rpmdirs and --yumrepos must be specified"
+ print("One of --rpmdirs and --yumrepos must be specified")
sys.exit(1)
# Set up list of directories to search
@@ -505,9 +526,9 @@ if __name__ == "__main__":
for d in options.rpmdirs.split(','):
search_dirs += glob.glob(d)
if options.verbose:
- print 'The following directories will be scanned:'
+ print("The following directories will be scanned:")
for d in search_dirs:
- print ' %s' % d
+ print(" %s" % d)
# Setup list of repos
if options.yumrepos:
@@ -515,26 +536,30 @@ if __name__ == "__main__":
for r in options.yumrepos.split(','):
repos.append(r)
if options.verbose:
- print 'The following repositories will be scanned:'
+ print("The following repositories will be scanned:")
for d in repos:
- print ' %s' % d
+ print(" %s" % d)
# Set up list of architectures to include and some mappings
# to use later.
- arch_mapping = {'x86':['i686', 'i586', 'i486', 'i386', 'athlon'],
- 'x86_64':['x86_64'],
- 'ia64':['ia64'],
- 'ppc':['ppc'],
- 'ppc64':['ppc64'],
- 'sparc':['sparc'],
- 'noarch':['noarch']}
- subarch_mapping = {'i686':'x86', 'i586':'x86', 'i486':'x86', 'i386':'x86', 'athlon':'x86',
- 'x86_64':'x86_64',
- 'ia64':'ia64',
- 'ppc':'ppc',
- 'ppc64':'ppc64',
- 'sparc':'sparc',
- 'noarch':'noarch' }
+ arch_mapping = {'x86': ['i686', 'i586', 'i486', 'i386', 'athlon'],
+ 'x86_64': ['x86_64'],
+ 'ia64': ['ia64'],
+ 'ppc': ['ppc'],
+ 'ppc64': ['ppc64'],
+ 'sparc': ['sparc'],
+ 'noarch': ['noarch']}
+ subarch_mapping = {'i686': 'x86',
+ 'i586': 'x86',
+ 'i486': 'x86',
+ 'i386': 'x86',
+ 'athlon': 'x86',
+ 'x86_64': 'x86_64',
+ 'ia64': 'ia64',
+ 'ppc': 'ppc',
+ 'ppc64': 'ppc64',
+ 'sparc': 'sparc',
+ 'noarch': 'noarch'}
commandline_subarchs = options.archs.split(',')
arch_list = []
subarchs = []
@@ -543,7 +568,7 @@ if __name__ == "__main__":
else:
for s in commandline_subarchs:
if s not in subarch_mapping:
- print 'Error: Invalid subarchitecture specified: ', s
+ print("Error: Invalid subarchitecture specified: ", s)
sys.exit(1)
# Only allow one subarchitecture per architecture to be specified.
if s not in arch_list:
@@ -556,7 +581,8 @@ if __name__ == "__main__":
#if i != len(arch_mapping[subarch_mapping[s]]):
subarchs += arch_mapping[subarch_mapping[s]][i:]
else:
- print 'Error: Multiple subarchitecutes of the same architecture specified.'
+ print("Error: Multiple subarchitecutes of the same "
+ "architecture specified.")
sys.exit(1)
indent = ' ' * options.indent
@@ -572,4 +598,3 @@ if __name__ == "__main__":
output = sys.stdout
main()
-
diff --git a/tools/pkgmgr_update.py b/tools/pkgmgr_update.py
index 3d13b8e4a..319016599 100755
--- a/tools/pkgmgr_update.py
+++ b/tools/pkgmgr_update.py
@@ -13,24 +13,37 @@
__version__ = '0.1'
-import sys
-import os
-import rpm
-import optparse
import datetime
import glob
+import gzip
+import optparse
+import os
+import rpm
+import sys
+import urlparse
+import urllib
+
try:
- from lxml.etree import parse, XML, tostring
+ from lxml.etree import parse, tostring
except:
- from elementtree.ElementTree import parse, XML, tostring
-import urlparse, urllib, gzip
-
-installOnlyPkgs = ['kernel', 'kernel-bigmem', 'kernel-enterprise', 'kernel-smp',
- 'kernel-modules', 'kernel-debug', 'kernel-unsupported',
- 'kernel-source', 'kernel-devel', 'kernel-default',
- 'kernel-largesmp-devel', 'kernel-largesmp', 'kernel-xen',
+ from elementtree.ElementTree import parse, tostring
+
+installOnlyPkgs = ['kernel',
+ 'kernel-bigmem',
+ 'kernel-enterprise',
+ 'kernel-smp',
+ 'kernel-modules',
+ 'kernel-debug',
+ 'kernel-unsupported',
+ 'kernel-source',
+ 'kernel-devel',
+ 'kernel-default',
+ 'kernel-largesmp-devel',
+ 'kernel-largesmp',
+ 'kernel-xen',
'gpg-pubkey']
+
def readRpmHeader(ts, filename):
"""
Read an rpm header from an RPM file.
@@ -38,19 +51,21 @@ def readRpmHeader(ts, filename):
try:
fd = os.open(filename, os.O_RDONLY)
except:
- print 'Failed to open RPM file %s' % filename
+ print("Failed to open RPM file %s" % filename)
h = ts.hdrFromFdno(fd)
os.close(fd)
return h
+
def sortedDictValues(adict):
"""
Sort a dictionary by its keys and return the items in sorted key order.
"""
- keys = adict.keys()
+ keys = list(adict.keys())
keys.sort()
- return map(adict.get, keys)
+ return list(map(adict.get, keys))
+
def cmpRpmHeader(a, b):
"""
@@ -67,6 +82,7 @@ def cmpRpmHeader(a, b):
return rpm.labelCompare((e1, v1, r1), (e2, v2, r2))
+
def loadRpms(dirs):
"""
dirs is a list of directories to search for rpms.
@@ -103,18 +119,18 @@ def loadRpms(dirs):
for dir in dirs:
if options.verbose:
- print 'Scanning directory: %s' % dir
+ print("Scanning directory: %s" % dir)
for file in [files for files in os.listdir(dir)
if files.endswith('.rpm')]:
- filename = os.path.join( dir, file )
+ filename = os.path.join(dir, file)
# Get the mtime of the RPM file.
file_mtime = datetime.date.fromtimestamp(os.stat(filename).st_mtime)
# Get the RPM header
- header = readRpmHeader( ts, filename )
+ header = readRpmHeader(ts, filename)
# Get what we are interesting in out of the header.
name = header[rpm.RPMTAG_NAME]
@@ -124,9 +140,13 @@ def loadRpms(dirs):
subarch = header[rpm.RPMTAG_ARCH]
if name not in installOnlyPkgs:
- packages.setdefault(name, {}).setdefault(subarch, []).append({'filename':file, \
- 'mtime':file_mtime, 'name':name, 'arch':subarch, \
- 'epoch':epoch, 'version':version, 'release':release})
+ packages.setdefault(name, {}).setdefault(subarch, []).append({'filename': file,
+ 'mtime': file_mtime,
+ 'name': name,
+ 'arch': subarch,
+ 'epoch': epoch,
+ 'version': version,
+ 'release': release})
if options.verbose:
sys.stdout.write('.')
sys.stdout.flush()
@@ -135,6 +155,7 @@ def loadRpms(dirs):
return packages
+
class pkgmgr_URLopener(urllib.FancyURLopener):
"""
Override default error handling so that we can see what the errors are.
@@ -143,7 +164,8 @@ class pkgmgr_URLopener(urllib.FancyURLopener):
"""
Override default error handling so that we can see what the errors are.
"""
- print "ERROR %s: Unable to retrieve %s" % (errcode, url)
+ print("ERROR %s: Unable to retrieve %s" % (errcode, url))
+
def loadRepos(repolist):
"""
@@ -181,12 +203,12 @@ def loadRepos(repolist):
opener = pkgmgr_URLopener()
file, message = opener.retrieve(url)
except:
- sys.exit();
+ sys.exit()
try:
tree = parse(file)
except IOError:
- print "ERROR: Unable to parse retrieved repomd.xml."
+ print("ERROR: Unable to parse retrieved repomd.xml.")
sys.exit()
repomd = tree.getroot()
@@ -199,7 +221,7 @@ def loadRepos(repolist):
url = urlparse.urljoin(repo, './' + primaryhref)
if options.verbose:
- print 'Loading : %s' % url
+ print("Loading : %s" % url)
try:
opener = pkgmgr_URLopener()
@@ -211,7 +233,7 @@ def loadRepos(repolist):
repo_file = gzip.open(file)
tree = parse(repo_file)
except IOError:
- print "ERROR: Unable to parse retrieved file."
+ print("ERROR: Unable to parse retrieved file.")
sys.exit()
root = tree.getroot()
@@ -230,9 +252,12 @@ def loadRepos(repolist):
file = property.get('href')
if name not in installOnlyPkgs:
- packages.setdefault(name, {}).setdefault(subarch, []).append({'filename':file, \
- 'name':name, 'arch':subarch, \
- 'epoch':epoch, 'version':version, 'release':release})
+ packages.setdefault(name, {}).setdefault(subarch, []).append({'filename': file,
+ 'name': name,
+ 'arch': subarch,
+ 'epoch': epoch,
+ 'version': version,
+ 'release': release})
if options.verbose:
sys.stdout.write('.')
sys.stdout.flush()
@@ -241,6 +266,7 @@ def loadRepos(repolist):
return packages
+
def str_evra(instance):
"""
Convert evra dict entries to a string.
@@ -252,6 +278,7 @@ def str_evra(instance):
return '%s:%s-%s.%s' % (instance.get('epoch', '*'), instance.get('version', '*'),
instance.get('release', '*'), instance.get('arch', '*'))
+
def updatepkg(pkg):
"""
"""
@@ -266,8 +293,9 @@ def updatepkg(pkg):
latest = package_dict[name][arch][-1]
if cmpRpmHeader(inst, latest) == -1:
if options.verbose:
- print 'Found newer version of package %s' % name
- print ' Updating %s to %s' % (str_evra(inst), str_evra(latest))
+ print("Found newer version of package %s" % name)
+ print(" Updating %s to %s" % (str_evra(inst),
+ str_evra(latest)))
if latest['epoch'] != None:
inst.attrib['epoch'] = str(latest['epoch'])
inst.attrib['version'] = latest['version']
@@ -279,30 +307,32 @@ def updatepkg(pkg):
# if we find Ignore tags, then assume they're correct;
# otherwise, check the altconfigfile
if not ignoretags:
- altpkgs = alttree.xpath(".//Package[@name='%s'][Ignore]"%name)
+ altpkgs = alttree.xpath(".//Package[@name='%s'][Ignore]" % name)
if (len(altpkgs) == 1):
for ignoretag in altpkgs[0].xpath(".//Ignore"):
if options.verbose:
print(" Found Ignore tag in altconfigfile for package %s" % name)
pkg.append(ignoretag)
+
def main():
global package_dict
global alttree
if options.verbose:
- print 'Loading Pkgmgr config file %s.' % (options.configfile)
+ print("Loading Pkgmgr config file %s." % (options.configfile))
tree = parse(options.configfile)
config = tree.getroot()
if options.altconfigfile:
if options.verbose:
- print 'Loading Pkgmgr alternate config file %s.' % (options.altconfigfile)
+ print("Loading Pkgmgr alternate config file %s." %
+ (options.altconfigfile))
alttree = parse(options.altconfigfile)
if options.verbose:
- print 'Loading package headers'
+ print("Loading package headers")
if options.rpmdirs:
package_dict = loadRpms(search_dirs)
@@ -310,7 +340,7 @@ def main():
package_dict = loadRepos(repos)
if options.verbose:
- print 'Processing package headers'
+ print("Processing package headers")
for pkg in config.getiterator('Package'):
updatepkg(pkg)
@@ -348,11 +378,12 @@ if __name__ == "__main__":
options, arguments = p.parse_args()
if not options.configfile:
- print "An existing Pkgmgr configuration file must be specified with the -c option."
+ print("An existing Pkgmgr configuration file must be specified with "
+ "the -c option.")
sys.exit()
if not options.rpmdirs and not options.yumrepos:
- print "One of --rpmdirs and --yumrepos must be specified"
+ print("One of --rpmdirs and --yumrepos must be specified")
sys.exit(1)
# Set up list of directories to search
@@ -361,9 +392,9 @@ if __name__ == "__main__":
for d in options.rpmdirs.split(','):
search_dirs += glob.glob(d)
if options.verbose:
- print 'The following directories will be scanned:'
+ print("The following directories will be scanned:")
for d in search_dirs:
- print ' %s' % d
+ print(" %s" % d)
# Setup list of repos
if options.yumrepos:
@@ -371,9 +402,9 @@ if __name__ == "__main__":
for r in options.yumrepos.split(','):
repos.append(r)
if options.verbose:
- print 'The following repositories will be scanned:'
+ print("The following repositories will be scanned:")
for d in repos:
- print ' %s' % d
+ print(" %s" % d)
if options.outfile:
output = file(options.outfile, "w")
@@ -383,4 +414,3 @@ if __name__ == "__main__":
package_dict = {}
main()
-
diff --git a/tools/rpmlisting.py b/tools/rpmlisting.py
index 41b4772a0..afc9ebed5 100644
--- a/tools/rpmlisting.py
+++ b/tools/rpmlisting.py
@@ -2,7 +2,7 @@
import os
import sys
-import commands
+import subprocess
import getopt
import re
import datetime
@@ -11,7 +11,7 @@ from socket import gethostname
def run_or_die(command):
"""run a command, returning output. raise an exception if it fails."""
- (status, stdio) = commands.getstatusoutput(command)
+ (status, stdio) = subprocess.getstatusoutput(command)
if status != 0:
raise Exception("command '%s' failed with exit status %d and output '%s'" %
(command, status, stdio))
@@ -62,11 +62,15 @@ def verstr_cmp(a, b):
else:
return len(a_parts) - len(b_parts)
return ret
-
-
+
def subdivide(verstr):
- """subdivide takes a version or release string and attempts to subdivide it into components to facilitate sorting. The string is divided into a two level hierarchy of sub-parts. The upper level is subdivided by periods, and the lower level is subdivided by boundaries between digit, alpha, and other character groupings."""
+ """subdivide takes a version or release string and attempts to subdivide
+ it into components to facilitate sorting. The string is divided into a
+ two level hierarchy of sub-parts. The upper level is subdivided by
+ periods, and the lower level is subdivided by boundaries between digit,
+ alpha, and other character groupings.
+ """
parts = []
# parts is a list of lists representing the subsections which make up a version string.
# example:
@@ -102,22 +106,39 @@ def subdivide(verstr):
return parts
-subarch_mapping = {'athlon':'x86', 'i686':'x86', 'i586':'x86', 'i486':'x86', 'i386':'x86', 'x86_64':'x86_64', 'noarch':'noarch'}
-arch_mapping = {'x86':['athlon','i686','i586','i486','i386'], 'x86_64':['x86_64'], 'noarch':['noarch']}
+subarch_mapping = {'athlon': 'x86',
+ 'i686': 'x86',
+ 'i586': 'x86',
+ 'i486': 'x86',
+ 'i386': 'x86',
+ 'x86_64': 'x86_64',
+ 'noarch': 'noarch'}
+arch_mapping = {'x86': ['athlon',
+ 'i686',
+ 'i586',
+ 'i486',
+ 'i386'],
+ 'x86_64': ['x86_64'],
+ 'noarch': ['noarch']}
def parse_rpm(path, filename):
- """read the name, version, release, and subarch of an rpm. this version reads the rpm headers."""
+ """read the name, version, release, and subarch of an rpm.
+ this version reads the rpm headers.
+ """
cmd = 'rpm --nosignature --queryformat \'%%{NAME} %%{VERSION} %%{RELEASE} %%{ARCH}\' -q -p %s/%s' % (path, filename)
output = run_or_die(cmd)
(name, version, release, subarch) = output.split()
- if subarch not in subarch_mapping.keys():
+ if subarch not in list(subarch_mapping.keys()):
raise Exception("%s/%s has invalid subarch %s" % (path, filename, subarch))
return (name, version, release, subarch)
-
+
def parse_rpm_filename(path, filename):
- """read the name, version, release, and subarch of an rpm. this version tries to parse the filename directly, and calls 'parse_rpm' as a fallback."""
+ """read the name, version, release, and subarch of an rpm.
+ this version tries to parse the filename directly, and calls
+ 'parse_rpm' as a fallback.
+ """
name, version, release, subarch = None, None, None, None
try:
(major, minor) = sys.version_info[:2]
@@ -139,7 +160,9 @@ def parse_rpm_filename(path, filename):
def get_pkgs(rpmdir):
- """scan a dir of rpms and generate a pkgs structure. first try parsing the filename. if that fails, try parsing the rpm headers."""
+ """scan a dir of rpms and generate a pkgs structure. first try parsing
+ the filename. if that fails, try parsing the rpm headers.
+ """
pkgs = {}
"""
pkgs structure:
@@ -161,7 +184,11 @@ pkgs = {
rpms = [item for item in os.listdir(rpmdir) if item.endswith('.rpm')]
for filename in rpms:
(name, version, release, subarch) = parse_rpm_filename(rpmdir, filename)
- rpmblob = {'file':filename, 'name':name, 'version':version, 'release':release, 'subarch':subarch}
+ rpmblob = {'file': filename,
+ 'name': name,
+ 'version': version,
+ 'release': release,
+ 'subarch': subarch}
if name in pkgs:
pkgs[name].append(rpmblob)
else:
@@ -170,9 +197,11 @@ pkgs = {
def prune_pkgs_latest(pkgs):
- """prune a pkgs structure to contain only the latest version of each package (includes multiarch results)."""
+ """prune a pkgs structure to contain only the latest version
+ of each package (includes multiarch results).
+ """
latest_pkgs = {}
- for rpmblobs in pkgs.values():
+ for rpmblobs in list(pkgs.values()):
(major, minor) = sys.version_info[:2]
if major >= 2 and minor >= 4:
rpmblobs.sort(rpmblob_cmp, reverse=True)
@@ -180,16 +209,18 @@ def prune_pkgs_latest(pkgs):
rpmblobs.sort(rpmblob_cmp)
rpmblobs.reverse()
pkg_name = rpmblobs[0]['name']
- all_archs = [blob for blob in rpmblobs if blob['version'] == rpmblobs[0]['version'] and
+ all_archs = [blob for blob in rpmblobs if blob['version'] == rpmblobs[0]['version'] and
blob['release'] == rpmblobs[0]['release']]
latest_pkgs[pkg_name] = all_archs
return latest_pkgs
def prune_pkgs_archs(pkgs):
- """prune a pkgs structure to contain no more than one subarch per architecture for each set of packages."""
+ """prune a pkgs structure to contain no more than one subarch
+ per architecture for each set of packages.
+ """
pruned_pkgs = {}
- for rpmblobs in pkgs.values():
+ for rpmblobs in list(pkgs.values()):
pkg_name = rpmblobs[0]['name']
arch_sifter = {}
for challenger in rpmblobs:
@@ -203,13 +234,16 @@ def prune_pkgs_archs(pkgs):
incumbent_index = subarchs.index(incumbent['subarch'])
if challenger_index < incumbent_index:
arch_sifter[arch] = challenger
- pruned_pkgs[pkg_name] = arch_sifter.values()
+ pruned_pkgs[pkg_name] = list(arch_sifter.values())
return pruned_pkgs
def get_date_from_desc(date_desc):
- """calls the unix 'date' command to turn a date description into a python date object.
-example: get_date_from_desc("last sunday 1 week ago")"""
+ """calls the unix 'date' command to turn a date
+ description into a python date object.
+
+ example: get_date_from_desc("last sunday 1 week ago")
+ """
stdio = run_or_die('date -d "' + date_desc + '" "+%Y %m %d"')
(year_str, month_str, day_str) = stdio.split()
year = int(year_str)
@@ -225,7 +259,9 @@ def get_mtime_date(path):
def prune_pkgs_timely(pkgs, start_date_desc=None, end_date_desc=None, rpmdir='.'):
- """prune a pkgs structure to contain only rpms with an mtime within a certain temporal window."""
+ """prune a pkgs structure to contain only rpms with
+ an mtime within a certain temporal window.
+ """
start_date = None
if start_date_desc != None:
start_date = get_date_from_desc(start_date_desc)
@@ -235,7 +271,7 @@ def prune_pkgs_timely(pkgs, start_date_desc=None, end_date_desc=None, rpmdir='.'
if start_date == None and end_date == None:
return pkgs
if start_date != None:
- for rpmblobs in pkgs.values():
+ for rpmblobs in list(pkgs.values()):
pkg_name = rpmblobs[0]['name']
timely_blobs = [blob for blob in rpmblobs if start_date < get_mtime_date(rpmdir + '/' + blob['file'])]
if len(timely_blobs) == 0:
@@ -243,7 +279,7 @@ def prune_pkgs_timely(pkgs, start_date_desc=None, end_date_desc=None, rpmdir='.'
else:
pkgs[pkg_name] = timely_blobs
if end_date != None:
- for rpmblobs in pkgs.values():
+ for rpmblobs in list(pkgs.values()):
pkg_name = rpmblobs[0]['name']
timely_blobs = [blob for blob in rpmblobs if get_mtime_date(rpmdir + '/' + blob['file']) <= end_date]
if len(timely_blobs) == 0:
@@ -256,7 +292,7 @@ def prune_pkgs_timely(pkgs, start_date_desc=None, end_date_desc=None, rpmdir='.'
# from http://aspn.activestate.com/ASPN/Python/Cookbook/Recipe/52306
def sorted_values(adict):
"""return a list of values from a dict, sorted by key."""
- items = adict.items()
+ items = list(adict.items())
items.sort()
return [value for key, value in items]
@@ -278,9 +314,9 @@ def scan_rpm_dir(rpmdir, uri, group, priority=0, output=sys.stdout, start_date_d
subarchs = [blob['subarch'] for blob in rpmblobs]
subarchs.sort()
multiarch_string = ' '.join(subarchs)
- pattern_string = '\.(%s)\.rpm$' % '|'.join(subarchs) # e.g., '\.(i386|x86_64)\.rpm$'
+ pattern_string = '\.(%s)\.rpm$' % '|'.join(subarchs) # e.g., '\.(i386|x86_64)\.rpm$'
pattern = re.compile(pattern_string)
- multiarch_file = pattern.sub('.%(arch)s.rpm', rpmblob['file']) # e.g., 'foo-1.0-1.%(arch)s.rpm'
+ multiarch_file = pattern.sub('.%(arch)s.rpm', rpmblob['file']) # e.g., 'foo-1.0-1.%(arch)s.rpm'
output.write(' <Package name="%s" file="%s" version="%s-%s" multiarch="%s"/>\n' %
(rpmblob['name'], multiarch_file, rpmblob['version'], rpmblob['release'], multiarch_string))
output.write(' </Group>\n')
@@ -300,7 +336,7 @@ if __name__ == "__main__":
sys.exit(1)
group = "base"
- uri = "http://"+gethostname()+"/rpms"
+ uri = "http://" + gethostname() + "/rpms"
rpmdir = "."
priority = "0"
output = None
@@ -320,6 +356,6 @@ if __name__ == "__main__":
if output == None:
output = sys.stdout
else:
- output = file(output,"w")
+ output = file(output, "w")
scan_rpm_dir(rpmdir, uri, group, priority, output)
diff --git a/tools/yum-listpkgs-xml.py b/tools/yum-listpkgs-xml.py
index c324d0889..60d440ddf 100644
--- a/tools/yum-listpkgs-xml.py
+++ b/tools/yum-listpkgs-xml.py
@@ -3,40 +3,41 @@ import sys
sys.path.append('/usr/bin/')
sys.path.append('/usr/share/yum-cli')
-import yum
import yummain
+
def mySimpleList(self, pkg):
- print "<Package name='%s' version='%s'/>" % (pkg.name, pkg.printVer())
-
+ print("<Package name='%s' version='%s'/>" % (pkg.name, pkg.printVer()))
+
+
def myListPkgs(self, lst, description, outputType):
- """outputs based on whatever outputType is. Current options:
- 'list' - simple pkg list
- 'info' - similar to rpm -qi output"""
-
- if outputType in ['list', 'info']:
- thingslisted = 0
- if len(lst) > 0:
- thingslisted = 1
- #print '%s' % description
- from yum.misc import sortPkgObj
- lst.sort(sortPkgObj)
- for pkg in lst:
- if outputType == 'list':
- self.simpleList(pkg)
- elif outputType == 'info':
- self.infoOutput(pkg)
- else:
- pass
-
- if thingslisted == 0:
- return 1, ['No Packages to list']
+ """outputs based on whatever outputType is. Current options:
+ 'list' - simple pkg list
+ 'info' - similar to rpm -qi output"""
+
+ if outputType in ['list', 'info']:
+ thingslisted = 0
+ if len(lst) > 0:
+ thingslisted = 1
+ #print '%s' % description
+ from yum.misc import sortPkgObj
+ lst.sort(sortPkgObj)
+ for pkg in lst:
+ if outputType == 'list':
+ self.simpleList(pkg)
+ elif outputType == 'info':
+ self.infoOutput(pkg)
+ else:
+ pass
+
+ if thingslisted == 0:
+ return 1, ['No Packages to list']
yummain.cli.output.YumOutput.listPkgs = myListPkgs
yummain.cli.output.YumOutput.simpleList = mySimpleList
try:
- sys.argv = [sys.argv[0],'-d','0','list']
+ sys.argv = [sys.argv[0], '-d', '0', 'list']
yummain.main(sys.argv[1:])
except KeyboardInterrupt, e:
print >> sys.stderr, "\n\nExiting on user cancel."