summaryrefslogtreecommitdiffstats
path: root/src/lib/Server/Plugins
diff options
context:
space:
mode:
authorNarayan Desai <desai@mcs.anl.gov>2011-05-10 11:24:28 -0500
committerNarayan Desai <desai@mcs.anl.gov>2011-05-10 11:24:28 -0500
commit0e75875e9bd9900a6a3c7ab118c448e48829eaef (patch)
tree391204747f48598c4e978d3724afbd5b8aa1d12c /src/lib/Server/Plugins
parentf2d218ccd2de93ef639347933ba127ef081b4401 (diff)
parent91634f9a3b888eee3cd5f9a777fcb075fc666c9a (diff)
downloadbcfg2-0e75875e9bd9900a6a3c7ab118c448e48829eaef.tar.gz
bcfg2-0e75875e9bd9900a6a3c7ab118c448e48829eaef.tar.bz2
bcfg2-0e75875e9bd9900a6a3c7ab118c448e48829eaef.zip
Merge branch 'master' of git.mcs.anl.gov:bcfg2
Diffstat (limited to 'src/lib/Server/Plugins')
-rw-r--r--src/lib/Server/Plugins/Account.py38
-rw-r--r--src/lib/Server/Plugins/Base.py12
-rw-r--r--src/lib/Server/Plugins/Bundler.py6
-rw-r--r--src/lib/Server/Plugins/Cfg.py63
-rw-r--r--src/lib/Server/Plugins/DBStats.py6
-rw-r--r--src/lib/Server/Plugins/Decisions.py3
-rw-r--r--src/lib/Server/Plugins/Deps.py21
-rw-r--r--src/lib/Server/Plugins/Editor.py20
-rw-r--r--src/lib/Server/Plugins/GroupPatterns.py16
-rw-r--r--src/lib/Server/Plugins/Hostbase.py104
-rw-r--r--src/lib/Server/Plugins/Ldap.py14
-rw-r--r--src/lib/Server/Plugins/Metadata.py79
-rw-r--r--src/lib/Server/Plugins/NagiosGen.py209
-rw-r--r--src/lib/Server/Plugins/Ohai.py8
-rw-r--r--src/lib/Server/Plugins/Packages.py83
-rw-r--r--src/lib/Server/Plugins/Pkgmgr.py49
-rw-r--r--src/lib/Server/Plugins/Probes.py16
-rw-r--r--src/lib/Server/Plugins/Properties.py49
-rw-r--r--src/lib/Server/Plugins/SGenshi.py10
-rw-r--r--src/lib/Server/Plugins/SSHbase.py38
-rw-r--r--src/lib/Server/Plugins/SSLCA.py49
-rw-r--r--src/lib/Server/Plugins/Snapshots.py45
-rw-r--r--src/lib/Server/Plugins/Statistics.py4
-rw-r--r--src/lib/Server/Plugins/Svn2.py44
-rw-r--r--src/lib/Server/Plugins/TCheetah.py8
-rw-r--r--src/lib/Server/Plugins/TGenshi.py21
26 files changed, 634 insertions, 381 deletions
diff --git a/src/lib/Server/Plugins/Account.py b/src/lib/Server/Plugins/Account.py
index e3ea58761..f67819b9d 100644
--- a/src/lib/Server/Plugins/Account.py
+++ b/src/lib/Server/Plugins/Account.py
@@ -3,6 +3,7 @@ __revision__ = '$Revision$'
import Bcfg2.Server.Plugin
+
class Account(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Generator):
"""This module generates account config files,
@@ -21,13 +22,14 @@ class Account(Bcfg2.Server.Plugin.Plugin,
def __init__(self, core, datastore):
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Generator.__init__(self)
- self.Entries = {'ConfigFile':{'/etc/passwd':self.from_yp_cb,
- '/etc/group':self.from_yp_cb,
- '/etc/security/limits.conf':self.gen_limits_cb,
- '/root/.ssh/authorized_keys':self.gen_root_keys_cb,
- '/etc/sudoers':self.gen_sudoers}}
+ self.Entries = {'ConfigFile': {'/etc/passwd': self.from_yp_cb,
+ '/etc/group': self.from_yp_cb,
+ '/etc/security/limits.conf': self.gen_limits_cb,
+ '/root/.ssh/authorized_keys': self.gen_root_keys_cb,
+ '/etc/sudoers': self.gen_sudoers}}
try:
- self.repository = Bcfg2.Server.Plugin.DirectoryBacked(self.data, self.core.fam)
+ self.repository = Bcfg2.Server.Plugin.DirectoryBacked(self.data,
+ self.core.fam)
except:
self.logger.error("Failed to load repos: %s, %s" % \
(self.data, "%s/ssh" % (self.data)))
@@ -38,9 +40,11 @@ class Account(Bcfg2.Server.Plugin.Plugin,
fname = entry.attrib['name'].split('/')[-1]
entry.text = self.repository.entries["static.%s" % (fname)].data
entry.text += self.repository.entries["dyn.%s" % (fname)].data
- perms = {'owner':'root', 'group':'root', 'perms':'0644'}
+ perms = {'owner': 'root',
+ 'group': 'root',
+ 'perms': '0644'}
[entry.attrib.__setitem__(key, value) for (key, value) in \
- perms.iteritems()]
+ list(perms.items())]
def gen_limits_cb(self, entry, metadata):
"""Build limits entries based on current ACLs."""
@@ -50,9 +54,11 @@ class Account(Bcfg2.Server.Plugin.Plugin,
self.repository.entries["useraccess"].data.split()]
users = [user for (user, host) in \
useraccess if host == metadata.hostname.split('.')[0]]
- perms = {'owner':'root', 'group':'root', 'perms':'0600'}
+ perms = {'owner': 'root',
+ 'group': 'root',
+ 'perms': '0600'}
[entry.attrib.__setitem__(key, value) for (key, value) in \
- perms.iteritems()]
+ list(perms.items())]
entry.text += "".join(["%s hard maxlogins 1024\n" % uname for uname in superusers + users])
if "*" not in users:
entry.text += "* hard maxlogins 0\n"
@@ -71,9 +77,11 @@ class Account(Bcfg2.Server.Plugin.Plugin,
entry.text = "".join([rdata["%s.key" % user].data for user \
in superusers if \
("%s.key" % user) in rdata])
- perms = {'owner':'root', 'group':'root', 'perms':'0600'}
+ perms = {'owner': 'root',
+ 'group': 'root',
+ 'perms': '0600'}
[entry.attrib.__setitem__(key, value) for (key, value) \
- in perms.iteritems()]
+ in list(perms.items())]
def gen_sudoers(self, entry, metadata):
"""Build root authorized keys file based on current ACLs."""
@@ -88,6 +96,8 @@ class Account(Bcfg2.Server.Plugin.Plugin,
entry.text = self.repository.entries['static.sudoers'].data
entry.text += "".join(["%s ALL=(ALL) ALL\n" % uname \
for uname in superusers])
- perms = {'owner':'root', 'group':'root', 'perms':'0440'}
+ perms = {'owner': 'root',
+ 'group': 'root',
+ 'perms': '0440'}
[entry.attrib.__setitem__(key, value) for (key, value) \
- in perms.iteritems()]
+ in list(perms.items())]
diff --git a/src/lib/Server/Plugins/Base.py b/src/lib/Server/Plugins/Base.py
index 8e5ca1cd9..5e7d89727 100644
--- a/src/lib/Server/Plugins/Base.py
+++ b/src/lib/Server/Plugins/Base.py
@@ -1,9 +1,15 @@
"""This module sets up a base list of configuration entries."""
__revision__ = '$Revision$'
-import Bcfg2.Server.Plugin
import copy
import lxml.etree
+import sys
+# py3k compatibility
+if sys.hexversion >= 0x03000000:
+ from functools import reduce
+
+import Bcfg2.Server.Plugin
+
class Base(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Structure,
@@ -31,8 +37,8 @@ class Base(Bcfg2.Server.Plugin.Plugin,
def BuildStructures(self, metadata):
"""Build structures for client described by metadata."""
ret = lxml.etree.Element("Independent", version='2.0')
- fragments = reduce(lambda x, y: x+y,
+ fragments = reduce(lambda x, y: x + y,
[base.Match(metadata) for base
- in self.entries.values()], [])
+ in list(self.entries.values())], [])
[ret.append(copy.deepcopy(frag)) for frag in fragments]
return [ret]
diff --git a/src/lib/Server/Plugins/Bundler.py b/src/lib/Server/Plugins/Bundler.py
index 3f88fe26b..01ad3c78b 100644
--- a/src/lib/Server/Plugins/Bundler.py
+++ b/src/lib/Server/Plugins/Bundler.py
@@ -4,6 +4,7 @@ __revision__ = '$Revision$'
import copy
import lxml.etree
import re
+import sys
import Bcfg2.Server.Plugin
@@ -73,14 +74,15 @@ class Bundler(Bcfg2.Server.Plugin.Plugin,
"""Build all structures for client (metadata)."""
bundleset = []
for bundlename in metadata.bundles:
- entries = [item for (key, item) in self.entries.iteritems() if \
+ entries = [item for (key, item) in list(self.entries.items()) if \
self.patterns.match(key).group('name') == bundlename]
if len(entries) == 0:
continue
elif len(entries) == 1:
try:
bundleset.append(entries[0].get_xml_value(metadata))
- except genshi.template.base.TemplateError, t:
+ except genshi.template.base.TemplateError:
+ t = sys.exc_info()[1]
self.logger.error("Bundler: Failed to template genshi bundle %s" \
% (bundlename))
self.logger.error(t)
diff --git a/src/lib/Server/Plugins/Cfg.py b/src/lib/Server/Plugins/Cfg.py
index f851b7914..41cf6c9c1 100644
--- a/src/lib/Server/Plugins/Cfg.py
+++ b/src/lib/Server/Plugins/Cfg.py
@@ -6,6 +6,7 @@ import logging
import lxml
import os
import re
+import sys
import tempfile
import Bcfg2.Server.Plugin
@@ -13,15 +14,21 @@ import Bcfg2.Server.Plugin
try:
import genshi.core
import genshi.input
- from genshi.template import TemplateLoader, \
- TextTemplate, MarkupTemplate, TemplateError
- from genshi.template import NewTextTemplate
+ from genshi.template import TemplateLoader, NewTextTemplate
have_genshi = True
except:
have_genshi = False
logger = logging.getLogger('Bcfg2.Plugins.Cfg')
+
+def u_str(string, encoding):
+ if sys.hexversion >= 0x03000000:
+ return str(string, encoding)
+ else:
+ return unicode(string, encoding)
+
+
# snipped from TGenshi
def removecomment(stream):
"""A genshi filter that removes comments from the stream."""
@@ -30,6 +37,7 @@ def removecomment(stream):
continue
yield kind, data, pos
+
def process_delta(data, delta):
if not delta.specific.delta:
return data
@@ -60,13 +68,15 @@ def process_delta(data, delta):
output = open(basefile.name, 'r').read()
[os.unlink(fname) for fname in [basefile.name, dfile.name]]
if ret >> 8 != 0:
- raise Bcfg2.Server.Plugin.PluginExecutionError, ('delta', delta)
+ raise Bcfg2.Server.Plugin.PluginExecutionError('delta', delta)
return output
+
class CfgMatcher:
+
def __init__(self, fname):
name = re.escape(fname)
- self.basefile_reg = re.compile('^(?P<basename>%s)(|\\.H_(?P<hostname>\S+)|.G(?P<prio>\d+)_(?P<group>\S+))(?P<genshi>\\.genshi)?$' % name)
+ self.basefile_reg = re.compile('^(?P<basename>%s)(|\\.H_(?P<hostname>\S+?)|.G(?P<prio>\d+)_(?P<group>\S+?))(?P<genshi>\\.genshi)?$' % name)
self.delta_reg = re.compile('^(?P<basename>%s)(|\\.H_(?P<hostname>\S+)|\\.G(?P<prio>\d+)_(?P<group>\S+))\\.(?P<delta>(cat|diff))$' % name)
self.cat_count = fname.count(".cat")
self.diff_count = fname.count(".diff")
@@ -77,7 +87,9 @@ class CfgMatcher:
return self.delta_reg.match(fname)
return self.basefile_reg.match(fname)
+
class CfgEntrySet(Bcfg2.Server.Plugin.EntrySet):
+
def __init__(self, basename, path, entry_type, encoding):
Bcfg2.Server.Plugin.EntrySet.__init__(self, basename, path,
entry_type, encoding)
@@ -87,15 +99,18 @@ class CfgEntrySet(Bcfg2.Server.Plugin.EntrySet):
return cmp(one.specific, other.specific)
def get_pertinent_entries(self, metadata):
- '''return a list of all entries pertinent to a client => [base, delta1, delta2]'''
- matching = [ent for ent in self.entries.values() if \
+ """return a list of all entries pertinent
+ to a client => [base, delta1, delta2]
+ """
+ matching = [ent for ent in list(self.entries.values()) if \
ent.specific.matches(metadata)]
matching.sort(self.sort_by_specific)
- non_delta = [matching.index(m) for m in matching if not m.specific.delta]
+ non_delta = [matching.index(m) for m in matching
+ if not m.specific.delta]
if not non_delta:
raise Bcfg2.Server.Plugin.PluginExecutionError
base = min(non_delta)
- used = matching[:base+1]
+ used = matching[:base + 1]
used.reverse()
return used
@@ -113,17 +128,19 @@ class CfgEntrySet(Bcfg2.Server.Plugin.EntrySet):
template_cls = NewTextTemplate
loader = TemplateLoader()
template = loader.load(basefile.name, cls=template_cls,
- encoding=self.encoding)
- stream = template.generate( \
- name=entry.get('name'), metadata=metadata,
- path=basefile.name).filter(removecomment)
+ encoding=self.encoding)
+ fname = entry.get('realname', entry.get('name'))
+ stream = template.generate(name=fname,
+ metadata=metadata,
+ path=basefile.name).filter(removecomment)
try:
data = stream.render('text', strip_whitespace=False)
except TypeError:
data = stream.render('text')
if data == '':
entry.set('empty', 'true')
- except Exception, e:
+ except Exception:
+ e = sys.exc_info()[1]
logger.error("Cfg: genshi exception: %s" % e)
raise Bcfg2.Server.Plugin.PluginExecutionError
else:
@@ -136,7 +153,13 @@ class CfgEntrySet(Bcfg2.Server.Plugin.EntrySet):
if entry.get('encoding') == 'base64':
entry.text = binascii.b2a_base64(data)
else:
- entry.text = unicode(data, self.encoding)
+ try:
+ entry.text = u_str(data, self.encoding)
+ except UnicodeDecodeError:
+ e = sys.exc_info()[1]
+ logger.error("Failed to decode %s: %s" % (entry.get('name'), e))
+ logger.error("Please verify you are using the proper encoding.")
+ raise Bcfg2.Server.Plugin.PluginExecutionError
if entry.text in ['', None]:
entry.set('empty', 'true')
@@ -168,7 +191,8 @@ class CfgEntrySet(Bcfg2.Server.Plugin.EntrySet):
open(name, 'w').write(new_entry['text'])
if log:
logger.info("Wrote file %s" % name)
- badattr = [attr for attr in ['owner', 'group', 'perms'] if attr in new_entry]
+ badattr = [attr for attr in ['owner', 'group', 'perms']
+ if attr in new_entry]
if badattr:
metadata_updates = {}
metadata_updates.update(self.metadata)
@@ -178,12 +202,13 @@ class CfgEntrySet(Bcfg2.Server.Plugin.EntrySet):
infotag = lxml.etree.SubElement(infoxml, 'Info')
[infotag.attrib.__setitem__(attr, metadata_updates[attr]) \
for attr in metadata_updates]
- ofile = open(self.path + "/info.xml","w")
+ ofile = open(self.path + "/info.xml", "w")
ofile.write(lxml.etree.tostring(infoxml, pretty_print=True))
ofile.close()
if log:
logger.info("Wrote file %s" % (self.path + "/info.xml"))
+
class Cfg(Bcfg2.Server.Plugin.GroupSpool,
Bcfg2.Server.Plugin.PullTarget):
"""This generator in the configuration file repository for Bcfg2."""
@@ -197,4 +222,6 @@ class Cfg(Bcfg2.Server.Plugin.GroupSpool,
return self.entries[entry.get('name')].list_accept_choices(metadata)
def AcceptPullData(self, specific, new_entry, log):
- return self.entries[new_entry.get('name')].write_update(specific, new_entry, log)
+ return self.entries[new_entry.get('name')].write_update(specific,
+ new_entry,
+ log)
diff --git a/src/lib/Server/Plugins/DBStats.py b/src/lib/Server/Plugins/DBStats.py
index 27696a978..5ef1920e1 100644
--- a/src/lib/Server/Plugins/DBStats.py
+++ b/src/lib/Server/Plugins/DBStats.py
@@ -33,7 +33,8 @@ class DBStats(Bcfg2.Server.Plugin.Plugin,
logger.debug("Searching for new models to add to the statistics database")
try:
update_database()
- except Exception, inst:
+ except Exception:
+ inst = sys.exc_info()[1]
logger.debug(str(inst))
logger.debug(str(type(inst)))
@@ -61,7 +62,8 @@ class DBStats(Bcfg2.Server.Plugin.Plugin,
logger.info("Imported data for %s in %s seconds" \
% (metadata.hostname, time.time() - start))
return
- except MultipleObjectsReturned, e:
+ except MultipleObjectsReturned:
+ e = sys.exc_info()[1]
logger.error("DBStats: MultipleObjectsReturned while handling %s: %s" % \
(metadata.hostname, e))
logger.error("DBStats: Data is inconsistent")
diff --git a/src/lib/Server/Plugins/Decisions.py b/src/lib/Server/Plugins/Decisions.py
index 1f9525a0e..e239be5ee 100644
--- a/src/lib/Server/Plugins/Decisions.py
+++ b/src/lib/Server/Plugins/Decisions.py
@@ -26,7 +26,8 @@ class DecisionSet(Bcfg2.Server.Plugin.EntrySet):
DecisionFile, encoding)
try:
fam.AddMonitor(path, self)
- except OSError, e:
+ except OSError:
+ e = sys.exc_info()[1]
logger.error('Adding filemonitor for %s failed. '
'Make sure directory exists' % path)
raise Bcfg2.Server.Plugin.PluginInitError(e)
diff --git a/src/lib/Server/Plugins/Deps.py b/src/lib/Server/Plugins/Deps.py
index 088f8cdad..b186258cb 100644
--- a/src/lib/Server/Plugins/Deps.py
+++ b/src/lib/Server/Plugins/Deps.py
@@ -5,20 +5,22 @@ import lxml.etree
import Bcfg2.Server.Plugin
+
class DNode(Bcfg2.Server.Plugin.INode):
"""DNode provides supports for single predicate types for dependencies."""
- raw = {'Group':"lambda x:'%s' in x.groups and predicate(x)"}
+ raw = {'Group': "lambda x:'%s' in x.groups and predicate(x)"}
containers = ['Group']
def __init__(self, data, idict, parent=None):
self.data = data
self.contents = {}
if parent == None:
- self.predicate = lambda x:True
+ self.predicate = lambda x: True
else:
predicate = parent.predicate
- if data.tag in self.raw.keys():
- self.predicate = eval(self.raw[data.tag] % (data.get('name')), {'predicate':predicate})
+ if data.tag in list(self.raw.keys()):
+ self.predicate = eval(self.raw[data.tag] % (data.get('name')),
+ {'predicate': predicate})
else:
raise Exception
mytype = self.__class__
@@ -27,15 +29,18 @@ class DNode(Bcfg2.Server.Plugin.INode):
if item.tag in self.containers:
self.children.append(mytype(item, idict, self))
else:
- data = [(child.tag, child.get('name')) for child in item.getchildren()]
+ data = [(child.tag, child.get('name'))
+ for child in item.getchildren()]
try:
self.contents[item.tag][item.get('name')] = data
except KeyError:
- self.contents[item.tag] = {item.get('name'):data}
+ self.contents[item.tag] = {item.get('name'): data}
+
class DepXMLSrc(Bcfg2.Server.Plugin.XMLSrc):
__node__ = DNode
+
class Deps(Bcfg2.Server.Plugin.PrioDir,
Bcfg2.Server.Plugin.StructureValidator):
name = 'Deps'
@@ -68,12 +73,12 @@ class Deps(Bcfg2.Server.Plugin.PrioDir,
if (entries, gdata) in self.cache:
prereqs = self.cache[(entries, gdata)]
else:
- [src.Cache(metadata) for src in self.entries.values()]
+ [src.Cache(metadata) for src in list(self.entries.values())]
toexamine = list(entries[:])
while toexamine:
entry = toexamine.pop()
- matching = [src for src in self.entries.values()
+ matching = [src for src in list(self.entries.values())
if src.cache and entry[0] in src.cache[1]
and entry[1] in src.cache[1][entry[0]]]
if len(matching) > 1:
diff --git a/src/lib/Server/Plugins/Editor.py b/src/lib/Server/Plugins/Editor.py
index bfd4d6e93..76a03a325 100644
--- a/src/lib/Server/Plugins/Editor.py
+++ b/src/lib/Server/Plugins/Editor.py
@@ -2,6 +2,7 @@ import Bcfg2.Server.Plugin
import re
import lxml.etree
+
def linesub(pattern, repl, filestring):
"""Substitutes instances of pattern with repl in filestring."""
if filestring == None:
@@ -12,6 +13,7 @@ def linesub(pattern, repl, filestring):
output.append(re.sub(pattern, repl, filestring))
return '\n'.join(output)
+
class EditDirectives(Bcfg2.Server.Plugin.SpecificData):
"""This object handles the editing directives."""
def ProcessDirectives(self, input):
@@ -22,23 +24,29 @@ class EditDirectives(Bcfg2.Server.Plugin.SpecificData):
temp = linesub(directive[0], directive[1], temp)
return temp
+
class EditEntrySet(Bcfg2.Server.Plugin.EntrySet):
def __init__(self, basename, path, entry_type, encoding):
- self.ignore = re.compile("^(\.#.*|.*~|\\..*\\.(tmp|sw[px])|%s\.H_.*)$" %path.split('/')[-1])
- Bcfg2.Server.Plugin.EntrySet.__init__(self, basename, path, entry_type, encoding)
+ self.ignore = re.compile("^(\.#.*|.*~|\\..*\\.(tmp|sw[px])|%s\.H_.*)$" % path.split('/')[-1])
+ Bcfg2.Server.Plugin.EntrySet.__init__(self,
+ basename,
+ path,
+ entry_type,
+ encoding)
self.inputs = dict()
def bind_entry(self, entry, metadata):
client = metadata.hostname
filename = entry.get('name')
- permdata = {'owner':'root', 'group':'root'}
- permdata['perms'] = '0644'
+ permdata = {'owner': 'root',
+ 'group': 'root',
+ 'perms': '0644'}
[entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
entry.text = self.entries['edits'].ProcessDirectives(self.get_client_data(client))
if not entry.text:
entry.set('empty', 'true')
try:
- f = open('%s/%s.H_%s' %(self.path, filename.split('/')[-1], client), 'w')
+ f = open('%s/%s.H_%s' % (self.path, filename.split('/')[-1], client), 'w')
f.write(entry.text)
f.close()
except:
@@ -60,7 +68,7 @@ class Editor(Bcfg2.Server.Plugin.GroupSpool,
def GetProbes(self, _):
'''Return a set of probes for execution on client'''
probelist = list()
- for name in self.entries.keys():
+ for name in list(self.entries.keys()):
probe = lxml.etree.Element('probe')
probe.set('name', name)
probe.set('source', "Editor")
diff --git a/src/lib/Server/Plugins/GroupPatterns.py b/src/lib/Server/Plugins/GroupPatterns.py
index 553f9d286..7faead39a 100644
--- a/src/lib/Server/Plugins/GroupPatterns.py
+++ b/src/lib/Server/Plugins/GroupPatterns.py
@@ -3,6 +3,7 @@ import re
import Bcfg2.Server.Plugin
+
class PackedDigitRange(object):
def __init__(self, digit_range):
self.sparse = list()
@@ -18,12 +19,14 @@ class PackedDigitRange(object):
if iother in self.sparse:
return True
for (start, end) in self.ranges:
- if iother in xrange(start, end+1):
+ if iother in range(start, end + 1):
return True
return False
+
class PatternMap(object):
range_finder = '\\[\\[[\d\-,]+\\]\\]'
+
def __init__(self, pattern, rangestr, groups):
self.pattern = pattern
self.rangestr = rangestr
@@ -33,8 +36,11 @@ class PatternMap(object):
self.process = self.process_re
elif rangestr != None:
self.process = self.process_range
- self.re = re.compile('^' + re.subn(self.range_finder, '(\d+)', rangestr)[0])
- dmatcher = re.compile(re.subn(self.range_finder, '\\[\\[([\d\-,]+)\\]\\]', rangestr)[0])
+ self.re = re.compile('^' + re.subn(self.range_finder, '(\d+)',
+ rangestr)[0])
+ dmatcher = re.compile(re.subn(self.range_finder,
+ '\\[\\[([\d\-,]+)\\]\\]',
+ rangestr)[0])
self.dranges = [PackedDigitRange(x) for x in dmatcher.match(rangestr).groups()]
else:
raise Exception
@@ -58,10 +64,11 @@ class PatternMap(object):
for group in self.groups:
newg = group
for idx in range(len(sub)):
- newg = newg.replace('$%s' % (idx+1), sub[idx])
+ newg = newg.replace('$%s' % (idx + 1), sub[idx])
ret.append(newg)
return ret
+
class PatternFile(Bcfg2.Server.Plugin.SingleXMLFileBacked):
def __init__(self, filename, fam):
Bcfg2.Server.Plugin.SingleXMLFileBacked.__init__(self, filename, fam)
@@ -101,6 +108,7 @@ class PatternFile(Bcfg2.Server.Plugin.SingleXMLFileBacked):
(pattern.pattern, hostname), exc_info=1)
return ret
+
class GroupPatterns(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Connector):
name = "GroupPatterns"
diff --git a/src/lib/Server/Plugins/Hostbase.py b/src/lib/Server/Plugins/Hostbase.py
index 65992596d..4180fd716 100644
--- a/src/lib/Server/Plugins/Hostbase.py
+++ b/src/lib/Server/Plugins/Hostbase.py
@@ -1,4 +1,7 @@
-'''This file provides the Hostbase plugin. It manages dns/dhcp/nis host information'''
+"""
+This file provides the Hostbase plugin.
+It manages dns/dhcp/nis host information
+"""
__revision__ = '$Revision$'
import os
@@ -11,7 +14,9 @@ from sets import Set
from django.template import Context, loader
from django.db import connection
import re
-import cStringIO
+# Compatibility imports
+from Bcfg2.Bcfg2Py3k import StringIO
+
class Hostbase(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Structure,
@@ -23,24 +28,29 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
filepath = '/my/adm/hostbase/files/bind'
def __init__(self, core, datastore):
-
+
self.ready = False
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Structure.__init__(self)
Bcfg2.Server.Plugin.Generator.__init__(self)
- files = ['zone.tmpl', 'reversesoa.tmpl', 'named.tmpl', 'reverseappend.tmpl',
- 'dhcpd.tmpl', 'hosts.tmpl', 'hostsappend.tmpl']
+ files = ['zone.tmpl',
+ 'reversesoa.tmpl',
+ 'named.tmpl',
+ 'reverseappend.tmpl',
+ 'dhcpd.tmpl',
+ 'hosts.tmpl',
+ 'hostsappend.tmpl']
self.filedata = {}
self.dnsservers = []
self.dhcpservers = []
- self.templates = {'zone':loader.get_template('zone.tmpl'),
- 'reversesoa':loader.get_template('reversesoa.tmpl'),
- 'named':loader.get_template('named.tmpl'),
- 'namedviews':loader.get_template('namedviews.tmpl'),
- 'reverseapp':loader.get_template('reverseappend.tmpl'),
- 'dhcp':loader.get_template('dhcpd.tmpl'),
- 'hosts':loader.get_template('hosts.tmpl'),
- 'hostsapp':loader.get_template('hostsappend.tmpl'),
+ self.templates = {'zone': loader.get_template('zone.tmpl'),
+ 'reversesoa': loader.get_template('reversesoa.tmpl'),
+ 'named': loader.get_template('named.tmpl'),
+ 'namedviews': loader.get_template('namedviews.tmpl'),
+ 'reverseapp': loader.get_template('reverseappend.tmpl'),
+ 'dhcp': loader.get_template('dhcpd.tmpl'),
+ 'hosts': loader.get_template('hosts.tmpl'),
+ 'hostsapp': loader.get_template('hostsappend.tmpl'),
}
self.Entries['ConfigFile'] = {}
self.__rmi__ = ['rebuildState']
@@ -48,14 +58,17 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
self.rebuildState(None)
except:
raise PluginInitError
-
+
def FetchFile(self, entry, metadata):
"""Return prebuilt file data."""
fname = entry.get('name').split('/')[-1]
if not fname in self.filedata:
raise PluginExecutionError
- perms = {'owner':'root', 'group':'root', 'perms':'644'}
- [entry.attrib.__setitem__(key, value) for (key, value) in perms.iteritems()]
+ perms = {'owner': 'root',
+ 'group': 'root',
+ 'perms': '644'}
+ [entry.attrib.__setitem__(key, value)
+ for (key, value) in list(perms.items())]
entry.text = self.filedata[fname]
def BuildStructures(self, metadata):
@@ -110,8 +123,8 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
hosts = {}
for zone in zones:
- zonefile = cStringIO.StringIO()
- externalzonefile = cStringIO.StringIO()
+ zonefile = StringIO()
+ externalzonefile = StringIO()
cursor.execute("""SELECT n.name FROM hostbase_zone_nameservers z
INNER JOIN hostbase_nameserver n ON z.nameserver_id = n.id
WHERE z.zone_id = \'%s\'""" % zone[0])
@@ -148,20 +161,20 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
cursor.execute(querystring)
zonehosts = cursor.fetchall()
prevhost = (None, None, None, None)
- cnames = cStringIO.StringIO()
- cnamesexternal = cStringIO.StringIO()
+ cnames = StringIO()
+ cnamesexternal = StringIO()
for host in zonehosts:
if not host[2].split(".", 1)[1] == zone[1]:
zonefile.write(cnames.getvalue())
externalzonefile.write(cnamesexternal.getvalue())
- cnames = cStringIO.StringIO()
- cnamesexternal = cStringIO.StringIO()
+ cnames = StringIO()
+ cnamesexternal = StringIO()
continue
if not prevhost[1] == host[1] or not prevhost[2] == host[2]:
zonefile.write(cnames.getvalue())
externalzonefile.write(cnamesexternal.getvalue())
- cnames = cStringIO.StringIO()
- cnamesexternal = cStringIO.StringIO()
+ cnames = StringIO()
+ cnamesexternal = StringIO()
zonefile.write("%-32s%-10s%-32s\n" %
(host[2].split(".", 1)[0], 'A', host[1]))
zonefile.write("%-32s%-10s%-3s%s.\n" %
@@ -173,29 +186,29 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
('', 'MX', host[4], host[5]))
elif not prevhost[5] == host[5]:
zonefile.write("%-32s%-10s%-3s%s.\n" %
- ('', 'MX', host[4], host[5]))
+ ('', 'MX', host[4], host[5]))
if host[6] == 'global':
externalzonefile.write("%-32s%-10s%-3s%s.\n" %
('', 'MX', host[4], host[5]))
-
+
if host[3]:
try:
if host[3].split(".", 1)[1] == zone[1]:
cnames.write("%-32s%-10s%-32s\n" %
(host[3].split(".", 1)[0],
- 'CNAME',host[2].split(".", 1)[0]))
+ 'CNAME', host[2].split(".", 1)[0]))
if host[6] == 'global':
cnamesexternal.write("%-32s%-10s%-32s\n" %
(host[3].split(".", 1)[0],
- 'CNAME',host[2].split(".", 1)[0]))
+ 'CNAME', host[2].split(".", 1)[0]))
else:
cnames.write("%-32s%-10s%-32s\n" %
- (host[3]+".",
+ (host[3] + ".",
'CNAME',
host[2].split(".", 1)[0]))
if host[6] == 'global':
cnamesexternal.write("%-32s%-10s%-32s\n" %
- (host[3]+".",
+ (host[3] + ".",
'CNAME',
host[2].split(".", 1)[0]))
@@ -215,9 +228,9 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
cursor.execute("SELECT * FROM hostbase_zone WHERE zone LIKE \'%%.rev\' AND zone <> \'.rev\'")
reversezones = cursor.fetchall()
-
+
reversenames = []
- for reversezone in reversezones:
+ for reversezone in reversezones:
cursor.execute("""SELECT n.name FROM hostbase_zone_nameservers z
INNER JOIN hostbase_nameserver n ON z.nameserver_id = n.id
WHERE z.zone_id = \'%s\'""" % reversezone[0])
@@ -236,7 +249,7 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
subnet = reversezone[1].split(".")
subnet.reverse()
- reversenames.append((reversezone[1].rstrip('.rev'),".".join(subnet[1:])))
+ reversenames.append((reversezone[1].rstrip('.rev'), ".".join(subnet[1:])))
for filename in reversenames:
cursor.execute("""
@@ -247,8 +260,8 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
WHERE p.ip_addr LIKE '%s%%%%' AND h.status = 'active' ORDER BY p.ip_addr
""" % filename[1])
reversehosts = cursor.fetchall()
- zonefile = cStringIO.StringIO()
- externalzonefile = cStringIO.StringIO()
+ zonefile = StringIO()
+ externalzonefile = StringIO()
if len(filename[0].split(".")) == 2:
originlist = []
[originlist.append((".".join([ip[1].split(".")[2], filename[0]]),
@@ -268,13 +281,13 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
'hosts': hosts,
'inaddr': origin[0],
'fileorigin': filename[0],
- })
+ })
zonefile.write(self.templates['reverseapp'].render(context))
context = Context({
'hosts': hosts_external,
'inaddr': origin[0],
'fileorigin': filename[0],
- })
+ })
externalzonefile.write(self.templates['reverseapp'].render(context))
else:
originlist = [filename[0]]
@@ -289,7 +302,7 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
'hosts': hosts,
'inaddr': filename[0],
'fileorigin': None,
- })
+ })
zonefile.write(self.templates['reverseapp'].render(context))
context = Context({
'hosts': hosts_external,
@@ -308,13 +321,12 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
context = Context({
'zones': zones,
'reverses': reversenames,
- })
+ })
self.filedata['named.conf'] = self.templates['named'].render(context)
self.Entries['ConfigFile']['/my/adm/hostbase/files/named.conf'] = self.FetchFile
self.filedata['named.conf.views'] = self.templates['namedviews'].render(context)
self.Entries['ConfigFile']['/my/adm/hostbase/files/named.conf.views'] = self.FetchFile
-
def buildDHCP(self):
"""Pre-build dhcpd.conf and stash in the filedata table."""
@@ -362,7 +374,6 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
self.filedata['dhcpd.conf'] = self.templates['dhcp'].render(context)
self.Entries['ConfigFile']['/my/adm/hostbase/files/dhcpd.conf'] = self.FetchFile
-
def buildHosts(self):
"""Pre-build and stash /etc/hosts file."""
@@ -490,7 +501,7 @@ Name Room User Type
def buildHostsLPD(self):
"""Creates the /mcs/etc/hosts.lpd file"""
-
+
# this header needs to be changed to be more generic
header = """+@machines
+@all-machines
@@ -503,7 +514,7 @@ delphi.esh.anl.gov
anlcv1.ctd.anl.gov
anlvms.ctd.anl.gov
olivia.ctd.anl.gov\n\n"""
-
+
cursor = connection.cursor()
cursor.execute("""
SELECT hostname FROM hostbase_host WHERE netgroup=\"red\" AND status = 'active'
@@ -534,7 +545,6 @@ olivia.ctd.anl.gov\n\n"""
self.filedata['hosts.lpd'] = hostslpdfile
self.Entries['ConfigFile']['/mcs/etc/hosts.lpd'] = self.FetchFile
-
def buildNetgroups(self):
"""Makes the *-machine files"""
header = """###################################################################
@@ -557,11 +567,11 @@ olivia.ctd.anl.gov\n\n"""
nameslist = cursor.fetchall()
# gets the first host and initializes the hash
hostdata = nameslist[0]
- netgroups = {hostdata[2]:[hostdata[0]]}
+ netgroups = {hostdata[2]: [hostdata[0]]}
for row in nameslist:
# if new netgroup, create it
if row[2] not in netgroups:
- netgroups.update({row[2]:[]})
+ netgroups.update({row[2]: []})
# if it belongs in the netgroup and has multiple interfaces, put them in
if hostdata[0] == row[0] and row[3]:
netgroups[row[2]].append(row[1])
@@ -572,7 +582,7 @@ olivia.ctd.anl.gov\n\n"""
hostdata = row
for netgroup in netgroups:
- fileoutput = cStringIO.StringIO()
+ fileoutput = StringIO()
fileoutput.write(header % (netgroup, netgroup, len(netgroups[netgroup])))
for each in netgroups[netgroup]:
fileoutput.write(each + "\n")
diff --git a/src/lib/Server/Plugins/Ldap.py b/src/lib/Server/Plugins/Ldap.py
index 4f10d8ca6..06ecaed7b 100644
--- a/src/lib/Server/Plugins/Ldap.py
+++ b/src/lib/Server/Plugins/Ldap.py
@@ -1,9 +1,18 @@
import imp
+import logging
+import sys
import time
-import ldap
import Bcfg2.Options
import Bcfg2.Server.Plugin
+logger = logging.getLogger('Bcfg2.Plugins.Ldap')
+
+try:
+ import ldap
+except:
+ logger.error("Unable to load ldap module. Is python-ldap installed?")
+ raise ImportError
+
# time in seconds between retries after failed LDAP connection
RETRY_DELAY = 5
# how many times to try reaching the LDAP server if a connection is broken
@@ -81,7 +90,8 @@ class Ldap(Bcfg2.Server.Plugin.Plugin, Bcfg2.Server.Plugin.Connector):
self.debug_log("LdapPlugin debug: query '" + query.name +
"' not applicable to host '" + metadata.hostname + "'")
return data
- except Exception, error_msg:
+ except Exception:
+ error_msg = sys.exc_info()[1]
if self.debug_flag:
raise
else:
diff --git a/src/lib/Server/Plugins/Metadata.py b/src/lib/Server/Plugins/Metadata.py
index aa482e7ed..ca6e43851 100644
--- a/src/lib/Server/Plugins/Metadata.py
+++ b/src/lib/Server/Plugins/Metadata.py
@@ -1,4 +1,6 @@
-"""This file stores persistent metadata for the Bcfg2 Configuration Repository."""
+"""
+This file stores persistent metadata for the Bcfg2 Configuration Repository.
+"""
__revision__ = '$Revision$'
@@ -12,6 +14,7 @@ import time
import Bcfg2.Server.FileMonitor
import Bcfg2.Server.Plugin
+
def locked(fd):
"""Aquire a lock on a file"""
try:
@@ -20,14 +23,19 @@ def locked(fd):
return True
return False
+
class MetadataConsistencyError(Exception):
"""This error gets raised when metadata is internally inconsistent."""
pass
+
class MetadataRuntimeError(Exception):
- """This error is raised when the metadata engine is called prior to reading enough data."""
+ """This error is raised when the metadata engine
+ is called prior to reading enough data.
+ """
pass
+
class XMLMetadataConfig(object):
"""Handles xml config files and all XInclude statements"""
def __init__(self, metadata, watch_clients, basefile):
@@ -39,7 +47,8 @@ class XMLMetadataConfig(object):
self.basedata = None
self.basedir = metadata.data
self.logger = metadata.logger
- self.pseudo_monitor = isinstance(metadata.core.fam, Bcfg2.Server.FileMonitor.Pseudo)
+ self.pseudo_monitor = isinstance(metadata.core.fam,
+ Bcfg2.Server.FileMonitor.Pseudo)
@property
def xdata(self):
@@ -56,7 +65,8 @@ class XMLMetadataConfig(object):
def add_monitor(self, fname):
"""Add a fam monitor for an included file"""
if self.should_monitor:
- self.metadata.core.fam.AddMonitor("%s/%s" % (self.basedir, fname), self.metadata)
+ self.metadata.core.fam.AddMonitor("%s/%s" % (self.basedir, fname),
+ self.metadata)
self.extras.append(fname)
def load_xml(self):
@@ -81,14 +91,16 @@ class XMLMetadataConfig(object):
def write(self):
"""Write changes to xml back to disk."""
- self.write_xml("%s/%s" % (self.basedir, self.basefile), self.basedata)
+ self.write_xml("%s/%s" % (self.basedir, self.basefile),
+ self.basedata)
def write_xml(self, fname, xmltree):
"""Write changes to xml back to disk."""
tmpfile = "%s.new" % fname
try:
datafile = open("%s" % tmpfile, 'w')
- except IOError, e:
+ except IOError:
+ e = sys.exc_info()[1]
self.logger.error("Failed to write %s: %s" % (tmpfile, e))
raise MetadataRuntimeError
# prep data
@@ -182,6 +194,7 @@ class ClientMetadata(object):
return grp
return ''
+
class MetadataQuery(object):
def __init__(self, by_name, get_clients, by_groups, by_profiles, all_groups, all_groups_in_category):
# resolver is set later
@@ -201,6 +214,7 @@ class MetadataQuery(object):
def all(self):
return [self.by_name(name) for name in self.all_clients()]
+
class Metadata(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Metadata,
Bcfg2.Server.Plugin.Statistics):
@@ -220,12 +234,13 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
except:
print("Unable to add file monitor for groups.xml or clients.xml")
raise Bcfg2.Server.Plugin.PluginInitError
-
+
self.clients_xml = XMLMetadataConfig(self, watch_clients, 'clients.xml')
self.groups_xml = XMLMetadataConfig(self, watch_clients, 'groups.xml')
self.states = {}
if watch_clients:
- self.states = {"groups.xml":False, "clients.xml":False}
+ self.states = {"groups.xml": False,
+ "clients.xml": False}
self.addresses = {}
self.auth = dict()
self.clients = {}
@@ -244,10 +259,11 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
self.session_cache = {}
self.default = None
self.pdirty = False
- self.extra = {'groups.xml':[], 'clients.xml':[]}
+ self.extra = {'groups.xml': [],
+ 'clients.xml': []}
self.password = core.password
self.query = MetadataQuery(core.build_metadata,
- lambda:self.clients.keys(),
+ lambda: list(self.clients.keys()),
self.get_client_names_by_groups,
self.get_client_names_by_profiles,
self.get_all_group_names,
@@ -288,7 +304,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
element = lxml.etree.SubElement(self.groups_xml.base_xdata.getroot(),
"Group", name=group_name)
- for key, val in attribs.iteritems():
+ for key, val in list(attribs.items()):
element.set(key, val)
self.groups_xml.write()
@@ -303,7 +319,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
self.logger.error("Unexpected error finding group")
raise MetadataConsistencyError
- for key, val in attribs.iteritems():
+ for key, val in list(attribs.items()):
xdict['xquery'][0].set(key, val)
self.groups_xml.write_xml(xdict['filename'], xdict['xmltree'])
@@ -330,7 +346,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
self.logger.error("Bundle \"%s\" already exists" % (bundle_name))
raise MetadataConsistencyError
root.append(element)
- group_tree = open(self.data + "/groups.xml","w")
+ group_tree = open(self.data + "/groups.xml", "w")
fd = group_tree.fileno()
while True:
try:
@@ -352,7 +368,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
self.logger.error("Bundle \"%s\" not found" % (bundle_name))
raise MetadataConsistencyError
root.remove(node)
- group_tree = open(self.data + "/groups.xml","w")
+ group_tree = open(self.data + "/groups.xml", "w")
fd = group_tree.fileno()
while True:
try:
@@ -384,7 +400,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
element = lxml.etree.SubElement(self.clients_xml.base_xdata.getroot(),
"Client", name=client_name)
- for key, val in attribs.iteritems():
+ for key, val in list(attribs.items()):
element.set(key, val)
self.clients_xml.write()
@@ -401,7 +417,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
raise MetadataConsistencyError
node = xdict['xquery'][0]
- [node.set(key, value) for key, value in attribs.items()]
+ [node.set(key, value) for key, value in list(attribs.items())]
self.clients_xml.write_xml(xdict['filename'], xdict['xmltree'])
def HandleEvent(self, event):
@@ -463,8 +479,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
grouptmp = {}
self.categories = {}
groupseen = list()
- for group in xdata.xpath('//Groups/Group') \
- + xdata.xpath('Group'):
+ for group in xdata.xpath('//Groups/Group'):
if group.get('name') not in groupseen:
groupseen.append(group.get('name'))
else:
@@ -506,17 +521,17 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
ggg))
[self.groups[group][0].add(bund) for bund in bundles]
self.states['groups.xml'] = True
- if False not in self.states.values():
+ if False not in list(self.states.values()):
# check that all client groups are real and complete
- real = self.groups.keys()
- for client in self.clients.keys():
+ real = list(self.groups.keys())
+ for client in list(self.clients.keys()):
if self.clients[client] not in self.profiles:
self.logger.error("Client %s set as nonexistent or incomplete group %s" \
% (client, self.clients[client]))
self.logger.error("Removing client mapping for %s" % (client))
self.bad_clients[client] = self.clients[client]
del self.clients[client]
- for bclient in self.bad_clients.keys():
+ for bclient in list(self.bad_clients.keys()):
if self.bad_clients[bclient] in self.profiles:
self.logger.info("Restored profile mapping for client %s" % bclient)
self.clients[bclient] = self.bad_clients[bclient]
@@ -525,7 +540,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
def set_profile(self, client, profile, addresspair):
"""Set group parameter for provided client."""
self.logger.info("Asserting client %s profile to %s" % (client, profile))
- if False in self.states.values():
+ if False in list(self.states.values()):
raise MetadataRuntimeError
if profile not in self.public:
self.logger.error("Failed to set client %s to private group %s" % (client, profile))
@@ -579,7 +594,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
def get_initial_metadata(self, client):
"""Return the metadata for a given client."""
- if False in self.states.values():
+ if False in list(self.states.values()):
raise MetadataRuntimeError
client = client.lower()
if client in self.aliases:
@@ -604,7 +619,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
password = self.passwords[client]
else:
password = None
- uuids = [item for item, value in self.uuid.iteritems() if value == client]
+ uuids = [item for item, value in list(self.uuid.items()) if value == client]
if uuids:
uuid = uuids[0]
else:
@@ -622,7 +637,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
def get_all_group_names(self):
all_groups = set()
- [all_groups.update(g[1]) for g in self.groups.values()]
+ [all_groups.update(g[1]) for g in list(self.groups.values())]
return all_groups
def get_all_groups_in_category(self, category):
@@ -632,11 +647,12 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
return all_groups
def get_client_names_by_profiles(self, profiles):
- return [client for client, profile in self.clients.iteritems() \
+ return [client for client, profile in list(self.clients.items()) \
if profile in profiles]
def get_client_names_by_groups(self, groups):
- mdata = [self.core.build_metadata(client) for client in self.clients.keys()]
+ mdata = [self.core.build_metadata(client)
+ for client in list(self.clients.keys())]
return [md.hostname for md in mdata if md.groups.issuperset(groups)]
def merge_additional_groups(self, imd, groups):
@@ -766,7 +782,6 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
xdict['xquery'][0].set('auth', 'cert')
self.clients_xml.write_xml(xdict['filename'], xdict['xmltree'])
-
def viz(self, hosts, bundles, key, colors):
"""Admin mode viz support."""
groups_tree = lxml.etree.parse(self.data + "/groups.xml")
@@ -775,7 +790,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
except lxml.etree.XIncludeError:
self.logger.error("Failed to process XInclude for file %s" % dest)
groups = groups_tree.getroot()
- categories = {'default':'grey83'}
+ categories = {'default': 'grey83'}
instances = {}
viz_str = ""
egroups = groups.findall("Group") + groups.findall('.//Groups/Group')
@@ -787,12 +802,12 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
del categories[None]
if hosts:
clients = self.clients
- for client, profile in clients.iteritems():
+ for client, profile in list(clients.items()):
if profile in instances:
instances[profile].append(client)
else:
instances[profile] = [client]
- for profile, clist in instances.iteritems():
+ for profile, clist in list(instances.items()):
clist.sort()
viz_str += '''\t"%s-instances" [ label="%s", shape="record" ];\n''' \
% (profile, '|'.join(clist))
diff --git a/src/lib/Server/Plugins/NagiosGen.py b/src/lib/Server/Plugins/NagiosGen.py
index 14277b63d..8a76c130d 100644
--- a/src/lib/Server/Plugins/NagiosGen.py
+++ b/src/lib/Server/Plugins/NagiosGen.py
@@ -1,149 +1,152 @@
'''This module implements a Nagios configuration generator'''
-import glob
-import logging
-import lxml.etree
import os
import re
+import sys
+import glob
import socket
+import logging
+import lxml.etree
import Bcfg2.Server.Plugin
LOGGER = logging.getLogger('Bcfg2.Plugins.NagiosGen')
-host_config_fmt = \
-'''
-define host{
- host_name %s
- alias %s
- address %s
-'''
+line_fmt = '\t%-32s %s'
+
+class NagiosGenConfig(Bcfg2.Server.Plugin.SingleXMLFileBacked,
+ Bcfg2.Server.Plugin.StructFile):
+ def __init__(self, filename, fam):
+ Bcfg2.Server.Plugin.SingleXMLFileBacked.__init__(self, filename, fam)
+ Bcfg2.Server.Plugin.StructFile.__init__(self, filename)
+
class NagiosGen(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Generator):
"""NagiosGen is a Bcfg2 plugin that dynamically generates
Nagios configuration file based on Bcfg2 data.
"""
name = 'NagiosGen'
- __version__ = '0.6'
+ __version__ = '0.7'
__author__ = 'bcfg-dev@mcs.anl.gov'
def __init__(self, core, datastore):
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Generator.__init__(self)
+ self.config = NagiosGenConfig(os.path.join(self.data, 'config.xml'),
+ core.fam)
self.Entries = {'Path':
- {'/etc/nagiosgen.status' : self.createhostconfig,
- '/etc/nagios/nagiosgen.cfg': self.createserverconfig}}
-
- self.client_attrib = {'encoding':'ascii',
- 'owner':'root',
- 'group':'root',
- 'type':'file',
- 'perms':'0400'}
- self.server_attrib = {'encoding':'ascii',
- 'owner':'nagios',
- 'group':'nagios',
- 'type':'file',
- 'perms':'0440'}
-
- def getparents(self, hostname):
- """Return parents for given hostname."""
- depends=[]
- if not os.path.isfile('%s/parents.xml' % (self.data)):
- return depends
-
- tree = lxml.etree.parse('%s/parents.xml' % (self.data))
- for entry in tree.findall('.//Depend'):
- if entry.attrib['name'] == hostname:
- depends.append(entry.attrib['on'])
- return depends
+ {'/etc/nagiosgen.status': self.createhostconfig,
+ '/etc/nagios/nagiosgen.cfg': self.createserverconfig}}
+
+ self.client_attrib = {'encoding': 'ascii',
+ 'owner': 'root',
+ 'group': 'root',
+ 'type': 'file',
+ 'perms': '0400'}
+ self.server_attrib = {'encoding': 'ascii',
+ 'owner': 'nagios',
+ 'group': 'nagios',
+ 'type': 'file',
+ 'perms': '0440'}
def createhostconfig(self, entry, metadata):
"""Build host specific configuration file."""
host_address = socket.gethostbyname(metadata.hostname)
- host_groups = [grp for grp in metadata.groups if \
- os.path.isfile('%s/%s-group.cfg' % (self.data, grp))]
- host_config = host_config_fmt % \
- (metadata.hostname, metadata.hostname, host_address)
+ host_groups = [grp for grp in metadata.groups
+ if os.path.isfile('%s/%s-group.cfg' % (self.data, grp))]
+ host_config = ['define host {',
+ line_fmt % ('host_name', metadata.hostname),
+ line_fmt % ('alias', metadata.hostname),
+ line_fmt % ('address', host_address)]
if host_groups:
- host_config += ' hostgroups %s\n' % (",".join(host_groups))
-
- xtra = None
- if hasattr(metadata, 'Properties') and \
- 'NagiosGen.xml' in metadata.Properties:
- for q in (metadata.hostname, 'default'):
- xtra = metadata.Properties['NagiosGen.xml'].data.find(q)
- if xtra is not None:
- break
-
- if xtra is not None:
- directives = list(xtra)
- for item in directives:
- host_config += ' %-32s %s\n' % (item.tag, item.text)
-
+ host_config.append(line_fmt % ("hostgroups",
+ ",".join(host_groups)))
+
+ # read the old-style Properties config, but emit a warning.
+ xtra = dict()
+ props = None
+ if (hasattr(metadata, 'Properties') and
+ 'NagiosGen.xml' in metadata.Properties):
+ props = metadata.Properties['NagiosGen.xml'].data
+ if props is not None:
+ LOGGER.warn("Parsing deprecated Properties/NagiosGen.xml. "
+ "Update to the new-style config with "
+ "nagiosgen-convert.py.")
+ xtra = dict((el.tag, el.text)
+ for el in props.find(metadata.hostname))
+ # hold off on parsing the defaults until we've checked for
+ # a new-style config
+
+ # read the old-style parents.xml, but emit a warning
+ pfile = os.path.join(self.data, "parents.xml")
+ if os.path.exists(pfile):
+ LOGGER.warn("Parsing deprecated NagiosGen/parents.xml. "
+ "Update to the new-style config with "
+ "nagiosgen-convert.py.")
+ parents = lxml.etree.parse(pfile)
+ for el in parents.xpath("//Depend[@name='%s']" % metadata.hostname):
+ if 'parent' in xtra:
+ xtra['parent'] += "," + el.get("on")
+ else:
+ xtra['parent'] = el.get("on")
+
+ # read the new-style config and overwrite the old-style config
+ for el in self.config.Match(metadata):
+ if el.tag == 'Option':
+ xtra[el.get("name")] = el.text
+
+ # if we haven't found anything in the new- or old-style
+ # configs, finally read defaults from old-style config
+ if not xtra and props is not None:
+ xtra = dict((el.tag, el.text) for el in props.find('default'))
+
+ if xtra:
+ host_config.extend([line_fmt % (opt, val)
+ for opt, val in list(xtra.items())])
else:
- host_config += ' use default\n'
+ host_config.append(line_fmt % ('use', 'default'))
- host_config += '}\n'
- entry.text = host_config
- [entry.attrib.__setitem__(key, value) for \
- (key, value) in self.client_attrib.iteritems()]
+ host_config.append('}')
+ entry.text = "%s\n" % "\n".join(host_config)
+ [entry.attrib.__setitem__(key, value)
+ for (key, value) in list(self.client_attrib.items())]
try:
- fileh = open("%s/%s-host.cfg" % \
- (self.data, metadata.hostname), 'w')
- fileh.write(host_config)
+ fileh = open("%s/%s-host.cfg" %
+ (self.data, metadata.hostname), 'w')
+ fileh.write(entry.text)
fileh.close()
- except OSError, ioerr:
- LOGGER.error("Failed to write %s/%s-host.cfg" % \
- (self.data, metadata.hostname))
+ except OSError:
+ ioerr = sys.exc_info()[1]
+ LOGGER.error("Failed to write %s/%s-host.cfg" %
+ (self.data, metadata.hostname))
LOGGER.error(ioerr)
def createserverconfig(self, entry, _):
"""Build monolithic server configuration file."""
- host_configs = glob.glob('%s/*-host.cfg' % self.data)
+ host_configs = glob.glob('%s/*-host.cfg' % self.data)
group_configs = glob.glob('%s/*-group.cfg' % self.data)
- host_data = ""
- group_data = ""
+ host_data = []
+ group_data = []
for host in host_configs:
- hostfile = open(host, 'r')
- hostname=host.split('/')[-1].replace('-host.cfg','')
- parents=self.getparents(hostname)
- if parents:
- hostlines = hostfile.readlines()
- else:
- hostdata = hostfile.read()
- hostfile.close()
-
- if parents:
- hostdata=''
- addparents=True
- for line in hostlines:
- line=line.replace('\n','')
- if 'parents' in line:
- line+=','+','.join(parents)
- addparents=False
- if '}' in line:
- line=''
- hostdata+="%s\n" % line
- if addparents:
- hostdata+=" parents %s\n" % ','.join(parents)
- hostdata+="}\n"
-
- host_data += hostdata
+ host_data.append(open(host, 'r').read())
+
for group in group_configs:
group_name = re.sub("(-group.cfg|.*/(?=[^/]+))", "", group)
- if host_data.find(group_name) != -1:
+ if "\n".join(host_data).find(group_name) != -1:
groupfile = open(group, 'r')
- group_data += groupfile.read()
+ group_data.append(groupfile.read())
groupfile.close()
- entry.text = group_data + host_data
- [entry.attrib.__setitem__(key, value) for \
- (key, value) in self.server_attrib.iteritems()]
+
+ entry.text = "%s\n\n%s" % ("\n".join(group_data), "\n".join(host_data))
+ [entry.attrib.__setitem__(key, value)
+ for (key, value) in list(self.server_attrib.items())]
try:
- fileh = open("%s/nagiosgen.cfg" % (self.data), 'w')
- fileh.write(group_data + host_data)
+ fileh = open("%s/nagiosgen.cfg" % self.data, 'w')
+ fileh.write(entry.text)
fileh.close()
- except OSError, ioerr:
- LOGGER.error("Failed to write %s/nagiosgen.cfg" % (self.data))
+ except OSError:
+ ioerr = sys.exc_info()[1]
+ LOGGER.error("Failed to write %s/nagiosgen.cfg" % self.data)
LOGGER.error(ioerr)
diff --git a/src/lib/Server/Plugins/Ohai.py b/src/lib/Server/Plugins/Ohai.py
index 0f7c7187f..6bd3edc34 100644
--- a/src/lib/Server/Plugins/Ohai.py
+++ b/src/lib/Server/Plugins/Ohai.py
@@ -37,12 +37,12 @@ class OhaiCache(object):
try:
data = open("%s/%s.json" % (self.dirname, item)).read()
except:
- raise KeyError, item
+ raise KeyError(item)
self.cache[item] = json.loads(data)
return self.cache[item]
def __iter__(self):
- data = self.cache.keys()
+ data = list(self.cache.keys())
data.extend([x[:-5] for x in os.listdir(self.dirname)])
return data.__iter__()
@@ -50,7 +50,9 @@ class OhaiCache(object):
class Ohai(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Probing,
Bcfg2.Server.Plugin.Connector):
- """The Ohai plugin is used to detect information about the client operating system."""
+ """The Ohai plugin is used to detect information
+ about the client operating system.
+ """
name = 'Ohai'
experimental = True
diff --git a/src/lib/Server/Plugins/Packages.py b/src/lib/Server/Plugins/Packages.py
index 438c1d5c0..4e47f8549 100644
--- a/src/lib/Server/Plugins/Packages.py
+++ b/src/lib/Server/Plugins/Packages.py
@@ -1,4 +1,3 @@
-import cPickle
import copy
import gzip
import tarfile
@@ -8,7 +7,21 @@ import lxml.etree
import os
import re
import sys
-import urllib2
+
+# Compatibility imports
+from Bcfg2.Bcfg2Py3k import cPickle
+from Bcfg2.Bcfg2Py3k import HTTPBasicAuthHandler
+from Bcfg2.Bcfg2Py3k import HTTPPasswordMgrWithDefaultRealm
+from Bcfg2.Bcfg2Py3k import HTTPError
+from Bcfg2.Bcfg2Py3k import install_opener
+from Bcfg2.Bcfg2Py3k import build_opener
+from Bcfg2.Bcfg2Py3k import urlopen
+
+# py3k compatibility
+if sys.hexversion >= 0x03000000:
+ from io import FileIO as BUILTIN_FILE_TYPE
+else:
+ BUILTIN_FILE_TYPE = file
# FIXME: Remove when server python dep is 2.5 or greater
if sys.version_info >= (2, 5):
@@ -22,14 +35,17 @@ import Bcfg2.Server.Plugin
# build sources.list?
# caching for yum
+
class NoData(Exception):
pass
+
class SomeData(Exception):
pass
logger = logging.getLogger('Packages')
+
def source_from_xml(xsource):
ret = dict([('rawurl', False), ('url', False)])
for key, tag in [('groups', 'Group'), ('components', 'Component'),
@@ -60,6 +76,7 @@ def source_from_xml(xsource):
ret['url'] += '/'
return ret
+
def _fetch_url(url):
if '@' in url:
mobj = re.match('(\w+://)([^:]+):([^@]+)@(.*)$', url)
@@ -68,10 +85,11 @@ def _fetch_url(url):
user = mobj.group(2)
passwd = mobj.group(3)
url = mobj.group(1) + mobj.group(4)
- auth = urllib2.HTTPBasicAuthHandler(urllib2.HTTPPasswordMgrWithDefaultRealm())
+ auth = HTTPBasicAuthHandler(HTTPPasswordMgrWithDefaultRealm())
auth.add_password(None, url, user, passwd)
- urllib2.install_opener(urllib2.build_opener(auth))
- return urllib2.urlopen(url).read()
+ install_opener(build_opener(auth))
+ return urlopen(url).read()
+
class Source(object):
basegroups = []
@@ -135,7 +153,7 @@ class Source(object):
agroups = ['global'] + [a for a in self.arches if a in meta.groups]
vdict = dict()
for agrp in agroups:
- for key, value in self.provides[agrp].iteritems():
+ for key, value in list(self.provides[agrp].items()):
if key not in vdict:
vdict[key] = set(value)
else:
@@ -160,11 +178,12 @@ class Source(object):
except ValueError:
logger.error("Packages: Bad url string %s" % url)
continue
- except urllib2.HTTPError, h:
+ except HTTPError:
+ h = sys.exc_info()[1]
logger.error("Packages: Failed to fetch url %s. code=%s" \
% (url, h.code))
continue
- file(fname, 'w').write(data)
+ BUILTIN_FILE_TYPE(fname, 'w').write(data)
def applies(self, metadata):
return len([g for g in self.basegroups if g in metadata.groups]) != 0 and \
@@ -193,6 +212,7 @@ class Source(object):
return {'groups': copy.copy(self.groups), \
'urls': [copy.deepcopy(url) for url in self.url_map]}
+
class YUMSource(Source):
xp = '{http://linux.duke.edu/metadata/common}'
rp = '{http://linux.duke.edu/metadata/rpm}'
@@ -217,13 +237,13 @@ class YUMSource(Source):
self.file_to_arch = dict()
def save_state(self):
- cache = file(self.cachefile, 'wb')
+ cache = BUILTIN_FILE_TYPE(self.cachefile, 'wb')
cPickle.dump((self.packages, self.deps, self.provides,
self.filemap, self.url_map), cache, 2)
cache.close()
def load_state(self):
- data = file(self.cachefile)
+ data = BUILTIN_FILE_TYPE(self.cachefile)
(self.packages, self.deps, self.provides, \
self.filemap, self.url_map) = cPickle.load(data)
@@ -250,7 +270,8 @@ class YUMSource(Source):
except ValueError:
logger.error("Packages: Bad url string %s" % rmdurl)
continue
- except urllib2.HTTPError, h:
+ except HTTPError:
+ h = sys.exc_info()[1]
logger.error("Packages: Failed to fetch url %s. code=%s" \
% (rmdurl, h.code))
continue
@@ -277,7 +298,7 @@ class YUMSource(Source):
fdata = lxml.etree.parse(fname).getroot()
self.parse_filelist(fdata, farch)
# merge data
- sdata = self.packages.values()
+ sdata = list(self.packages.values())
self.packages['global'] = copy.deepcopy(sdata.pop())
while sdata:
self.packages['global'] = self.packages['global'].intersection(sdata.pop())
@@ -337,10 +358,10 @@ class YUMSource(Source):
def get_vpkgs(self, metadata):
rv = Source.get_vpkgs(self, metadata)
- for arch, fmdata in self.filemap.iteritems():
+ for arch, fmdata in list(self.filemap.items()):
if arch not in metadata.groups and arch != 'global':
continue
- for filename, pkgs in fmdata.iteritems():
+ for filename, pkgs in list(fmdata.items()):
rv[filename] = pkgs
return rv
@@ -348,6 +369,7 @@ class YUMSource(Source):
filtered = set([u for u in unknown if u.startswith('rpmlib')])
unknown.difference_update(filtered)
+
class APTSource(Source):
basegroups = ['apt', 'debian', 'ubuntu', 'nexenta']
ptype = 'deb'
@@ -362,13 +384,13 @@ class APTSource(Source):
'components': self.components, 'arches': self.arches, 'groups': self.groups}]
def save_state(self):
- cache = file(self.cachefile, 'wb')
+ cache = BUILTIN_FILE_TYPE(self.cachefile, 'wb')
cPickle.dump((self.pkgnames, self.deps, self.provides),
cache, 2)
cache.close()
def load_state(self):
- data = file(self.cachefile)
+ data = BUILTIN_FILE_TYPE(self.cachefile)
self.pkgnames, self.deps, self.provides = cPickle.load(data)
def filter_unknown(self, unknown):
@@ -407,7 +429,7 @@ class APTSource(Source):
print("Failed to read file %s" % fname)
raise
for line in reader.readlines():
- words = line.strip().split(':', 1)
+ words = str(line.strip()).split(':', 1)
if words[0] == 'Package':
pkgname = words[1].strip().rstrip()
self.pkgnames.add(pkgname)
@@ -449,7 +471,7 @@ class APTSource(Source):
for barch in bdeps:
self.deps[barch][pkgname] = bdeps[barch][pkgname]
provided = set()
- for bprovided in bprov.values():
+ for bprovided in list(bprov.values()):
provided.update(set(bprovided))
for prov in provided:
prset = set()
@@ -469,6 +491,7 @@ class APTSource(Source):
pkg not in self.blacklist and \
(len(self.whitelist) == 0 or pkg in self.whitelist)
+
class PACSource(Source):
basegroups = ['arch', 'parabola']
ptype = 'pacman'
@@ -483,13 +506,13 @@ class PACSource(Source):
'components': self.components, 'arches': self.arches, 'groups': self.groups}]
def save_state(self):
- cache = file(self.cachefile, 'wb')
+ cache = BUILTIN_FILE_TYPE(self.cachefile, 'wb')
cPickle.dump((self.pkgnames, self.deps, self.provides),
cache, 2)
cache.close()
def load_state(self):
- data = file(self.cachefile)
+ data = BUILTIN_FILE_TYPE(self.cachefile)
self.pkgnames, self.deps, self.provides = cPickle.load(data)
def filter_unknown(self, unknown):
@@ -526,7 +549,7 @@ class PACSource(Source):
bdeps[barch] = dict()
bprov[barch] = dict()
try:
- print "try to read : " + fname
+ print("try to read : " + fname)
tar = tarfile.open(fname, "r")
reader = gzip.GzipFile(fname)
except:
@@ -536,7 +559,7 @@ class PACSource(Source):
for tarinfo in tar:
if tarinfo.isdir():
self.pkgnames.add(tarinfo.name.rsplit("-", 2)[0])
- print "added : " + tarinfo.name.rsplit("-", 2)[0]
+ print("added : " + tarinfo.name.rsplit("-", 2)[0])
tar.close()
self.deps['global'] = dict()
@@ -556,7 +579,7 @@ class PACSource(Source):
for barch in bdeps:
self.deps[barch][pkgname] = bdeps[barch][pkgname]
provided = set()
- for bprovided in bprov.values():
+ for bprovided in list(bprov.values()):
provided.update(set(bprovided))
for prov in provided:
prset = set()
@@ -576,6 +599,7 @@ class PACSource(Source):
pkg not in self.blacklist and \
(len(self.whitelist) == 0 or pkg in self.whitelist)
+
class Packages(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.StructureValidator,
Bcfg2.Server.Plugin.Generator,
@@ -614,7 +638,7 @@ class Packages(Bcfg2.Server.Plugin.Plugin,
vpkgs = dict()
for source in self.get_matching_sources(meta):
s_vpkgs = source.get_vpkgs(meta)
- for name, prov_set in s_vpkgs.iteritems():
+ for name, prov_set in list(s_vpkgs.items()):
if name not in vpkgs:
vpkgs[name] = set(prov_set)
else:
@@ -726,7 +750,9 @@ class Packages(Bcfg2.Server.Plugin.Plugin,
satisfied_vpkgs.add(current)
elif [item for item in vpkg_cache[current] if item in packages]:
if debug:
- self.logger.debug("Packages: requirement %s satisfied by %s" % (current, [item for item in vpkg_cache[current] if item in packages]))
+ self.logger.debug("Packages: requirement %s satisfied by %s" % (current,
+ [item for item in vpkg_cache[current]
+ if item in packages]))
satisfied_vpkgs.add(current)
vpkgs.difference_update(satisfied_vpkgs)
@@ -736,7 +762,9 @@ class Packages(Bcfg2.Server.Plugin.Plugin,
# allow use of virt through explicit specification, then fall back to forcing current on last pass
if [item for item in vpkg_cache[current] if item in packages]:
if debug:
- self.logger.debug("Packages: requirement %s satisfied by %s" % (current, [item for item in vpkg_cache[current] if item in packages]))
+ self.logger.debug("Packages: requirement %s satisfied by %s" % (current,
+ [item for item in vpkg_cache[current]
+ if item in packages]))
satisfied_both.add(current)
elif current in input_requirements or final_pass:
pkgs.add(current)
@@ -828,7 +856,8 @@ class Packages(Bcfg2.Server.Plugin.Plugin,
xdata.xinclude()
xdata = xdata.getroot()
except (lxml.etree.XIncludeError, \
- lxml.etree.XMLSyntaxError), xmlerr:
+ lxml.etree.XMLSyntaxError):
+ xmlerr = sys.exc_info()[1]
self.logger.error("Package: Error processing xml: %s" % xmlerr)
raise Bcfg2.Server.Plugin.PluginInitError
except IOError:
diff --git a/src/lib/Server/Plugins/Pkgmgr.py b/src/lib/Server/Plugins/Pkgmgr.py
index b58a7c91d..b96e7ea7d 100644
--- a/src/lib/Server/Plugins/Pkgmgr.py
+++ b/src/lib/Server/Plugins/Pkgmgr.py
@@ -7,15 +7,17 @@ import Bcfg2.Server.Plugin
logger = logging.getLogger('Bcfg2.Plugins.Pkgmgr')
+
class FuzzyDict(dict):
fuzzy = re.compile('(?P<name>.*):(?P<alist>\S+(,\S+)*)')
+
def __getitem__(self, key):
if isinstance(key, str):
mdata = self.fuzzy.match(key)
if mdata:
return dict.__getitem__(self, mdata.groupdict()['name'])
else:
- print "got non-string key %s" % str(key)
+ print("got non-string key %s" % str(key))
return dict.__getitem__(self, key)
def has_key(self, key):
@@ -33,11 +35,14 @@ class FuzzyDict(dict):
return default
raise
+
class PNode(Bcfg2.Server.Plugin.INode):
- """PNode has a list of packages available at a particular group intersection."""
- splitters = {'rpm':re.compile('^(.*/)?(?P<name>[\w\+\d\.]+(-[\w\+\d\.]+)*)-' + \
+ """PNode has a list of packages available at a
+ particular group intersection.
+ """
+ splitters = {'rpm': re.compile('^(.*/)?(?P<name>[\w\+\d\.]+(-[\w\+\d\.]+)*)-' + \
'(?P<version>[\w\d\.]+-([\w\d\.]+))\.(?P<arch>\S+)\.rpm$'),
- 'encap':re.compile('^(?P<name>[\w-]+)-(?P<version>[\w\d\.+-]+).encap.*$')}
+ 'encap': re.compile('^(?P<name>[\w-]+)-(?P<version>[\w\d\.+-]+).encap.*$')}
ignore = ['Package']
def Match(self, metadata, data):
@@ -54,41 +59,44 @@ class PNode(Bcfg2.Server.Plugin.INode):
def __init__(self, data, pdict, parent=None):
# copy local attributes to all child nodes if no local attribute exists
- if not pdict.has_key('Package'):
+ if 'Package' not in pdict:
pdict['Package'] = set()
for child in data.getchildren():
- for attr in [key for key in data.attrib.keys() \
- if key != 'name' and not child.attrib.has_key(key)]:
+ for attr in [key for key in list(data.attrib.keys())
+ if key != 'name' and key not in child.attrib]:
try:
child.set(attr, data.get(attr))
except:
# don't fail on things like comments and other immutable elements
pass
Bcfg2.Server.Plugin.INode.__init__(self, data, pdict, parent)
- if not self.contents.has_key('Package'):
+ if 'Package' not in self.contents:
self.contents['Package'] = FuzzyDict()
for pkg in data.findall('./Package'):
- if pkg.attrib.has_key('name') and pkg.get('name') not in pdict['Package']:
+ if 'name' in pkg.attrib and pkg.get('name') not in pdict['Package']:
pdict['Package'].add(pkg.get('name'))
if pkg.get('name') != None:
self.contents['Package'][pkg.get('name')] = {}
if pkg.getchildren():
self.contents['Package'][pkg.get('name')]['__children__'] \
= pkg.getchildren()
- if pkg.attrib.has_key('simplefile'):
+ if 'simplefile' in pkg.attrib:
pkg.set('url', "%s/%s" % (pkg.get('uri'), pkg.get('simplefile')))
self.contents['Package'][pkg.get('name')].update(pkg.attrib)
else:
- if pkg.attrib.has_key('file'):
- if pkg.attrib.has_key('multiarch'):
+ if 'file' in pkg.attrib:
+ if 'multiarch' in pkg.attrib:
archs = pkg.get('multiarch').split()
srcs = pkg.get('srcs', pkg.get('multiarch')).split()
- url = ' '.join(["%s/%s" % (pkg.get('uri'), pkg.get('file') % {'src':srcs[idx], 'arch':archs[idx]})
+ url = ' '.join(["%s/%s" % (pkg.get('uri'),
+ pkg.get('file') % {'src':srcs[idx],
+ 'arch':archs[idx]})
for idx in range(len(archs))])
pkg.set('url', url)
else:
- pkg.set('url', '%s/%s' % (pkg.get('uri'), pkg.get('file')))
- if self.splitters.has_key(pkg.get('type')) and pkg.get('file') != None:
+ pkg.set('url', '%s/%s' % (pkg.get('uri'),
+ pkg.get('file')))
+ if pkg.get('type') in self.splitters and pkg.get('file') != None:
mdata = self.splitters[pkg.get('type')].match(pkg.get('file'))
if not mdata:
logger.error("Failed to match pkg %s" % pkg.get('file'))
@@ -112,10 +120,13 @@ class PNode(Bcfg2.Server.Plugin.INode):
class PkgSrc(Bcfg2.Server.Plugin.XMLSrc):
- """PkgSrc files contain a PNode hierarchy that returns matching package entries."""
+ """PkgSrc files contain a PNode hierarchy that
+ returns matching package entries.
+ """
__node__ = PNode
__cacheobj__ = FuzzyDict
+
class Pkgmgr(Bcfg2.Server.Plugin.PrioDir):
"""This is a generator that handles package assignments."""
name = 'Pkgmgr'
@@ -127,8 +138,8 @@ class Pkgmgr(Bcfg2.Server.Plugin.PrioDir):
def HandleEvent(self, event):
'''Handle events and update dispatch table'''
Bcfg2.Server.Plugin.XMLDirectoryBacked.HandleEvent(self, event)
- for src in self.entries.values():
- for itype, children in src.items.iteritems():
+ for src in list(self.entries.values()):
+ for itype, children in list(src.items.items()):
for child in children:
try:
self.Entries[itype][child] = self.BindEntry
@@ -149,7 +160,7 @@ class Pkgmgr(Bcfg2.Server.Plugin.PrioDir):
if inst.get('arch') not in arches]
def HandlesEntry(self, entry, metadata):
- return entry.tag == 'Package' and entry.get('name').split(':')[0] in self.Entries['Package'].keys()
+ return entry.tag == 'Package' and entry.get('name').split(':')[0] in list(self.Entries['Package'].keys())
def HandleEntry(self, entry, metadata):
self.BindEntry(entry, metadata)
diff --git a/src/lib/Server/Plugins/Probes.py b/src/lib/Server/Plugins/Probes.py
index 57dd4f698..ea2e79ccc 100644
--- a/src/lib/Server/Plugins/Probes.py
+++ b/src/lib/Server/Plugins/Probes.py
@@ -6,8 +6,10 @@ import Bcfg2.Server.Plugin
specific_probe_matcher = re.compile("(.*/)?(?P<basename>\S+)(.(?P<mode>[GH](\d\d)?)_\S+)")
probe_matcher = re.compile("(.*/)?(?P<basename>\S+)")
+
class ProbeSet(Bcfg2.Server.Plugin.EntrySet):
ignore = re.compile("^(\.#.*|.*~|\\..*\\.(tmp|sw[px])|probed\\.xml)$")
+
def __init__(self, path, fam, encoding, plugin_name):
fpattern = '[0-9A-Za-z_\-]+'
self.plugin_name = plugin_name
@@ -34,7 +36,7 @@ class ProbeSet(Bcfg2.Server.Plugin.EntrySet):
if pname not in build:
build[pname] = entry
- for (name, entry) in build.iteritems():
+ for (name, entry) in list(build.items()):
probe = lxml.etree.Element('probe')
probe.set('name', name.split('/')[-1])
probe.set('source', self.plugin_name)
@@ -47,6 +49,7 @@ class ProbeSet(Bcfg2.Server.Plugin.EntrySet):
ret.append(probe)
return ret
+
class Probes(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Probing,
Bcfg2.Server.Plugin.Connector):
@@ -80,7 +83,8 @@ class Probes(Bcfg2.Server.Plugin.Plugin,
value=self.probedata[client][probe])
for group in sorted(self.cgroups[client]):
lxml.etree.SubElement(cx, "Group", name=group)
- data = lxml.etree.tostring(top, encoding='UTF-8', xml_declaration=True,
+ data = lxml.etree.tostring(top, encoding='UTF-8',
+ xml_declaration=True,
pretty_print='true')
try:
datafile = open("%s/%s" % (self.data, 'probed.xml'), 'w')
@@ -98,7 +102,7 @@ class Probes(Bcfg2.Server.Plugin.Plugin,
self.cgroups = {}
for client in data.getchildren():
self.probedata[client.get('name')] = {}
- self.cgroups[client.get('name')]=[]
+ self.cgroups[client.get('name')] = []
for pdata in client:
if (pdata.tag == 'Probe'):
self.probedata[client.get('name')][pdata.get('name')] = pdata.get('value')
@@ -118,7 +122,7 @@ class Probes(Bcfg2.Server.Plugin.Plugin,
def ReceiveDataItem(self, client, data):
"""Receive probe results pertaining to client."""
- if not self.cgroups.has_key(client.hostname):
+ if client.hostname not in self.cgroups:
self.cgroups[client.hostname] = []
if data.text == None:
self.logger.error("Got null response to probe %s from %s" % \
@@ -139,9 +143,9 @@ class Probes(Bcfg2.Server.Plugin.Plugin,
dlines.remove(line)
dtext = "\n".join(dlines)
try:
- self.probedata[client.hostname].update({data.get('name'):dtext})
+ self.probedata[client.hostname].update({data.get('name'): dtext})
except KeyError:
- self.probedata[client.hostname] = {data.get('name'):dtext}
+ self.probedata[client.hostname] = {data.get('name'): dtext}
def get_additional_groups(self, meta):
return self.cgroups.get(meta.hostname, list())
diff --git a/src/lib/Server/Plugins/Properties.py b/src/lib/Server/Plugins/Properties.py
index 2888ef1d1..dea797a10 100644
--- a/src/lib/Server/Plugins/Properties.py
+++ b/src/lib/Server/Plugins/Properties.py
@@ -4,15 +4,45 @@ import lxml.etree
import Bcfg2.Server.Plugin
-class PropertyFile(Bcfg2.Server.Plugin.XMLFileBacked):
+class PropertyFile(Bcfg2.Server.Plugin.StructFile):
"""Class for properties files."""
-
def Index(self):
- """Build data into an xml object."""
- try:
- self.data = lxml.etree.XML(self.data)
- except lxml.etree.XMLSyntaxError:
- Bcfg2.Server.Plugin.logger.error("Failed to parse %s" % self.name)
+ """Build internal data structures."""
+ if type(self.data) is not lxml.etree._Element:
+ try:
+ self.data = lxml.etree.XML(self.data)
+ except lxml.etree.XMLSyntaxError:
+ Bcfg2.Server.Plugin.logger.error("Failed to parse %s" %
+ self.name)
+
+ self.fragments = {}
+ work = {lambda x: True: self.data.getchildren()}
+ while work:
+ (predicate, worklist) = work.popitem()
+ self.fragments[predicate] = \
+ [item for item in worklist
+ if (item.tag != 'Group' and
+ item.tag != 'Client' and
+ not isinstance(item,
+ lxml.etree._Comment))]
+ for item in worklist:
+ cmd = None
+ if item.tag == 'Group':
+ if item.get('negate', 'false').lower() == 'true':
+ cmd = "lambda x:'%s' not in x.groups and predicate(x)"
+ else:
+ cmd = "lambda x:'%s' in x.groups and predicate(x)"
+ elif item.tag == 'Client':
+ if item.get('negate', 'false').lower() == 'true':
+ cmd = "lambda x:x.hostname != '%s' and predicate(x)"
+ else:
+ cmd = "lambda x:x.hostname == '%s' and predicate(x)"
+ # else, ignore item
+ if cmd is not None:
+ newpred = eval(cmd % item.get('name'),
+ {'predicate':predicate})
+ work[newpred] = item.getchildren()
+
class PropDirectoryBacked(Bcfg2.Server.Plugin.DirectoryBacked):
@@ -33,9 +63,10 @@ class Properties(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Connector.__init__(self)
try:
self.store = PropDirectoryBacked(self.data, core.fam)
- except OSError, e:
+ except OSError:
+ e = sys.exc_info()[1]
Bcfg2.Server.Plugin.logger.error("Error while creating Properties "
- "store: %s %s" % (e.strerror,e.filename))
+ "store: %s %s" % (e.strerror, e.filename))
raise Bcfg2.Server.Plugin.PluginInitError
def get_additional_data(self, _):
diff --git a/src/lib/Server/Plugins/SGenshi.py b/src/lib/Server/Plugins/SGenshi.py
index cead06e34..efd981956 100644
--- a/src/lib/Server/Plugins/SGenshi.py
+++ b/src/lib/Server/Plugins/SGenshi.py
@@ -5,6 +5,7 @@ import genshi.input
import genshi.template
import lxml.etree
import logging
+import sys
import Bcfg2.Server.Plugin
import Bcfg2.Server.Plugins.TGenshi
@@ -23,11 +24,14 @@ class SGenshiTemplateFile(Bcfg2.Server.Plugins.TGenshi.TemplateFile):
Bcfg2.Server.Plugins.TGenshi.removecomment)
data = stream.render('xml', strip_whitespace=False)
return lxml.etree.XML(data)
- except LookupError, lerror:
+ except LookupError:
+ lerror = sys.exc_info()[1]
logger.error('Genshi lookup error: %s' % lerror)
- except genshi.template.TemplateError, terror:
+ except genshi.template.TemplateError:
+ terror = sys.exc_info()[1]
logger.error('Genshi template error: %s' % terror)
- except genshi.input.ParseError, perror:
+ except genshi.input.ParseError:
+ perror = sys.exc_info()[1]
logger.error('Genshi parse error: %s' % perror)
raise
diff --git a/src/lib/Server/Plugins/SSHbase.py b/src/lib/Server/Plugins/SSHbase.py
index 96a444875..cf0998aaa 100644
--- a/src/lib/Server/Plugins/SSHbase.py
+++ b/src/lib/Server/Plugins/SSHbase.py
@@ -5,6 +5,7 @@ import binascii
import os
import socket
import shutil
+import sys
import tempfile
from subprocess import Popen, PIPE
import Bcfg2.Server.Plugin
@@ -52,7 +53,8 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
try:
Bcfg2.Server.Plugin.DirectoryBacked.__init__(self, self.data,
self.core.fam)
- except OSError, ioerr:
+ except OSError:
+ ioerr = sys.exc_info()[1]
self.logger.error("Failed to load SSHbase repository from %s" \
% (self.data))
self.logger.error(ioerr)
@@ -72,8 +74,8 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
def get_skn(self):
"""Build memory cache of the ssh known hosts file."""
if not self.__skn:
- self.__skn = "\n".join([value.data for key, value in \
- self.entries.iteritems() if \
+ self.__skn = "\n".join([str(value.data) for key, value in \
+ list(self.entries.items()) if \
key.endswith('.static')])
names = dict()
# if no metadata is registered yet, defer
@@ -103,7 +105,7 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
continue
names[cmeta.hostname] = sorted(names[cmeta.hostname])
# now we have our name cache
- pubkeys = [pubk for pubk in self.entries.keys() \
+ pubkeys = [pubk for pubk in list(self.entries.keys()) \
if pubk.find('.pub.H_') != -1]
pubkeys.sort()
badnames = set()
@@ -131,7 +133,7 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
if event and event.filename.endswith('.static'):
self.skn = False
if not self.__skn:
- if (len(self.entries.keys())) >= (len(os.listdir(self.data))-1):
+ if (len(list(self.entries.keys()))) >= (len(os.listdir(self.data)) - 1):
_ = self.skn
def HandlesEntry(self, entry, _):
@@ -205,26 +207,26 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
for hostkey in hostkeys:
entry.text += "localhost,localhost.localdomain,127.0.0.1 %s" % (
self.entries[hostkey].data)
- permdata = {'owner':'root',
- 'group':'root',
- 'type':'file',
- 'perms':'0644'}
+ permdata = {'owner': 'root',
+ 'group': 'root',
+ 'type': 'file',
+ 'perms': '0644'}
[entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
def build_hk(self, entry, metadata):
"""This binds host key data into entries."""
client = metadata.hostname
filename = "%s.H_%s" % (entry.get('name').split('/')[-1], client)
- if filename not in self.entries.keys():
+ if filename not in list(self.entries.keys()):
self.GenerateHostKeys(client)
if not filename in self.entries:
self.logger.error("%s still not registered" % filename)
raise Bcfg2.Server.Plugin.PluginExecutionError
keydata = self.entries[filename].data
- permdata = {'owner':'root',
- 'group':'root',
- 'type':'file',
- 'perms':'0600'}
+ permdata = {'owner': 'root',
+ 'group': 'root',
+ 'type': 'file',
+ 'perms': '0600'}
if entry.get('name')[-4:] == '.pub':
permdata['perms'] = '0644'
[entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
@@ -245,7 +247,7 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
else:
keytype = 'rsa1'
- if hostkey not in self.entries.keys():
+ if hostkey not in list(self.entries.keys()):
fileloc = "%s/%s" % (self.data, hostkey)
publoc = self.data + '/' + ".".join([hostkey.split('.')[0],
'pub',
@@ -257,8 +259,8 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
shutil.copy(temploc, fileloc)
shutil.copy("%s.pub" % temploc, publoc)
self.AddEntry(hostkey)
- self.AddEntry(".".join([hostkey.split('.')[0]]+['pub', "H_%s" \
- % client]))
+ self.AddEntry(".".join([hostkey.split('.')[0]] + ['pub', "H_%s" \
+ % client]))
try:
os.unlink(temploc)
os.unlink("%s.pub" % temploc)
@@ -277,7 +279,7 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
try:
open(filename, 'w').write(entry['text'])
if log:
- print "Wrote file %s" % filename
+ print("Wrote file %s" % filename)
except KeyError:
self.logger.error("Failed to pull %s. This file does not currently "
"exist on the client" % entry.get('name'))
diff --git a/src/lib/Server/Plugins/SSLCA.py b/src/lib/Server/Plugins/SSLCA.py
index 1c9e1b59d..baaa14ba9 100644
--- a/src/lib/Server/Plugins/SSLCA.py
+++ b/src/lib/Server/Plugins/SSLCA.py
@@ -5,7 +5,8 @@ import posixpath
import tempfile
import os
from subprocess import Popen, PIPE, STDOUT
-from ConfigParser import ConfigParser
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import ConfigParser
class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
@@ -41,14 +42,14 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
if event.filename.endswith('.xml'):
if action in ['exists', 'created', 'changed']:
if event.filename.endswith('key.xml'):
- key_spec = dict(lxml.etree.parse(epath).find('Key').items())
+ key_spec = dict(list(lxml.etree.parse(epath).find('Key').items()))
self.key_specs[ident] = {
'bits': key_spec.get('bits', 2048),
'type': key_spec.get('type', 'rsa')
}
self.Entries['Path'][ident] = self.get_key
elif event.filename.endswith('cert.xml'):
- cert_spec = dict(lxml.etree.parse(epath).find('Cert').items())
+ cert_spec = dict(list(lxml.etree.parse(epath).find('Cert').items()))
ca = cert_spec.get('ca', 'default')
self.cert_specs[ident] = {
'ca': ca,
@@ -64,7 +65,7 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
}
cp = ConfigParser()
cp.read(self.core.cfile)
- self.CAs[ca] = dict(cp.items('sslca_'+ca))
+ self.CAs[ca] = dict(cp.items('sslca_' + ca))
self.Entries['Path'][ident] = self.get_cert
if action == 'deleted':
if ident in self.Entries['Path']:
@@ -99,12 +100,14 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
# check if we already have a hostfile, or need to generate a new key
# TODO: verify key fits the specs
path = entry.get('name')
- filename = "".join([path, '/', path.rsplit('/', 1)[1], '.H_', metadata.hostname])
- if filename not in self.entries.keys():
+ filename = "".join([path, '/', path.rsplit('/', 1)[1],
+ '.H_', metadata.hostname])
+ if filename not in list(self.entries.keys()):
key = self.build_key(filename, entry, metadata)
open(self.data + filename, 'w').write(key)
entry.text = key
- self.entries[filename] = self.__child__("%s%s" % (self.data, filename))
+ self.entries[filename] = self.__child__("%s%s" % (self.data,
+ filename))
self.entries[filename].HandleEvent()
else:
entry.text = self.entries[filename].data
@@ -135,23 +138,28 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
[entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
path = entry.get('name')
- filename = "".join([path, '/', path.rsplit('/', 1)[1], '.H_', metadata.hostname])
+ filename = "".join([path, '/', path.rsplit('/', 1)[1],
+ '.H_', metadata.hostname])
# first - ensure we have a key to work with
key = self.cert_specs[entry.get('name')].get('key')
- key_filename = "".join([key, '/', key.rsplit('/', 1)[1], '.H_', metadata.hostname])
+ key_filename = "".join([key, '/', key.rsplit('/', 1)[1],
+ '.H_', metadata.hostname])
if key_filename not in self.entries:
e = lxml.etree.Element('Path')
e.attrib['name'] = key
self.core.Bind(e, metadata)
# check if we have a valid hostfile
- if filename in self.entries.keys() and self.verify_cert(filename, key_filename, entry):
+ if filename in list(self.entries.keys()) and self.verify_cert(filename,
+ key_filename,
+ entry):
entry.text = self.entries[filename].data
else:
cert = self.build_cert(key_filename, entry, metadata)
open(self.data + filename, 'w').write(cert)
- self.entries[filename] = self.__child__("%s%s" % (self.data, filename))
+ self.entries[filename] = self.__child__("%s%s" % (self.data,
+ filename))
self.entries[filename].HandleEvent()
entry.text = cert
@@ -188,7 +196,6 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
return True
return False
-
def build_cert(self, key_filename, entry, metadata):
"""
creates a new certificate according to the specification
@@ -200,9 +207,14 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
days = self.cert_specs[entry.get('name')]['days']
passphrase = self.CAs[ca].get('passphrase')
if passphrase:
- cmd = "openssl ca -config %s -in %s -days %s -batch -passin pass:%s" % (ca_config, req, days, passphrase)
+ cmd = "openssl ca -config %s -in %s -days %s -batch -passin pass:%s" % (ca_config,
+ req,
+ days,
+ passphrase)
else:
- cmd = "openssl ca -config %s -in %s -days %s -batch" % (ca_config, req, days)
+ cmd = "openssl ca -config %s -in %s -days %s -batch" % (ca_config,
+ req,
+ days)
cert = Popen(cmd, shell=True, stdout=PIPE).stdout.read()
try:
os.unlink(req_config)
@@ -234,7 +246,7 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
},
'alt_names': {}
}
- for section in defaults.keys():
+ for section in list(defaults.keys()):
cp.add_section(section)
for key in defaults[section]:
cp.set(section, key, defaults[section][key])
@@ -242,7 +254,7 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
altnames = list(metadata.aliases)
altnames.append(metadata.hostname)
for altname in altnames:
- cp.set('alt_names', 'DNS.'+str(x), altname)
+ cp.set('alt_names', 'DNS.' + str(x), altname)
x += 1
for item in ['C', 'L', 'ST', 'O', 'OU', 'emailAddress']:
if self.cert_specs[entry.get('name')][item]:
@@ -259,6 +271,9 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
req = tempfile.mkstemp()[1]
days = self.cert_specs[entry.get('name')]['days']
key = self.data + key_filename
- cmd = "openssl req -new -config %s -days %s -key %s -text -out %s" % (req_config, days, key, req)
+ cmd = "openssl req -new -config %s -days %s -key %s -text -out %s" % (req_config,
+ days,
+ key,
+ req)
res = Popen(cmd, shell=True, stdout=PIPE).stdout.read()
return req
diff --git a/src/lib/Server/Plugins/Snapshots.py b/src/lib/Server/Plugins/Snapshots.py
index a4489ae95..8b6bad574 100644
--- a/src/lib/Server/Plugins/Snapshots.py
+++ b/src/lib/Server/Plugins/Snapshots.py
@@ -8,10 +8,13 @@ import Bcfg2.Server.Plugin
import Bcfg2.Server.Snapshots
import Bcfg2.Logger
from Bcfg2.Server.Snapshots.model import Snapshot
-import Queue
+import sys
import time
import threading
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import Queue
+
logger = logging.getLogger('Snapshots')
ftypes = ['ConfigFile', 'SymLink', 'Directory']
@@ -24,13 +27,21 @@ datafields = {
'SymLink': ['to'],
}
+
+def u_str(string):
+ if sys.hexversion >= 0x03000000:
+ return string
+ else:
+ return unicode(string)
+
+
def build_snap_ent(entry):
basefields = []
if entry.tag in ['Package', 'Service']:
basefields += ['type']
- desired = dict([(key, unicode(entry.get(key))) for key in basefields])
- state = dict([(key, unicode(entry.get(key))) for key in basefields])
- desired.update([(key, unicode(entry.get(key))) for key in \
+ desired = dict([(key, u_str(entry.get(key))) for key in basefields])
+ state = dict([(key, u_str(entry.get(key))) for key in basefields])
+ desired.update([(key, u_str(entry.get(key))) for key in \
datafields[entry.tag]])
if entry.tag == 'ConfigFile' or \
((entry.tag == 'Path') and (entry.get('type') == 'file')):
@@ -38,19 +49,19 @@ def build_snap_ent(entry):
desired['contents'] = None
else:
if entry.get('encoding', 'ascii') == 'ascii':
- desired['contents'] = unicode(entry.text)
+ desired['contents'] = u_str(entry.text)
else:
- desired['contents'] = unicode(binascii.a2b_base64(entry.text))
+ desired['contents'] = u_str(binascii.a2b_base64(entry.text))
if 'current_bfile' in entry.attrib:
- state['contents'] = unicode(binascii.a2b_base64( \
+ state['contents'] = u_str(binascii.a2b_base64( \
entry.get('current_bfile')))
elif 'current_bdiff' in entry.attrib:
diff = binascii.a2b_base64(entry.get('current_bdiff'))
- state['contents'] = unicode( \
+ state['contents'] = u_str( \
'\n'.join(difflib.restore(diff.split('\n'), 1)))
- state.update([(key, unicode(entry.get('current_' + key, entry.get(key)))) \
+ state.update([(key, u_str(entry.get('current_' + key, entry.get(key)))) \
for key in datafields[entry.tag]])
if entry.tag in ['ConfigFile', 'Path'] and entry.get('exists', 'true') == 'false':
state = None
@@ -66,7 +77,7 @@ class Snapshots(Bcfg2.Server.Plugin.Statistics,
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Statistics.__init__(self)
self.session = Bcfg2.Server.Snapshots.setup_session(core.cfile)
- self.work_queue = Queue.Queue()
+ self.work_queue = Queue()
self.loader = threading.Thread(target=self.load_snapshot)
self.loader.start()
@@ -92,9 +103,9 @@ class Snapshots(Bcfg2.Server.Plugin.Statistics,
bad = []
state = xdata.find('.//Statistics')
correct = state.get('state') == 'clean'
- revision = unicode(state.get('revision', '-1'))
+ revision = u_str(state.get('revision', '-1'))
for entry in state.find('.//Bad'):
- data = [False, False, unicode(entry.get('name'))] \
+ data = [False, False, u_str(entry.get('name'))] \
+ build_snap_ent(entry)
if entry.tag in ftypes:
etag = 'Path'
@@ -107,24 +118,24 @@ class Snapshots(Bcfg2.Server.Plugin.Statistics,
else:
etag = entry.tag
if entry.get('name') in entries[etag]:
- data = [True, False, unicode(entry.get('name'))] + \
+ data = [True, False, u_str(entry.get('name'))] + \
build_snap_ent(entry)
else:
- data = [True, False, unicode(entry.get('name'))] + \
+ data = [True, False, u_str(entry.get('name'))] + \
build_snap_ent(entry)
for entry in state.find('.//Extra'):
if entry.tag in datafields:
data = build_snap_ent(entry)[1]
- ename = unicode(entry.get('name'))
+ ename = u_str(entry.get('name'))
data['name'] = ename
extra[entry.tag][ename] = data
else:
- print "extra", entry.tag, entry.get('name')
+ print("extra", entry.tag, entry.get('name'))
t2 = time.time()
snap = Snapshot.from_data(self.session, correct, revision,
metadata, entries, extra)
self.session.add(snap)
self.session.commit()
t3 = time.time()
- logger.info("Snapshot storage took %fs" % (t3-t2))
+ logger.info("Snapshot storage took %fs" % (t3 - t2))
return True
diff --git a/src/lib/Server/Plugins/Statistics.py b/src/lib/Server/Plugins/Statistics.py
index c7fa0e534..f4f4c7175 100644
--- a/src/lib/Server/Plugins/Statistics.py
+++ b/src/lib/Server/Plugins/Statistics.py
@@ -8,7 +8,6 @@ import logging
from lxml.etree import XML, SubElement, Element, XMLSyntaxError
import lxml.etree
import os
-import Queue
from time import asctime, localtime, time, strptime, mktime
import threading
@@ -33,7 +32,8 @@ class StatisticsStore(object):
or force:
try:
fout = open(self.filename + '.new', 'w')
- except IOError, ioerr:
+ except IOError:
+ ioerr = sys.exc_info()[1]
self.logger.error("Failed to open %s for writing: %s" % (self.filename + '.new', ioerr))
else:
fout.write(lxml.etree.tostring(self.element, encoding='UTF-8', xml_declaration=True))
diff --git a/src/lib/Server/Plugins/Svn2.py b/src/lib/Server/Plugins/Svn2.py
index 875e9e6a6..35f555294 100644
--- a/src/lib/Server/Plugins/Svn2.py
+++ b/src/lib/Server/Plugins/Svn2.py
@@ -1,4 +1,3 @@
-import os
try:
import pysvn
missing = False
@@ -7,7 +6,7 @@ except:
import Bcfg2.Server.Plugin
class Svn2(Bcfg2.Server.Plugin.Plugin,
- Bcfg2.Server.Plugin.Version):
+ Bcfg2.Server.Plugin.Version):
"""Svn is a version plugin for dealing with Bcfg2 repos."""
name = 'Svn2'
__version__ = '$Id$'
@@ -36,7 +35,7 @@ class Svn2(Bcfg2.Server.Plugin.Plugin,
if not self.revision:
raise Bcfg2.Server.Plugin.PluginInitError
- self.logger.debug("Initialized svn plugin with svn root %s at revision %s" \
+ self.logger.debug("Initialized svn plugin with svn root %s at revision %s"
% (self.svn_root, revision))
def get_revision(self):
@@ -63,25 +62,50 @@ class Svn2(Bcfg2.Server.Plugin.Plugin,
#FIXME - look for conflicts?
- for file in file_list:
- stat = self.client.status(file)
+ for fname in file_list:
+ stat = self.client.status(fname)
self.client.add([f.path for f in stat \
if f.text_status == pysvn.wc_status_kind.unversioned])
try:
self.revision = self.client.checkin([self.datastore], comment,
recurse=True)
self.revision = self.client.update(self.datastore, recurse=True)[0]
- self.logger.info("Svn2: Commited changes. At %s" % self.revision.number)
- except:
- self.logger.error("Svn2: Failed to commit changes", exc_info=1)
+ self.logger.info("Svn2: Commited changes. At %s" %
+ self.revision.number)
+ except Exception, err:
+ # try to be smart about the error we got back
+ details = None
+ if "callback_ssl_server_trust_prompt" in err.message:
+ details = "SVN server certificate is not trusted"
+ elif "callback_get_login" in err.message:
+ details = "SVN credentials not cached"
+
+ if details is None:
+ self.logger.error("Svn2: Failed to commit changes",
+ exc_info=1)
+ else:
+ self.logger.error("Svn2: Failed to commit changes: %s" %
+ details)
def Update(self):
'''Svn2.Update() => True|False\nUpdate svn working copy\n'''
try:
old_revision = self.revision.number
self.revision = self.client.update(self.datastore, recurse=True)[0]
- except:
- self.logger.error("Svn2: Failed to update server repository", exc_info=1)
+ except Exception, err:
+ # try to be smart about the error we got back
+ details = None
+ if "callback_ssl_server_trust_prompt" in err.message:
+ details = "SVN server certificate is not trusted"
+ elif "callback_get_login" in err.message:
+ details = "SVN credentials not cached"
+
+ if details is None:
+ self.logger.error("Svn2: Failed to update server repository",
+ exc_info=1)
+ else:
+ self.logger.error("Svn2: Failed to update server repository: %s" %
+ details)
return False
if old_revision == self.revision.number:
diff --git a/src/lib/Server/Plugins/TCheetah.py b/src/lib/Server/Plugins/TCheetah.py
index d40f4baf3..49be88881 100644
--- a/src/lib/Server/Plugins/TCheetah.py
+++ b/src/lib/Server/Plugins/TCheetah.py
@@ -6,6 +6,9 @@ import logging
import sys
import traceback
import Bcfg2.Server.Plugin
+# py3k compatibility
+if sys.hexversion >= 0x03000000:
+ unicode = str
logger = logging.getLogger('Bcfg2.Plugins.TCheetah')
@@ -36,7 +39,8 @@ class TemplateFile:
self.template = Cheetah.Template.Template(open(self.name).read(),
compilerSettings=s,
searchList=self.searchlist)
- except Cheetah.Parser.ParseError, perror:
+ except Cheetah.Parser.ParseError:
+ perror = sys.exc_info()[1]
logger.error("Cheetah parse error for file %s" % (self.name))
logger.error(perror.report())
@@ -56,7 +60,7 @@ class TemplateFile:
entry.text = self.template
else:
if entry.get('encoding') == 'base64':
- # take care of case where file needs base64 encoding
+ # take care of case where file needs base64 encoding
entry.text = binascii.b2a_base64(self.template)
else:
entry.text = unicode(str(self.template), self.encoding)
diff --git a/src/lib/Server/Plugins/TGenshi.py b/src/lib/Server/Plugins/TGenshi.py
index 2a12672cc..bc5e00400 100644
--- a/src/lib/Server/Plugins/TGenshi.py
+++ b/src/lib/Server/Plugins/TGenshi.py
@@ -3,7 +3,11 @@ __revision__ = '$Revision$'
import binascii
import logging
+import sys
import Bcfg2.Server.Plugin
+# py3k compatibility
+if sys.hexversion >= 0x03000000:
+ unicode = str
logger = logging.getLogger('Bcfg2.Plugins.TGenshi')
@@ -63,11 +67,14 @@ class TemplateFile:
try:
self.template = loader.load(self.name, cls=self.template_cls,
encoding=self.encoding)
- except LookupError, lerror:
+ except LookupError:
+ lerror = sys.exc_info()[1]
logger.error('Genshi lookup error: %s' % lerror)
- except TemplateError, terror:
+ except TemplateError:
+ terror = sys.exc_info()[1]
logger.error('Genshi template error: %s' % terror)
- except genshi.input.ParseError, perror:
+ except genshi.input.ParseError:
+ perror = sys.exc_info()[1]
logger.error('Genshi parse error: %s' % perror)
def bind_entry(self, entry, metadata):
@@ -92,7 +99,7 @@ class TemplateFile:
entry.text = textdata
else:
if entry.get('encoding') == 'base64':
- # take care of case where file needs base64 encoding
+ # take care of case where file needs base64 encoding
entry.text = binascii.b2a_base64(textdata)
else:
entry.text = unicode(textdata, self.encoding)
@@ -107,10 +114,12 @@ class TemplateFile:
entry.text = unicode(xmldata, self.encoding)
if entry.text == '':
entry.set('empty', 'true')
- except TemplateError, terror:
+ except TemplateError:
+ terror = sys.exc_info()[1]
logger.error('Genshi template error: %s' % terror)
raise Bcfg2.Server.Plugin.PluginExecutionError
- except AttributeError, err:
+ except AttributeError:
+ err = sys.exc_info()[1]
logger.error('Genshi template loading error: %s' % err)
raise Bcfg2.Server.Plugin.PluginExecutionError