diff options
-rw-r--r-- | src/lib/Server/Plugin.py | 102 | ||||
-rw-r--r-- | src/lib/Server/Plugins/Account.py | 38 | ||||
-rw-r--r-- | src/lib/Server/Plugins/Base.py | 5 | ||||
-rw-r--r-- | src/lib/Server/Plugins/Bundler.py | 2 | ||||
-rw-r--r-- | src/lib/Server/Plugins/Cfg.py | 8 | ||||
-rw-r--r-- | src/lib/Server/Plugins/Deps.py | 21 | ||||
-rw-r--r-- | src/lib/Server/Plugins/Editor.py | 20 | ||||
-rw-r--r-- | src/lib/Server/Plugins/GroupPatterns.py | 16 | ||||
-rw-r--r-- | src/lib/Server/Plugins/Metadata.py | 73 | ||||
-rw-r--r-- | src/lib/Server/Plugins/NagiosGen.py | 53 | ||||
-rw-r--r-- | src/lib/Server/Plugins/Ohai.py | 8 | ||||
-rw-r--r-- | src/lib/Server/Plugins/Packages.py | 35 | ||||
-rw-r--r-- | src/lib/Server/Plugins/Pkgmgr.py | 49 | ||||
-rw-r--r-- | src/lib/Server/Plugins/Probes.py | 16 | ||||
-rw-r--r-- | src/lib/Server/Plugins/SSHbase.py | 32 | ||||
-rw-r--r-- | src/lib/Server/Plugins/SSLCA.py | 46 | ||||
-rw-r--r-- | src/lib/Server/Plugins/Snapshots.py | 5 |
17 files changed, 328 insertions, 201 deletions
diff --git a/src/lib/Server/Plugin.py b/src/lib/Server/Plugin.py index 73d054409..5a6f3281b 100644 --- a/src/lib/Server/Plugin.py +++ b/src/lib/Server/Plugin.py @@ -38,14 +38,17 @@ info_regex = re.compile( \ 'paranoid:(\s)*(?P<paranoid>\S+)|' + 'perms:(\s)*(?P<perms>\w+)|') + class PluginInitError(Exception): """Error raised in cases of Plugin initialization errors.""" pass + class PluginExecutionError(Exception): """Error raised in case of Plugin execution errors.""" pass + class Plugin(object): """This is the base class for all Bcfg2 Server plugins. Several attributes must be defined in the subclass: @@ -90,6 +93,7 @@ class Plugin(object): def shutdown(self): self.running = False + class Generator(object): """Generator plugins contribute to literal client configurations.""" def HandlesEntry(self, entry, metadata): @@ -100,20 +104,24 @@ class Generator(object): """This is the slow-path handler for configuration entry binding.""" raise PluginExecutionError + class Structure(object): """Structure Plugins contribute to abstract client configurations.""" def BuildStructures(self, metadata): """Return a list of abstract goal structures for client.""" raise PluginExecutionError + class Metadata(object): """Signal metadata capabilities for this plugin""" def add_client(self, client_name, attribs): """Add client.""" pass + def remove_client(self, client_name): """Remove client.""" pass + def viz(self, hosts, bundles, key, colors): """Create viz str for viz admin mode.""" pass @@ -124,6 +132,7 @@ class Metadata(object): def merge_additional_data(self, imd, source, groups, data): raise PluginExecutionError + class Connector(object): """Connector Plugins augment client metadata instances.""" def get_additional_groups(self, metadata): @@ -134,6 +143,7 @@ class Connector(object): """Determine additional data for metadata instances.""" return dict() + class Probing(object): """Signal probe capability for this plugin.""" def GetProbes(self, _): @@ -144,11 +154,13 @@ class Probing(object): """Receive probe results pertaining to client.""" pass + class Statistics(object): """Signal statistics handling capability.""" def process_statistics(self, client, xdata): pass + class ThreadedStatistics(Statistics, threading.Thread): """Threaded statistics handling capability.""" @@ -169,7 +181,7 @@ class ThreadedStatistics(Statistics, while not self.work_queue.empty(): (metadata, data) = self.work_queue.get_nowait() try: - pending_data.append( ( metadata.hostname, lxml.etree.tostring(data) ) ) + pending_data.append((metadata.hostname, lxml.etree.tostring(data))) except: self.logger.warning("Dropping interaction for %s" % metadata.hostname) except Queue.Empty: @@ -202,7 +214,7 @@ class ThreadedStatistics(Statistics, try: while True: try: - metadata = self.core.build_metadata(pmetadata) + metadata = self.core.build_metadata(pmetadata) break except Bcfg2.Server.Plugins.Metadata.MetadataRuntimeError: pass @@ -211,7 +223,7 @@ class ThreadedStatistics(Statistics, if self.terminate.isSet(): return False - self.work_queue.put_nowait( (metadata, lxml.etree.fromstring(pdata)) ) + self.work_queue.put_nowait((metadata, lxml.etree.fromstring(pdata))) except Queue.Full: self.logger.warning("Queue.Full: Failed to load queue data") break @@ -255,6 +267,7 @@ class ThreadedStatistics(Statistics, """Handle stats here.""" pass + class PullSource(object): def GetExtra(self, client): return [] @@ -262,6 +275,7 @@ class PullSource(object): def GetCurrentEntry(self, client, e_type, e_name): raise PluginExecutionError + class PullTarget(object): def AcceptChoices(self, entry, metadata): raise PluginExecutionError @@ -271,31 +285,38 @@ class PullTarget(object): of bcfg2-admin pull.""" raise PluginExecutionError + class Decision(object): """Signal decision handling capability.""" def GetDecisions(self, metadata, mode): return [] + class ValidationError(Exception): pass + class StructureValidator(object): """Validate/modify goal structures.""" def validate_structures(self, metadata, structures): - raise ValidationError, "not implemented" + raise ValidationError("not implemented") + class GoalValidator(object): """Validate/modify configuration goals.""" def validate_goals(self, metadata, goals): - raise ValidationError, "not implemented" + raise ValidationError("not implemented") + class Version(object): """Interact with various version control systems.""" def get_revision(self): return [] + def commit_data(self, file_list, comment=None): pass + # the rest of the file contains classes for coherent file caching class FileBacked(object): @@ -324,6 +345,7 @@ class FileBacked(object): """Update local data structures based on current file state""" pass + class DirectoryBacked(object): """This object is a coherent cache for a filesystem hierarchy of files.""" __child__ = FileBacked @@ -341,7 +363,7 @@ class DirectoryBacked(object): return self.entries[key] def __iter__(self): - return self.entries.iteritems() + return iter(self.entries.items()) def AddEntry(self, name): """Add new entry to data structures upon file creation.""" @@ -380,9 +402,10 @@ class DirectoryBacked(object): elif action in ['endExist']: pass else: - print "Got unknown event %s %s %s" % (event.requestID, + print("Got unknown event %s %s %s" % (event.requestID, event.code2str(), - event.filename) + event.filename)) + class XMLFileBacked(FileBacked): """ @@ -401,7 +424,7 @@ class XMLFileBacked(FileBacked): try: xdata = XML(self.data) except XMLSyntaxError: - logger.error("Failed to parse %s"%(self.name)) + logger.error("Failed to parse %s" % (self.name)) return self.label = xdata.attrib[self.__identifier__] self.entries = xdata.getchildren() @@ -409,12 +432,14 @@ class XMLFileBacked(FileBacked): def __iter__(self): return iter(self.entries) + class SingleXMLFileBacked(XMLFileBacked): """This object is a coherent cache for an independent XML file.""" def __init__(self, filename, fam): XMLFileBacked.__init__(self, filename) fam.AddMonitor(filename, self) + class StructFile(XMLFileBacked): """This file contains a set of structure file formatting logic.""" def __init__(self, name): @@ -429,7 +454,7 @@ class StructFile(XMLFileBacked): logger.error("Failed to parse file %s" % self.name) return self.fragments = {} - work = {lambda x:True: xdata.getchildren()} + work = {lambda x: True: xdata.getchildren()} while work: (predicate, worklist) = work.popitem() self.fragments[predicate] = [item for item in worklist if item.tag != 'Group' @@ -441,26 +466,28 @@ class StructFile(XMLFileBacked): else: cmd = "lambda x:'%s' in x.groups and predicate(x)" - newpred = eval(cmd % (group.get('name')), {'predicate':predicate}) + newpred = eval(cmd % (group.get('name')), {'predicate': predicate}) work[newpred] = group.getchildren() def Match(self, metadata): """Return matching fragments of independent.""" - matching = [frag for (pred, frag) in self.fragments.iteritems() if pred(metadata)] + matching = [frag for (pred, frag) in list(self.fragments.items()) + if pred(metadata)] if matching: - return reduce(lambda x, y:x+y, matching) + return reduce(lambda x, y: x + y, matching) logger.error("File %s got null match" % (self.name)) return [] + class INode: """ LNodes provide lists of things available at a particular group intersection. """ - raw = {'Client':"lambda x:'%s' == x.hostname and predicate(x)", - 'Group':"lambda x:'%s' in x.groups and predicate(x)"} - nraw = {'Client':"lambda x:'%s' != x.hostname and predicate(x)", - 'Group':"lambda x:'%s' not in x.groups and predicate(x)"} + raw = {'Client': "lambda x:'%s' == x.hostname and predicate(x)", + 'Group': "lambda x:'%s' in x.groups and predicate(x)"} + nraw = {'Client': "lambda x:'%s' != x.hostname and predicate(x)", + 'Group': "lambda x:'%s' not in x.groups and predicate(x)"} containers = ['Group', 'Client'] ignore = [] @@ -468,16 +495,16 @@ class INode: self.data = data self.contents = {} if parent == None: - self.predicate = lambda x:True + self.predicate = lambda x: True else: predicate = parent.predicate if data.get('negate', 'false') in ['true', 'True']: psrc = self.nraw else: psrc = self.raw - if data.tag in psrc.keys(): + if data.tag in list(psrc.keys()): self.predicate = eval(psrc[data.tag] % (data.get('name')), - {'predicate':predicate}) + {'predicate': predicate}) else: raise Exception mytype = self.__class__ @@ -491,7 +518,7 @@ class INode: try: self.contents[item.tag][item.get('name')] = item.attrib except KeyError: - self.contents[item.tag] = {item.get('name'):item.attrib} + self.contents[item.tag] = {item.get('name'): item.attrib} if item.text: self.contents[item.tag]['__text__'] = item.text try: @@ -511,6 +538,7 @@ class INode: for child in self.children: child.Match(metadata, data) + class XMLSrc(XMLFileBacked): """XMLSrc files contain a LNode hierarchy that returns matching entries.""" __node__ = INode @@ -557,10 +585,12 @@ class XMLSrc(XMLFileBacked): self.pnode.Match(metadata, cache[1]) self.cache = cache + class XMLDirectoryBacked(DirectoryBacked): """Directorybacked for *.xml.""" patterns = re.compile('.*\.xml') + class PrioDir(Plugin, Generator, XMLDirectoryBacked): """This is a generator that handles package assignments.""" name = 'PrioDir' @@ -579,8 +609,8 @@ class PrioDir(Plugin, Generator, XMLDirectoryBacked): """Handle events and update dispatch table.""" XMLDirectoryBacked.HandleEvent(self, event) self.Entries = {} - for src in self.entries.values(): - for itype, children in src.items.iteritems(): + for src in list(self.entries.values()): + for itype, children in list(src.items.items()): for child in children: try: self.Entries[itype][child] = self.BindEntry @@ -589,14 +619,14 @@ class PrioDir(Plugin, Generator, XMLDirectoryBacked): def BindEntry(self, entry, metadata): """Check package lists of package entries.""" - [src.Cache(metadata) for src in self.entries.values()] + [src.Cache(metadata) for src in list(self.entries.values())] name = entry.get('name') if not src.cache: self.logger.error("Called before data loaded") raise PluginExecutionError - matching = [src for src in self.entries.values() + matching = [src for src in list(self.entries.values()) if src.cache and entry.tag in src.cache[1] - and src.cache[1][entry.tag].has_key(name)] + and name in src.cache[1][entry.tag]] if len(matching) == 0: raise PluginExecutionError elif len(matching) == 1: @@ -618,15 +648,17 @@ class PrioDir(Plugin, Generator, XMLDirectoryBacked): entry.text = data['__text__'] if '__children__' in data: [entry.append(copy.deepcopy(item)) for item in data['__children__']] - [entry.attrib.__setitem__(key, data[key]) for key in data.keys() \ + [entry.attrib.__setitem__(key, data[key]) for key in list(data.keys()) \ if not key.startswith('__')] + # new unified EntrySet backend class SpecificityError(Exception): """Thrown in case of filename parse failure.""" pass + class Specificity: def __init__(self, all=False, group=False, hostname=False, prio=0, delta=False): @@ -665,6 +697,7 @@ class Specificity: return True return False + class SpecificData(object): def __init__(self, name, specific, encoding): self.name = name @@ -678,9 +711,11 @@ class SpecificData(object): except: logger.error("Failed to read file %s" % self.name) + class EntrySet: """Entry sets deal with the host- and group-specific entries.""" ignore = re.compile("^(\.#.*|.*~|\\..*\\.(sw[px])|.*\\.genshi_include)$") + def __init__(self, basename, path, entry_type, encoding): self.path = path self.entry_type = entry_type @@ -693,7 +728,7 @@ class EntrySet: self.specific = re.compile(pattern) def get_matching(self, metadata): - return [item for item in self.entries.values() \ + return [item for item in list(self.entries.values()) \ if item.specific.matches(metadata)] def handle_event(self, event): @@ -761,11 +796,11 @@ class EntrySet: for line in open(fpath).readlines(): match = info_regex.match(line) if not match: - logger.warning("Failed to match line: %s"%line) + logger.warning("Failed to match line: %s" % line) continue else: mgd = match.groupdict() - for key, value in mgd.iteritems(): + for key, value in list(mgd.items()): if value: self.metadata[key] = value if len(self.metadata['perms']) == 3: @@ -795,7 +830,7 @@ class EntrySet: (entry.get('name'))) raise PluginExecutionError [entry.attrib.__setitem__(key, value) \ - for (key, value) in mdata['Info'][None].iteritems()] + for (key, value) in list(mdata['Info'][None].items())] def bind_entry(self, entry, metadata): """Return the appropriate interpreted template from the set of available templates.""" @@ -817,6 +852,7 @@ class EntrySet: raise PluginExecutionError + class GroupSpool(Plugin, Generator): """Unified interface for handling group-specific data (e.g. .G## files).""" name = 'GroupSpool' @@ -878,9 +914,9 @@ class GroupSpool(Plugin, Generator): if not relative.endswith('/'): relative += '/' name = self.data + relative - if relative not in self.handles.values(): + if relative not in list(self.handles.values()): if not posixpath.isdir(name): - print "Failed to open directory %s" % (name) + print("Failed to open directory %s" % (name)) return reqid = self.core.fam.AddMonitor(name, self) self.handles[reqid] = relative diff --git a/src/lib/Server/Plugins/Account.py b/src/lib/Server/Plugins/Account.py index e3ea58761..f67819b9d 100644 --- a/src/lib/Server/Plugins/Account.py +++ b/src/lib/Server/Plugins/Account.py @@ -3,6 +3,7 @@ __revision__ = '$Revision$' import Bcfg2.Server.Plugin + class Account(Bcfg2.Server.Plugin.Plugin, Bcfg2.Server.Plugin.Generator): """This module generates account config files, @@ -21,13 +22,14 @@ class Account(Bcfg2.Server.Plugin.Plugin, def __init__(self, core, datastore): Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore) Bcfg2.Server.Plugin.Generator.__init__(self) - self.Entries = {'ConfigFile':{'/etc/passwd':self.from_yp_cb, - '/etc/group':self.from_yp_cb, - '/etc/security/limits.conf':self.gen_limits_cb, - '/root/.ssh/authorized_keys':self.gen_root_keys_cb, - '/etc/sudoers':self.gen_sudoers}} + self.Entries = {'ConfigFile': {'/etc/passwd': self.from_yp_cb, + '/etc/group': self.from_yp_cb, + '/etc/security/limits.conf': self.gen_limits_cb, + '/root/.ssh/authorized_keys': self.gen_root_keys_cb, + '/etc/sudoers': self.gen_sudoers}} try: - self.repository = Bcfg2.Server.Plugin.DirectoryBacked(self.data, self.core.fam) + self.repository = Bcfg2.Server.Plugin.DirectoryBacked(self.data, + self.core.fam) except: self.logger.error("Failed to load repos: %s, %s" % \ (self.data, "%s/ssh" % (self.data))) @@ -38,9 +40,11 @@ class Account(Bcfg2.Server.Plugin.Plugin, fname = entry.attrib['name'].split('/')[-1] entry.text = self.repository.entries["static.%s" % (fname)].data entry.text += self.repository.entries["dyn.%s" % (fname)].data - perms = {'owner':'root', 'group':'root', 'perms':'0644'} + perms = {'owner': 'root', + 'group': 'root', + 'perms': '0644'} [entry.attrib.__setitem__(key, value) for (key, value) in \ - perms.iteritems()] + list(perms.items())] def gen_limits_cb(self, entry, metadata): """Build limits entries based on current ACLs.""" @@ -50,9 +54,11 @@ class Account(Bcfg2.Server.Plugin.Plugin, self.repository.entries["useraccess"].data.split()] users = [user for (user, host) in \ useraccess if host == metadata.hostname.split('.')[0]] - perms = {'owner':'root', 'group':'root', 'perms':'0600'} + perms = {'owner': 'root', + 'group': 'root', + 'perms': '0600'} [entry.attrib.__setitem__(key, value) for (key, value) in \ - perms.iteritems()] + list(perms.items())] entry.text += "".join(["%s hard maxlogins 1024\n" % uname for uname in superusers + users]) if "*" not in users: entry.text += "* hard maxlogins 0\n" @@ -71,9 +77,11 @@ class Account(Bcfg2.Server.Plugin.Plugin, entry.text = "".join([rdata["%s.key" % user].data for user \ in superusers if \ ("%s.key" % user) in rdata]) - perms = {'owner':'root', 'group':'root', 'perms':'0600'} + perms = {'owner': 'root', + 'group': 'root', + 'perms': '0600'} [entry.attrib.__setitem__(key, value) for (key, value) \ - in perms.iteritems()] + in list(perms.items())] def gen_sudoers(self, entry, metadata): """Build root authorized keys file based on current ACLs.""" @@ -88,6 +96,8 @@ class Account(Bcfg2.Server.Plugin.Plugin, entry.text = self.repository.entries['static.sudoers'].data entry.text += "".join(["%s ALL=(ALL) ALL\n" % uname \ for uname in superusers]) - perms = {'owner':'root', 'group':'root', 'perms':'0440'} + perms = {'owner': 'root', + 'group': 'root', + 'perms': '0440'} [entry.attrib.__setitem__(key, value) for (key, value) \ - in perms.iteritems()] + in list(perms.items())] diff --git a/src/lib/Server/Plugins/Base.py b/src/lib/Server/Plugins/Base.py index 8e5ca1cd9..2b241da9d 100644 --- a/src/lib/Server/Plugins/Base.py +++ b/src/lib/Server/Plugins/Base.py @@ -5,6 +5,7 @@ import Bcfg2.Server.Plugin import copy import lxml.etree + class Base(Bcfg2.Server.Plugin.Plugin, Bcfg2.Server.Plugin.Structure, Bcfg2.Server.Plugin.XMLDirectoryBacked): @@ -31,8 +32,8 @@ class Base(Bcfg2.Server.Plugin.Plugin, def BuildStructures(self, metadata): """Build structures for client described by metadata.""" ret = lxml.etree.Element("Independent", version='2.0') - fragments = reduce(lambda x, y: x+y, + fragments = reduce(lambda x, y: x + y, [base.Match(metadata) for base - in self.entries.values()], []) + in list(self.entries.values())], []) [ret.append(copy.deepcopy(frag)) for frag in fragments] return [ret] diff --git a/src/lib/Server/Plugins/Bundler.py b/src/lib/Server/Plugins/Bundler.py index 3f88fe26b..04df8ea86 100644 --- a/src/lib/Server/Plugins/Bundler.py +++ b/src/lib/Server/Plugins/Bundler.py @@ -73,7 +73,7 @@ class Bundler(Bcfg2.Server.Plugin.Plugin, """Build all structures for client (metadata).""" bundleset = [] for bundlename in metadata.bundles: - entries = [item for (key, item) in self.entries.iteritems() if \ + entries = [item for (key, item) in list(self.entries.items()) if \ self.patterns.match(key).group('name') == bundlename] if len(entries) == 0: continue diff --git a/src/lib/Server/Plugins/Cfg.py b/src/lib/Server/Plugins/Cfg.py index 184bb792c..6c7a40a52 100644 --- a/src/lib/Server/Plugins/Cfg.py +++ b/src/lib/Server/Plugins/Cfg.py @@ -13,9 +13,7 @@ import Bcfg2.Server.Plugin try: import genshi.core import genshi.input - from genshi.template import TemplateLoader, \ - TextTemplate, MarkupTemplate, TemplateError - from genshi.template import NewTextTemplate + from genshi.template import TemplateLoader, NewTextTemplate have_genshi = True except: have_genshi = False @@ -62,7 +60,7 @@ def process_delta(data, delta): output = open(basefile.name, 'r').read() [os.unlink(fname) for fname in [basefile.name, dfile.name]] if ret >> 8 != 0: - raise Bcfg2.Server.Plugin.PluginExecutionError, ('delta', delta) + raise Bcfg2.Server.Plugin.PluginExecutionError('delta', delta) return output @@ -96,7 +94,7 @@ class CfgEntrySet(Bcfg2.Server.Plugin.EntrySet): """return a list of all entries pertinent to a client => [base, delta1, delta2] """ - matching = [ent for ent in self.entries.values() if \ + matching = [ent for ent in list(self.entries.values()) if \ ent.specific.matches(metadata)] matching.sort(self.sort_by_specific) non_delta = [matching.index(m) for m in matching diff --git a/src/lib/Server/Plugins/Deps.py b/src/lib/Server/Plugins/Deps.py index 088f8cdad..b186258cb 100644 --- a/src/lib/Server/Plugins/Deps.py +++ b/src/lib/Server/Plugins/Deps.py @@ -5,20 +5,22 @@ import lxml.etree import Bcfg2.Server.Plugin + class DNode(Bcfg2.Server.Plugin.INode): """DNode provides supports for single predicate types for dependencies.""" - raw = {'Group':"lambda x:'%s' in x.groups and predicate(x)"} + raw = {'Group': "lambda x:'%s' in x.groups and predicate(x)"} containers = ['Group'] def __init__(self, data, idict, parent=None): self.data = data self.contents = {} if parent == None: - self.predicate = lambda x:True + self.predicate = lambda x: True else: predicate = parent.predicate - if data.tag in self.raw.keys(): - self.predicate = eval(self.raw[data.tag] % (data.get('name')), {'predicate':predicate}) + if data.tag in list(self.raw.keys()): + self.predicate = eval(self.raw[data.tag] % (data.get('name')), + {'predicate': predicate}) else: raise Exception mytype = self.__class__ @@ -27,15 +29,18 @@ class DNode(Bcfg2.Server.Plugin.INode): if item.tag in self.containers: self.children.append(mytype(item, idict, self)) else: - data = [(child.tag, child.get('name')) for child in item.getchildren()] + data = [(child.tag, child.get('name')) + for child in item.getchildren()] try: self.contents[item.tag][item.get('name')] = data except KeyError: - self.contents[item.tag] = {item.get('name'):data} + self.contents[item.tag] = {item.get('name'): data} + class DepXMLSrc(Bcfg2.Server.Plugin.XMLSrc): __node__ = DNode + class Deps(Bcfg2.Server.Plugin.PrioDir, Bcfg2.Server.Plugin.StructureValidator): name = 'Deps' @@ -68,12 +73,12 @@ class Deps(Bcfg2.Server.Plugin.PrioDir, if (entries, gdata) in self.cache: prereqs = self.cache[(entries, gdata)] else: - [src.Cache(metadata) for src in self.entries.values()] + [src.Cache(metadata) for src in list(self.entries.values())] toexamine = list(entries[:]) while toexamine: entry = toexamine.pop() - matching = [src for src in self.entries.values() + matching = [src for src in list(self.entries.values()) if src.cache and entry[0] in src.cache[1] and entry[1] in src.cache[1][entry[0]]] if len(matching) > 1: diff --git a/src/lib/Server/Plugins/Editor.py b/src/lib/Server/Plugins/Editor.py index bfd4d6e93..e68d28d8b 100644 --- a/src/lib/Server/Plugins/Editor.py +++ b/src/lib/Server/Plugins/Editor.py @@ -2,6 +2,7 @@ import Bcfg2.Server.Plugin import re import lxml.etree + def linesub(pattern, repl, filestring): """Substitutes instances of pattern with repl in filestring.""" if filestring == None: @@ -12,6 +13,7 @@ def linesub(pattern, repl, filestring): output.append(re.sub(pattern, repl, filestring)) return '\n'.join(output) + class EditDirectives(Bcfg2.Server.Plugin.SpecificData): """This object handles the editing directives.""" def ProcessDirectives(self, input): @@ -22,23 +24,29 @@ class EditDirectives(Bcfg2.Server.Plugin.SpecificData): temp = linesub(directive[0], directive[1], temp) return temp + class EditEntrySet(Bcfg2.Server.Plugin.EntrySet): def __init__(self, basename, path, entry_type, encoding): - self.ignore = re.compile("^(\.#.*|.*~|\\..*\\.(tmp|sw[px])|%s\.H_.*)$" %path.split('/')[-1]) - Bcfg2.Server.Plugin.EntrySet.__init__(self, basename, path, entry_type, encoding) + self.ignore = re.compile("^(\.#.*|.*~|\\..*\\.(tmp|sw[px])|%s\.H_.*)$" % path.split('/')[-1]) + Bcfg2.Server.Plugin.EntrySet.__init__(self, + basename, + path, + entry_type, + encoding) self.inputs = dict() def bind_entry(self, entry, metadata): client = metadata.hostname filename = entry.get('name') - permdata = {'owner':'root', 'group':'root'} - permdata['perms'] = '0644' + permdata = {'owner': 'root', + 'group': 'root' + 'perms': '0644'} [entry.attrib.__setitem__(key, permdata[key]) for key in permdata] entry.text = self.entries['edits'].ProcessDirectives(self.get_client_data(client)) if not entry.text: entry.set('empty', 'true') try: - f = open('%s/%s.H_%s' %(self.path, filename.split('/')[-1], client), 'w') + f = open('%s/%s.H_%s' % (self.path, filename.split('/')[-1], client), 'w') f.write(entry.text) f.close() except: @@ -60,7 +68,7 @@ class Editor(Bcfg2.Server.Plugin.GroupSpool, def GetProbes(self, _): '''Return a set of probes for execution on client''' probelist = list() - for name in self.entries.keys(): + for name in list(self.entries.keys()): probe = lxml.etree.Element('probe') probe.set('name', name) probe.set('source', "Editor") diff --git a/src/lib/Server/Plugins/GroupPatterns.py b/src/lib/Server/Plugins/GroupPatterns.py index 3801a6a08..f99026a9d 100644 --- a/src/lib/Server/Plugins/GroupPatterns.py +++ b/src/lib/Server/Plugins/GroupPatterns.py @@ -3,6 +3,7 @@ import re import Bcfg2.Server.Plugin + class PackedDigitRange(object): def __init__(self, digit_range): self.sparse = list() @@ -18,12 +19,14 @@ class PackedDigitRange(object): if iother in self.sparse: return True for (start, end) in self.ranges: - if iother in xrange(start, end+1): + if iother in range(start, end + 1): return True return False + class PatternMap(object): range_finder = '\\[\\[[\d\-,]+\\]\\]' + def __init__(self, pattern, rangestr, groups): self.pattern = pattern self.rangestr = rangestr @@ -33,8 +36,11 @@ class PatternMap(object): self.process = self.process_re elif rangestr != None: self.process = self.process_range - self.re = re.compile('^' + re.subn(self.range_finder, '(\d+)', rangestr)[0]) - dmatcher = re.compile(re.subn(self.range_finder, '\\[\\[([\d\-,]+)\\]\\]', rangestr)[0]) + self.re = re.compile('^' + re.subn(self.range_finder, '(\d+)', + rangestr)[0]) + dmatcher = re.compile(re.subn(self.range_finder, + '\\[\\[([\d\-,]+)\\]\\]', + rangestr)[0]) self.dranges = [PackedDigitRange(x) for x in dmatcher.match(rangestr).groups()] else: raise Exception @@ -58,10 +64,11 @@ class PatternMap(object): for group in self.groups: newg = group for idx in range(len(sub)): - newg = newg.replace('$%s' % (idx+1), sub[idx]) + newg = newg.replace('$%s' % (idx + 1), sub[idx]) ret.append(newg) return ret + class PatternFile(Bcfg2.Server.Plugin.SingleXMLFileBacked): def __init__(self, filename, fam): Bcfg2.Server.Plugin.SingleXMLFileBacked.__init__(self, filename, fam) @@ -102,6 +109,7 @@ class PatternFile(Bcfg2.Server.Plugin.SingleXMLFileBacked): (pattern.pattern, hostname), exc_info=1) return ret + class GroupPatterns(Bcfg2.Server.Plugin.Plugin, Bcfg2.Server.Plugin.Connector): name = "GroupPatterns" diff --git a/src/lib/Server/Plugins/Metadata.py b/src/lib/Server/Plugins/Metadata.py index aa482e7ed..98caa577d 100644 --- a/src/lib/Server/Plugins/Metadata.py +++ b/src/lib/Server/Plugins/Metadata.py @@ -1,4 +1,6 @@ -"""This file stores persistent metadata for the Bcfg2 Configuration Repository.""" +""" +This file stores persistent metadata for the Bcfg2 Configuration Repository. +""" __revision__ = '$Revision$' @@ -12,6 +14,7 @@ import time import Bcfg2.Server.FileMonitor import Bcfg2.Server.Plugin + def locked(fd): """Aquire a lock on a file""" try: @@ -20,14 +23,19 @@ def locked(fd): return True return False + class MetadataConsistencyError(Exception): """This error gets raised when metadata is internally inconsistent.""" pass + class MetadataRuntimeError(Exception): - """This error is raised when the metadata engine is called prior to reading enough data.""" + """This error is raised when the metadata engine + is called prior to reading enough data. + """ pass + class XMLMetadataConfig(object): """Handles xml config files and all XInclude statements""" def __init__(self, metadata, watch_clients, basefile): @@ -39,7 +47,8 @@ class XMLMetadataConfig(object): self.basedata = None self.basedir = metadata.data self.logger = metadata.logger - self.pseudo_monitor = isinstance(metadata.core.fam, Bcfg2.Server.FileMonitor.Pseudo) + self.pseudo_monitor = isinstance(metadata.core.fam, + Bcfg2.Server.FileMonitor.Pseudo) @property def xdata(self): @@ -56,7 +65,8 @@ class XMLMetadataConfig(object): def add_monitor(self, fname): """Add a fam monitor for an included file""" if self.should_monitor: - self.metadata.core.fam.AddMonitor("%s/%s" % (self.basedir, fname), self.metadata) + self.metadata.core.fam.AddMonitor("%s/%s" % (self.basedir, fname), + self.metadata) self.extras.append(fname) def load_xml(self): @@ -81,7 +91,8 @@ class XMLMetadataConfig(object): def write(self): """Write changes to xml back to disk.""" - self.write_xml("%s/%s" % (self.basedir, self.basefile), self.basedata) + self.write_xml("%s/%s" % (self.basedir, self.basefile), + self.basedata) def write_xml(self, fname, xmltree): """Write changes to xml back to disk.""" @@ -182,6 +193,7 @@ class ClientMetadata(object): return grp return '' + class MetadataQuery(object): def __init__(self, by_name, get_clients, by_groups, by_profiles, all_groups, all_groups_in_category): # resolver is set later @@ -201,6 +213,7 @@ class MetadataQuery(object): def all(self): return [self.by_name(name) for name in self.all_clients()] + class Metadata(Bcfg2.Server.Plugin.Plugin, Bcfg2.Server.Plugin.Metadata, Bcfg2.Server.Plugin.Statistics): @@ -220,12 +233,13 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, except: print("Unable to add file monitor for groups.xml or clients.xml") raise Bcfg2.Server.Plugin.PluginInitError - + self.clients_xml = XMLMetadataConfig(self, watch_clients, 'clients.xml') self.groups_xml = XMLMetadataConfig(self, watch_clients, 'groups.xml') self.states = {} if watch_clients: - self.states = {"groups.xml":False, "clients.xml":False} + self.states = {"groups.xml": False, + "clients.xml": False} self.addresses = {} self.auth = dict() self.clients = {} @@ -244,10 +258,11 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, self.session_cache = {} self.default = None self.pdirty = False - self.extra = {'groups.xml':[], 'clients.xml':[]} + self.extra = {'groups.xml': [], + 'clients.xml': []} self.password = core.password self.query = MetadataQuery(core.build_metadata, - lambda:self.clients.keys(), + lambda: list(self.clients.keys()), self.get_client_names_by_groups, self.get_client_names_by_profiles, self.get_all_group_names, @@ -288,7 +303,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, element = lxml.etree.SubElement(self.groups_xml.base_xdata.getroot(), "Group", name=group_name) - for key, val in attribs.iteritems(): + for key, val in list(attribs.items()): element.set(key, val) self.groups_xml.write() @@ -303,7 +318,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, self.logger.error("Unexpected error finding group") raise MetadataConsistencyError - for key, val in attribs.iteritems(): + for key, val in list(attribs.items()): xdict['xquery'][0].set(key, val) self.groups_xml.write_xml(xdict['filename'], xdict['xmltree']) @@ -330,7 +345,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, self.logger.error("Bundle \"%s\" already exists" % (bundle_name)) raise MetadataConsistencyError root.append(element) - group_tree = open(self.data + "/groups.xml","w") + group_tree = open(self.data + "/groups.xml", "w") fd = group_tree.fileno() while True: try: @@ -352,7 +367,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, self.logger.error("Bundle \"%s\" not found" % (bundle_name)) raise MetadataConsistencyError root.remove(node) - group_tree = open(self.data + "/groups.xml","w") + group_tree = open(self.data + "/groups.xml", "w") fd = group_tree.fileno() while True: try: @@ -384,7 +399,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, element = lxml.etree.SubElement(self.clients_xml.base_xdata.getroot(), "Client", name=client_name) - for key, val in attribs.iteritems(): + for key, val in list(attribs.items()): element.set(key, val) self.clients_xml.write() @@ -401,7 +416,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, raise MetadataConsistencyError node = xdict['xquery'][0] - [node.set(key, value) for key, value in attribs.items()] + [node.set(key, value) for key, value in list(attribs.items())] self.clients_xml.write_xml(xdict['filename'], xdict['xmltree']) def HandleEvent(self, event): @@ -506,17 +521,17 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, ggg)) [self.groups[group][0].add(bund) for bund in bundles] self.states['groups.xml'] = True - if False not in self.states.values(): + if False not in list(self.states.values()): # check that all client groups are real and complete - real = self.groups.keys() - for client in self.clients.keys(): + real = list(self.groups.keys()) + for client in list(self.clients.keys()): if self.clients[client] not in self.profiles: self.logger.error("Client %s set as nonexistent or incomplete group %s" \ % (client, self.clients[client])) self.logger.error("Removing client mapping for %s" % (client)) self.bad_clients[client] = self.clients[client] del self.clients[client] - for bclient in self.bad_clients.keys(): + for bclient in list(self.bad_clients.keys()): if self.bad_clients[bclient] in self.profiles: self.logger.info("Restored profile mapping for client %s" % bclient) self.clients[bclient] = self.bad_clients[bclient] @@ -525,7 +540,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, def set_profile(self, client, profile, addresspair): """Set group parameter for provided client.""" self.logger.info("Asserting client %s profile to %s" % (client, profile)) - if False in self.states.values(): + if False in list(self.states.values()): raise MetadataRuntimeError if profile not in self.public: self.logger.error("Failed to set client %s to private group %s" % (client, profile)) @@ -579,7 +594,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, def get_initial_metadata(self, client): """Return the metadata for a given client.""" - if False in self.states.values(): + if False in list(self.states.values()): raise MetadataRuntimeError client = client.lower() if client in self.aliases: @@ -604,7 +619,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, password = self.passwords[client] else: password = None - uuids = [item for item, value in self.uuid.iteritems() if value == client] + uuids = [item for item, value in list(self.uuid.items()) if value == client] if uuids: uuid = uuids[0] else: @@ -622,7 +637,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, def get_all_group_names(self): all_groups = set() - [all_groups.update(g[1]) for g in self.groups.values()] + [all_groups.update(g[1]) for g in list(self.groups.values())] return all_groups def get_all_groups_in_category(self, category): @@ -632,11 +647,12 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, return all_groups def get_client_names_by_profiles(self, profiles): - return [client for client, profile in self.clients.iteritems() \ + return [client for client, profile in list(self.clients.items()) \ if profile in profiles] def get_client_names_by_groups(self, groups): - mdata = [self.core.build_metadata(client) for client in self.clients.keys()] + mdata = [self.core.build_metadata(client) + for client in list(self.clients.keys())] return [md.hostname for md in mdata if md.groups.issuperset(groups)] def merge_additional_groups(self, imd, groups): @@ -766,7 +782,6 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, xdict['xquery'][0].set('auth', 'cert') self.clients_xml.write_xml(xdict['filename'], xdict['xmltree']) - def viz(self, hosts, bundles, key, colors): """Admin mode viz support.""" groups_tree = lxml.etree.parse(self.data + "/groups.xml") @@ -775,7 +790,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, except lxml.etree.XIncludeError: self.logger.error("Failed to process XInclude for file %s" % dest) groups = groups_tree.getroot() - categories = {'default':'grey83'} + categories = {'default': 'grey83'} instances = {} viz_str = "" egroups = groups.findall("Group") + groups.findall('.//Groups/Group') @@ -787,12 +802,12 @@ class Metadata(Bcfg2.Server.Plugin.Plugin, del categories[None] if hosts: clients = self.clients - for client, profile in clients.iteritems(): + for client, profile in list(clients.items()): if profile in instances: instances[profile].append(client) else: instances[profile] = [client] - for profile, clist in instances.iteritems(): + for profile, clist in list(instances.items()): clist.sort() viz_str += '''\t"%s-instances" [ label="%s", shape="record" ];\n''' \ % (profile, '|'.join(clist)) diff --git a/src/lib/Server/Plugins/NagiosGen.py b/src/lib/Server/Plugins/NagiosGen.py index 14277b63d..1724a1c8a 100644 --- a/src/lib/Server/Plugins/NagiosGen.py +++ b/src/lib/Server/Plugins/NagiosGen.py @@ -19,6 +19,7 @@ define host{ address %s ''' + class NagiosGen(Bcfg2.Server.Plugin.Plugin, Bcfg2.Server.Plugin.Generator): """NagiosGen is a Bcfg2 plugin that dynamically generates @@ -32,23 +33,23 @@ class NagiosGen(Bcfg2.Server.Plugin.Plugin, Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore) Bcfg2.Server.Plugin.Generator.__init__(self) self.Entries = {'Path': - {'/etc/nagiosgen.status' : self.createhostconfig, + {'/etc/nagiosgen.status': self.createhostconfig, '/etc/nagios/nagiosgen.cfg': self.createserverconfig}} - self.client_attrib = {'encoding':'ascii', - 'owner':'root', - 'group':'root', - 'type':'file', - 'perms':'0400'} - self.server_attrib = {'encoding':'ascii', - 'owner':'nagios', - 'group':'nagios', - 'type':'file', - 'perms':'0440'} + self.client_attrib = {'encoding': 'ascii', + 'owner': 'root', + 'group': 'root', + 'type': 'file', + 'perms': '0400'} + self.server_attrib = {'encoding': 'ascii', + 'owner': 'nagios', + 'group': 'nagios', + 'type': 'file', + 'perms': '0440'} def getparents(self, hostname): """Return parents for given hostname.""" - depends=[] + depends = [] if not os.path.isfile('%s/parents.xml' % (self.data)): return depends @@ -88,7 +89,7 @@ class NagiosGen(Bcfg2.Server.Plugin.Plugin, host_config += '}\n' entry.text = host_config [entry.attrib.__setitem__(key, value) for \ - (key, value) in self.client_attrib.iteritems()] + (key, value) in list(self.client_attrib.items())] try: fileh = open("%s/%s-host.cfg" % \ (self.data, metadata.hostname), 'w') @@ -101,14 +102,14 @@ class NagiosGen(Bcfg2.Server.Plugin.Plugin, def createserverconfig(self, entry, _): """Build monolithic server configuration file.""" - host_configs = glob.glob('%s/*-host.cfg' % self.data) + host_configs = glob.glob('%s/*-host.cfg' % self.data) group_configs = glob.glob('%s/*-group.cfg' % self.data) host_data = "" group_data = "" for host in host_configs: hostfile = open(host, 'r') - hostname=host.split('/')[-1].replace('-host.cfg','') - parents=self.getparents(hostname) + hostname = host.split('/')[-1].replace('-host.cfg', '') + parents = self.getparents(hostname) if parents: hostlines = hostfile.readlines() else: @@ -116,19 +117,19 @@ class NagiosGen(Bcfg2.Server.Plugin.Plugin, hostfile.close() if parents: - hostdata='' - addparents=True + hostdata = '' + addparents = True for line in hostlines: - line=line.replace('\n','') + line = line.replace('\n', '') if 'parents' in line: - line+=','+','.join(parents) - addparents=False + line += ',' + ','.join(parents) + addparents = False if '}' in line: - line='' - hostdata+="%s\n" % line + line = '' + hostdata += "%s\n" % line if addparents: - hostdata+=" parents %s\n" % ','.join(parents) - hostdata+="}\n" + hostdata += " parents %s\n" % ','.join(parents) + hostdata += "}\n" host_data += hostdata for group in group_configs: @@ -139,7 +140,7 @@ class NagiosGen(Bcfg2.Server.Plugin.Plugin, groupfile.close() entry.text = group_data + host_data [entry.attrib.__setitem__(key, value) for \ - (key, value) in self.server_attrib.iteritems()] + (key, value) in list(self.server_attrib.items())] try: fileh = open("%s/nagiosgen.cfg" % (self.data), 'w') fileh.write(group_data + host_data) diff --git a/src/lib/Server/Plugins/Ohai.py b/src/lib/Server/Plugins/Ohai.py index 0f7c7187f..6bd3edc34 100644 --- a/src/lib/Server/Plugins/Ohai.py +++ b/src/lib/Server/Plugins/Ohai.py @@ -37,12 +37,12 @@ class OhaiCache(object): try: data = open("%s/%s.json" % (self.dirname, item)).read() except: - raise KeyError, item + raise KeyError(item) self.cache[item] = json.loads(data) return self.cache[item] def __iter__(self): - data = self.cache.keys() + data = list(self.cache.keys()) data.extend([x[:-5] for x in os.listdir(self.dirname)]) return data.__iter__() @@ -50,7 +50,9 @@ class OhaiCache(object): class Ohai(Bcfg2.Server.Plugin.Plugin, Bcfg2.Server.Plugin.Probing, Bcfg2.Server.Plugin.Connector): - """The Ohai plugin is used to detect information about the client operating system.""" + """The Ohai plugin is used to detect information + about the client operating system. + """ name = 'Ohai' experimental = True diff --git a/src/lib/Server/Plugins/Packages.py b/src/lib/Server/Plugins/Packages.py index 438c1d5c0..a84d8dc70 100644 --- a/src/lib/Server/Plugins/Packages.py +++ b/src/lib/Server/Plugins/Packages.py @@ -22,14 +22,17 @@ import Bcfg2.Server.Plugin # build sources.list? # caching for yum + class NoData(Exception): pass + class SomeData(Exception): pass logger = logging.getLogger('Packages') + def source_from_xml(xsource): ret = dict([('rawurl', False), ('url', False)]) for key, tag in [('groups', 'Group'), ('components', 'Component'), @@ -60,6 +63,7 @@ def source_from_xml(xsource): ret['url'] += '/' return ret + def _fetch_url(url): if '@' in url: mobj = re.match('(\w+://)([^:]+):([^@]+)@(.*)$', url) @@ -73,6 +77,7 @@ def _fetch_url(url): urllib2.install_opener(urllib2.build_opener(auth)) return urllib2.urlopen(url).read() + class Source(object): basegroups = [] @@ -135,7 +140,7 @@ class Source(object): agroups = ['global'] + [a for a in self.arches if a in meta.groups] vdict = dict() for agrp in agroups: - for key, value in self.provides[agrp].iteritems(): + for key, value in list(self.provides[agrp].items()): if key not in vdict: vdict[key] = set(value) else: @@ -193,6 +198,7 @@ class Source(object): return {'groups': copy.copy(self.groups), \ 'urls': [copy.deepcopy(url) for url in self.url_map]} + class YUMSource(Source): xp = '{http://linux.duke.edu/metadata/common}' rp = '{http://linux.duke.edu/metadata/rpm}' @@ -277,7 +283,7 @@ class YUMSource(Source): fdata = lxml.etree.parse(fname).getroot() self.parse_filelist(fdata, farch) # merge data - sdata = self.packages.values() + sdata = list(self.packages.values()) self.packages['global'] = copy.deepcopy(sdata.pop()) while sdata: self.packages['global'] = self.packages['global'].intersection(sdata.pop()) @@ -337,10 +343,10 @@ class YUMSource(Source): def get_vpkgs(self, metadata): rv = Source.get_vpkgs(self, metadata) - for arch, fmdata in self.filemap.iteritems(): + for arch, fmdata in list(self.filemap.items()): if arch not in metadata.groups and arch != 'global': continue - for filename, pkgs in fmdata.iteritems(): + for filename, pkgs in list(fmdata.items()): rv[filename] = pkgs return rv @@ -348,6 +354,7 @@ class YUMSource(Source): filtered = set([u for u in unknown if u.startswith('rpmlib')]) unknown.difference_update(filtered) + class APTSource(Source): basegroups = ['apt', 'debian', 'ubuntu', 'nexenta'] ptype = 'deb' @@ -449,7 +456,7 @@ class APTSource(Source): for barch in bdeps: self.deps[barch][pkgname] = bdeps[barch][pkgname] provided = set() - for bprovided in bprov.values(): + for bprovided in list(bprov.values()): provided.update(set(bprovided)) for prov in provided: prset = set() @@ -469,6 +476,7 @@ class APTSource(Source): pkg not in self.blacklist and \ (len(self.whitelist) == 0 or pkg in self.whitelist) + class PACSource(Source): basegroups = ['arch', 'parabola'] ptype = 'pacman' @@ -526,7 +534,7 @@ class PACSource(Source): bdeps[barch] = dict() bprov[barch] = dict() try: - print "try to read : " + fname + print("try to read : " + fname) tar = tarfile.open(fname, "r") reader = gzip.GzipFile(fname) except: @@ -536,7 +544,7 @@ class PACSource(Source): for tarinfo in tar: if tarinfo.isdir(): self.pkgnames.add(tarinfo.name.rsplit("-", 2)[0]) - print "added : " + tarinfo.name.rsplit("-", 2)[0] + print("added : " + tarinfo.name.rsplit("-", 2)[0]) tar.close() self.deps['global'] = dict() @@ -556,7 +564,7 @@ class PACSource(Source): for barch in bdeps: self.deps[barch][pkgname] = bdeps[barch][pkgname] provided = set() - for bprovided in bprov.values(): + for bprovided in list(bprov.values()): provided.update(set(bprovided)) for prov in provided: prset = set() @@ -576,6 +584,7 @@ class PACSource(Source): pkg not in self.blacklist and \ (len(self.whitelist) == 0 or pkg in self.whitelist) + class Packages(Bcfg2.Server.Plugin.Plugin, Bcfg2.Server.Plugin.StructureValidator, Bcfg2.Server.Plugin.Generator, @@ -614,7 +623,7 @@ class Packages(Bcfg2.Server.Plugin.Plugin, vpkgs = dict() for source in self.get_matching_sources(meta): s_vpkgs = source.get_vpkgs(meta) - for name, prov_set in s_vpkgs.iteritems(): + for name, prov_set in list(s_vpkgs.items()): if name not in vpkgs: vpkgs[name] = set(prov_set) else: @@ -726,7 +735,9 @@ class Packages(Bcfg2.Server.Plugin.Plugin, satisfied_vpkgs.add(current) elif [item for item in vpkg_cache[current] if item in packages]: if debug: - self.logger.debug("Packages: requirement %s satisfied by %s" % (current, [item for item in vpkg_cache[current] if item in packages])) + self.logger.debug("Packages: requirement %s satisfied by %s" % (current, + [item for item in vpkg_cache[current] + if item in packages])) satisfied_vpkgs.add(current) vpkgs.difference_update(satisfied_vpkgs) @@ -736,7 +747,9 @@ class Packages(Bcfg2.Server.Plugin.Plugin, # allow use of virt through explicit specification, then fall back to forcing current on last pass if [item for item in vpkg_cache[current] if item in packages]: if debug: - self.logger.debug("Packages: requirement %s satisfied by %s" % (current, [item for item in vpkg_cache[current] if item in packages])) + self.logger.debug("Packages: requirement %s satisfied by %s" % (current, + [item for item in vpkg_cache[current] + if item in packages])) satisfied_both.add(current) elif current in input_requirements or final_pass: pkgs.add(current) diff --git a/src/lib/Server/Plugins/Pkgmgr.py b/src/lib/Server/Plugins/Pkgmgr.py index b58a7c91d..dc4a5f37f 100644 --- a/src/lib/Server/Plugins/Pkgmgr.py +++ b/src/lib/Server/Plugins/Pkgmgr.py @@ -7,15 +7,17 @@ import Bcfg2.Server.Plugin logger = logging.getLogger('Bcfg2.Plugins.Pkgmgr') + class FuzzyDict(dict): fuzzy = re.compile('(?P<name>.*):(?P<alist>\S+(,\S+)*)') + def __getitem__(self, key): if isinstance(key, str): mdata = self.fuzzy.match(key) if mdata: return dict.__getitem__(self, mdata.groupdict()['name']) else: - print "got non-string key %s" % str(key) + print("got non-string key %s" % str(key)) return dict.__getitem__(self, key) def has_key(self, key): @@ -33,11 +35,14 @@ class FuzzyDict(dict): return default raise + class PNode(Bcfg2.Server.Plugin.INode): - """PNode has a list of packages available at a particular group intersection.""" - splitters = {'rpm':re.compile('^(.*/)?(?P<name>[\w\+\d\.]+(-[\w\+\d\.]+)*)-' + \ + """PNode has a list of packages available at a + particular group intersection. + """ + splitters = {'rpm': re.compile('^(.*/)?(?P<name>[\w\+\d\.]+(-[\w\+\d\.]+)*)-' + \ '(?P<version>[\w\d\.]+-([\w\d\.]+))\.(?P<arch>\S+)\.rpm$'), - 'encap':re.compile('^(?P<name>[\w-]+)-(?P<version>[\w\d\.+-]+).encap.*$')} + 'encap': re.compile('^(?P<name>[\w-]+)-(?P<version>[\w\d\.+-]+).encap.*$')} ignore = ['Package'] def Match(self, metadata, data): @@ -54,41 +59,44 @@ class PNode(Bcfg2.Server.Plugin.INode): def __init__(self, data, pdict, parent=None): # copy local attributes to all child nodes if no local attribute exists - if not pdict.has_key('Package'): + if 'Package' not in pdict: pdict['Package'] = set() for child in data.getchildren(): - for attr in [key for key in data.attrib.keys() \ - if key != 'name' and not child.attrib.has_key(key)]: + for attr in [key for key in data.attrib.keys() + if key != 'name' and key not in child.attrib]: try: child.set(attr, data.get(attr)) except: # don't fail on things like comments and other immutable elements pass Bcfg2.Server.Plugin.INode.__init__(self, data, pdict, parent) - if not self.contents.has_key('Package'): + if 'Package' not in self.contents: self.contents['Package'] = FuzzyDict() for pkg in data.findall('./Package'): - if pkg.attrib.has_key('name') and pkg.get('name') not in pdict['Package']: + if 'name' in pkg.attrib and pkg.get('name') not in pdict['Package']: pdict['Package'].add(pkg.get('name')) if pkg.get('name') != None: self.contents['Package'][pkg.get('name')] = {} if pkg.getchildren(): self.contents['Package'][pkg.get('name')]['__children__'] \ = pkg.getchildren() - if pkg.attrib.has_key('simplefile'): + if 'simplefile' in pkg.attrib: pkg.set('url', "%s/%s" % (pkg.get('uri'), pkg.get('simplefile'))) self.contents['Package'][pkg.get('name')].update(pkg.attrib) else: - if pkg.attrib.has_key('file'): - if pkg.attrib.has_key('multiarch'): + if 'file' in pkg.attrib: + if 'multiarch' in pkg.attrib: archs = pkg.get('multiarch').split() srcs = pkg.get('srcs', pkg.get('multiarch')).split() - url = ' '.join(["%s/%s" % (pkg.get('uri'), pkg.get('file') % {'src':srcs[idx], 'arch':archs[idx]}) + url = ' '.join(["%s/%s" % (pkg.get('uri'), + pkg.get('file') % {'src':srcs[idx], + 'arch':archs[idx]}) for idx in range(len(archs))]) pkg.set('url', url) else: - pkg.set('url', '%s/%s' % (pkg.get('uri'), pkg.get('file'))) - if self.splitters.has_key(pkg.get('type')) and pkg.get('file') != None: + pkg.set('url', '%s/%s' % (pkg.get('uri'), + pkg.get('file'))) + if pkg.get('type') in self.splitters and pkg.get('file') != None: mdata = self.splitters[pkg.get('type')].match(pkg.get('file')) if not mdata: logger.error("Failed to match pkg %s" % pkg.get('file')) @@ -112,10 +120,13 @@ class PNode(Bcfg2.Server.Plugin.INode): class PkgSrc(Bcfg2.Server.Plugin.XMLSrc): - """PkgSrc files contain a PNode hierarchy that returns matching package entries.""" + """PkgSrc files contain a PNode hierarchy that + returns matching package entries. + """ __node__ = PNode __cacheobj__ = FuzzyDict + class Pkgmgr(Bcfg2.Server.Plugin.PrioDir): """This is a generator that handles package assignments.""" name = 'Pkgmgr' @@ -127,8 +138,8 @@ class Pkgmgr(Bcfg2.Server.Plugin.PrioDir): def HandleEvent(self, event): '''Handle events and update dispatch table''' Bcfg2.Server.Plugin.XMLDirectoryBacked.HandleEvent(self, event) - for src in self.entries.values(): - for itype, children in src.items.iteritems(): + for src in list(self.entries.values()): + for itype, children in list(src.items.items()): for child in children: try: self.Entries[itype][child] = self.BindEntry @@ -149,7 +160,7 @@ class Pkgmgr(Bcfg2.Server.Plugin.PrioDir): if inst.get('arch') not in arches] def HandlesEntry(self, entry, metadata): - return entry.tag == 'Package' and entry.get('name').split(':')[0] in self.Entries['Package'].keys() + return entry.tag == 'Package' and entry.get('name').split(':')[0] in list(self.Entries['Package'].keys()) def HandleEntry(self, entry, metadata): self.BindEntry(entry, metadata) diff --git a/src/lib/Server/Plugins/Probes.py b/src/lib/Server/Plugins/Probes.py index 57dd4f698..ea2e79ccc 100644 --- a/src/lib/Server/Plugins/Probes.py +++ b/src/lib/Server/Plugins/Probes.py @@ -6,8 +6,10 @@ import Bcfg2.Server.Plugin specific_probe_matcher = re.compile("(.*/)?(?P<basename>\S+)(.(?P<mode>[GH](\d\d)?)_\S+)") probe_matcher = re.compile("(.*/)?(?P<basename>\S+)") + class ProbeSet(Bcfg2.Server.Plugin.EntrySet): ignore = re.compile("^(\.#.*|.*~|\\..*\\.(tmp|sw[px])|probed\\.xml)$") + def __init__(self, path, fam, encoding, plugin_name): fpattern = '[0-9A-Za-z_\-]+' self.plugin_name = plugin_name @@ -34,7 +36,7 @@ class ProbeSet(Bcfg2.Server.Plugin.EntrySet): if pname not in build: build[pname] = entry - for (name, entry) in build.iteritems(): + for (name, entry) in list(build.items()): probe = lxml.etree.Element('probe') probe.set('name', name.split('/')[-1]) probe.set('source', self.plugin_name) @@ -47,6 +49,7 @@ class ProbeSet(Bcfg2.Server.Plugin.EntrySet): ret.append(probe) return ret + class Probes(Bcfg2.Server.Plugin.Plugin, Bcfg2.Server.Plugin.Probing, Bcfg2.Server.Plugin.Connector): @@ -80,7 +83,8 @@ class Probes(Bcfg2.Server.Plugin.Plugin, value=self.probedata[client][probe]) for group in sorted(self.cgroups[client]): lxml.etree.SubElement(cx, "Group", name=group) - data = lxml.etree.tostring(top, encoding='UTF-8', xml_declaration=True, + data = lxml.etree.tostring(top, encoding='UTF-8', + xml_declaration=True, pretty_print='true') try: datafile = open("%s/%s" % (self.data, 'probed.xml'), 'w') @@ -98,7 +102,7 @@ class Probes(Bcfg2.Server.Plugin.Plugin, self.cgroups = {} for client in data.getchildren(): self.probedata[client.get('name')] = {} - self.cgroups[client.get('name')]=[] + self.cgroups[client.get('name')] = [] for pdata in client: if (pdata.tag == 'Probe'): self.probedata[client.get('name')][pdata.get('name')] = pdata.get('value') @@ -118,7 +122,7 @@ class Probes(Bcfg2.Server.Plugin.Plugin, def ReceiveDataItem(self, client, data): """Receive probe results pertaining to client.""" - if not self.cgroups.has_key(client.hostname): + if client.hostname not in self.cgroups: self.cgroups[client.hostname] = [] if data.text == None: self.logger.error("Got null response to probe %s from %s" % \ @@ -139,9 +143,9 @@ class Probes(Bcfg2.Server.Plugin.Plugin, dlines.remove(line) dtext = "\n".join(dlines) try: - self.probedata[client.hostname].update({data.get('name'):dtext}) + self.probedata[client.hostname].update({data.get('name'): dtext}) except KeyError: - self.probedata[client.hostname] = {data.get('name'):dtext} + self.probedata[client.hostname] = {data.get('name'): dtext} def get_additional_groups(self, meta): return self.cgroups.get(meta.hostname, list()) diff --git a/src/lib/Server/Plugins/SSHbase.py b/src/lib/Server/Plugins/SSHbase.py index 96a444875..77c5e008f 100644 --- a/src/lib/Server/Plugins/SSHbase.py +++ b/src/lib/Server/Plugins/SSHbase.py @@ -73,7 +73,7 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin, """Build memory cache of the ssh known hosts file.""" if not self.__skn: self.__skn = "\n".join([value.data for key, value in \ - self.entries.iteritems() if \ + list(self.entries.items()) if \ key.endswith('.static')]) names = dict() # if no metadata is registered yet, defer @@ -103,7 +103,7 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin, continue names[cmeta.hostname] = sorted(names[cmeta.hostname]) # now we have our name cache - pubkeys = [pubk for pubk in self.entries.keys() \ + pubkeys = [pubk for pubk in list(self.entries.keys()) \ if pubk.find('.pub.H_') != -1] pubkeys.sort() badnames = set() @@ -131,7 +131,7 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin, if event and event.filename.endswith('.static'): self.skn = False if not self.__skn: - if (len(self.entries.keys())) >= (len(os.listdir(self.data))-1): + if (len(list(self.entries.keys()))) >= (len(os.listdir(self.data)) - 1): _ = self.skn def HandlesEntry(self, entry, _): @@ -205,26 +205,26 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin, for hostkey in hostkeys: entry.text += "localhost,localhost.localdomain,127.0.0.1 %s" % ( self.entries[hostkey].data) - permdata = {'owner':'root', - 'group':'root', - 'type':'file', - 'perms':'0644'} + permdata = {'owner': 'root', + 'group': 'root', + 'type': 'file', + 'perms': '0644'} [entry.attrib.__setitem__(key, permdata[key]) for key in permdata] def build_hk(self, entry, metadata): """This binds host key data into entries.""" client = metadata.hostname filename = "%s.H_%s" % (entry.get('name').split('/')[-1], client) - if filename not in self.entries.keys(): + if filename not in list(self.entries.keys()): self.GenerateHostKeys(client) if not filename in self.entries: self.logger.error("%s still not registered" % filename) raise Bcfg2.Server.Plugin.PluginExecutionError keydata = self.entries[filename].data - permdata = {'owner':'root', - 'group':'root', - 'type':'file', - 'perms':'0600'} + permdata = {'owner': 'root', + 'group': 'root', + 'type': 'file', + 'perms': '0600'} if entry.get('name')[-4:] == '.pub': permdata['perms'] = '0644' [entry.attrib.__setitem__(key, permdata[key]) for key in permdata] @@ -245,7 +245,7 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin, else: keytype = 'rsa1' - if hostkey not in self.entries.keys(): + if hostkey not in list(self.entries.keys()): fileloc = "%s/%s" % (self.data, hostkey) publoc = self.data + '/' + ".".join([hostkey.split('.')[0], 'pub', @@ -257,8 +257,8 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin, shutil.copy(temploc, fileloc) shutil.copy("%s.pub" % temploc, publoc) self.AddEntry(hostkey) - self.AddEntry(".".join([hostkey.split('.')[0]]+['pub', "H_%s" \ - % client])) + self.AddEntry(".".join([hostkey.split('.')[0]] + ['pub', "H_%s" \ + % client])) try: os.unlink(temploc) os.unlink("%s.pub" % temploc) @@ -277,7 +277,7 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin, try: open(filename, 'w').write(entry['text']) if log: - print "Wrote file %s" % filename + print("Wrote file %s" % filename) except KeyError: self.logger.error("Failed to pull %s. This file does not currently " "exist on the client" % entry.get('name')) diff --git a/src/lib/Server/Plugins/SSLCA.py b/src/lib/Server/Plugins/SSLCA.py index 1c9e1b59d..00f67834d 100644 --- a/src/lib/Server/Plugins/SSLCA.py +++ b/src/lib/Server/Plugins/SSLCA.py @@ -41,14 +41,14 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool): if event.filename.endswith('.xml'): if action in ['exists', 'created', 'changed']: if event.filename.endswith('key.xml'): - key_spec = dict(lxml.etree.parse(epath).find('Key').items()) + key_spec = dict(list(lxml.etree.parse(epath).find('Key').items())) self.key_specs[ident] = { 'bits': key_spec.get('bits', 2048), 'type': key_spec.get('type', 'rsa') } self.Entries['Path'][ident] = self.get_key elif event.filename.endswith('cert.xml'): - cert_spec = dict(lxml.etree.parse(epath).find('Cert').items()) + cert_spec = dict(list(lxml.etree.parse(epath).find('Cert').items())) ca = cert_spec.get('ca', 'default') self.cert_specs[ident] = { 'ca': ca, @@ -64,7 +64,7 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool): } cp = ConfigParser() cp.read(self.core.cfile) - self.CAs[ca] = dict(cp.items('sslca_'+ca)) + self.CAs[ca] = dict(cp.items('sslca_' + ca)) self.Entries['Path'][ident] = self.get_cert if action == 'deleted': if ident in self.Entries['Path']: @@ -99,12 +99,14 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool): # check if we already have a hostfile, or need to generate a new key # TODO: verify key fits the specs path = entry.get('name') - filename = "".join([path, '/', path.rsplit('/', 1)[1], '.H_', metadata.hostname]) - if filename not in self.entries.keys(): + filename = "".join([path, '/', path.rsplit('/', 1)[1], + '.H_', metadata.hostname]) + if filename not in list(self.entries.keys()): key = self.build_key(filename, entry, metadata) open(self.data + filename, 'w').write(key) entry.text = key - self.entries[filename] = self.__child__("%s%s" % (self.data, filename)) + self.entries[filename] = self.__child__("%s%s" % (self.data, + filename)) self.entries[filename].HandleEvent() else: entry.text = self.entries[filename].data @@ -135,23 +137,28 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool): [entry.attrib.__setitem__(key, permdata[key]) for key in permdata] path = entry.get('name') - filename = "".join([path, '/', path.rsplit('/', 1)[1], '.H_', metadata.hostname]) + filename = "".join([path, '/', path.rsplit('/', 1)[1], + '.H_', metadata.hostname]) # first - ensure we have a key to work with key = self.cert_specs[entry.get('name')].get('key') - key_filename = "".join([key, '/', key.rsplit('/', 1)[1], '.H_', metadata.hostname]) + key_filename = "".join([key, '/', key.rsplit('/', 1)[1], + '.H_', metadata.hostname]) if key_filename not in self.entries: e = lxml.etree.Element('Path') e.attrib['name'] = key self.core.Bind(e, metadata) # check if we have a valid hostfile - if filename in self.entries.keys() and self.verify_cert(filename, key_filename, entry): + if filename in list(self.entries.keys()) and self.verify_cert(filename, + key_filename, + entry): entry.text = self.entries[filename].data else: cert = self.build_cert(key_filename, entry, metadata) open(self.data + filename, 'w').write(cert) - self.entries[filename] = self.__child__("%s%s" % (self.data, filename)) + self.entries[filename] = self.__child__("%s%s" % (self.data, + filename)) self.entries[filename].HandleEvent() entry.text = cert @@ -188,7 +195,6 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool): return True return False - def build_cert(self, key_filename, entry, metadata): """ creates a new certificate according to the specification @@ -200,9 +206,14 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool): days = self.cert_specs[entry.get('name')]['days'] passphrase = self.CAs[ca].get('passphrase') if passphrase: - cmd = "openssl ca -config %s -in %s -days %s -batch -passin pass:%s" % (ca_config, req, days, passphrase) + cmd = "openssl ca -config %s -in %s -days %s -batch -passin pass:%s" % (ca_config, + req, + days, + passphrase) else: - cmd = "openssl ca -config %s -in %s -days %s -batch" % (ca_config, req, days) + cmd = "openssl ca -config %s -in %s -days %s -batch" % (ca_config, + req, + days) cert = Popen(cmd, shell=True, stdout=PIPE).stdout.read() try: os.unlink(req_config) @@ -234,7 +245,7 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool): }, 'alt_names': {} } - for section in defaults.keys(): + for section in list(defaults.keys()): cp.add_section(section) for key in defaults[section]: cp.set(section, key, defaults[section][key]) @@ -242,7 +253,7 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool): altnames = list(metadata.aliases) altnames.append(metadata.hostname) for altname in altnames: - cp.set('alt_names', 'DNS.'+str(x), altname) + cp.set('alt_names', 'DNS.' + str(x), altname) x += 1 for item in ['C', 'L', 'ST', 'O', 'OU', 'emailAddress']: if self.cert_specs[entry.get('name')][item]: @@ -259,6 +270,9 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool): req = tempfile.mkstemp()[1] days = self.cert_specs[entry.get('name')]['days'] key = self.data + key_filename - cmd = "openssl req -new -config %s -days %s -key %s -text -out %s" % (req_config, days, key, req) + cmd = "openssl req -new -config %s -days %s -key %s -text -out %s" % (req_config, + days, + key, + req) res = Popen(cmd, shell=True, stdout=PIPE).stdout.read() return req diff --git a/src/lib/Server/Plugins/Snapshots.py b/src/lib/Server/Plugins/Snapshots.py index a4489ae95..a1f72ba3e 100644 --- a/src/lib/Server/Plugins/Snapshots.py +++ b/src/lib/Server/Plugins/Snapshots.py @@ -24,6 +24,7 @@ datafields = { 'SymLink': ['to'], } + def build_snap_ent(entry): basefields = [] if entry.tag in ['Package', 'Service']: @@ -119,12 +120,12 @@ class Snapshots(Bcfg2.Server.Plugin.Statistics, data['name'] = ename extra[entry.tag][ename] = data else: - print "extra", entry.tag, entry.get('name') + print("extra", entry.tag, entry.get('name')) t2 = time.time() snap = Snapshot.from_data(self.session, correct, revision, metadata, entries, extra) self.session.add(snap) self.session.commit() t3 = time.time() - logger.info("Snapshot storage took %fs" % (t3-t2)) + logger.info("Snapshot storage took %fs" % (t3 - t2)) return True |