summaryrefslogtreecommitdiffstats
path: root/src/lib/Bcfg2/Server/Plugin.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/lib/Bcfg2/Server/Plugin.py')
-rw-r--r--src/lib/Bcfg2/Server/Plugin.py688
1 files changed, 394 insertions, 294 deletions
diff --git a/src/lib/Bcfg2/Server/Plugin.py b/src/lib/Bcfg2/Server/Plugin.py
index ca37431a2..910bc0108 100644
--- a/src/lib/Bcfg2/Server/Plugin.py
+++ b/src/lib/Bcfg2/Server/Plugin.py
@@ -1,54 +1,60 @@
"""This module provides the baseclass for Bcfg2 Server Plugins."""
-import copy
-import logging
-import lxml.etree
import os
-import pickle
-import posixpath
import re
import sys
+import copy
+import logging
+import operator
import threading
-from Bcfg2.Bcfg2Py3k import ConfigParser
-
-from lxml.etree import XML, XMLSyntaxError
-
+import lxml.etree
+import Bcfg2.Server
import Bcfg2.Options
+from Bcfg2.Bcfg2Py3k import ConfigParser, CmpMixin, reduce, Queue, Empty, \
+ Full, cPickle
-# py3k compatibility
-if sys.hexversion >= 0x03000000:
- from functools import reduce
- from io import FileIO as BUILTIN_FILE_TYPE
-else:
- BUILTIN_FILE_TYPE = file
-from Bcfg2.Bcfg2Py3k import Queue
-from Bcfg2.Bcfg2Py3k import Empty
-from Bcfg2.Bcfg2Py3k import Full
+try:
+ import django
+ has_django = True
+except ImportError:
+ has_django = False
# grab default metadata info from bcfg2.conf
opts = {'owner': Bcfg2.Options.MDATA_OWNER,
'group': Bcfg2.Options.MDATA_GROUP,
- 'important': Bcfg2.Options.MDATA_IMPORTANT,
'perms': Bcfg2.Options.MDATA_PERMS,
+ 'secontext': Bcfg2.Options.MDATA_SECONTEXT,
+ 'important': Bcfg2.Options.MDATA_IMPORTANT,
'paranoid': Bcfg2.Options.MDATA_PARANOID,
'sensitive': Bcfg2.Options.MDATA_SENSITIVE}
-mdata_setup = Bcfg2.Options.OptionParser(opts)
-mdata_setup.parse([])
-del mdata_setup['args']
+default_file_metadata = Bcfg2.Options.OptionParser(opts)
+default_file_metadata.parse([])
+del default_file_metadata['args']
logger = logging.getLogger('Bcfg2.Server.Plugin')
-default_file_metadata = mdata_setup
-
-info_regex = re.compile( \
- 'encoding:(\s)*(?P<encoding>\w+)|' +
- 'group:(\s)*(?P<group>\S+)|' +
- 'important:(\s)*(?P<important>\S+)|' +
- 'mtime:(\s)*(?P<mtime>\w+)|' +
- 'owner:(\s)*(?P<owner>\S+)|' +
- 'paranoid:(\s)*(?P<paranoid>\S+)|' +
- 'perms:(\s)*(?P<perms>\w+)|' +
- 'sensitive:(\s)*(?P<sensitive>\S+)|')
+info_regex = re.compile('owner:(\s)*(?P<owner>\S+)|' +
+ 'group:(\s)*(?P<group>\S+)|' +
+ 'perms:(\s)*(?P<perms>\w+)|' +
+ 'secontext:(\s)*(?P<secontext>\S+)|' +
+ 'paranoid:(\s)*(?P<paranoid>\S+)|' +
+ 'sensitive:(\s)*(?P<sensitive>\S+)|' +
+ 'encoding:(\s)*(?P<encoding>\S+)|' +
+ 'important:(\s)*(?P<important>\S+)|' +
+ 'mtime:(\s)*(?P<mtime>\w+)|')
+
+def bind_info(entry, metadata, infoxml=None, default=default_file_metadata):
+ for attr, val in list(default.items()):
+ entry.set(attr, val)
+ if infoxml:
+ mdata = dict()
+ infoxml.pnode.Match(metadata, mdata, entry=entry)
+ if 'Info' not in mdata:
+ msg = "Failed to set metadata for file %s" % entry.get('name')
+ logger.error(msg)
+ raise PluginExecutionError(msg)
+ for attr, val in list(mdata['Info'][None].items()):
+ entry.set(attr, val)
class PluginInitError(Exception):
@@ -61,6 +67,18 @@ class PluginExecutionError(Exception):
pass
+class MetadataConsistencyError(Exception):
+ """This error gets raised when metadata is internally inconsistent."""
+ pass
+
+
+class MetadataRuntimeError(Exception):
+ """This error is raised when the metadata engine
+ is called prior to reading enough data.
+ """
+ pass
+
+
class Debuggable(object):
__rmi__ = ['toggle_debug']
@@ -73,6 +91,10 @@ class Debuggable(object):
def toggle_debug(self):
self.debug_flag = not self.debug_flag
+ self.debug_log("%s: debug_flag = %s" % (self.__class__.__name__,
+ self.debug_flag),
+ flag=True)
+ return self.debug_flag
def debug_log(self, message, flag=None):
if (flag is None and self.debug_flag) or flag:
@@ -116,8 +138,7 @@ class Plugin(Debuggable):
@classmethod
def init_repo(cls, repo):
- path = "%s/%s" % (repo, cls.name)
- os.makedirs(path)
+ os.makedirs(os.path.join(repo, cls.name))
def shutdown(self):
self.running = False
@@ -126,6 +147,26 @@ class Plugin(Debuggable):
return "%s Plugin" % self.__class__.__name__
+class DatabaseBacked(Plugin):
+ @property
+ def _use_db(self):
+ use_db = self.core.setup.cfp.getboolean(self.name.lower(),
+ "use_database",
+ default=False)
+ if use_db and has_django and self.core.database_available:
+ return True
+ elif not use_db:
+ return False
+ else:
+ self.logger.error("use_database is true but django not found")
+ return False
+
+
+class PluginDatabaseModel(object):
+ class Meta:
+ app_label = "Server"
+
+
class Generator(object):
"""Generator plugins contribute to literal client configurations."""
def HandlesEntry(self, entry, metadata):
@@ -134,19 +175,19 @@ class Generator(object):
def HandleEntry(self, entry, metadata):
"""This is the slow-path handler for configuration entry binding."""
- raise PluginExecutionError
+ return entry
class Structure(object):
"""Structure Plugins contribute to abstract client configurations."""
def BuildStructures(self, metadata):
"""Return a list of abstract goal structures for client."""
- raise PluginExecutionError
+ raise NotImplementedError
class Metadata(object):
"""Signal metadata capabilities for this plugin"""
- def add_client(self, client_name, attribs):
+ def add_client(self, client_name):
"""Add client."""
pass
@@ -158,11 +199,17 @@ class Metadata(object):
"""Create viz str for viz admin mode."""
pass
+ def _handle_default_event(self, event):
+ pass
+
def get_initial_metadata(self, client_name):
- raise PluginExecutionError
+ raise NotImplementedError
- def merge_additional_data(self, imd, source, groups, data):
- raise PluginExecutionError
+ def merge_additional_data(self, imd, source, data):
+ raise NotImplementedError
+
+ def merge_additional_groups(self, imd, groups):
+ raise NotImplementedError
class Connector(object):
@@ -187,23 +234,23 @@ class Probing(object):
pass
-class Statistics(object):
+class Statistics(Plugin):
"""Signal statistics handling capability."""
def process_statistics(self, client, xdata):
pass
-class ThreadedStatistics(Statistics,
- threading.Thread):
+class ThreadedStatistics(Statistics, threading.Thread):
"""Threaded statistics handling capability."""
def __init__(self, core, datastore):
- Statistics.__init__(self)
+ Statistics.__init__(self, core, datastore)
threading.Thread.__init__(self)
# Event from the core signaling an exit
self.terminate = core.terminate
self.work_queue = Queue(100000)
- self.pending_file = "%s/etc/%s.pending" % (datastore, self.__class__.__name__)
- self.daemon = True
+ self.pending_file = os.path.join(datastore, "etc",
+ "%s.pending" % self.name)
+ self.daemon = False
self.start()
def save(self):
@@ -213,32 +260,38 @@ class ThreadedStatistics(Statistics,
while not self.work_queue.empty():
(metadata, data) = self.work_queue.get_nowait()
try:
- pending_data.append((metadata.hostname, lxml.etree.tostring(data)))
+ pending_data.append((metadata.hostname,
+ lxml.etree.tostring(data,
+ xml_declaration=False).decode("UTF-8")))
except:
- self.logger.warning("Dropping interaction for %s" % metadata.hostname)
+ err = sys.exc_info()[1]
+ self.logger.warning("Dropping interaction for %s: %s" %
+ (metadata.hostname, err))
except Empty:
pass
try:
savefile = open(self.pending_file, 'w')
- pickle.dump(pending_data, savefile)
+ cPickle.dump(pending_data, savefile)
savefile.close()
- self.logger.info("Saved pending %s data" % self.__class__.__name__)
+ self.logger.info("Saved pending %s data" % self.name)
except:
- self.logger.warning("Failed to save pending data")
+ err = sys.exc_info()[1]
+ self.logger.warning("Failed to save pending data: %s" % err)
def load(self):
- """Load any pending data to a file."""
+ """Load any pending data from a file."""
if not os.path.exists(self.pending_file):
return True
pending_data = []
try:
savefile = open(self.pending_file, 'r')
- pending_data = pickle.load(savefile)
+ pending_data = cPickle.load(savefile)
savefile.close()
except Exception:
e = sys.exc_info()[1]
self.logger.warning("Failed to load pending data: %s" % e)
+ return False
for (pmetadata, pdata) in pending_data:
# check that shutdown wasnt called early
if self.terminate.isSet():
@@ -249,56 +302,58 @@ class ThreadedStatistics(Statistics,
try:
metadata = self.core.build_metadata(pmetadata)
break
- except Bcfg2.Server.Plugins.Metadata.MetadataRuntimeError:
+ except MetadataRuntimeError:
pass
self.terminate.wait(5)
if self.terminate.isSet():
return False
- self.work_queue.put_nowait((metadata, lxml.etree.fromstring(pdata)))
+ self.work_queue.put_nowait((metadata,
+ lxml.etree.XML(pdata,
+ parser=Bcfg2.Server.XMLParser)))
except Full:
self.logger.warning("Queue.Full: Failed to load queue data")
break
except lxml.etree.LxmlError:
lxml_error = sys.exc_info()[1]
- self.logger.error("Unable to load save interaction: %s" % lxml_error)
- except Bcfg2.Server.Plugins.Metadata.MetadataConsistencyError:
- self.logger.error("Unable to load metadata for save interaction: %s" % pmetadata)
+ self.logger.error("Unable to load saved interaction: %s" %
+ lxml_error)
+ except MetadataConsistencyError:
+ self.logger.error("Unable to load metadata for save "
+ "interaction: %s" % pmetadata)
try:
os.unlink(self.pending_file)
except:
- self.logger.error("Failed to unlink save file: %s" % self.pending_file)
- self.logger.info("Loaded pending %s data" % self.__class__.__name__)
+ self.logger.error("Failed to unlink save file: %s" %
+ self.pending_file)
+ self.logger.info("Loaded pending %s data" % self.name)
return True
def run(self):
if not self.load():
return
- while not self.terminate.isSet():
+ while not self.terminate.isSet() and self.work_queue != None:
try:
- (xdata, client) = self.work_queue.get(block=True, timeout=2)
+ (client, xdata) = self.work_queue.get(block=True, timeout=2)
except Empty:
continue
except Exception:
e = sys.exc_info()[1]
self.logger.error("ThreadedStatistics: %s" % e)
continue
- self.handle_statistic(xdata, client)
- if not self.work_queue.empty():
+ self.handle_statistic(client, xdata)
+ if self.work_queue != None and not self.work_queue.empty():
self.save()
def process_statistics(self, metadata, data):
- warned = False
try:
self.work_queue.put_nowait((metadata, copy.copy(data)))
- warned = False
except Full:
- if not warned:
- self.logger.warning("%s: Queue is full. Dropping interactions." % self.__class__.__name__)
- warned = True
+ self.logger.warning("%s: Queue is full. Dropping interactions." %
+ self.name)
- def handle_statistics(self, metadata, data):
+ def handle_statistic(self, metadata, data):
"""Handle stats here."""
pass
@@ -308,17 +363,17 @@ class PullSource(object):
return []
def GetCurrentEntry(self, client, e_type, e_name):
- raise PluginExecutionError
+ raise NotImplementedError
class PullTarget(object):
def AcceptChoices(self, entry, metadata):
- raise PluginExecutionError
+ raise NotImplementedError
def AcceptPullData(self, specific, new_entry, verbose):
"""This is the null per-plugin implementation
of bcfg2-admin pull."""
- raise PluginExecutionError
+ raise NotImplementedError
class Decision(object):
@@ -334,13 +389,13 @@ class ValidationError(Exception):
class StructureValidator(object):
"""Validate/modify goal structures."""
def validate_structures(self, metadata, structures):
- raise ValidationError("not implemented")
+ raise NotImplementedError
class GoalValidator(object):
"""Validate/modify configuration goals."""
def validate_goals(self, metadata, goals):
- raise ValidationError("not implemented")
+ raise NotImplementedError
class Version(object):
@@ -352,6 +407,17 @@ class Version(object):
pass
+class ClientRunHooks(object):
+ """ Provides hooks to interact with client runs """
+ def start_client_run(self, metadata):
+ pass
+
+ def end_client_run(self, metadata):
+ pass
+
+ def end_statistics(self, metadata):
+ pass
+
# the rest of the file contains classes for coherent file caching
class FileBacked(object):
@@ -361,17 +427,18 @@ class FileBacked(object):
This object is meant to be used as a part of DirectoryBacked.
"""
- def __init__(self, name):
+ def __init__(self, name, fam=None):
object.__init__(self)
self.data = ''
self.name = name
+ self.fam = fam
def HandleEvent(self, event=None):
"""Read file upon update."""
if event and event.code2str() not in ['exists', 'changed', 'created']:
return
try:
- self.data = BUILTIN_FILE_TYPE(self.name).read()
+ self.data = open(self.name).read()
self.Index()
except IOError:
err = sys.exc_info()[1]
@@ -382,16 +449,14 @@ class FileBacked(object):
pass
def __repr__(self):
- return "%s: %s" % (self.__class__.__name__, str(self))
-
- def __str__(self):
- return "%s: %s" % (self.name, self.data)
+ return "%s: %s" % (self.__class__.__name__, self.name)
class DirectoryBacked(object):
"""This object is a coherent cache for a filesystem hierarchy of files."""
__child__ = FileBacked
patterns = re.compile('.*')
+ ignore = None
def __init__(self, data, fam):
"""Initialize the DirectoryBacked object.
@@ -438,8 +503,8 @@ class DirectoryBacked(object):
"""
dirpathname = os.path.join(self.data, relative)
if relative not in self.handles.values():
- if not posixpath.isdir(dirpathname):
- logger.error("Failed to open directory %s" % (dirpathname))
+ if not os.path.isdir(dirpathname):
+ logger.error("%s is not a directory" % dirpathname)
return
reqid = self.fam.AddMonitor(dirpathname, self)
self.handles[reqid] = relative
@@ -453,7 +518,8 @@ class DirectoryBacked(object):
added.
"""
self.entries[relative] = self.__child__(os.path.join(self.data,
- relative))
+ relative),
+ self.fam)
self.entries[relative].HandleEvent(event)
def HandleEvent(self, event):
@@ -470,27 +536,33 @@ class DirectoryBacked(object):
"""
action = event.code2str()
- # Clean up the absolute path names passed in
- event.filename = os.path.normpath(event.filename)
- if event.filename.startswith(self.data):
- event.filename = event.filename[len(self.data)+1:]
-
# Exclude events for actions we don't care about
if action == 'endExist':
return
if event.requestID not in self.handles:
- logger.warn("Got %s event with unknown handle (%s) for %s"
- % (action, event.requestID, abspath))
+ logger.warn("Got %s event with unknown handle (%s) for %s" %
+ (action, event.requestID, event.filename))
+ return
+
+ # Clean up path names
+ event.filename = os.path.normpath(event.filename)
+ if event.filename.startswith(self.data):
+ # the first event we get is on the data directory itself
+ event.filename = event.filename[len(self.data) + 1:]
+
+ if self.ignore and self.ignore.search(event.filename):
+ logger.debug("Ignoring event %s" % event.filename)
return
# Calculate the absolute and relative paths this event refers to
abspath = os.path.join(self.data, self.handles[event.requestID],
event.filename)
- relpath = os.path.join(self.handles[event.requestID], event.filename)
+ relpath = os.path.join(self.handles[event.requestID],
+ event.filename).lstrip('/')
if action == 'deleted':
- for key in self.entries.keys():
+ for key in list(self.entries.keys()):
if key.startswith(relpath):
del self.entries[key]
# We remove values from self.entries, but not
@@ -498,7 +570,7 @@ class DirectoryBacked(object):
# watching a directory just because it gets deleted. If it
# is recreated, we will start getting notifications for it
# again without having to add a new monitor.
- elif posixpath.isdir(abspath):
+ elif os.path.isdir(abspath):
# Deal with events for directories
if action in ['exists', 'created']:
self.add_directory_monitor(relpath)
@@ -522,21 +594,13 @@ class DirectoryBacked(object):
# didn't know about. Go ahead and treat it like a
# "created" event, but log a warning, because this
# is unexpected.
- logger.warn("Got %s event for unexpected dir %s" % (action,
- abspath))
+ logger.warn("Got %s event for unexpected dir %s" %
+ (action, abspath))
self.add_directory_monitor(relpath)
else:
- logger.warn("Got unknown dir event %s %s %s" % (event.requestID,
- event.code2str(),
- abspath))
- else:
- # Deal with events for non-directories
- if ((event.filename[-1] == '~') or
- (event.filename[:2] == '.#') or
- (event.filename[-4:] == '.swp') or
- (event.filename in ['SCCS', '.svn', '4913']) or
- (not self.patterns.match(event.filename))):
- return
+ logger.warn("Got unknown dir event %s %s %s" %
+ (event.requestID, event.code2str(), abspath))
+ elif self.patterns.search(event.filename):
if action in ['exists', 'created']:
self.add_entry(relpath, event)
elif action == 'changed':
@@ -547,13 +611,16 @@ class DirectoryBacked(object):
# know about. Go ahead and treat it like a
# "created" event, but log a warning, because this
# is unexpected.
- logger.warn("Got %s event for unexpected file %s" % (action,
- abspath))
+ logger.warn("Got %s event for unexpected file %s" %
+ (action,
+ abspath))
self.add_entry(relpath, event)
else:
- logger.warn("Got unknown file event %s %s %s" % (event.requestID,
- event.code2str(),
- abspath))
+ logger.warn("Got unknown file event %s %s %s" %
+ (event.requestID, event.code2str(), abspath))
+ else:
+ logger.warn("Could not process filename %s; ignoring" %
+ event.filename)
class XMLFileBacked(FileBacked):
@@ -563,68 +630,55 @@ class XMLFileBacked(FileBacked):
"""
__identifier__ = 'name'
- def __init__(self, filename):
- self.label = "dummy"
- self.entries = []
+ def __init__(self, filename, fam=None, should_monitor=False):
FileBacked.__init__(self, filename)
-
- def Index(self):
- """Build local data structures."""
- try:
- self.xdata = XML(self.data)
- except XMLSyntaxError:
- logger.error("Failed to parse %s" % (self.name))
- return
- self.entries = self.xdata.getchildren()
- if self.__identifier__ is not None:
- self.label = self.xdata.attrib[self.__identifier__]
-
- def __iter__(self):
- return iter(self.entries)
-
- def __str__(self):
- return "%s: %s" % (self.name, lxml.etree.tostring(self.xdata))
-
-
-class SingleXMLFileBacked(XMLFileBacked):
- """This object is a coherent cache for an independent XML file."""
- def __init__(self, filename, fam):
- XMLFileBacked.__init__(self, filename)
+ self.label = ""
+ self.entries = []
self.extras = []
self.fam = fam
- self.fam.AddMonitor(filename, self)
+ self.should_monitor = should_monitor
+ if fam and should_monitor:
+ self.fam.AddMonitor(filename, self)
def _follow_xincludes(self, fname=None, xdata=None):
- ''' follow xincludes, adding included files to fam and to
- self.extras '''
+ ''' follow xincludes, adding included files to self.extras '''
if xdata is None:
if fname is None:
xdata = self.xdata.getroottree()
else:
xdata = lxml.etree.parse(fname)
- included = [ent.get('href')
- for ent in xdata.findall('//{http://www.w3.org/2001/XInclude}include')]
- for name in included:
- if name not in self.extras:
- if name.startswith("/"):
- fpath = name
+ included = [el for el in xdata.findall('//%sinclude' %
+ Bcfg2.Server.XI_NAMESPACE)]
+ for el in included:
+ name = el.get("href")
+ if name.startswith("/"):
+ fpath = name
+ else:
+ if fname:
+ rel = fname
else:
- fpath = os.path.join(os.path.dirname(self.name), name)
- self.add_monitor(fpath, name)
- self._follow_xincludes(fname=fpath)
-
- def add_monitor(self, fpath, fname):
- self.fam.AddMonitor(fpath, self)
- self.extras.append(fname)
+ rel = self.name
+ fpath = os.path.join(os.path.dirname(rel), name)
+ if fpath not in self.extras:
+ if os.path.exists(fpath):
+ self._follow_xincludes(fname=fpath)
+ self.add_monitor(fpath)
+ else:
+ msg = "%s: %s does not exist, skipping" % (self.name, name)
+ if el.findall('./%sfallback' % Bcfg2.Server.XI_NAMESPACE):
+ self.logger.debug(msg)
+ else:
+ self.logger.warning(msg)
def Index(self):
"""Build local data structures."""
try:
- self.xdata = lxml.etree.XML(self.data, base_url=self.name)
+ self.xdata = lxml.etree.XML(self.data, base_url=self.name,
+ parser=Bcfg2.Server.XMLParser)
except lxml.etree.XMLSyntaxError:
- err = sys.exc_info()[1]
- logger.error("Failed to parse %s: %s" % (self.name, err))
- raise Bcfg2.Server.Plugin.PluginInitError
+ msg = "Failed to parse %s: %s" % (self.name, sys.exc_info()[1])
+ logger.error(msg)
+ raise PluginInitError(msg)
self._follow_xincludes()
if self.extras:
@@ -638,43 +692,52 @@ class SingleXMLFileBacked(XMLFileBacked):
if self.__identifier__ is not None:
self.label = self.xdata.attrib[self.__identifier__]
+ def add_monitor(self, fpath):
+ self.extras.append(fpath)
+ if self.fam and self.should_monitor:
+ self.fam.AddMonitor(fpath, self)
+
+ def __iter__(self):
+ return iter(self.entries)
+
+ def __str__(self):
+ return "%s at %s" % (self.__class__.__name__, self.name)
+
class StructFile(XMLFileBacked):
"""This file contains a set of structure file formatting logic."""
__identifier__ = None
- def __init__(self, name):
- XMLFileBacked.__init__(self, name)
+ def _include_element(self, item, metadata):
+ """ determine if an XML element matches the metadata """
+ if isinstance(item, lxml.etree._Comment):
+ return False
+ negate = item.get('negate', 'false').lower() == 'true'
+ if item.tag == 'Group':
+ return negate == (item.get('name') not in metadata.groups)
+ elif item.tag == 'Client':
+ return negate == (item.get('name') != metadata.hostname)
+ else:
+ return True
def _match(self, item, metadata):
""" recursive helper for Match() """
- if isinstance(item, lxml.etree._Comment):
- return []
- elif item.tag == 'Group':
- rv = []
- if ((item.get('negate', 'false').lower() == 'true' and
- item.get('name') not in metadata.groups) or
- (item.get('negate', 'false').lower() == 'false' and
- item.get('name') in metadata.groups)):
- for child in item.iterchildren():
- rv.extend(self._match(child, metadata))
- return rv
- elif item.tag == 'Client':
- rv = []
- if ((item.get('negate', 'false').lower() == 'true' and
- item.get('name') != metadata.hostname) or
- (item.get('negate', 'false').lower() == 'false' and
- item.get('name') == metadata.hostname)):
+ if self._include_element(item, metadata):
+ if item.tag == 'Group' or item.tag == 'Client':
+ rv = []
+ if self._include_element(item, metadata):
+ for child in item.iterchildren():
+ rv.extend(self._match(child, metadata))
+ return rv
+ else:
+ rv = copy.deepcopy(item)
+ for child in rv.iterchildren():
+ rv.remove(child)
for child in item.iterchildren():
rv.extend(self._match(child, metadata))
- return rv
+ return [rv]
else:
- rv = copy.copy(item)
- for child in rv.iterchildren():
- rv.remove(child)
- for child in item.iterchildren():
- rv.extend(self._match(child, metadata))
- return [rv]
+ return []
def Match(self, metadata):
"""Return matching fragments of independent."""
@@ -683,27 +746,52 @@ class StructFile(XMLFileBacked):
rv.extend(self._match(child, metadata))
return rv
+ def _xml_match(self, item, metadata):
+ """ recursive helper for XMLMatch """
+ if self._include_element(item, metadata):
+ if item.tag == 'Group' or item.tag == 'Client':
+ for child in item.iterchildren():
+ item.remove(child)
+ item.getparent().append(child)
+ self._xml_match(child, metadata)
+ item.getparent().remove(item)
+ else:
+ for child in item.iterchildren():
+ self._xml_match(child, metadata)
+ else:
+ item.getparent().remove(item)
+
+ def XMLMatch(self, metadata):
+ """ Return a rebuilt XML document that only contains the
+ matching portions """
+ rv = copy.deepcopy(self.xdata)
+ for child in rv.iterchildren():
+ self._xml_match(child, metadata)
+ return rv
+
-class INode:
+class INode(object):
"""
LNodes provide lists of things available at a particular
group intersection.
"""
- raw = {'Client': "lambda m, e:'%(name)s' == m.hostname and predicate(m, e)",
- 'Group': "lambda m, e:'%(name)s' in m.groups and predicate(m, e)"}
- nraw = {'Client': "lambda m, e:'%(name)s' != m.hostname and predicate(m, e)",
- 'Group': "lambda m, e:'%(name)s' not in m.groups and predicate(m, e)"}
+ raw = dict(
+ Client="lambda m, e:'%(name)s' == m.hostname and predicate(m, e)",
+ Group="lambda m, e:'%(name)s' in m.groups and predicate(m, e)")
+ nraw = dict(
+ Client="lambda m, e:'%(name)s' != m.hostname and predicate(m, e)",
+ Group="lambda m, e:'%(name)s' not in m.groups and predicate(m, e)")
containers = ['Group', 'Client']
ignore = []
def __init__(self, data, idict, parent=None):
self.data = data
self.contents = {}
- if parent == None:
- self.predicate = lambda m, d: True
+ if parent is None:
+ self.predicate = lambda m, e: True
else:
predicate = parent.predicate
- if data.get('negate', 'false') in ['true', 'True']:
+ if data.get('negate', 'false').lower() == 'true':
psrc = self.nraw
else:
psrc = self.raw
@@ -712,21 +800,29 @@ class INode:
{'name': data.get('name')},
{'predicate': predicate})
else:
- raise Exception
- mytype = self.__class__
+ raise PluginExecutionError("Unknown tag: %s" % data.tag)
self.children = []
+ self._load_children(data, idict)
+
+ def _load_children(self, data, idict):
for item in data.getchildren():
if item.tag in self.ignore:
continue
elif item.tag in self.containers:
- self.children.append(mytype(item, idict, self))
+ self.children.append(self.__class__(item, idict, self))
else:
try:
- self.contents[item.tag][item.get('name')] = item.attrib
+ self.contents[item.tag][item.get('name')] = \
+ dict(item.attrib)
except KeyError:
- self.contents[item.tag] = {item.get('name'): item.attrib}
+ self.contents[item.tag] = \
+ {item.get('name'): dict(item.attrib)}
if item.text:
- self.contents[item.tag]['__text__'] = item.text
+ self.contents[item.tag][item.get('name')]['__text__'] = \
+ item.text
+ if item.getchildren():
+ self.contents[item.tag][item.get('name')]['__children__'] =\
+ item.getchildren()
try:
idict[item.tag].append(item.get('name'))
except KeyError:
@@ -760,43 +856,48 @@ class XMLSrc(XMLFileBacked):
"""XMLSrc files contain a LNode hierarchy that returns matching entries."""
__node__ = INode
__cacheobj__ = dict
+ __priority_required__ = True
- def __init__(self, filename, noprio=False):
- XMLFileBacked.__init__(self, filename)
+ def __init__(self, filename, fam=None, should_monitor=False):
+ XMLFileBacked.__init__(self, filename, fam, should_monitor)
self.items = {}
self.cache = None
self.pnode = None
self.priority = -1
- self.noprio = noprio
def HandleEvent(self, _=None):
"""Read file upon update."""
try:
- data = BUILTIN_FILE_TYPE(self.name).read()
+ data = open(self.name).read()
except IOError:
- logger.error("Failed to read file %s" % (self.name))
- return
+ msg = "Failed to read file %s: %s" % (self.name, sys.exc_info()[1])
+ logger.error(msg)
+ raise PluginExecutionError(msg)
self.items = {}
try:
- xdata = lxml.etree.XML(data)
+ xdata = lxml.etree.XML(data, parser=Bcfg2.Server.XMLParser)
except lxml.etree.XMLSyntaxError:
- logger.error("Failed to parse file %s" % (self.name))
- return
+ msg = "Failed to parse file %s" % (self.name, sys.exc_info()[1])
+ logger.error(msg)
+ raise PluginExecutionError(msg)
self.pnode = self.__node__(xdata, self.items)
self.cache = None
try:
self.priority = int(xdata.get('priority'))
except (ValueError, TypeError):
- if not self.noprio:
- logger.error("Got bogus priority %s for file %s" %
- (xdata.get('priority'), self.name))
+ if self.__priority_required__:
+ msg = "Got bogus priority %s for file %s" % \
+ (xdata.get('priority'), self.name)
+ logger.error(msg)
+ raise PluginExecutionError(msg)
+
del xdata, data
def Cache(self, metadata):
"""Build a package dict for a given host."""
- if self.cache == None or self.cache[0] != metadata:
+ if self.cache is None or self.cache[0] != metadata:
cache = (metadata, self.__cacheobj__())
- if self.pnode == None:
+ if self.pnode is None:
logger.error("Cache method called early for %s; forcing data load" % (self.name))
self.HandleEvent()
return
@@ -809,11 +910,13 @@ class XMLSrc(XMLFileBacked):
class InfoXML(XMLSrc):
__node__ = InfoNode
+ __priority_required__ = False
class XMLDirectoryBacked(DirectoryBacked):
"""Directorybacked for *.xml."""
- patterns = re.compile('.*\.xml')
+ patterns = re.compile('^.*\.xml$')
+ __child__ = XMLFileBacked
class PrioDir(Plugin, Generator, XMLDirectoryBacked):
@@ -824,11 +927,7 @@ class PrioDir(Plugin, Generator, XMLDirectoryBacked):
def __init__(self, core, datastore):
Plugin.__init__(self, core, datastore)
Generator.__init__(self)
- try:
- XMLDirectoryBacked.__init__(self, self.data, self.core.fam)
- except OSError:
- self.logger.error("Failed to load %s indices" % (self.name))
- raise PluginInitError
+ XMLDirectoryBacked.__init__(self, self.data, self.core.fam)
def HandleEvent(self, event):
"""Handle events and update dispatch table."""
@@ -867,19 +966,22 @@ class PrioDir(Plugin, Generator, XMLDirectoryBacked):
else:
prio = [int(src.priority) for src in matching]
if prio.count(max(prio)) > 1:
- self.logger.error("Found conflicting sources with "
- "same priority for %s, %s %s" %
- (metadata.hostname,
- entry.tag.lower(), entry.get('name')))
+ msg = "Found conflicting sources with same priority for " + \
+ "%s:%s for %s" % (entry.tag, entry.get("name"),
+ metadata.hostname)
+ self.logger.error(msg)
self.logger.error([item.name for item in matching])
self.logger.error("Priority was %s" % max(prio))
- raise PluginExecutionError
+ raise PluginExecutionError(msg)
index = prio.index(max(prio))
for rname in list(matching[index].cache[1][entry.tag].keys()):
if self._matches(entry, metadata, [rname]):
data = matching[index].cache[1][entry.tag][rname]
break
+ else:
+ # Fall back on __getitem__. Required if override used
+ data = matching[index].cache[1][entry.tag][entry.get('name')]
if '__text__' in data:
entry.text = data['__text__']
if '__children__' in data:
@@ -896,18 +998,16 @@ class SpecificityError(Exception):
pass
-class Specificity:
-
- def __init__(self, all=False, group=False, hostname=False, prio=0, delta=False):
+class Specificity(CmpMixin):
+ def __init__(self, all=False, group=False, hostname=False, prio=0,
+ delta=False):
+ CmpMixin.__init__(self)
self.hostname = hostname
self.all = all
self.group = group
self.prio = prio
self.delta = delta
- def __lt__(self, other):
- return self.__cmp__(other) < 0
-
def matches(self, metadata):
return self.all or \
self.hostname == metadata.hostname or \
@@ -916,26 +1016,36 @@ class Specificity:
def __cmp__(self, other):
"""Sort most to least specific."""
if self.all:
- return 1
- if self.group:
+ if other.all:
+ return 0
+ else:
+ return 1
+ elif other.all:
+ return -1
+ elif self.group:
if other.hostname:
return 1
if other.group and other.prio > self.prio:
return 1
if other.group and other.prio == self.prio:
return 0
+ elif other.group:
+ return -1
+ elif self.hostname and other.hostname:
+ return 0
return -1
- def more_specific(self, other):
- """Test if self is more specific than other."""
+ def __str__(self):
+ rv = [self.__class__.__name__, ': ']
if self.all:
- True
+ rv.append("all")
elif self.group:
- if other.hostname:
- return True
- elif other.group and other.prio > self.prio:
- return True
- return False
+ rv.append("Group %s, priority %s" % (self.group, self.prio))
+ elif self.hostname:
+ rv.append("Host %s" % self.hostname)
+ if self.delta:
+ rv.append(", delta=%s" % self.delta)
+ return "".join(rv)
class SpecificData(object):
@@ -957,6 +1067,7 @@ class SpecificData(object):
class EntrySet(Debuggable):
"""Entry sets deal with the host- and group-specific entries."""
ignore = re.compile("^(\.#.*|.*~|\\..*\\.(sw[px])|.*\\.genshi_include)$")
+ basename_is_regex=False
def __init__(self, basename, path, entry_type, encoding):
Debuggable.__init__(self, name=basename)
@@ -966,14 +1077,15 @@ class EntrySet(Debuggable):
self.metadata = default_file_metadata.copy()
self.infoxml = None
self.encoding = encoding
- pattern = '(.*/)?%s(\.((H_(?P<hostname>\S+))|' % basename
+
+ if self.basename_is_regex:
+ base_pat = basename
+ else:
+ base_pat = re.escape(basename)
+ pattern = '(.*/)?%s(\.((H_(?P<hostname>\S+))|' % base_pat
pattern += '(G(?P<prio>\d+)_(?P<group>\S+))))?$'
self.specific = re.compile(pattern)
- def debug_log(self, message, flag=None):
- if (flag is None and self.debug_flag) or flag:
- logger.error(message)
-
def sort_by_specific(self, one, other):
return cmp(one.specific, other.specific)
@@ -987,20 +1099,13 @@ class EntrySet(Debuggable):
if matching is None:
matching = self.get_matching(metadata)
- hspec = [ent for ent in matching if ent.specific.hostname]
- if hspec:
- return hspec[0]
-
- gspec = [ent for ent in matching if ent.specific.group]
- if gspec:
- gspec.sort(self.group_sortfunc)
- return gspec[-1]
-
- aspec = [ent for ent in matching if ent.specific.all]
- if aspec:
- return aspec[0]
-
- raise PluginExecutionError
+ if matching:
+ matching.sort(key=operator.attrgetter("specific"))
+ return matching[0]
+ else:
+ raise PluginExecutionError("No matching entries available for %s "
+ "for %s" % (self.path,
+ metadata.hostname))
def handle_event(self, event):
"""Handle FAM events for the TemplateSet."""
@@ -1074,7 +1179,7 @@ class EntrySet(Debuggable):
fpath = os.path.join(self.path, event.filename)
if event.filename == 'info.xml':
if not self.infoxml:
- self.infoxml = InfoXML(fpath, True)
+ self.infoxml = InfoXML(fpath)
self.infoxml.HandleEvent(event)
elif event.filename in [':info', 'info']:
for line in open(fpath).readlines():
@@ -1089,8 +1194,7 @@ class EntrySet(Debuggable):
if value:
self.metadata[key] = value
if len(self.metadata['perms']) == 3:
- self.metadata['perms'] = "0%s" % \
- (self.metadata['perms'])
+ self.metadata['perms'] = "0%s" % self.metadata['perms']
def reset_metadata(self, event):
"""Reset metadata to defaults if info or info.xml removed."""
@@ -1099,26 +1203,12 @@ class EntrySet(Debuggable):
elif event.filename in [':info', 'info']:
self.metadata = default_file_metadata.copy()
- def group_sortfunc(self, x, y):
- """sort groups by their priority"""
- return cmp(x.specific.prio, y.specific.prio)
-
def bind_info_to_entry(self, entry, metadata):
- # first set defaults from global metadata/:info
- for key in self.metadata:
- entry.set(key, self.metadata[key])
- if self.infoxml:
- mdata = {}
- self.infoxml.pnode.Match(metadata, mdata, entry=entry)
- if 'Info' not in mdata:
- logger.error("Failed to set metadata for file %s" % \
- (entry.get('name')))
- raise PluginExecutionError
- [entry.attrib.__setitem__(key, value) \
- for (key, value) in list(mdata['Info'][None].items())]
+ bind_info(entry, metadata, infoxml=self.infoxml, default=self.metadata)
def bind_entry(self, entry, metadata):
- """Return the appropriate interpreted template from the set of available templates."""
+ """Return the appropriate interpreted template from the set of
+ available templates."""
self.bind_info_to_entry(entry, metadata)
return self.best_matching(metadata).bind_entry(entry, metadata)
@@ -1130,13 +1220,14 @@ class GroupSpool(Plugin, Generator):
filename_pattern = ""
es_child_cls = object
es_cls = EntrySet
+ entry_type = 'Path'
def __init__(self, core, datastore):
Plugin.__init__(self, core, datastore)
Generator.__init__(self)
if self.data[-1] == '/':
self.data = self.data[:-1]
- self.Entries['Path'] = {}
+ self.Entries[self.entry_type] = {}
self.entries = {}
self.handles = {}
self.AddDirectoryMonitor('')
@@ -1145,29 +1236,38 @@ class GroupSpool(Plugin, Generator):
def add_entry(self, event):
epath = self.event_path(event)
ident = self.event_id(event)
- if posixpath.isdir(epath):
+ if os.path.isdir(epath):
self.AddDirectoryMonitor(epath[len(self.data):])
- if ident not in self.entries and posixpath.isfile(epath):
- dirpath = "".join([self.data, ident])
+ if ident not in self.entries and os.path.isfile(epath):
+ dirpath = self.data + ident
self.entries[ident] = self.es_cls(self.filename_pattern,
dirpath,
self.es_child_cls,
self.encoding)
- self.Entries['Path'][ident] = self.entries[ident].bind_entry
- if not posixpath.isdir(epath):
+ self.Entries[self.entry_type][ident] = \
+ self.entries[ident].bind_entry
+ if not os.path.isdir(epath):
# do not pass through directory events
self.entries[ident].handle_event(event)
def event_path(self, event):
- return "".join([self.data, self.handles[event.requestID],
- event.filename])
+ return os.path.join(self.data,
+ self.handles[event.requestID].lstrip("/"),
+ event.filename)
def event_id(self, event):
epath = self.event_path(event)
- if posixpath.isdir(epath):
- return self.handles[event.requestID] + event.filename
+ if os.path.isdir(epath):
+ return os.path.join(self.handles[event.requestID].lstrip("/"),
+ event.filename)
else:
- return self.handles[event.requestID][:-1]
+ return self.handles[event.requestID].rstrip("/")
+
+ def toggle_debug(self):
+ for entry in self.entries.values():
+ if hasattr(entry, "toggle_debug"):
+ entry.toggle_debug()
+ return Plugin.toggle_debug(self)
def HandleEvent(self, event):
"""Unified FAM event handler for GroupSpool."""
@@ -1178,7 +1278,7 @@ class GroupSpool(Plugin, Generator):
if action in ['exists', 'created']:
self.add_entry(event)
- if action == 'changed':
+ elif action == 'changed':
if ident in self.entries:
self.entries[ident].handle_event(event)
else:
@@ -1193,7 +1293,7 @@ class GroupSpool(Plugin, Generator):
if fbase in self.entries:
# a directory was deleted
del self.entries[fbase]
- del self.Entries['Path'][fbase]
+ del self.Entries[self.entry_type][fbase]
elif ident in self.entries:
self.entries[ident].handle_event(event)
elif ident not in self.entries:
@@ -1206,8 +1306,8 @@ class GroupSpool(Plugin, Generator):
relative += '/'
name = self.data + relative
if relative not in list(self.handles.values()):
- if not posixpath.isdir(name):
- print("Failed to open directory %s" % (name))
+ if not os.path.isdir(name):
+ self.logger.error("Failed to open directory %s" % name)
return
reqid = self.core.fam.AddMonitor(name, self)
self.handles[reqid] = relative