From d3aa773f9f42045a0922d6c194e01d029ee53a40 Mon Sep 17 00:00:00 2001 From: "Chris St. Pierre" Date: Thu, 6 Sep 2012 09:17:08 -0400 Subject: split up mammoth Plugin.py --- doc/development/plugins.txt | 127 +- src/lib/Bcfg2/Server/Plugin.py | 1642 -------------- src/lib/Bcfg2/Server/Plugin/__init__.py | 11 + src/lib/Bcfg2/Server/Plugin/base.py | 106 + src/lib/Bcfg2/Server/Plugin/exceptions.py | 36 + src/lib/Bcfg2/Server/Plugin/helpers.py | 965 ++++++++ src/lib/Bcfg2/Server/Plugin/interfaces.py | 548 +++++ testsuite/Testsrc/Testlib/TestServer/TestPlugin.py | 2334 -------------------- .../Testlib/TestServer/TestPlugin/Testbase.py | 83 + .../TestServer/TestPlugin/Testexceptions.py | 47 + .../Testlib/TestServer/TestPlugin/Testhelpers.py | 1938 ++++++++++++++++ .../TestServer/TestPlugin/Testinterfaces.py | 342 +++ .../Testlib/TestServer/TestPlugin/__init__.py | 17 + 13 files changed, 4097 insertions(+), 4099 deletions(-) delete mode 100644 src/lib/Bcfg2/Server/Plugin.py create mode 100644 src/lib/Bcfg2/Server/Plugin/__init__.py create mode 100644 src/lib/Bcfg2/Server/Plugin/base.py create mode 100644 src/lib/Bcfg2/Server/Plugin/exceptions.py create mode 100644 src/lib/Bcfg2/Server/Plugin/helpers.py create mode 100644 src/lib/Bcfg2/Server/Plugin/interfaces.py delete mode 100644 testsuite/Testsrc/Testlib/TestServer/TestPlugin.py create mode 100644 testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testbase.py create mode 100644 testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testexceptions.py create mode 100644 testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testhelpers.py create mode 100644 testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testinterfaces.py create mode 100644 testsuite/Testsrc/Testlib/TestServer/TestPlugin/__init__.py diff --git a/doc/development/plugins.txt b/doc/development/plugins.txt index 2609595a7..bb1f0f046 100644 --- a/doc/development/plugins.txt +++ b/doc/development/plugins.txt @@ -48,125 +48,10 @@ With the exceptions of :class:`Bcfg2.Server.Plugin.Statistics` and listed below do **not** inherit from Plugin; they simply provide interfaces that a given plugin may or must implement. -Generator -^^^^^^^^^ - -.. autoclass:: Bcfg2.Server.Plugin.Generator - -Examples are :ref:`server-plugins-generators-cfg` and -:ref:`server-plugins-generators-sshbase`. - -Structure -^^^^^^^^^ - -.. autoclass:: Bcfg2.Server.Plugin.Structure - -:ref:`server-plugins-structures-bundler-index` is a Structure plugin. - -Metadata -^^^^^^^^ - -.. autoclass:: Bcfg2.Server.Plugin.Metadata - -:ref:`server-plugins-grouping-metadata` is a Metadata plugin. - -Connector -^^^^^^^^^ - -.. autoclass:: Bcfg2.Server.Plugin.Connector - -Connector plugins include -:ref:`server-plugins-grouping-grouppatterns`, -:ref:`server-plugins-connectors-properties`, and -:ref:`server-plugins-probes-index`. - -Probing -^^^^^^^ - -.. autoclass:: Bcfg2.Server.Plugin.Probing - -Examples include :ref:`server-plugins-probes-index` and -:ref:`server-plugins-probes-fileprobes`. - -Statistics +Interfaces ^^^^^^^^^^ -.. autoclass:: Bcfg2.Server.Plugin.Statistics - -The Statistics object is itself a :class:`Bcfg2.Server.Plugin.Plugin` -object, so objects that inherit from Statistics do not have to also -inherit from Plugin. - -ThreadedStatistics -^^^^^^^^^^^^^^^^^^ - -.. autoclass:: Bcfg2.Server.Plugin.ThreadedStatistics - -:ref:`server-plugins-statistics-dbstats` is an example of a -ThreadedStatistics plugin. - -The ThreadedStatistics object is itself a -:class:`Bcfg2.Server.Plugin.Plugin` object, so objects that inherit -from ThreadedStatistics do not have to also inherit from Plugin. - -PullSource -^^^^^^^^^^ - -.. autoclass:: Bcfg2.Server.Plugin.PullSource - -:ref:`server-plugins-statistics-dbstats` is an example of a plugin -that implements the PullSource interface - -PullTarget -^^^^^^^^^^ - -.. autoclass:: Bcfg2.Server.Plugin.PullTarget - -:ref:`server-plugins-generators-sshbase` is an example of a plugin -that implements the PullTarget interface - -Decision -^^^^^^^^ - -.. autoclass:: Bcfg2.Server.Plugin.Decision - -:ref:`server-plugins-generators-decisions` is an example of a Decision -plugin, and has much more information about how decisions are used. - -StructureValidator -^^^^^^^^^^^^^^^^^^ - -.. autoclass:: Bcfg2.Server.Plugin.StructureValidator - -Examples are :ref:`server-plugins-structures-defaults` and -:ref:`server-plugins-structures-deps`. - -GoalValidator -^^^^^^^^^^^^^ - -.. autoclass:: Bcfg2.Server.Plugin.GoalValidator - -An example of a GoalValidator plugin would be the ServiceCompat plugin -that is used to provide old-style Service tag attributes to older -clients from a Bcfg2 1.3.0 server. As a final stage of configuration -generation, it translates the new "restart" and "install" attributes -into the older "mode" attribute. - -Version -^^^^^^^ - -.. autoclass:: Bcfg2.Server.Plugin.Version - -Examples include :ref:`server-plugins-version-git` and -:ref:`server-plugins-version-svn2`. - -ClientRunHooks -^^^^^^^^^^^^^^ - -.. autoclass:: Bcfg2.Server.Plugin.ClientRunHooks - -Examples are :ref:`server-plugins-misc-trigger` and -:ref:`server-plugins-connectors-puppetenc`. +.. automodule:: Bcfg2.Server.Plugin.interfaces Exposing XML-RPC Functions -------------------------- @@ -210,13 +95,9 @@ functions, you could run:: Plugin Helper Classes --------------------- -.. autoclass:: Bcfg2.Server.Plugin.Debuggable +.. automodule:: Bcfg2.Server.Plugin.helpers Plugin Exceptions ----------------- -.. autoexception:: Bcfg2.Server.Plugin.ValidationError -.. autoexception:: Bcfg2.Server.Plugin.PluginInitError -.. autoexception:: Bcfg2.Server.Plugin.PluginExecutionError -.. autoexception:: Bcfg2.Server.Plugin.MetadataConsistencyError -.. autoexception:: Bcfg2.Server.Plugin.MetadataRuntimeError +.. automodule:: Bcfg2.Server.Plugin.exceptions diff --git a/src/lib/Bcfg2/Server/Plugin.py b/src/lib/Bcfg2/Server/Plugin.py deleted file mode 100644 index 0b2f7cee0..000000000 --- a/src/lib/Bcfg2/Server/Plugin.py +++ /dev/null @@ -1,1642 +0,0 @@ -"""This module provides the baseclass for Bcfg2 Server Plugins.""" - -import os -import re -import sys -import copy -import logging -import operator -import threading -import lxml.etree -import Bcfg2.Server -import Bcfg2.Options -from Bcfg2.Compat import ConfigParser, CmpMixin, reduce, Queue, Empty, \ - Full, cPickle - -try: - import django - has_django = True -except ImportError: - has_django = False - -# grab default metadata info from bcfg2.conf -opts = {'owner': Bcfg2.Options.MDATA_OWNER, - 'group': Bcfg2.Options.MDATA_GROUP, - 'perms': Bcfg2.Options.MDATA_PERMS, - 'secontext': Bcfg2.Options.MDATA_SECONTEXT, - 'important': Bcfg2.Options.MDATA_IMPORTANT, - 'paranoid': Bcfg2.Options.MDATA_PARANOID, - 'sensitive': Bcfg2.Options.MDATA_SENSITIVE} -default_file_metadata = Bcfg2.Options.OptionParser(opts) -default_file_metadata.parse([]) -del default_file_metadata['args'] - -logger = logging.getLogger('Bcfg2.Server.Plugin') - -info_regex = re.compile('owner:(\s)*(?P\S+)|' + - 'group:(\s)*(?P\S+)|' + - 'perms:(\s)*(?P\w+)|' + - 'secontext:(\s)*(?P\S+)|' + - 'paranoid:(\s)*(?P\S+)|' + - 'sensitive:(\s)*(?P\S+)|' + - 'encoding:(\s)*(?P\S+)|' + - 'important:(\s)*(?P\S+)|' + - 'mtime:(\s)*(?P\w+)|') - -def bind_info(entry, metadata, infoxml=None, default=default_file_metadata): - for attr, val in list(default.items()): - entry.set(attr, val) - if infoxml: - mdata = dict() - infoxml.pnode.Match(metadata, mdata, entry=entry) - if 'Info' not in mdata: - msg = "Failed to set metadata for file %s" % entry.get('name') - logger.error(msg) - raise PluginExecutionError(msg) - for attr, val in list(mdata['Info'][None].items()): - entry.set(attr, val) - - -class PluginInitError(Exception): - """Error raised in cases of :class:`Bcfg2.Server.Plugin.Plugin` - initialization errors.""" - pass - - -class PluginExecutionError(Exception): - """Error raised in case of :class:`Bcfg2.Server.Plugin.Plugin` - execution errors.""" - pass - - -class MetadataConsistencyError(Exception): - """This error gets raised when metadata is internally inconsistent.""" - pass - - -class MetadataRuntimeError(Exception): - """This error is raised when the metadata engine is called prior - to reading enough data, or for other - :class:`Bcfg2.Server.Plugin.Metadata` errors. """ - pass - - -class Debuggable(object): - """ Mixin to add a debugging interface to an object and expose it - via XML-RPC on :class:`Bcfg2.Server.Plugin.Plugin` objects """ - - #: List of names of methods to be exposed as XML-RPC functions - __rmi__ = ['toggle_debug'] - - def __init__(self, name=None): - if name is None: - name = "%s.%s" % (self.__class__.__module__, - self.__class__.__name__) - self.debug_flag = False - self.logger = logging.getLogger(name) - - def toggle_debug(self): - """ Turn debugging output on or off. - - :returns: bool - The new value of the debug flag - """ - self.debug_flag = not self.debug_flag - self.debug_log("%s: debug_flag = %s" % (self.__class__.__name__, - self.debug_flag), - flag=True) - return self.debug_flag - - def debug_log(self, message, flag=None): - """ Log a message at the debug level. - - :param message: The message to log - :type message: string - :param flag: Override the current debug flag with this value - :type flag: bool - :returns: None - """ - if (flag is None and self.debug_flag) or flag: - self.logger.error(message) - - -class Plugin(Debuggable): - """ The base class for all Bcfg2 Server plugins. """ - - #: The name of the plugin. - name = 'Plugin' - - #: The email address of the plugin author. - __author__ = 'bcfg-dev@mcs.anl.gov' - - #: Plugin is experimental. Use of this plugin will produce a log - #: message alerting the administrator that an experimental plugin - #: is in use. - experimental = False - - #: Plugin is deprecated and will be removed in a future release. - #: Use of this plugin will produce a log message alerting the - #: administrator that an experimental plugin is in use. - deprecated = False - - #: Plugin conflicts with the list of other plugin names - conflicts = [] - - #: Plugins of the same type are processed in order of ascending - #: sort_order value. Plugins with the same sort_order are sorted - #: alphabetically by their name. - sort_order = 500 - - def __init__(self, core, datastore): - """ Initialize the plugin. - - :param core: The Bcfg2.Server.Core initializing the plugin - :type core: Bcfg2.Server.Core - :param datastore: The path to the Bcfg2 repository on the - filesystem - :type datastore: string - :raises: Bcfg2.Server.Plugin.PluginInitError - """ - object.__init__(self) - self.Entries = {} - self.core = core - self.data = os.path.join(datastore, self.name) - self.running = True - Debuggable.__init__(self, name=self.name) - - @classmethod - def init_repo(cls, repo): - """ Perform any tasks necessary to create an initial Bcfg2 - repository. - - :param repo: The path to the Bcfg2 repository on the filesystem - :type repo: string - :returns: None - """ - os.makedirs(os.path.join(repo, cls.name)) - - def shutdown(self): - """ Perform shutdown tasks for the plugin - - :returns: None """ - self.running = False - - def __str__(self): - return "%s Plugin" % self.__class__.__name__ - - -class DatabaseBacked(Plugin): - @property - def _use_db(self): - use_db = self.core.setup.cfp.getboolean(self.name.lower(), - "use_database", - default=False) - if use_db and has_django and self.core.database_available: - return True - elif not use_db: - return False - else: - self.logger.error("use_database is true but django not found") - return False - - -class PluginDatabaseModel(object): - class Meta: - app_label = "Server" - - -class Generator(object): - """ Generator plugins contribute to literal client configurations. - That is, they generate entry contents. - - An entry is generated in one of two ways: - - #. The Bcfg2 core looks in the ``Entries`` dict attribute of the - plugin object. ``Entries`` is expected to be a dict whose keys - are entry tags (e.g., ``"Path"``, ``"Service"``, etc.) and - whose values are dicts; those dicts should map the ``name`` - attribute of an entry to a callable that will be called to - generate the content. The callable will receive two arguments: - the abstract entry (as an lxml.etree._Element object), and the - client metadata object the entry is being generated for. - - #. If the entry is not listed in ``Entries``, the Bcfg2 core calls - :func:`Bcfg2.Server.Plugin.Generator.HandlesEntry`; if that - returns True, then it calls - :func:`Bcfg2.Server.Plugin.Generator.HandleEntry`. - """ - - def HandlesEntry(self, entry, metadata): - """ HandlesEntry is the slow path method for routing - configuration binding requests. It is called if the - ``Entries`` dict does not contain a method for binding the - entry. - - :param entry: The entry to bind - :type entry: lxml.etree._Element - :param metadata: The client metadata - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :return: bool - Whether or not this plugin can handle the entry - :raises: Bcfg2.Server.Plugin.PluginExecutionError - """ - return False - - def HandleEntry(self, entry, metadata): - """ HandlesEntry is the slow path method for binding - configuration binding requests. It is called if the - ``Entries`` dict does not contain a method for binding the - entry, and :func:`Bcfg2.Server.Plugin.Generator.HandlesEntry` - returns True. - - :param entry: The entry to bind - :type entry: lxml.etree._Element - :param metadata: The client metadata - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :return: lxml.etree._Element - The fully bound entry - :raises: Bcfg2.Server.Plugin.PluginExecutionError - """ - return entry - - -class Structure(object): - """ Structure Plugins contribute to abstract client - configurations. That is, they produce lists of entries that will - be generated for a client. """ - - def BuildStructures(self, metadata): - """ Build a list of lxml.etree._Element objects that will be - added to the top-level ```` tag of the client - configuration. Consequently, each object in the list returned - by ``BuildStructures()`` must consist of a container tag - (e.g., ```` or ````) which contains the - entry tags. It must not return a list of entry tags. - - :param metadata: The client metadata - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :return: list of lxml.etree._Element objects - """ - raise NotImplementedError - - -class Metadata(object): - """Signal metadata capabilities for this plugin""" - def viz(self, hosts, bundles, key, only_client, colors): - """ Return a string containing a graphviz document that maps - out the Metadata for :ref:`bcfg2-admin viz ` - - :param hosts: Include hosts in the graph - :type hosts: bool - :param bundles: Include bundles in the graph - :type bundles: bool - :param key: Include a key in the graph - :type key: bool - :param only_client: Only include data for the specified client - :type only_client: string - :param colors: Use the specified graphviz colors - :type colors: list of strings - :return: string - """ - return '' - - def set_version(self, client, version): - """ Set the version for the named client to the specified - version string. - - :param client: Hostname of the client - :type client: string - :param profile: Client Bcfg2 version - :type profile: string - :return: None - :raises: Bcfg2.Server.Plugin.MetadataRuntimeError, - Bcfg2.Server.Plugin.MetadataConsistencyError - """ - pass - - def set_profile(self, client, profile, address): - """ Set the profile for the named client to the named profile - group. - - :param client: Hostname of the client - :type client: string - :param profile: Name of the profile group - :type profile: string - :param address: Address pair of ``(, )`` - :type address: tuple - :return: None - :raises: Bcfg2.Server.Plugin.MetadataRuntimeError, - Bcfg2.Server.Plugin.MetadataConsistencyError - """ - pass - - def resolve_client(self, address, cleanup_cache=False): - """ Resolve the canonical name of this client. If this method - is not implemented, the hostname claimed by the client is - used. (This may be a security risk; it's highly recommended - that you implement ``resolve_client`` if you are writing a - Metadata plugin.) - - :param address: Address pair of ``(, )`` - :type address: tuple - :param cleanup_cache: Whether or not to remove expire the - entire client hostname resolution class - :type cleanup_cache: bool - :return: string - canonical client hostname - :raises: Bcfg2.Server.Plugin.MetadataRuntimeError, - Bcfg2.Server.Plugin.MetadataConsistencyError - """ - return address[1] - - def AuthenticateConnection(self, cert, user, password, address): - """ Authenticate the given client. - - :param cert: an x509 certificate - :type cert: dict - :param user: The username of the user trying to authenticate - :type user: string - :param password: The password supplied by the client - :type password: string - :param addresspair: An address pair of ``(, - )`` - :type addresspair: tuple - :return: bool - True if the authenticate succeeds, False otherwise - """ - raise NotImplementedError - - def get_initial_metadata(self, client_name): - """ Return a - :class:`Bcfg2.Server.Plugins.Metadata.ClientMetadata` object - that fully describes everything the Metadata plugin knows - about the named client. - - :param client_name: The hostname of the client - :type client_name: string - :return: Bcfg2.Server.Plugins.Metadata.ClientMetadata - """ - raise NotImplementedError - - def merge_additional_data(self, imd, source, data): - """ Add arbitrary data from a - :class:`Bcfg2.Server.Plugin.Connector` plugin to the given - metadata object. - - :param imd: An initial metadata object - :type imd: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :param source: The name of the plugin providing this data - :type source: string - :param data: The data to add - :type data: any - :return: None - """ - raise NotImplementedError - - def merge_additional_groups(self, imd, groups): - """ Add groups from a - :class:`Bcfg2.Server.Plugin.Connector` plugin to the given - metadata object. - - :param imd: An initial metadata object - :type imd: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :param groups: The groups to add - :type groups: list of strings - :return: None - """ - raise NotImplementedError - - -class Connector(object): - """ Connector plugins augment client metadata instances with - additional data, additional groups, or both. """ - - def get_additional_groups(self, metadata): - """ Return a list of additional groups for the given client. - - :param metadata: The client metadata - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :return: list of strings - """ - return list() - - def get_additional_data(self, metadata): - """ Return arbitrary additional data for the given - ClientMetadata object. By convention this is usually a dict - object, but doesn't need to be. - - :param metadata: The client metadata - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :return: list of strings - """ - return dict() - - -class Probing(object): - """ Probing plugins can collect data from clients and process it. - """ - - def GetProbes(self, metadata): - """ Return a list of probes for the given client. Each probe - should be an lxml.etree._Element object that adheres to - the following specification. Each probe must the following - attributes: - - * ``name``: The unique name of the probe. - * ``source``: The origin of the probe; probably the name of - the plugin that supplies the probe. - * ``interpreter``: The command that will be run on the client - to interpret the probe script. Compiled (i.e., - non-interpreted) probes are not supported. - - The text of the XML tag should be the contents of the probe, - i.e., the code that will be run on the client. - - :param metadata: The client metadata - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :return: list of lxml.etree._Element objects - """ - raise NotImplementedError - - def ReceiveData(self, metadata, datalist): - """ Process data returned from the probes for the given - client. ``datalist`` is a list of lxml.etree._Element - objects, each of which is a single tag; the ``name`` attribute - holds the unique name of the probe that was run, and the text - contents of the tag hold the results of the probe. - - :param metadata: The client metadata - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :param datalist: The probe data - :type datalist: list of lxml.etree._Element objects - :return: None - """ - raise NotImplementedError - - -class Statistics(Plugin): - """ Statistics plugins handle statistics for clients. In general, - you should avoid using Statistics and use - :class:`Bcfg2.Server.Plugin.ThreadedStatistics` instead.""" - - def process_statistics(self, client, xdata): - """ Process the given XML statistics data for the specified - client. - - :param metadata: The client metadata - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :param data: The statistics data - :type data: lxml.etree._Element - :return: None - """ - raise NotImplementedError - - -class ThreadedStatistics(Statistics, threading.Thread): - """ ThreadedStatistics plugins process client statistics in a - separate thread. """ - - def __init__(self, core, datastore): - Statistics.__init__(self, core, datastore) - threading.Thread.__init__(self) - # Event from the core signaling an exit - self.terminate = core.terminate - self.work_queue = Queue(100000) - self.pending_file = os.path.join(datastore, "etc", - "%s.pending" % self.name) - self.daemon = False - self.start() - - def _save(self): - """Save any pending data to a file.""" - pending_data = [] - try: - while not self.work_queue.empty(): - (metadata, data) = self.work_queue.get_nowait() - try: - pending_data.append((metadata.hostname, - lxml.etree.tostring(data, - xml_declaration=False).decode("UTF-8"))) - except: - err = sys.exc_info()[1] - self.logger.warning("Dropping interaction for %s: %s" % - (metadata.hostname, err)) - except Empty: - pass - - try: - savefile = open(self.pending_file, 'w') - cPickle.dump(pending_data, savefile) - savefile.close() - self.logger.info("Saved pending %s data" % self.name) - except: - err = sys.exc_info()[1] - self.logger.warning("Failed to save pending data: %s" % err) - - def _load(self): - """Load any pending data from a file.""" - if not os.path.exists(self.pending_file): - return True - pending_data = [] - try: - savefile = open(self.pending_file, 'r') - pending_data = cPickle.load(savefile) - savefile.close() - except Exception: - e = sys.exc_info()[1] - self.logger.warning("Failed to load pending data: %s" % e) - return False - for (pmetadata, pdata) in pending_data: - # check that shutdown wasnt called early - if self.terminate.isSet(): - return False - - try: - while True: - try: - metadata = self.core.build_metadata(pmetadata) - break - except MetadataRuntimeError: - pass - - self.terminate.wait(5) - if self.terminate.isSet(): - return False - - self.work_queue.put_nowait((metadata, - lxml.etree.XML(pdata, - parser=Bcfg2.Server.XMLParser))) - except Full: - self.logger.warning("Queue.Full: Failed to load queue data") - break - except lxml.etree.LxmlError: - lxml_error = sys.exc_info()[1] - self.logger.error("Unable to load saved interaction: %s" % - lxml_error) - except MetadataConsistencyError: - self.logger.error("Unable to load metadata for save " - "interaction: %s" % pmetadata) - try: - os.unlink(self.pending_file) - except: - self.logger.error("Failed to unlink save file: %s" % - self.pending_file) - self.logger.info("Loaded pending %s data" % self.name) - return True - - def run(self): - if not self._load(): - return - while not self.terminate.isSet() and self.work_queue != None: - try: - (client, xdata) = self.work_queue.get(block=True, timeout=2) - except Empty: - continue - except Exception: - e = sys.exc_info()[1] - self.logger.error("ThreadedStatistics: %s" % e) - continue - self.handle_statistic(client, xdata) - if self.work_queue != None and not self.work_queue.empty(): - self._save() - - def process_statistics(self, metadata, data): - try: - self.work_queue.put_nowait((metadata, copy.copy(data))) - except Full: - self.logger.warning("%s: Queue is full. Dropping interactions." % - self.name) - - def handle_statistic(self, metadata, data): - """ Process the given XML statistics data for the specified - client object. This differs from the - :func:`Bcfg2.Server.Plugin.Statistics.process_statistics` - method only in that ThreadedStatistics first adds the data to - a queue, and then processes them in a separate thread. - - :param metadata: The client metadata - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :param data: The statistics data - :type data: lxml.etree._Element - :return: None - """ - raise NotImplementedError - - -class PullSource(object): - def GetExtra(self, client): - return [] - - def GetCurrentEntry(self, client, e_type, e_name): - raise NotImplementedError - - -class PullTarget(object): - def AcceptChoices(self, entry, metadata): - raise NotImplementedError - - def AcceptPullData(self, specific, new_entry, verbose): - raise NotImplementedError - - -class Decision(object): - """ Decision plugins produce decision lists for affecting which - entries are actually installed on clients. """ - - def GetDecisions(self, metadata, mode): - """ Return a list of tuples of ``(, )`` to be used as the decision list for the given - client in the specified mode. - - :param metadata: The client metadata - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :param mode: The decision mode ("whitelist" or "blacklist") - :type mode: string - :return: list of tuples - """ - raise NotImplementedError - - -class ValidationError(Exception): - """ Exception raised by - :class:`Bcfg2.Server.Plugin.StructureValidator` and - :class:`Bcfg2.Server.Plugin.GoalValidator` objects """ - - -class StructureValidator(object): - """ StructureValidator plugins can modify the list of structures - after it has been created but before the entries have been - concretely bound. """ - - def validate_structures(self, metadata, structures): - """ Given a list of structures (i.e., of tags that contain - entry tags), modify that list or the structures in it - in-place. - - :param metadata: The client metadata - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :param config: A list of lxml.etree._Element objects - describing the structures for this client - :type config: list - :returns: None - :raises: Bcfg2.Server.Plugin.ValidationError - """ - raise NotImplementedError - - -class GoalValidator(object): - """ GoalValidator plugins can modify the concretely-bound configuration of - a client as a last stage before the configuration is sent to the - client. """ - - def validate_goals(self, metadata, config): - """ Given a monolithic XML document of the full configuration, - modify the document in-place. - - :param metadata: The client metadata - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :param config: The full configuration for the client - :type config: lxml.etree._Element - :returns: None - :raises: Bcfg2.Server.Plugin.ValidationError - """ - raise NotImplementedError - - -class Version(object): - """ Version plugins interact with various version control systems. """ - - def get_revision(self): - """ Return the current revision of the Bcfg2 specification. - This will be included in the ``revision`` attribute of the - top-level tag of the XML configuration sent to the client. - - :returns: string - the current version - """ - raise NotImplementedError - - -class ClientRunHooks(object): - """ ClientRunHooks can hook into various parts of a client run to - perform actions at various times without needing to pretend to be - a different plugin type. """ - - def start_client_run(self, metadata): - """ Invoked at the start of a client run, after all probe data - has been received and decision lists have been queried (if - applicable), but before the configuration is generated. - - :param metadata: The client metadata object - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :returns: None - """ - pass - - def end_client_run(self, metadata): - """ Invoked at the end of a client run, immediately after - :class:`Bcfg2.Server.Plugin.GoalValidator` plugins have been run - and just before the configuration is returned to the client. - - :param metadata: The client metadata object - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :returns: None - """ - pass - - def end_statistics(self, metadata): - """ Invoked after statistics are processed for a client. - - :param metadata: The client metadata object - :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata - :returns: None - """ - pass - -# the rest of the file contains classes for coherent file caching - -class FileBacked(object): - """This object caches file data in memory. - HandleEvent is called whenever fam registers an event. - Index can parse the data into member data as required. - This object is meant to be used as a part of DirectoryBacked. - """ - - def __init__(self, name, fam=None): - object.__init__(self) - self.data = '' - self.name = name - self.fam = fam - - def HandleEvent(self, event=None): - """Read file upon update.""" - if event and event.code2str() not in ['exists', 'changed', 'created']: - return - try: - self.data = open(self.name).read() - self.Index() - except IOError: - err = sys.exc_info()[1] - logger.error("Failed to read file %s: %s" % (self.name, err)) - - def Index(self): - """Update local data structures based on current file state""" - pass - - def __repr__(self): - return "%s: %s" % (self.__class__.__name__, self.name) - - -class DirectoryBacked(object): - """This object is a coherent cache for a filesystem hierarchy of files.""" - __child__ = FileBacked - patterns = re.compile('.*') - ignore = None - - def __init__(self, data, fam): - """Initialize the DirectoryBacked object. - - :param self: the object being initialized. - :param data: the path to the data directory that will be - monitored. - :param fam: The FileMonitor object used to receive - notifications of changes. - """ - object.__init__(self) - - self.data = os.path.normpath(data) - self.fam = fam - - # self.entries contains information about the files monitored - # by this object.... The keys of the dict are the relative - # paths to the files. The values are the objects (of type - # __child__) that handle their contents. - self.entries = {} - - # self.handles contains information about the directories - # monitored by this object. The keys of the dict are the - # values returned by the initial fam.AddMonitor() call (which - # appear to be integers). The values are the relative paths of - # the directories. - self.handles = {} - - # Monitor everything in the plugin's directory - self.add_directory_monitor('') - - def __getitem__(self, key): - return self.entries[key] - - def __iter__(self): - return iter(list(self.entries.items())) - - def add_directory_monitor(self, relative): - """Add a new directory to FAM structures for monitoring. - - :param relative: Path name to monitor. This must be relative - to the plugin's directory. An empty string value ("") will - cause the plugin directory itself to be monitored. - """ - dirpathname = os.path.join(self.data, relative) - if relative not in self.handles.values(): - if not os.path.isdir(dirpathname): - logger.error("%s is not a directory" % dirpathname) - return - reqid = self.fam.AddMonitor(dirpathname, self) - self.handles[reqid] = relative - - def add_entry(self, relative, event): - """Add a new file to our structures for monitoring. - - :param relative: Path name to monitor. This must be relative - to the plugin's directory. - :param event: File Monitor event that caused this entry to be - added. - """ - self.entries[relative] = self.__child__(os.path.join(self.data, - relative), - self.fam) - self.entries[relative].HandleEvent(event) - - def HandleEvent(self, event): - """Handle FAM/Gamin events. - - This method is invoked by FAM/Gamin when it detects a change - to a filesystem object we have requsted to be monitored. - - This method manages the lifecycle of events related to the - monitored objects, adding them to our indiciess and creating - objects of type __child__ that actually do the domain-specific - processing. When appropriate, it propogates events those - objects by invoking their HandleEvent in turn. - """ - action = event.code2str() - - # Exclude events for actions we don't care about - if action == 'endExist': - return - - if event.requestID not in self.handles: - logger.warn("Got %s event with unknown handle (%s) for %s" % - (action, event.requestID, event.filename)) - return - - # Clean up path names - event.filename = os.path.normpath(event.filename) - if event.filename.startswith(self.data): - # the first event we get is on the data directory itself - event.filename = event.filename[len(self.data) + 1:] - - if self.ignore and self.ignore.search(event.filename): - logger.debug("Ignoring event %s" % event.filename) - return - - # Calculate the absolute and relative paths this event refers to - abspath = os.path.join(self.data, self.handles[event.requestID], - event.filename) - relpath = os.path.join(self.handles[event.requestID], - event.filename).lstrip('/') - - if action == 'deleted': - for key in list(self.entries.keys()): - if key.startswith(relpath): - del self.entries[key] - # We remove values from self.entries, but not - # self.handles, because the FileMonitor doesn't stop - # watching a directory just because it gets deleted. If it - # is recreated, we will start getting notifications for it - # again without having to add a new monitor. - elif os.path.isdir(abspath): - # Deal with events for directories - if action in ['exists', 'created']: - self.add_directory_monitor(relpath) - elif action == 'changed': - if relpath in self.entries: - # Ownerships, permissions or timestamps changed on - # the directory. None of these should affect the - # contents of the files, though it could change - # our ability to access them. - # - # It seems like the right thing to do is to cancel - # monitoring the directory and then begin - # monitoring it again. But the current FileMonitor - # class doesn't support canceling, so at least let - # the user know that a restart might be a good - # idea. - logger.warn("Directory properties for %s changed, please " + - " consider restarting the server" % (abspath)) - else: - # Got a "changed" event for a directory that we - # didn't know about. Go ahead and treat it like a - # "created" event, but log a warning, because this - # is unexpected. - logger.warn("Got %s event for unexpected dir %s" % - (action, abspath)) - self.add_directory_monitor(relpath) - else: - logger.warn("Got unknown dir event %s %s %s" % - (event.requestID, event.code2str(), abspath)) - elif self.patterns.search(event.filename): - if action in ['exists', 'created']: - self.add_entry(relpath, event) - elif action == 'changed': - if relpath in self.entries: - self.entries[relpath].HandleEvent(event) - else: - # Got a "changed" event for a file that we didn't - # know about. Go ahead and treat it like a - # "created" event, but log a warning, because this - # is unexpected. - logger.warn("Got %s event for unexpected file %s" % - (action, - abspath)) - self.add_entry(relpath, event) - else: - logger.warn("Got unknown file event %s %s %s" % - (event.requestID, event.code2str(), abspath)) - else: - logger.warn("Could not process filename %s; ignoring" % - event.filename) - - -class XMLFileBacked(FileBacked): - """ - This object is a coherent cache for an XML file to be used as a - part of DirectoryBacked. - """ - __identifier__ = 'name' - - def __init__(self, filename, fam=None, should_monitor=False): - FileBacked.__init__(self, filename) - self.label = "" - self.entries = [] - self.extras = [] - self.fam = fam - self.should_monitor = should_monitor - if fam and should_monitor: - self.fam.AddMonitor(filename, self) - - def _follow_xincludes(self, fname=None, xdata=None): - ''' follow xincludes, adding included files to self.extras ''' - if xdata is None: - if fname is None: - xdata = self.xdata.getroottree() - else: - xdata = lxml.etree.parse(fname) - included = [el for el in xdata.findall('//%sinclude' % - Bcfg2.Server.XI_NAMESPACE)] - for el in included: - name = el.get("href") - if name.startswith("/"): - fpath = name - else: - if fname: - rel = fname - else: - rel = self.name - fpath = os.path.join(os.path.dirname(rel), name) - if fpath not in self.extras: - if os.path.exists(fpath): - self._follow_xincludes(fname=fpath) - self.add_monitor(fpath) - else: - msg = "%s: %s does not exist, skipping" % (self.name, name) - if el.findall('./%sfallback' % Bcfg2.Server.XI_NAMESPACE): - self.logger.debug(msg) - else: - self.logger.warning(msg) - - def Index(self): - """Build local data structures.""" - try: - self.xdata = lxml.etree.XML(self.data, base_url=self.name, - parser=Bcfg2.Server.XMLParser) - except lxml.etree.XMLSyntaxError: - msg = "Failed to parse %s: %s" % (self.name, sys.exc_info()[1]) - logger.error(msg) - raise PluginInitError(msg) - - self._follow_xincludes() - if self.extras: - try: - self.xdata.getroottree().xinclude() - except lxml.etree.XIncludeError: - err = sys.exc_info()[1] - logger.error("XInclude failed on %s: %s" % (self.name, err)) - - self.entries = self.xdata.getchildren() - if self.__identifier__ is not None: - self.label = self.xdata.attrib[self.__identifier__] - - def add_monitor(self, fpath): - self.extras.append(fpath) - if self.fam and self.should_monitor: - self.fam.AddMonitor(fpath, self) - - def __iter__(self): - return iter(self.entries) - - def __str__(self): - return "%s at %s" % (self.__class__.__name__, self.name) - - -class StructFile(XMLFileBacked): - """This file contains a set of structure file formatting logic.""" - __identifier__ = None - - def _include_element(self, item, metadata): - """ determine if an XML element matches the metadata """ - if isinstance(item, lxml.etree._Comment): - return False - negate = item.get('negate', 'false').lower() == 'true' - if item.tag == 'Group': - return negate == (item.get('name') not in metadata.groups) - elif item.tag == 'Client': - return negate == (item.get('name') != metadata.hostname) - else: - return True - - def _match(self, item, metadata): - """ recursive helper for Match() """ - if self._include_element(item, metadata): - if item.tag == 'Group' or item.tag == 'Client': - rv = [] - if self._include_element(item, metadata): - for child in item.iterchildren(): - rv.extend(self._match(child, metadata)) - return rv - else: - rv = copy.deepcopy(item) - for child in rv.iterchildren(): - rv.remove(child) - for child in item.iterchildren(): - rv.extend(self._match(child, metadata)) - return [rv] - else: - return [] - - def Match(self, metadata): - """Return matching fragments of independent.""" - rv = [] - for child in self.entries: - rv.extend(self._match(child, metadata)) - return rv - - def _xml_match(self, item, metadata): - """ recursive helper for XMLMatch """ - if self._include_element(item, metadata): - if item.tag == 'Group' or item.tag == 'Client': - for child in item.iterchildren(): - item.remove(child) - item.getparent().append(child) - self._xml_match(child, metadata) - item.getparent().remove(item) - else: - for child in item.iterchildren(): - self._xml_match(child, metadata) - else: - item.getparent().remove(item) - - def XMLMatch(self, metadata): - """ Return a rebuilt XML document that only contains the - matching portions """ - rv = copy.deepcopy(self.xdata) - for child in rv.iterchildren(): - self._xml_match(child, metadata) - return rv - - -class INode(object): - """ - LNodes provide lists of things available at a particular - group intersection. - """ - raw = dict( - Client="lambda m, e:'%(name)s' == m.hostname and predicate(m, e)", - Group="lambda m, e:'%(name)s' in m.groups and predicate(m, e)") - nraw = dict( - Client="lambda m, e:'%(name)s' != m.hostname and predicate(m, e)", - Group="lambda m, e:'%(name)s' not in m.groups and predicate(m, e)") - containers = ['Group', 'Client'] - ignore = [] - - def __init__(self, data, idict, parent=None): - self.data = data - self.contents = {} - if parent is None: - self.predicate = lambda m, e: True - else: - predicate = parent.predicate - if data.get('negate', 'false').lower() == 'true': - psrc = self.nraw - else: - psrc = self.raw - if data.tag in list(psrc.keys()): - self.predicate = eval(psrc[data.tag] % - {'name': data.get('name')}, - {'predicate': predicate}) - else: - raise PluginExecutionError("Unknown tag: %s" % data.tag) - self.children = [] - self._load_children(data, idict) - - def _load_children(self, data, idict): - for item in data.getchildren(): - if item.tag in self.ignore: - continue - elif item.tag in self.containers: - self.children.append(self.__class__(item, idict, self)) - else: - try: - self.contents[item.tag][item.get('name')] = \ - dict(item.attrib) - except KeyError: - self.contents[item.tag] = \ - {item.get('name'): dict(item.attrib)} - if item.text: - self.contents[item.tag][item.get('name')]['__text__'] = \ - item.text - if item.getchildren(): - self.contents[item.tag][item.get('name')]['__children__'] =\ - item.getchildren() - try: - idict[item.tag].append(item.get('name')) - except KeyError: - idict[item.tag] = [item.get('name')] - - def Match(self, metadata, data, entry=lxml.etree.Element("None")): - """Return a dictionary of package mappings.""" - if self.predicate(metadata, entry): - for key in self.contents: - try: - data[key].update(self.contents[key]) - except: - data[key] = {} - data[key].update(self.contents[key]) - for child in self.children: - child.Match(metadata, data, entry=entry) - - -class InfoNode (INode): - """ INode implementation that includes tags """ - raw = {'Client': "lambda m, e:'%(name)s' == m.hostname and predicate(m, e)", - 'Group': "lambda m, e:'%(name)s' in m.groups and predicate(m, e)", - 'Path': "lambda m, e:('%(name)s' == e.get('name') or '%(name)s' == e.get('realname')) and predicate(m, e)"} - nraw = {'Client': "lambda m, e:'%(name)s' != m.hostname and predicate(m, e)", - 'Group': "lambda m, e:'%(name)s' not in m.groups and predicate(m, e)", - 'Path': "lambda m, e:('%(name)s' != e.get('name') and '%(name)s' != e.get('realname')) and predicate(m, e)"} - containers = ['Group', 'Client', 'Path'] - - -class XMLSrc(XMLFileBacked): - """XMLSrc files contain a LNode hierarchy that returns matching entries.""" - __node__ = INode - __cacheobj__ = dict - __priority_required__ = True - - def __init__(self, filename, fam=None, should_monitor=False): - XMLFileBacked.__init__(self, filename, fam, should_monitor) - self.items = {} - self.cache = None - self.pnode = None - self.priority = -1 - - def HandleEvent(self, _=None): - """Read file upon update.""" - try: - data = open(self.name).read() - except IOError: - msg = "Failed to read file %s: %s" % (self.name, sys.exc_info()[1]) - logger.error(msg) - raise PluginExecutionError(msg) - self.items = {} - try: - xdata = lxml.etree.XML(data, parser=Bcfg2.Server.XMLParser) - except lxml.etree.XMLSyntaxError: - msg = "Failed to parse file %s" % (self.name, sys.exc_info()[1]) - logger.error(msg) - raise PluginExecutionError(msg) - self.pnode = self.__node__(xdata, self.items) - self.cache = None - try: - self.priority = int(xdata.get('priority')) - except (ValueError, TypeError): - if self.__priority_required__: - msg = "Got bogus priority %s for file %s" % \ - (xdata.get('priority'), self.name) - logger.error(msg) - raise PluginExecutionError(msg) - - del xdata, data - - def Cache(self, metadata): - """Build a package dict for a given host.""" - if self.cache is None or self.cache[0] != metadata: - cache = (metadata, self.__cacheobj__()) - if self.pnode is None: - logger.error("Cache method called early for %s; forcing data load" % (self.name)) - self.HandleEvent() - return - self.pnode.Match(metadata, cache[1]) - self.cache = cache - - def __str__(self): - return str(self.items) - - -class InfoXML(XMLSrc): - __node__ = InfoNode - __priority_required__ = False - - -class XMLDirectoryBacked(DirectoryBacked): - """Directorybacked for *.xml.""" - patterns = re.compile('^.*\.xml$') - __child__ = XMLFileBacked - - -class PrioDir(Plugin, Generator, XMLDirectoryBacked): - """This is a generator that handles package assignments.""" - name = 'PrioDir' - __child__ = XMLSrc - - def __init__(self, core, datastore): - Plugin.__init__(self, core, datastore) - Generator.__init__(self) - XMLDirectoryBacked.__init__(self, self.data, self.core.fam) - - def HandleEvent(self, event): - """Handle events and update dispatch table.""" - XMLDirectoryBacked.HandleEvent(self, event) - self.Entries = {} - for src in list(self.entries.values()): - for itype, children in list(src.items.items()): - for child in children: - try: - self.Entries[itype][child] = self.BindEntry - except KeyError: - self.Entries[itype] = {child: self.BindEntry} - - def _matches(self, entry, metadata, rules): - return entry.get('name') in rules - - def BindEntry(self, entry, metadata): - attrs = self.get_attrs(entry, metadata) - for key, val in list(attrs.items()): - entry.attrib[key] = val - - def get_attrs(self, entry, metadata): - """ get a list of attributes to add to the entry during the bind """ - for src in self.entries.values(): - src.Cache(metadata) - - matching = [src for src in list(self.entries.values()) - if (src.cache and - entry.tag in src.cache[1] and - self._matches(entry, metadata, - src.cache[1][entry.tag]))] - if len(matching) == 0: - raise PluginExecutionError('No matching source for entry when retrieving attributes for %s(%s)' % (entry.tag, entry.attrib.get('name'))) - elif len(matching) == 1: - index = 0 - else: - prio = [int(src.priority) for src in matching] - if prio.count(max(prio)) > 1: - msg = "Found conflicting sources with same priority for " + \ - "%s:%s for %s" % (entry.tag, entry.get("name"), - metadata.hostname) - self.logger.error(msg) - self.logger.error([item.name for item in matching]) - self.logger.error("Priority was %s" % max(prio)) - raise PluginExecutionError(msg) - index = prio.index(max(prio)) - - for rname in list(matching[index].cache[1][entry.tag].keys()): - if self._matches(entry, metadata, [rname]): - data = matching[index].cache[1][entry.tag][rname] - break - else: - # Fall back on __getitem__. Required if override used - data = matching[index].cache[1][entry.tag][entry.get('name')] - if '__text__' in data: - entry.text = data['__text__'] - if '__children__' in data: - [entry.append(copy.copy(item)) for item in data['__children__']] - - return dict([(key, data[key]) - for key in list(data.keys()) - if not key.startswith('__')]) - - -# new unified EntrySet backend -class SpecificityError(Exception): - """Thrown in case of filename parse failure.""" - pass - - -class Specificity(CmpMixin): - def __init__(self, all=False, group=False, hostname=False, prio=0, - delta=False): - CmpMixin.__init__(self) - self.hostname = hostname - self.all = all - self.group = group - self.prio = prio - self.delta = delta - - def matches(self, metadata): - return self.all or \ - self.hostname == metadata.hostname or \ - self.group in metadata.groups - - def __cmp__(self, other): - """Sort most to least specific.""" - if self.all: - if other.all: - return 0 - else: - return 1 - elif other.all: - return -1 - elif self.group: - if other.hostname: - return 1 - if other.group and other.prio > self.prio: - return 1 - if other.group and other.prio == self.prio: - return 0 - elif other.group: - return -1 - elif self.hostname and other.hostname: - return 0 - return -1 - - def __str__(self): - rv = [self.__class__.__name__, ': '] - if self.all: - rv.append("all") - elif self.group: - rv.append("Group %s, priority %s" % (self.group, self.prio)) - elif self.hostname: - rv.append("Host %s" % self.hostname) - if self.delta: - rv.append(", delta=%s" % self.delta) - return "".join(rv) - - -class SpecificData(object): - def __init__(self, name, specific, encoding): - self.name = name - self.specific = specific - - def handle_event(self, event): - if event.code2str() == 'deleted': - return - try: - self.data = open(self.name).read() - except UnicodeDecodeError: - self.data = open(self.name, mode='rb').read() - except: - logger.error("Failed to read file %s" % self.name) - - -class EntrySet(Debuggable): - """Entry sets deal with the host- and group-specific entries.""" - ignore = re.compile("^(\.#.*|.*~|\\..*\\.(sw[px])|.*\\.genshi_include)$") - basename_is_regex=False - - def __init__(self, basename, path, entry_type, encoding): - Debuggable.__init__(self, name=basename) - self.path = path - self.entry_type = entry_type - self.entries = {} - self.metadata = default_file_metadata.copy() - self.infoxml = None - self.encoding = encoding - - if self.basename_is_regex: - base_pat = basename - else: - base_pat = re.escape(basename) - pattern = '(.*/)?%s(\.((H_(?P\S+))|' % base_pat - pattern += '(G(?P\d+)_(?P\S+))))?$' - self.specific = re.compile(pattern) - - def sort_by_specific(self, one, other): - return cmp(one.specific, other.specific) - - def get_matching(self, metadata): - return [item for item in list(self.entries.values()) - if item.specific.matches(metadata)] - - def best_matching(self, metadata, matching=None): - """ Return the appropriate interpreted template from the set of - available templates. """ - if matching is None: - matching = self.get_matching(metadata) - - if matching: - matching.sort(key=operator.attrgetter("specific")) - return matching[0] - else: - raise PluginExecutionError("No matching entries available for %s " - "for %s" % (self.path, - metadata.hostname)) - - def handle_event(self, event): - """Handle FAM events for the TemplateSet.""" - action = event.code2str() - - if event.filename in ['info', 'info.xml', ':info']: - if action in ['exists', 'created', 'changed']: - self.update_metadata(event) - elif action == 'deleted': - self.reset_metadata(event) - return - - if action in ['exists', 'created']: - self.entry_init(event) - else: - if event.filename not in self.entries: - logger.warning("Got %s event for unknown file %s" % - (action, event.filename)) - if action == 'changed': - # received a bogus changed event; warn, but treat - # it like a created event - self.entry_init(event) - return - if action == 'changed': - self.entries[event.filename].handle_event(event) - elif action == 'deleted': - del self.entries[event.filename] - - def entry_init(self, event, entry_type=None, specific=None): - """Handle template and info file creation.""" - if entry_type is None: - entry_type = self.entry_type - - if event.filename in self.entries: - logger.warn("Got duplicate add for %s" % event.filename) - else: - fpath = os.path.join(self.path, event.filename) - try: - spec = self.specificity_from_filename(event.filename, - specific=specific) - except SpecificityError: - if not self.ignore.match(event.filename): - logger.error("Could not process filename %s; ignoring" % - fpath) - return - self.entries[event.filename] = entry_type(fpath, spec, - self.encoding) - self.entries[event.filename].handle_event(event) - - def specificity_from_filename(self, fname, specific=None): - """Construct a specificity instance from a filename and regex.""" - if specific is None: - specific = self.specific - data = specific.match(fname) - if not data: - raise SpecificityError(fname) - kwargs = {} - if data.group('hostname'): - kwargs['hostname'] = data.group('hostname') - elif data.group('group'): - kwargs['group'] = data.group('group') - kwargs['prio'] = int(data.group('prio')) - else: - kwargs['all'] = True - if 'delta' in data.groupdict(): - kwargs['delta'] = data.group('delta') - return Specificity(**kwargs) - - def update_metadata(self, event): - """Process info and info.xml files for the templates.""" - fpath = os.path.join(self.path, event.filename) - if event.filename == 'info.xml': - if not self.infoxml: - self.infoxml = InfoXML(fpath) - self.infoxml.HandleEvent(event) - elif event.filename in [':info', 'info']: - for line in open(fpath).readlines(): - match = info_regex.match(line) - if not match: - logger.warning("Failed to match line in %s: %s" % (fpath, - line)) - continue - else: - mgd = match.groupdict() - for key, value in list(mgd.items()): - if value: - self.metadata[key] = value - if len(self.metadata['perms']) == 3: - self.metadata['perms'] = "0%s" % self.metadata['perms'] - - def reset_metadata(self, event): - """Reset metadata to defaults if info or info.xml removed.""" - if event.filename == 'info.xml': - self.infoxml = None - elif event.filename in [':info', 'info']: - self.metadata = default_file_metadata.copy() - - def bind_info_to_entry(self, entry, metadata): - bind_info(entry, metadata, infoxml=self.infoxml, default=self.metadata) - - def bind_entry(self, entry, metadata): - """Return the appropriate interpreted template from the set of - available templates.""" - self.bind_info_to_entry(entry, metadata) - return self.best_matching(metadata).bind_entry(entry, metadata) - - -class GroupSpool(Plugin, Generator): - """Unified interface for handling group-specific data (e.g. .G## files).""" - name = 'GroupSpool' - __author__ = 'bcfg-dev@mcs.anl.gov' - filename_pattern = "" - es_child_cls = object - es_cls = EntrySet - entry_type = 'Path' - - def __init__(self, core, datastore): - Plugin.__init__(self, core, datastore) - Generator.__init__(self) - if self.data[-1] == '/': - self.data = self.data[:-1] - self.Entries[self.entry_type] = {} - self.entries = {} - self.handles = {} - self.AddDirectoryMonitor('') - self.encoding = core.encoding - - def add_entry(self, event): - epath = self.event_path(event) - ident = self.event_id(event) - if os.path.isdir(epath): - self.AddDirectoryMonitor(epath[len(self.data):]) - if ident not in self.entries and os.path.isfile(epath): - dirpath = self.data + ident - self.entries[ident] = self.es_cls(self.filename_pattern, - dirpath, - self.es_child_cls, - self.encoding) - self.Entries[self.entry_type][ident] = \ - self.entries[ident].bind_entry - if not os.path.isdir(epath): - # do not pass through directory events - self.entries[ident].handle_event(event) - - def event_path(self, event): - return os.path.join(self.data, - self.handles[event.requestID].lstrip("/"), - event.filename) - - def event_id(self, event): - epath = self.event_path(event) - if os.path.isdir(epath): - return os.path.join(self.handles[event.requestID].lstrip("/"), - event.filename) - else: - return self.handles[event.requestID].rstrip("/") - - def toggle_debug(self): - for entry in self.entries.values(): - if hasattr(entry, "toggle_debug"): - entry.toggle_debug() - return Plugin.toggle_debug(self) - - def HandleEvent(self, event): - """Unified FAM event handler for GroupSpool.""" - action = event.code2str() - if event.filename[0] == '/': - return - ident = self.event_id(event) - - if action in ['exists', 'created']: - self.add_entry(event) - elif action == 'changed': - if ident in self.entries: - self.entries[ident].handle_event(event) - else: - # got a changed event for a file we didn't know - # about. go ahead and process this as a 'created', but - # warn - self.logger.warning("Got changed event for unknown file %s" % - ident) - self.add_entry(event) - elif action == 'deleted': - fbase = self.handles[event.requestID] + event.filename - if fbase in self.entries: - # a directory was deleted - del self.entries[fbase] - del self.Entries[self.entry_type][fbase] - elif ident in self.entries: - self.entries[ident].handle_event(event) - elif ident not in self.entries: - self.logger.warning("Got deleted event for unknown file %s" % - ident) - - def AddDirectoryMonitor(self, relative): - """Add new directory to FAM structures.""" - if not relative.endswith('/'): - relative += '/' - name = self.data + relative - if relative not in list(self.handles.values()): - if not os.path.isdir(name): - self.logger.error("Failed to open directory %s" % name) - return - reqid = self.core.fam.AddMonitor(name, self) - self.handles[reqid] = relative diff --git a/src/lib/Bcfg2/Server/Plugin/__init__.py b/src/lib/Bcfg2/Server/Plugin/__init__.py new file mode 100644 index 000000000..487a457e6 --- /dev/null +++ b/src/lib/Bcfg2/Server/Plugin/__init__.py @@ -0,0 +1,11 @@ +""" Bcfg2 server plugin base classes, interfaces, and helper +objects. """ + +import os +import sys +sys.path.append(os.path.dirname(__file__)) + +from base import * +from interfaces import * +from helpers import * +from exceptions import * diff --git a/src/lib/Bcfg2/Server/Plugin/base.py b/src/lib/Bcfg2/Server/Plugin/base.py new file mode 100644 index 000000000..98427e726 --- /dev/null +++ b/src/lib/Bcfg2/Server/Plugin/base.py @@ -0,0 +1,106 @@ +"""This module provides the base class for Bcfg2 server plugins.""" + +import os +import logging + +class Debuggable(object): + """ Mixin to add a debugging interface to an object and expose it + via XML-RPC on :class:`Bcfg2.Server.Plugin.Plugin` objects """ + + #: List of names of methods to be exposed as XML-RPC functions + __rmi__ = ['toggle_debug'] + + def __init__(self, name=None): + if name is None: + name = "%s.%s" % (self.__class__.__module__, + self.__class__.__name__) + self.debug_flag = False + self.logger = logging.getLogger(name) + + def toggle_debug(self): + """ Turn debugging output on or off. + + :returns: bool - The new value of the debug flag + """ + self.debug_flag = not self.debug_flag + self.debug_log("%s: debug_flag = %s" % (self.__class__.__name__, + self.debug_flag), + flag=True) + return self.debug_flag + + def debug_log(self, message, flag=None): + """ Log a message at the debug level. + + :param message: The message to log + :type message: string + :param flag: Override the current debug flag with this value + :type flag: bool + :returns: None + """ + if (flag is None and self.debug_flag) or flag: + self.logger.error(message) + + +class Plugin(Debuggable): + """ The base class for all Bcfg2 Server plugins. """ + + #: The name of the plugin. + name = 'Plugin' + + #: The email address of the plugin author. + __author__ = 'bcfg-dev@mcs.anl.gov' + + #: Plugin is experimental. Use of this plugin will produce a log + #: message alerting the administrator that an experimental plugin + #: is in use. + experimental = False + + #: Plugin is deprecated and will be removed in a future release. + #: Use of this plugin will produce a log message alerting the + #: administrator that an experimental plugin is in use. + deprecated = False + + #: Plugin conflicts with the list of other plugin names + conflicts = [] + + #: Plugins of the same type are processed in order of ascending + #: sort_order value. Plugins with the same sort_order are sorted + #: alphabetically by their name. + sort_order = 500 + + def __init__(self, core, datastore): + """ Initialize the plugin. + + :param core: The Bcfg2.Server.Core initializing the plugin + :type core: Bcfg2.Server.Core + :param datastore: The path to the Bcfg2 repository on the + filesystem + :type datastore: string + :raises: Bcfg2.Server.Plugin.PluginInitError + """ + object.__init__(self) + self.Entries = {} + self.core = core + self.data = os.path.join(datastore, self.name) + self.running = True + Debuggable.__init__(self, name=self.name) + + @classmethod + def init_repo(cls, repo): + """ Perform any tasks necessary to create an initial Bcfg2 + repository. + + :param repo: The path to the Bcfg2 repository on the filesystem + :type repo: string + :returns: None + """ + os.makedirs(os.path.join(repo, cls.name)) + + def shutdown(self): + """ Perform shutdown tasks for the plugin + + :returns: None """ + self.running = False + + def __str__(self): + return "%s Plugin" % self.__class__.__name__ diff --git a/src/lib/Bcfg2/Server/Plugin/exceptions.py b/src/lib/Bcfg2/Server/Plugin/exceptions.py new file mode 100644 index 000000000..bc8c62acd --- /dev/null +++ b/src/lib/Bcfg2/Server/Plugin/exceptions.py @@ -0,0 +1,36 @@ +""" Exceptions for Bcfg2 Server Plugins.""" + +class PluginInitError(Exception): + """Error raised in cases of :class:`Bcfg2.Server.Plugin.Plugin` + initialization errors.""" + pass + + +class PluginExecutionError(Exception): + """Error raised in case of :class:`Bcfg2.Server.Plugin.Plugin` + execution errors.""" + pass + + +class MetadataConsistencyError(Exception): + """This error gets raised when metadata is internally inconsistent.""" + pass + + +class MetadataRuntimeError(Exception): + """This error is raised when the metadata engine is called prior + to reading enough data, or for other + :class:`Bcfg2.Server.Plugin.Metadata` errors. """ + pass + + +class ValidationError(Exception): + """ Exception raised by + :class:`Bcfg2.Server.Plugin.StructureValidator` and + :class:`Bcfg2.Server.Plugin.GoalValidator` objects """ + + +class SpecificityError(Exception): + """ Thrown by :class:`Bcfg2.Server.Plugin.Specificity` in case of + filename parse failure.""" + pass diff --git a/src/lib/Bcfg2/Server/Plugin/helpers.py b/src/lib/Bcfg2/Server/Plugin/helpers.py new file mode 100644 index 000000000..74cf4b3c4 --- /dev/null +++ b/src/lib/Bcfg2/Server/Plugin/helpers.py @@ -0,0 +1,965 @@ +""" Helper classes for Bcfg2 server plugins """ + +import os +import re +import sys +import copy +import logging +import operator +import lxml.etree +import Bcfg2.Server +import Bcfg2.Options +from Bcfg2.Compat import CmpMixin +from base import * +from interfaces import * +from exceptions import * + +try: + import django + has_django = True +except ImportError: + has_django = False + +# grab default metadata info from bcfg2.conf +opts = {'owner': Bcfg2.Options.MDATA_OWNER, + 'group': Bcfg2.Options.MDATA_GROUP, + 'perms': Bcfg2.Options.MDATA_PERMS, + 'secontext': Bcfg2.Options.MDATA_SECONTEXT, + 'important': Bcfg2.Options.MDATA_IMPORTANT, + 'paranoid': Bcfg2.Options.MDATA_PARANOID, + 'sensitive': Bcfg2.Options.MDATA_SENSITIVE} +default_file_metadata = Bcfg2.Options.OptionParser(opts) +default_file_metadata.parse([]) +del default_file_metadata['args'] + +logger = logging.getLogger(__name__) + +info_regex = re.compile('owner:(\s)*(?P\S+)|' + + 'group:(\s)*(?P\S+)|' + + 'perms:(\s)*(?P\w+)|' + + 'secontext:(\s)*(?P\S+)|' + + 'paranoid:(\s)*(?P\S+)|' + + 'sensitive:(\s)*(?P\S+)|' + + 'encoding:(\s)*(?P\S+)|' + + 'important:(\s)*(?P\S+)|' + + 'mtime:(\s)*(?P\w+)|') + +def bind_info(entry, metadata, infoxml=None, default=default_file_metadata): + for attr, val in list(default.items()): + entry.set(attr, val) + if infoxml: + mdata = dict() + infoxml.pnode.Match(metadata, mdata, entry=entry) + if 'Info' not in mdata: + msg = "Failed to set metadata for file %s" % entry.get('name') + logger.error(msg) + raise PluginExecutionError(msg) + for attr, val in list(mdata['Info'][None].items()): + entry.set(attr, val) + + +class DatabaseBacked(Plugin): + @property + def _use_db(self): + use_db = self.core.setup.cfp.getboolean(self.name.lower(), + "use_database", + default=False) + if use_db and has_django and self.core.database_available: + return True + elif not use_db: + return False + else: + self.logger.error("use_database is true but django not found") + return False + + +class PluginDatabaseModel(object): + class Meta: + app_label = "Server" + + +class FileBacked(object): + """This object caches file data in memory. + HandleEvent is called whenever fam registers an event. + Index can parse the data into member data as required. + This object is meant to be used as a part of DirectoryBacked. + """ + + def __init__(self, name, fam=None): + object.__init__(self) + self.data = '' + self.name = name + self.fam = fam + + def HandleEvent(self, event=None): + """Read file upon update.""" + if event and event.code2str() not in ['exists', 'changed', 'created']: + return + try: + self.data = open(self.name).read() + self.Index() + except IOError: + err = sys.exc_info()[1] + logger.error("Failed to read file %s: %s" % (self.name, err)) + + def Index(self): + """Update local data structures based on current file state""" + pass + + def __repr__(self): + return "%s: %s" % (self.__class__.__name__, self.name) + + +class DirectoryBacked(object): + """This object is a coherent cache for a filesystem hierarchy of files.""" + __child__ = FileBacked + patterns = re.compile('.*') + ignore = None + + def __init__(self, data, fam): + """Initialize the DirectoryBacked object. + + :param self: the object being initialized. + :param data: the path to the data directory that will be + monitored. + :param fam: The FileMonitor object used to receive + notifications of changes. + """ + object.__init__(self) + + self.data = os.path.normpath(data) + self.fam = fam + + # self.entries contains information about the files monitored + # by this object.... The keys of the dict are the relative + # paths to the files. The values are the objects (of type + # __child__) that handle their contents. + self.entries = {} + + # self.handles contains information about the directories + # monitored by this object. The keys of the dict are the + # values returned by the initial fam.AddMonitor() call (which + # appear to be integers). The values are the relative paths of + # the directories. + self.handles = {} + + # Monitor everything in the plugin's directory + self.add_directory_monitor('') + + def __getitem__(self, key): + return self.entries[key] + + def __iter__(self): + return iter(list(self.entries.items())) + + def add_directory_monitor(self, relative): + """Add a new directory to FAM structures for monitoring. + + :param relative: Path name to monitor. This must be relative + to the plugin's directory. An empty string value ("") will + cause the plugin directory itself to be monitored. + """ + dirpathname = os.path.join(self.data, relative) + if relative not in self.handles.values(): + if not os.path.isdir(dirpathname): + logger.error("%s is not a directory" % dirpathname) + return + reqid = self.fam.AddMonitor(dirpathname, self) + self.handles[reqid] = relative + + def add_entry(self, relative, event): + """Add a new file to our structures for monitoring. + + :param relative: Path name to monitor. This must be relative + to the plugin's directory. + :param event: File Monitor event that caused this entry to be + added. + """ + self.entries[relative] = self.__child__(os.path.join(self.data, + relative), + self.fam) + self.entries[relative].HandleEvent(event) + + def HandleEvent(self, event): + """Handle FAM/Gamin events. + + This method is invoked by FAM/Gamin when it detects a change + to a filesystem object we have requsted to be monitored. + + This method manages the lifecycle of events related to the + monitored objects, adding them to our indiciess and creating + objects of type __child__ that actually do the domain-specific + processing. When appropriate, it propogates events those + objects by invoking their HandleEvent in turn. + """ + action = event.code2str() + + # Exclude events for actions we don't care about + if action == 'endExist': + return + + if event.requestID not in self.handles: + logger.warn("Got %s event with unknown handle (%s) for %s" % + (action, event.requestID, event.filename)) + return + + # Clean up path names + event.filename = os.path.normpath(event.filename) + if event.filename.startswith(self.data): + # the first event we get is on the data directory itself + event.filename = event.filename[len(self.data) + 1:] + + if self.ignore and self.ignore.search(event.filename): + logger.debug("Ignoring event %s" % event.filename) + return + + # Calculate the absolute and relative paths this event refers to + abspath = os.path.join(self.data, self.handles[event.requestID], + event.filename) + relpath = os.path.join(self.handles[event.requestID], + event.filename).lstrip('/') + + if action == 'deleted': + for key in list(self.entries.keys()): + if key.startswith(relpath): + del self.entries[key] + # We remove values from self.entries, but not + # self.handles, because the FileMonitor doesn't stop + # watching a directory just because it gets deleted. If it + # is recreated, we will start getting notifications for it + # again without having to add a new monitor. + elif os.path.isdir(abspath): + # Deal with events for directories + if action in ['exists', 'created']: + self.add_directory_monitor(relpath) + elif action == 'changed': + if relpath in self.entries: + # Ownerships, permissions or timestamps changed on + # the directory. None of these should affect the + # contents of the files, though it could change + # our ability to access them. + # + # It seems like the right thing to do is to cancel + # monitoring the directory and then begin + # monitoring it again. But the current FileMonitor + # class doesn't support canceling, so at least let + # the user know that a restart might be a good + # idea. + logger.warn("Directory properties for %s changed, please " + + " consider restarting the server" % (abspath)) + else: + # Got a "changed" event for a directory that we + # didn't know about. Go ahead and treat it like a + # "created" event, but log a warning, because this + # is unexpected. + logger.warn("Got %s event for unexpected dir %s" % + (action, abspath)) + self.add_directory_monitor(relpath) + else: + logger.warn("Got unknown dir event %s %s %s" % + (event.requestID, event.code2str(), abspath)) + elif self.patterns.search(event.filename): + if action in ['exists', 'created']: + self.add_entry(relpath, event) + elif action == 'changed': + if relpath in self.entries: + self.entries[relpath].HandleEvent(event) + else: + # Got a "changed" event for a file that we didn't + # know about. Go ahead and treat it like a + # "created" event, but log a warning, because this + # is unexpected. + logger.warn("Got %s event for unexpected file %s" % + (action, + abspath)) + self.add_entry(relpath, event) + else: + logger.warn("Got unknown file event %s %s %s" % + (event.requestID, event.code2str(), abspath)) + else: + logger.warn("Could not process filename %s; ignoring" % + event.filename) + + +class XMLFileBacked(FileBacked): + """ + This object is a coherent cache for an XML file to be used as a + part of DirectoryBacked. + """ + __identifier__ = 'name' + + def __init__(self, filename, fam=None, should_monitor=False): + FileBacked.__init__(self, filename) + self.label = "" + self.entries = [] + self.extras = [] + self.fam = fam + self.should_monitor = should_monitor + if fam and should_monitor: + self.fam.AddMonitor(filename, self) + + def _follow_xincludes(self, fname=None, xdata=None): + ''' follow xincludes, adding included files to self.extras ''' + if xdata is None: + if fname is None: + xdata = self.xdata.getroottree() + else: + xdata = lxml.etree.parse(fname) + included = [el for el in xdata.findall('//%sinclude' % + Bcfg2.Server.XI_NAMESPACE)] + for el in included: + name = el.get("href") + if name.startswith("/"): + fpath = name + else: + if fname: + rel = fname + else: + rel = self.name + fpath = os.path.join(os.path.dirname(rel), name) + if fpath not in self.extras: + if os.path.exists(fpath): + self._follow_xincludes(fname=fpath) + self.add_monitor(fpath) + else: + msg = "%s: %s does not exist, skipping" % (self.name, name) + if el.findall('./%sfallback' % Bcfg2.Server.XI_NAMESPACE): + self.logger.debug(msg) + else: + self.logger.warning(msg) + + def Index(self): + """Build local data structures.""" + try: + self.xdata = lxml.etree.XML(self.data, base_url=self.name, + parser=Bcfg2.Server.XMLParser) + except lxml.etree.XMLSyntaxError: + msg = "Failed to parse %s: %s" % (self.name, sys.exc_info()[1]) + logger.error(msg) + raise PluginInitError(msg) + + self._follow_xincludes() + if self.extras: + try: + self.xdata.getroottree().xinclude() + except lxml.etree.XIncludeError: + err = sys.exc_info()[1] + logger.error("XInclude failed on %s: %s" % (self.name, err)) + + self.entries = self.xdata.getchildren() + if self.__identifier__ is not None: + self.label = self.xdata.attrib[self.__identifier__] + + def add_monitor(self, fpath): + self.extras.append(fpath) + if self.fam and self.should_monitor: + self.fam.AddMonitor(fpath, self) + + def __iter__(self): + return iter(self.entries) + + def __str__(self): + return "%s at %s" % (self.__class__.__name__, self.name) + + +class StructFile(XMLFileBacked): + """This file contains a set of structure file formatting logic.""" + __identifier__ = None + + def _include_element(self, item, metadata): + """ determine if an XML element matches the metadata """ + if isinstance(item, lxml.etree._Comment): + return False + negate = item.get('negate', 'false').lower() == 'true' + if item.tag == 'Group': + return negate == (item.get('name') not in metadata.groups) + elif item.tag == 'Client': + return negate == (item.get('name') != metadata.hostname) + else: + return True + + def _match(self, item, metadata): + """ recursive helper for Match() """ + if self._include_element(item, metadata): + if item.tag == 'Group' or item.tag == 'Client': + rv = [] + if self._include_element(item, metadata): + for child in item.iterchildren(): + rv.extend(self._match(child, metadata)) + return rv + else: + rv = copy.deepcopy(item) + for child in rv.iterchildren(): + rv.remove(child) + for child in item.iterchildren(): + rv.extend(self._match(child, metadata)) + return [rv] + else: + return [] + + def Match(self, metadata): + """Return matching fragments of independent.""" + rv = [] + for child in self.entries: + rv.extend(self._match(child, metadata)) + return rv + + def _xml_match(self, item, metadata): + """ recursive helper for XMLMatch """ + if self._include_element(item, metadata): + if item.tag == 'Group' or item.tag == 'Client': + for child in item.iterchildren(): + item.remove(child) + item.getparent().append(child) + self._xml_match(child, metadata) + item.getparent().remove(item) + else: + for child in item.iterchildren(): + self._xml_match(child, metadata) + else: + item.getparent().remove(item) + + def XMLMatch(self, metadata): + """ Return a rebuilt XML document that only contains the + matching portions """ + rv = copy.deepcopy(self.xdata) + for child in rv.iterchildren(): + self._xml_match(child, metadata) + return rv + + +class INode(object): + """ + LNodes provide lists of things available at a particular + group intersection. + """ + raw = dict( + Client="lambda m, e:'%(name)s' == m.hostname and predicate(m, e)", + Group="lambda m, e:'%(name)s' in m.groups and predicate(m, e)") + nraw = dict( + Client="lambda m, e:'%(name)s' != m.hostname and predicate(m, e)", + Group="lambda m, e:'%(name)s' not in m.groups and predicate(m, e)") + containers = ['Group', 'Client'] + ignore = [] + + def __init__(self, data, idict, parent=None): + self.data = data + self.contents = {} + if parent is None: + self.predicate = lambda m, e: True + else: + predicate = parent.predicate + if data.get('negate', 'false').lower() == 'true': + psrc = self.nraw + else: + psrc = self.raw + if data.tag in list(psrc.keys()): + self.predicate = eval(psrc[data.tag] % + {'name': data.get('name')}, + {'predicate': predicate}) + else: + raise PluginExecutionError("Unknown tag: %s" % data.tag) + self.children = [] + self._load_children(data, idict) + + def _load_children(self, data, idict): + for item in data.getchildren(): + if item.tag in self.ignore: + continue + elif item.tag in self.containers: + self.children.append(self.__class__(item, idict, self)) + else: + try: + self.contents[item.tag][item.get('name')] = \ + dict(item.attrib) + except KeyError: + self.contents[item.tag] = \ + {item.get('name'): dict(item.attrib)} + if item.text: + self.contents[item.tag][item.get('name')]['__text__'] = \ + item.text + if item.getchildren(): + self.contents[item.tag][item.get('name')]['__children__'] =\ + item.getchildren() + try: + idict[item.tag].append(item.get('name')) + except KeyError: + idict[item.tag] = [item.get('name')] + + def Match(self, metadata, data, entry=lxml.etree.Element("None")): + """Return a dictionary of package mappings.""" + if self.predicate(metadata, entry): + for key in self.contents: + try: + data[key].update(self.contents[key]) + except: + data[key] = {} + data[key].update(self.contents[key]) + for child in self.children: + child.Match(metadata, data, entry=entry) + + +class InfoNode (INode): + """ INode implementation that includes tags """ + raw = {'Client': "lambda m, e:'%(name)s' == m.hostname and predicate(m, e)", + 'Group': "lambda m, e:'%(name)s' in m.groups and predicate(m, e)", + 'Path': "lambda m, e:('%(name)s' == e.get('name') or '%(name)s' == e.get('realname')) and predicate(m, e)"} + nraw = {'Client': "lambda m, e:'%(name)s' != m.hostname and predicate(m, e)", + 'Group': "lambda m, e:'%(name)s' not in m.groups and predicate(m, e)", + 'Path': "lambda m, e:('%(name)s' != e.get('name') and '%(name)s' != e.get('realname')) and predicate(m, e)"} + containers = ['Group', 'Client', 'Path'] + + +class XMLSrc(XMLFileBacked): + """XMLSrc files contain a LNode hierarchy that returns matching entries.""" + __node__ = INode + __cacheobj__ = dict + __priority_required__ = True + + def __init__(self, filename, fam=None, should_monitor=False): + XMLFileBacked.__init__(self, filename, fam, should_monitor) + self.items = {} + self.cache = None + self.pnode = None + self.priority = -1 + + def HandleEvent(self, _=None): + """Read file upon update.""" + try: + data = open(self.name).read() + except IOError: + msg = "Failed to read file %s: %s" % (self.name, sys.exc_info()[1]) + logger.error(msg) + raise PluginExecutionError(msg) + self.items = {} + try: + xdata = lxml.etree.XML(data, parser=Bcfg2.Server.XMLParser) + except lxml.etree.XMLSyntaxError: + msg = "Failed to parse file %s" % (self.name, sys.exc_info()[1]) + logger.error(msg) + raise PluginExecutionError(msg) + self.pnode = self.__node__(xdata, self.items) + self.cache = None + try: + self.priority = int(xdata.get('priority')) + except (ValueError, TypeError): + if self.__priority_required__: + msg = "Got bogus priority %s for file %s" % \ + (xdata.get('priority'), self.name) + logger.error(msg) + raise PluginExecutionError(msg) + + del xdata, data + + def Cache(self, metadata): + """Build a package dict for a given host.""" + if self.cache is None or self.cache[0] != metadata: + cache = (metadata, self.__cacheobj__()) + if self.pnode is None: + logger.error("Cache method called early for %s; forcing data load" % (self.name)) + self.HandleEvent() + return + self.pnode.Match(metadata, cache[1]) + self.cache = cache + + def __str__(self): + return str(self.items) + + +class InfoXML(XMLSrc): + __node__ = InfoNode + __priority_required__ = False + + +class XMLDirectoryBacked(DirectoryBacked): + """Directorybacked for *.xml.""" + patterns = re.compile('^.*\.xml$') + __child__ = XMLFileBacked + + +class PrioDir(Plugin, Generator, XMLDirectoryBacked): + """This is a generator that handles package assignments.""" + name = 'PrioDir' + __child__ = XMLSrc + + def __init__(self, core, datastore): + Plugin.__init__(self, core, datastore) + Generator.__init__(self) + XMLDirectoryBacked.__init__(self, self.data, self.core.fam) + + def HandleEvent(self, event): + """Handle events and update dispatch table.""" + XMLDirectoryBacked.HandleEvent(self, event) + self.Entries = {} + for src in list(self.entries.values()): + for itype, children in list(src.items.items()): + for child in children: + try: + self.Entries[itype][child] = self.BindEntry + except KeyError: + self.Entries[itype] = {child: self.BindEntry} + + def _matches(self, entry, metadata, rules): + return entry.get('name') in rules + + def BindEntry(self, entry, metadata): + attrs = self.get_attrs(entry, metadata) + for key, val in list(attrs.items()): + entry.attrib[key] = val + + def get_attrs(self, entry, metadata): + """ get a list of attributes to add to the entry during the bind """ + for src in self.entries.values(): + src.Cache(metadata) + + matching = [src for src in list(self.entries.values()) + if (src.cache and + entry.tag in src.cache[1] and + self._matches(entry, metadata, + src.cache[1][entry.tag]))] + if len(matching) == 0: + raise PluginExecutionError('No matching source for entry when retrieving attributes for %s(%s)' % (entry.tag, entry.attrib.get('name'))) + elif len(matching) == 1: + index = 0 + else: + prio = [int(src.priority) for src in matching] + if prio.count(max(prio)) > 1: + msg = "Found conflicting sources with same priority for " + \ + "%s:%s for %s" % (entry.tag, entry.get("name"), + metadata.hostname) + self.logger.error(msg) + self.logger.error([item.name for item in matching]) + self.logger.error("Priority was %s" % max(prio)) + raise PluginExecutionError(msg) + index = prio.index(max(prio)) + + for rname in list(matching[index].cache[1][entry.tag].keys()): + if self._matches(entry, metadata, [rname]): + data = matching[index].cache[1][entry.tag][rname] + break + else: + # Fall back on __getitem__. Required if override used + data = matching[index].cache[1][entry.tag][entry.get('name')] + if '__text__' in data: + entry.text = data['__text__'] + if '__children__' in data: + [entry.append(copy.copy(item)) for item in data['__children__']] + + return dict([(key, data[key]) + for key in list(data.keys()) + if not key.startswith('__')]) + + +class Specificity(CmpMixin): + def __init__(self, all=False, group=False, hostname=False, prio=0, + delta=False): + CmpMixin.__init__(self) + self.hostname = hostname + self.all = all + self.group = group + self.prio = prio + self.delta = delta + + def matches(self, metadata): + return self.all or \ + self.hostname == metadata.hostname or \ + self.group in metadata.groups + + def __cmp__(self, other): + """Sort most to least specific.""" + if self.all: + if other.all: + return 0 + else: + return 1 + elif other.all: + return -1 + elif self.group: + if other.hostname: + return 1 + if other.group and other.prio > self.prio: + return 1 + if other.group and other.prio == self.prio: + return 0 + elif other.group: + return -1 + elif self.hostname and other.hostname: + return 0 + return -1 + + def __str__(self): + rv = [self.__class__.__name__, ': '] + if self.all: + rv.append("all") + elif self.group: + rv.append("Group %s, priority %s" % (self.group, self.prio)) + elif self.hostname: + rv.append("Host %s" % self.hostname) + if self.delta: + rv.append(", delta=%s" % self.delta) + return "".join(rv) + + +class SpecificData(object): + def __init__(self, name, specific, encoding): + self.name = name + self.specific = specific + + def handle_event(self, event): + if event.code2str() == 'deleted': + return + try: + self.data = open(self.name).read() + except UnicodeDecodeError: + self.data = open(self.name, mode='rb').read() + except: + logger.error("Failed to read file %s" % self.name) + + +class EntrySet(Debuggable): + """Entry sets deal with the host- and group-specific entries.""" + ignore = re.compile("^(\.#.*|.*~|\\..*\\.(sw[px])|.*\\.genshi_include)$") + basename_is_regex=False + + def __init__(self, basename, path, entry_type, encoding): + Debuggable.__init__(self, name=basename) + self.path = path + self.entry_type = entry_type + self.entries = {} + self.metadata = default_file_metadata.copy() + self.infoxml = None + self.encoding = encoding + + if self.basename_is_regex: + base_pat = basename + else: + base_pat = re.escape(basename) + pattern = '(.*/)?%s(\.((H_(?P\S+))|' % base_pat + pattern += '(G(?P\d+)_(?P\S+))))?$' + self.specific = re.compile(pattern) + + def sort_by_specific(self, one, other): + return cmp(one.specific, other.specific) + + def get_matching(self, metadata): + return [item for item in list(self.entries.values()) + if item.specific.matches(metadata)] + + def best_matching(self, metadata, matching=None): + """ Return the appropriate interpreted template from the set of + available templates. """ + if matching is None: + matching = self.get_matching(metadata) + + if matching: + matching.sort(key=operator.attrgetter("specific")) + return matching[0] + else: + raise PluginExecutionError("No matching entries available for %s " + "for %s" % (self.path, + metadata.hostname)) + + def handle_event(self, event): + """Handle FAM events for the TemplateSet.""" + action = event.code2str() + + if event.filename in ['info', 'info.xml', ':info']: + if action in ['exists', 'created', 'changed']: + self.update_metadata(event) + elif action == 'deleted': + self.reset_metadata(event) + return + + if action in ['exists', 'created']: + self.entry_init(event) + else: + if event.filename not in self.entries: + logger.warning("Got %s event for unknown file %s" % + (action, event.filename)) + if action == 'changed': + # received a bogus changed event; warn, but treat + # it like a created event + self.entry_init(event) + return + if action == 'changed': + self.entries[event.filename].handle_event(event) + elif action == 'deleted': + del self.entries[event.filename] + + def entry_init(self, event, entry_type=None, specific=None): + """Handle template and info file creation.""" + if entry_type is None: + entry_type = self.entry_type + + if event.filename in self.entries: + logger.warn("Got duplicate add for %s" % event.filename) + else: + fpath = os.path.join(self.path, event.filename) + try: + spec = self.specificity_from_filename(event.filename, + specific=specific) + except SpecificityError: + if not self.ignore.match(event.filename): + logger.error("Could not process filename %s; ignoring" % + fpath) + return + self.entries[event.filename] = entry_type(fpath, spec, + self.encoding) + self.entries[event.filename].handle_event(event) + + def specificity_from_filename(self, fname, specific=None): + """Construct a specificity instance from a filename and regex.""" + if specific is None: + specific = self.specific + data = specific.match(fname) + if not data: + raise SpecificityError(fname) + kwargs = {} + if data.group('hostname'): + kwargs['hostname'] = data.group('hostname') + elif data.group('group'): + kwargs['group'] = data.group('group') + kwargs['prio'] = int(data.group('prio')) + else: + kwargs['all'] = True + if 'delta' in data.groupdict(): + kwargs['delta'] = data.group('delta') + return Specificity(**kwargs) + + def update_metadata(self, event): + """Process info and info.xml files for the templates.""" + fpath = os.path.join(self.path, event.filename) + if event.filename == 'info.xml': + if not self.infoxml: + self.infoxml = InfoXML(fpath) + self.infoxml.HandleEvent(event) + elif event.filename in [':info', 'info']: + for line in open(fpath).readlines(): + match = info_regex.match(line) + if not match: + logger.warning("Failed to match line in %s: %s" % (fpath, + line)) + continue + else: + mgd = match.groupdict() + for key, value in list(mgd.items()): + if value: + self.metadata[key] = value + if len(self.metadata['perms']) == 3: + self.metadata['perms'] = "0%s" % self.metadata['perms'] + + def reset_metadata(self, event): + """Reset metadata to defaults if info or info.xml removed.""" + if event.filename == 'info.xml': + self.infoxml = None + elif event.filename in [':info', 'info']: + self.metadata = default_file_metadata.copy() + + def bind_info_to_entry(self, entry, metadata): + bind_info(entry, metadata, infoxml=self.infoxml, default=self.metadata) + + def bind_entry(self, entry, metadata): + """Return the appropriate interpreted template from the set of + available templates.""" + self.bind_info_to_entry(entry, metadata) + return self.best_matching(metadata).bind_entry(entry, metadata) + + +class GroupSpool(Plugin, Generator): + """Unified interface for handling group-specific data (e.g. .G## files).""" + name = 'GroupSpool' + __author__ = 'bcfg-dev@mcs.anl.gov' + filename_pattern = "" + es_child_cls = object + es_cls = EntrySet + entry_type = 'Path' + + def __init__(self, core, datastore): + Plugin.__init__(self, core, datastore) + Generator.__init__(self) + if self.data[-1] == '/': + self.data = self.data[:-1] + self.Entries[self.entry_type] = {} + self.entries = {} + self.handles = {} + self.AddDirectoryMonitor('') + self.encoding = core.encoding + + def add_entry(self, event): + epath = self.event_path(event) + ident = self.event_id(event) + if os.path.isdir(epath): + self.AddDirectoryMonitor(epath[len(self.data):]) + if ident not in self.entries and os.path.isfile(epath): + dirpath = self.data + ident + self.entries[ident] = self.es_cls(self.filename_pattern, + dirpath, + self.es_child_cls, + self.encoding) + self.Entries[self.entry_type][ident] = \ + self.entries[ident].bind_entry + if not os.path.isdir(epath): + # do not pass through directory events + self.entries[ident].handle_event(event) + + def event_path(self, event): + return os.path.join(self.data, + self.handles[event.requestID].lstrip("/"), + event.filename) + + def event_id(self, event): + epath = self.event_path(event) + if os.path.isdir(epath): + return os.path.join(self.handles[event.requestID].lstrip("/"), + event.filename) + else: + return self.handles[event.requestID].rstrip("/") + + def toggle_debug(self): + for entry in self.entries.values(): + if hasattr(entry, "toggle_debug"): + entry.toggle_debug() + return Plugin.toggle_debug(self) + + def HandleEvent(self, event): + """Unified FAM event handler for GroupSpool.""" + action = event.code2str() + if event.filename[0] == '/': + return + ident = self.event_id(event) + + if action in ['exists', 'created']: + self.add_entry(event) + elif action == 'changed': + if ident in self.entries: + self.entries[ident].handle_event(event) + else: + # got a changed event for a file we didn't know + # about. go ahead and process this as a 'created', but + # warn + self.logger.warning("Got changed event for unknown file %s" % + ident) + self.add_entry(event) + elif action == 'deleted': + fbase = self.handles[event.requestID] + event.filename + if fbase in self.entries: + # a directory was deleted + del self.entries[fbase] + del self.Entries[self.entry_type][fbase] + elif ident in self.entries: + self.entries[ident].handle_event(event) + elif ident not in self.entries: + self.logger.warning("Got deleted event for unknown file %s" % + ident) + + def AddDirectoryMonitor(self, relative): + """Add new directory to FAM structures.""" + if not relative.endswith('/'): + relative += '/' + name = self.data + relative + if relative not in list(self.handles.values()): + if not os.path.isdir(name): + self.logger.error("Failed to open directory %s" % name) + return + reqid = self.core.fam.AddMonitor(name, self) + self.handles[reqid] = relative diff --git a/src/lib/Bcfg2/Server/Plugin/interfaces.py b/src/lib/Bcfg2/Server/Plugin/interfaces.py new file mode 100644 index 000000000..a6543e9b9 --- /dev/null +++ b/src/lib/Bcfg2/Server/Plugin/interfaces.py @@ -0,0 +1,548 @@ +""" Interface definitions for Bcfg2 server plugins """ + +import os +import sys +import copy +import threading +import lxml.etree +import Bcfg2.Server +from Bcfg2.Compat import Queue, Empty, Full, cPickle +from exceptions import * +from base import Plugin + +class Generator(object): + """ Generator plugins contribute to literal client configurations. + That is, they generate entry contents. + + An entry is generated in one of two ways: + + #. The Bcfg2 core looks in the ``Entries`` dict attribute of the + plugin object. ``Entries`` is expected to be a dict whose keys + are entry tags (e.g., ``"Path"``, ``"Service"``, etc.) and + whose values are dicts; those dicts should map the ``name`` + attribute of an entry to a callable that will be called to + generate the content. The callable will receive two arguments: + the abstract entry (as an lxml.etree._Element object), and the + client metadata object the entry is being generated for. + + #. If the entry is not listed in ``Entries``, the Bcfg2 core calls + :func:`Bcfg2.Server.Plugin.Generator.HandlesEntry`; if that + returns True, then it calls + :func:`Bcfg2.Server.Plugin.Generator.HandleEntry`. + """ + + def HandlesEntry(self, entry, metadata): + """ HandlesEntry is the slow path method for routing + configuration binding requests. It is called if the + ``Entries`` dict does not contain a method for binding the + entry. + + :param entry: The entry to bind + :type entry: lxml.etree._Element + :param metadata: The client metadata + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :return: bool - Whether or not this plugin can handle the entry + :raises: Bcfg2.Server.Plugin.PluginExecutionError + """ + return False + + def HandleEntry(self, entry, metadata): + """ HandlesEntry is the slow path method for binding + configuration binding requests. It is called if the + ``Entries`` dict does not contain a method for binding the + entry, and :func:`Bcfg2.Server.Plugin.Generator.HandlesEntry` + returns True. + + :param entry: The entry to bind + :type entry: lxml.etree._Element + :param metadata: The client metadata + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :return: lxml.etree._Element - The fully bound entry + :raises: Bcfg2.Server.Plugin.PluginExecutionError + """ + return entry + + +class Structure(object): + """ Structure Plugins contribute to abstract client + configurations. That is, they produce lists of entries that will + be generated for a client. """ + + def BuildStructures(self, metadata): + """ Build a list of lxml.etree._Element objects that will be + added to the top-level ```` tag of the client + configuration. Consequently, each object in the list returned + by ``BuildStructures()`` must consist of a container tag + (e.g., ```` or ````) which contains the + entry tags. It must not return a list of entry tags. + + :param metadata: The client metadata + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :return: list of lxml.etree._Element objects + """ + raise NotImplementedError + + +class Metadata(object): + """Signal metadata capabilities for this plugin""" + def viz(self, hosts, bundles, key, only_client, colors): + """ Return a string containing a graphviz document that maps + out the Metadata for :ref:`bcfg2-admin viz ` + + :param hosts: Include hosts in the graph + :type hosts: bool + :param bundles: Include bundles in the graph + :type bundles: bool + :param key: Include a key in the graph + :type key: bool + :param only_client: Only include data for the specified client + :type only_client: string + :param colors: Use the specified graphviz colors + :type colors: list of strings + :return: string + """ + return '' + + def set_version(self, client, version): + """ Set the version for the named client to the specified + version string. + + :param client: Hostname of the client + :type client: string + :param profile: Client Bcfg2 version + :type profile: string + :return: None + :raises: Bcfg2.Server.Plugin.MetadataRuntimeError, + Bcfg2.Server.Plugin.MetadataConsistencyError + """ + pass + + def set_profile(self, client, profile, address): + """ Set the profile for the named client to the named profile + group. + + :param client: Hostname of the client + :type client: string + :param profile: Name of the profile group + :type profile: string + :param address: Address pair of ``(, )`` + :type address: tuple + :return: None + :raises: Bcfg2.Server.Plugin.MetadataRuntimeError, + Bcfg2.Server.Plugin.MetadataConsistencyError + """ + pass + + def resolve_client(self, address, cleanup_cache=False): + """ Resolve the canonical name of this client. If this method + is not implemented, the hostname claimed by the client is + used. (This may be a security risk; it's highly recommended + that you implement ``resolve_client`` if you are writing a + Metadata plugin.) + + :param address: Address pair of ``(, )`` + :type address: tuple + :param cleanup_cache: Whether or not to remove expire the + entire client hostname resolution class + :type cleanup_cache: bool + :return: string - canonical client hostname + :raises: Bcfg2.Server.Plugin.MetadataRuntimeError, + Bcfg2.Server.Plugin.MetadataConsistencyError + """ + return address[1] + + def AuthenticateConnection(self, cert, user, password, address): + """ Authenticate the given client. + + :param cert: an x509 certificate + :type cert: dict + :param user: The username of the user trying to authenticate + :type user: string + :param password: The password supplied by the client + :type password: string + :param addresspair: An address pair of ``(, + )`` + :type addresspair: tuple + :return: bool - True if the authenticate succeeds, False otherwise + """ + raise NotImplementedError + + def get_initial_metadata(self, client_name): + """ Return a + :class:`Bcfg2.Server.Plugins.Metadata.ClientMetadata` object + that fully describes everything the Metadata plugin knows + about the named client. + + :param client_name: The hostname of the client + :type client_name: string + :return: Bcfg2.Server.Plugins.Metadata.ClientMetadata + """ + raise NotImplementedError + + def merge_additional_data(self, imd, source, data): + """ Add arbitrary data from a + :class:`Bcfg2.Server.Plugin.Connector` plugin to the given + metadata object. + + :param imd: An initial metadata object + :type imd: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :param source: The name of the plugin providing this data + :type source: string + :param data: The data to add + :type data: any + :return: None + """ + raise NotImplementedError + + def merge_additional_groups(self, imd, groups): + """ Add groups from a + :class:`Bcfg2.Server.Plugin.Connector` plugin to the given + metadata object. + + :param imd: An initial metadata object + :type imd: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :param groups: The groups to add + :type groups: list of strings + :return: None + """ + raise NotImplementedError + + +class Connector(object): + """ Connector plugins augment client metadata instances with + additional data, additional groups, or both. """ + + def get_additional_groups(self, metadata): + """ Return a list of additional groups for the given client. + + :param metadata: The client metadata + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :return: list of strings + """ + return list() + + def get_additional_data(self, metadata): + """ Return arbitrary additional data for the given + ClientMetadata object. By convention this is usually a dict + object, but doesn't need to be. + + :param metadata: The client metadata + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :return: list of strings + """ + return dict() + + +class Probing(object): + """ Probing plugins can collect data from clients and process it. + """ + + def GetProbes(self, metadata): + """ Return a list of probes for the given client. Each probe + should be an lxml.etree._Element object that adheres to + the following specification. Each probe must the following + attributes: + + * ``name``: The unique name of the probe. + * ``source``: The origin of the probe; probably the name of + the plugin that supplies the probe. + * ``interpreter``: The command that will be run on the client + to interpret the probe script. Compiled (i.e., + non-interpreted) probes are not supported. + + The text of the XML tag should be the contents of the probe, + i.e., the code that will be run on the client. + + :param metadata: The client metadata + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :return: list of lxml.etree._Element objects + """ + raise NotImplementedError + + def ReceiveData(self, metadata, datalist): + """ Process data returned from the probes for the given + client. ``datalist`` is a list of lxml.etree._Element + objects, each of which is a single tag; the ``name`` attribute + holds the unique name of the probe that was run, and the text + contents of the tag hold the results of the probe. + + :param metadata: The client metadata + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :param datalist: The probe data + :type datalist: list of lxml.etree._Element objects + :return: None + """ + raise NotImplementedError + + +class Statistics(Plugin): + """ Statistics plugins handle statistics for clients. In general, + you should avoid using Statistics and use + :class:`Bcfg2.Server.Plugin.ThreadedStatistics` instead.""" + + def process_statistics(self, client, xdata): + """ Process the given XML statistics data for the specified + client. + + :param metadata: The client metadata + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :param data: The statistics data + :type data: lxml.etree._Element + :return: None + """ + raise NotImplementedError + + +class ThreadedStatistics(Statistics, threading.Thread): + """ ThreadedStatistics plugins process client statistics in a + separate thread. """ + + def __init__(self, core, datastore): + Statistics.__init__(self, core, datastore) + threading.Thread.__init__(self) + # Event from the core signaling an exit + self.terminate = core.terminate + self.work_queue = Queue(100000) + self.pending_file = os.path.join(datastore, "etc", + "%s.pending" % self.name) + self.daemon = False + self.start() + + def _save(self): + """Save any pending data to a file.""" + pending_data = [] + try: + while not self.work_queue.empty(): + (metadata, data) = self.work_queue.get_nowait() + try: + pending_data.append((metadata.hostname, + lxml.etree.tostring(data, + xml_declaration=False).decode("UTF-8"))) + except: + err = sys.exc_info()[1] + self.logger.warning("Dropping interaction for %s: %s" % + (metadata.hostname, err)) + except Empty: + pass + + try: + savefile = open(self.pending_file, 'w') + cPickle.dump(pending_data, savefile) + savefile.close() + self.logger.info("Saved pending %s data" % self.name) + except: + err = sys.exc_info()[1] + self.logger.warning("Failed to save pending data: %s" % err) + + def _load(self): + """Load any pending data from a file.""" + if not os.path.exists(self.pending_file): + return True + pending_data = [] + try: + savefile = open(self.pending_file, 'r') + pending_data = cPickle.load(savefile) + savefile.close() + except Exception: + e = sys.exc_info()[1] + self.logger.warning("Failed to load pending data: %s" % e) + return False + for (pmetadata, pdata) in pending_data: + # check that shutdown wasnt called early + if self.terminate.isSet(): + return False + + try: + while True: + try: + metadata = self.core.build_metadata(pmetadata) + break + except MetadataRuntimeError: + pass + + self.terminate.wait(5) + if self.terminate.isSet(): + return False + + self.work_queue.put_nowait((metadata, + lxml.etree.XML(pdata, + parser=Bcfg2.Server.XMLParser))) + except Full: + self.logger.warning("Queue.Full: Failed to load queue data") + break + except lxml.etree.LxmlError: + lxml_error = sys.exc_info()[1] + self.logger.error("Unable to load saved interaction: %s" % + lxml_error) + except MetadataConsistencyError: + self.logger.error("Unable to load metadata for save " + "interaction: %s" % pmetadata) + try: + os.unlink(self.pending_file) + except: + self.logger.error("Failed to unlink save file: %s" % + self.pending_file) + self.logger.info("Loaded pending %s data" % self.name) + return True + + def run(self): + if not self._load(): + return + while not self.terminate.isSet() and self.work_queue != None: + try: + (client, xdata) = self.work_queue.get(block=True, timeout=2) + except Empty: + continue + except Exception: + e = sys.exc_info()[1] + self.logger.error("ThreadedStatistics: %s" % e) + continue + self.handle_statistic(client, xdata) + if self.work_queue != None and not self.work_queue.empty(): + self._save() + + def process_statistics(self, metadata, data): + try: + self.work_queue.put_nowait((metadata, copy.copy(data))) + except Full: + self.logger.warning("%s: Queue is full. Dropping interactions." % + self.name) + + def handle_statistic(self, metadata, data): + """ Process the given XML statistics data for the specified + client object. This differs from the + :func:`Bcfg2.Server.Plugin.Statistics.process_statistics` + method only in that ThreadedStatistics first adds the data to + a queue, and then processes them in a separate thread. + + :param metadata: The client metadata + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :param data: The statistics data + :type data: lxml.etree._Element + :return: None + """ + raise NotImplementedError + + +class PullSource(object): + def GetExtra(self, client): + return [] + + def GetCurrentEntry(self, client, e_type, e_name): + raise NotImplementedError + + +class PullTarget(object): + def AcceptChoices(self, entry, metadata): + raise NotImplementedError + + def AcceptPullData(self, specific, new_entry, verbose): + raise NotImplementedError + + +class Decision(object): + """ Decision plugins produce decision lists for affecting which + entries are actually installed on clients. """ + + def GetDecisions(self, metadata, mode): + """ Return a list of tuples of ``(, )`` to be used as the decision list for the given + client in the specified mode. + + :param metadata: The client metadata + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :param mode: The decision mode ("whitelist" or "blacklist") + :type mode: string + :return: list of tuples + """ + raise NotImplementedError + + +class StructureValidator(object): + """ StructureValidator plugins can modify the list of structures + after it has been created but before the entries have been + concretely bound. """ + + def validate_structures(self, metadata, structures): + """ Given a list of structures (i.e., of tags that contain + entry tags), modify that list or the structures in it + in-place. + + :param metadata: The client metadata + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :param config: A list of lxml.etree._Element objects + describing the structures for this client + :type config: list + :returns: None + :raises: Bcfg2.Server.Plugin.ValidationError + """ + raise NotImplementedError + + +class GoalValidator(object): + """ GoalValidator plugins can modify the concretely-bound configuration of + a client as a last stage before the configuration is sent to the + client. """ + + def validate_goals(self, metadata, config): + """ Given a monolithic XML document of the full configuration, + modify the document in-place. + + :param metadata: The client metadata + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :param config: The full configuration for the client + :type config: lxml.etree._Element + :returns: None + :raises: Bcfg2.Server.Plugin.ValidationError + """ + raise NotImplementedError + + +class Version(object): + """ Version plugins interact with various version control systems. """ + + def get_revision(self): + """ Return the current revision of the Bcfg2 specification. + This will be included in the ``revision`` attribute of the + top-level tag of the XML configuration sent to the client. + + :returns: string - the current version + """ + raise NotImplementedError + + +class ClientRunHooks(object): + """ ClientRunHooks can hook into various parts of a client run to + perform actions at various times without needing to pretend to be + a different plugin type. """ + + def start_client_run(self, metadata): + """ Invoked at the start of a client run, after all probe data + has been received and decision lists have been queried (if + applicable), but before the configuration is generated. + + :param metadata: The client metadata object + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :returns: None + """ + pass + + def end_client_run(self, metadata): + """ Invoked at the end of a client run, immediately after + :class:`Bcfg2.Server.Plugin.GoalValidator` plugins have been run + and just before the configuration is returned to the client. + + :param metadata: The client metadata object + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :returns: None + """ + pass + + def end_statistics(self, metadata): + """ Invoked after statistics are processed for a client. + + :param metadata: The client metadata object + :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata + :returns: None + """ + pass diff --git a/testsuite/Testsrc/Testlib/TestServer/TestPlugin.py b/testsuite/Testsrc/Testlib/TestServer/TestPlugin.py deleted file mode 100644 index 5410c550e..000000000 --- a/testsuite/Testsrc/Testlib/TestServer/TestPlugin.py +++ /dev/null @@ -1,2334 +0,0 @@ -import os -import re -import sys -import copy -import logging -import lxml.etree -import Bcfg2.Server -from Bcfg2.Compat import reduce -from mock import Mock, MagicMock, patch -from Bcfg2.Server.Plugin import * - -# add all parent testsuite directories to sys.path to allow (most) -# relative imports in python 2.4 -path = os.path.dirname(__file__) -while path != '/': - if os.path.basename(path).lower().startswith("test"): - sys.path.append(path) - if os.path.basename(path) == "testsuite": - break - path = os.path.dirname(path) -from common import XI_NAMESPACE, XI, inPy3k, call, builtins, u, can_skip, \ - skip, skipIf, skipUnless, Bcfg2TestCase, DBModelTestCase, syncdb, \ - patchIf, datastore - - -try: - re_type = re._pattern_type -except AttributeError: - re_type = type(re.compile("")) - -def tostring(el): - return lxml.etree.tostring(el, xml_declaration=False).decode('UTF-8') - - -class FakeElementTree(lxml.etree._ElementTree): - xinclude = Mock() - - -class TestFunctions(Bcfg2TestCase): - def test_bind_info(self): - entry = lxml.etree.Element("Path", name="/test") - metadata = Mock() - default = dict(test1="test1", test2="test2") - # test without infoxml - bind_info(entry, metadata, default=default) - self.assertItemsEqual(entry.attrib, - dict(test1="test1", - test2="test2", - name="/test")) - - # test with bogus infoxml - entry = lxml.etree.Element("Path", name="/test") - infoxml = Mock() - self.assertRaises(PluginExecutionError, - bind_info, - entry, metadata, infoxml=infoxml) - infoxml.pnode.Match.assert_called_with(metadata, dict(), entry=entry) - - # test with valid infoxml - entry = lxml.etree.Element("Path", name="/test") - infoxml.reset_mock() - infodata = {None: {"test3": "test3", "test4": "test4"}} - def infoxml_rv(metadata, rv, entry=None): - rv['Info'] = infodata - infoxml.pnode.Match.side_effect = infoxml_rv - bind_info(entry, metadata, infoxml=infoxml, default=default) - # mock objects don't properly track the called-with value of - # arguments whose value is changed by the function, so it - # thinks Match() was called with the final value of the mdata - # arg, not the initial value. makes this test a little less - # worthwhile, TBH. - infoxml.pnode.Match.assert_called_with(metadata, dict(Info=infodata), - entry=entry) - self.assertItemsEqual(entry.attrib, - dict(test1="test1", - test2="test2", - test3="test3", - test4="test4", - name="/test")) - - -class TestPluginInitError(Bcfg2TestCase): - """ placeholder for future tests """ - pass - - -class TestPluginExecutionError(Bcfg2TestCase): - """ placeholder for future tests """ - pass - - -class TestDebuggable(Bcfg2TestCase): - test_obj = Debuggable - - def get_obj(self): - return self.test_obj() - - def test__init(self): - d = self.get_obj() - self.assertIsInstance(d.logger, logging.Logger) - self.assertFalse(d.debug_flag) - - @patch("Bcfg2.Server.Plugin.%s.debug_log" % test_obj.__name__) - def test_toggle_debug(self, mock_debug): - d = self.get_obj() - orig = d.debug_flag - d.toggle_debug() - self.assertNotEqual(orig, d.debug_flag) - self.assertTrue(mock_debug.called) - - mock_debug.reset_mock() - - changed = d.debug_flag - d.toggle_debug() - self.assertNotEqual(changed, d.debug_flag) - self.assertEqual(orig, d.debug_flag) - self.assertTrue(mock_debug.called) - - def test_debug_log(self): - d = self.get_obj() - d.logger = Mock() - d.debug_flag = False - d.debug_log("test") - self.assertFalse(d.logger.error.called) - - d.logger.reset_mock() - d.debug_log("test", flag=True) - self.assertTrue(d.logger.error.called) - - d.logger.reset_mock() - d.debug_flag = True - d.debug_log("test") - self.assertTrue(d.logger.error.called) - - -class TestPlugin(TestDebuggable): - test_obj = Plugin - - def get_obj(self, core=None): - if core is None: - core = Mock() - return self.test_obj(core, datastore) - - def test__init(self): - core = Mock() - p = self.get_obj(core=core) - self.assertEqual(p.data, os.path.join(datastore, p.name)) - self.assertEqual(p.core, core) - self.assertIsInstance(p, Debuggable) - - @patch("os.makedirs") - def test_init_repo(self, mock_makedirs): - self.test_obj.init_repo(datastore) - mock_makedirs.assert_called_with(os.path.join(datastore, - self.test_obj.name)) - - -class TestDatabaseBacked(TestPlugin): - test_obj = DatabaseBacked - - @skipUnless(has_django, "Django not found") - def test__use_db(self): - core = Mock() - core.setup.cfp.getboolean.return_value = True - db = self.get_obj(core) - self.assertTrue(db._use_db) - - core = Mock() - core.setup.cfp.getboolean.return_value = False - db = self.get_obj(core) - self.assertFalse(db._use_db) - - Bcfg2.Server.Plugin.has_django = False - core = Mock() - db = self.get_obj(core) - self.assertFalse(db._use_db) - - core = Mock() - core.setup.cfp.getboolean.return_value = True - db = self.get_obj(core) - self.assertFalse(db._use_db) - Bcfg2.Server.Plugin.has_django = True - - -class TestPluginDatabaseModel(Bcfg2TestCase): - """ placeholder for future tests """ - pass - - -class TestGenerator(Bcfg2TestCase): - test_obj = Generator - - def test_HandlesEntry(self): - pass - - def test_HandleEntry(self): - pass - - -class TestStructure(Bcfg2TestCase): - test_obj = Structure - - def get_obj(self): - return self.test_obj() - - def test_BuildStructures(self): - s = self.get_obj() - self.assertRaises(NotImplementedError, - s.BuildStructures, None) - - -class TestMetadata(Bcfg2TestCase): - test_obj = Metadata - - def get_obj(self): - return self.test_obj() - - def test_AuthenticateConnection(self): - m = self.get_obj() - self.assertRaises(NotImplementedError, - m.AuthenticateConnection, - None, None, None, (None, None)) - - def test_get_initial_metadata(self): - m = self.get_obj() - self.assertRaises(NotImplementedError, - m.get_initial_metadata, None) - - def test_merge_additional_data(self): - m = self.get_obj() - self.assertRaises(NotImplementedError, - m.merge_additional_data, None, None, None) - - def test_merge_additional_groups(self): - m = self.get_obj() - self.assertRaises(NotImplementedError, - m.merge_additional_groups, None, None) - - -class TestConnector(Bcfg2TestCase): - """ placeholder """ - def test_get_additional_groups(self): - pass - - def test_get_additional_data(self): - pass - - -class TestProbing(Bcfg2TestCase): - test_obj = Probing - - def get_obj(self): - return self.test_obj() - - def test_GetProbes(self): - p = self.get_obj() - self.assertRaises(NotImplementedError, - p.GetProbes, None) - - def test_ReceiveData(self): - p = self.get_obj() - self.assertRaises(NotImplementedError, - p.ReceiveData, None, None) - - -class TestStatistics(TestPlugin): - test_obj = Statistics - - def get_obj(self, core=None): - if core is None: - core = Mock() - return self.test_obj(core, datastore) - - def test_process_statistics(self): - s = self.get_obj() - self.assertRaises(NotImplementedError, - s.process_statistics, None, None) - - -class TestThreadedStatistics(TestStatistics): - test_obj = ThreadedStatistics - data = [("foo.example.com", ""), - ("bar.example.com", "")] - - @patch("threading.Thread.start") - def test__init(self, mock_start): - core = Mock() - ts = self.get_obj(core) - mock_start.assert_any_call() - - @patch("%s.open" % builtins) - @patch("%s.dump" % cPickle.__name__) - @patch("Bcfg2.Server.Plugin.ThreadedStatistics.run", Mock()) - def test_save(self, mock_dump, mock_open): - core = Mock() - ts = self.get_obj(core) - queue = Mock() - queue.empty = Mock(side_effect=Empty) - ts.work_queue = queue - - mock_open.side_effect = OSError - # test that save does _not_ raise an exception even when - # everything goes pear-shaped - ts._save() - queue.empty.assert_any_call() - mock_open.assert_called_with(ts.pending_file, 'w') - - queue.reset_mock() - mock_open.reset_mock() - - queue.data = [] - for hostname, xml in self.data: - md = Mock() - md.hostname = hostname - queue.data.append((md, lxml.etree.XML(xml))) - queue.empty.side_effect = lambda: len(queue.data) == 0 - queue.get_nowait = Mock(side_effect=lambda: queue.data.pop()) - mock_open.side_effect = None - - ts._save() - queue.empty.assert_any_call() - queue.get_nowait.assert_any_call() - mock_open.assert_called_with(ts.pending_file, 'w') - mock_open.return_value.close.assert_any_call() - # the order of the queue data gets changed, so we have to - # verify this call in an ugly way - self.assertItemsEqual(mock_dump.call_args[0][0], self.data) - self.assertEqual(mock_dump.call_args[0][1], mock_open.return_value) - - @patch("os.unlink") - @patch("os.path.exists") - @patch("%s.open" % builtins) - @patch("lxml.etree.XML") - @patch("%s.load" % cPickle.__name__) - @patch("Bcfg2.Server.Plugin.ThreadedStatistics.run", Mock()) - def test_load(self, mock_load, mock_XML, mock_open, mock_exists, - mock_unlink): - core = Mock() - core.terminate.isSet.return_value = False - ts = self.get_obj(core) - - ts.work_queue = Mock() - ts.work_queue.data = [] - def reset(): - core.reset_mock() - mock_open.reset_mock() - mock_exists.reset_mock() - mock_unlink.reset_mock() - mock_load.reset_mock() - mock_XML.reset_mock() - ts.work_queue.reset_mock() - ts.work_queue.data = [] - - mock_exists.return_value = False - self.assertTrue(ts._load()) - mock_exists.assert_called_with(ts.pending_file) - - reset() - mock_exists.return_value = True - mock_open.side_effect = OSError - self.assertFalse(ts._load()) - mock_exists.assert_called_with(ts.pending_file) - mock_open.assert_called_with(ts.pending_file, 'r') - - reset() - mock_open.side_effect = None - mock_load.return_value = self.data - ts.work_queue.put_nowait.side_effect = Full - self.assertTrue(ts._load()) - mock_exists.assert_called_with(ts.pending_file) - mock_open.assert_called_with(ts.pending_file, 'r') - mock_open.return_value.close.assert_any_call() - mock_load.assert_called_with(mock_open.return_value) - - reset() - core.build_metadata.side_effect = lambda x: x - mock_XML.side_effect = lambda x, parser=None: x - ts.work_queue.put_nowait.side_effect = None - self.assertTrue(ts._load()) - mock_exists.assert_called_with(ts.pending_file) - mock_open.assert_called_with(ts.pending_file, 'r') - mock_open.return_value.close.assert_any_call() - mock_load.assert_called_with(mock_open.return_value) - self.assertItemsEqual(mock_XML.call_args_list, - [call(x, parser=Bcfg2.Server.XMLParser) - for h, x in self.data]) - self.assertItemsEqual(ts.work_queue.put_nowait.call_args_list, - [call((h, x)) for h, x in self.data]) - mock_unlink.assert_called_with(ts.pending_file) - - @patch("threading.Thread.start", Mock()) - @patch("Bcfg2.Server.Plugin.ThreadedStatistics._load") - @patch("Bcfg2.Server.Plugin.ThreadedStatistics._save") - @patch("Bcfg2.Server.Plugin.ThreadedStatistics.handle_statistic") - def test_run(self, mock_handle, mock_save, mock_load): - core = Mock() - ts = self.get_obj(core) - mock_load.return_value = True - ts.work_queue = Mock() - - def reset(): - mock_handle.reset_mock() - mock_save.reset_mock() - mock_load.reset_mock() - core.reset_mock() - ts.work_queue.reset_mock() - ts.work_queue.data = self.data[:] - ts.work_queue.get_calls = 0 - - reset() - - def get_rv(**kwargs): - ts.work_queue.get_calls += 1 - try: - return ts.work_queue.data.pop() - except: - raise Empty - ts.work_queue.get.side_effect = get_rv - def terminate_isset(): - # this lets the loop go on a few iterations with an empty - # queue to test that it doesn't error out - return ts.work_queue.get_calls > 3 - core.terminate.isSet.side_effect = terminate_isset - - ts.work_queue.empty.return_value = False - ts.run() - mock_load.assert_any_call() - self.assertGreaterEqual(ts.work_queue.get.call_count, len(self.data)) - self.assertItemsEqual(mock_handle.call_args_list, - [call(h, x) for h, x in self.data]) - mock_save.assert_any_call() - - @patch("copy.copy", Mock(side_effect=lambda x: x)) - @patch("Bcfg2.Server.Plugin.ThreadedStatistics.run", Mock()) - def test_process_statistics(self): - core = Mock() - ts = self.get_obj(core) - ts.work_queue = Mock() - ts.process_statistics(*self.data[0]) - ts.work_queue.put_nowait.assert_called_with(self.data[0]) - - ts.work_queue.reset_mock() - ts.work_queue.put_nowait.side_effect = Full - # test that no exception is thrown - ts.process_statistics(*self.data[0]) - - def test_handle_statistic(self): - ts = self.get_obj() - self.assertRaises(NotImplementedError, - ts.handle_statistic, None, None) - - -class TestPullSource(Bcfg2TestCase): - def test_GetCurrentEntry(self): - ps = PullSource() - self.assertRaises(NotImplementedError, - ps.GetCurrentEntry, None, None, None) - - -class TestPullTarget(Bcfg2TestCase): - def test_AcceptChoices(self): - pt = PullTarget() - self.assertRaises(NotImplementedError, - pt.AcceptChoices, None, None) - - def test_AcceptPullData(self): - pt = PullTarget() - self.assertRaises(NotImplementedError, - pt.AcceptPullData, None, None, None) - - -class TestDecision(Bcfg2TestCase): - test_obj = Decision - - def get_obj(self): - return self.test_obj() - - def test_GetDecisions(self): - d = self.get_obj() - self.assertRaises(NotImplementedError, - d.GetDecisions, None, None) - - -class TestValidationError(Bcfg2TestCase): - """ placeholder for future tests """ - pass - - -class TestStructureValidator(Bcfg2TestCase): - def test_validate_structures(self): - sv = StructureValidator() - self.assertRaises(NotImplementedError, - sv.validate_structures, None, None) - - -class TestGoalValidator(Bcfg2TestCase): - def test_validate_goals(self): - gv = GoalValidator() - self.assertRaises(NotImplementedError, - gv.validate_goals, None, None) - - -class TestVersion(Bcfg2TestCase): - test_obj = Version - - def get_obj(self): - return self.test_obj() - - def test_get_revision(self): - d = self.get_obj() - self.assertRaises(NotImplementedError, d.get_revision) - - -class TestClientRunHooks(Bcfg2TestCase): - """ placeholder for future tests """ - pass - - -class TestFileBacked(Bcfg2TestCase): - test_obj = FileBacked - path = os.path.join(datastore, "test") - - def get_obj(self, path=None, fam=None): - if path is None: - path = self.path - return self.test_obj(path, fam=fam) - - @patch("%s.open" % builtins) - def test_HandleEvent(self, mock_open): - fb = self.get_obj() - fb.Index = Mock() - - def reset(): - fb.Index.reset_mock() - mock_open.reset_mock() - - for evt in ["exists", "changed", "created"]: - reset() - event = Mock() - event.code2str.return_value = evt - fb.HandleEvent(event) - mock_open.assert_called_with(self.path) - mock_open.return_value.read.assert_any_call() - fb.Index.assert_any_call() - - reset() - event = Mock() - event.code2str.return_value = "endExist" - fb.HandleEvent(event) - self.assertFalse(mock_open.called) - self.assertFalse(fb.Index.called) - - -class TestDirectoryBacked(Bcfg2TestCase): - test_obj = DirectoryBacked - testpaths = {1: '', - 2: '/foo', - 3: '/foo/bar', - 4: '/foo/bar/baz', - 5: 'quux', - 6: 'xyzzy/', - 7: 'xyzzy/plugh/'} - testfiles = ['foo', 'bar/baz.txt', 'plugh.py'] - ignore = [] # ignore no events - badevents = [] # DirectoryBacked handles all files, so there's no - # such thing as a bad event - - def test_child_interface(self): - # ensure that the child object has the correct interface - self.assertTrue(hasattr(self.test_obj.__child__, "HandleEvent")) - - @patch("Bcfg2.Server.Plugin.%s.add_directory_monitor" % test_obj.__name__, - Mock()) - def get_obj(self, fam=None): - if fam is None: - fam = Mock() - return self.test_obj(os.path.join(datastore, self.test_obj.__name__), - fam) - - @patch("Bcfg2.Server.Plugin.%s.add_directory_monitor" % test_obj.__name__) - def test__init(self, mock_add_monitor): - db = self.test_obj(datastore, Mock()) - mock_add_monitor.assert_called_with('') - - def test__getitem(self): - db = self.get_obj() - db.entries.update(dict(a=1, b=2, c=3)) - self.assertEqual(db['a'], 1) - self.assertEqual(db['b'], 2) - expected = KeyError - try: - db['d'] - except expected: - pass - except: - err = sys.exc_info()[1] - self.assertFalse(True, "%s raised instead of %s" % - (err.__class__.__name__, - expected.__class__.__name__)) - else: - self.assertFalse(True, - "%s not raised" % expected.__class__.__name__) - - def test__iter(self): - db = self.get_obj() - db.entries.update(dict(a=1, b=2, c=3)) - self.assertEqual([i for i in db], - [i for i in db.entries.items()]) - - @patch("os.path.isdir") - def test_add_directory_monitor(self, mock_isdir): - db = self.get_obj() - db.fam = Mock() - db.fam.rv = 0 - - def reset(): - db.fam.rv += 1 - db.fam.AddMonitor.return_value = db.fam.rv - db.fam.reset_mock() - mock_isdir.reset_mock() - - mock_isdir.return_value = True - for path in self.testpaths.values(): - reset() - db.add_directory_monitor(path) - db.fam.AddMonitor.assert_called_with(os.path.join(db.data, path), - db) - self.assertIn(db.fam.rv, db.handles) - self.assertEqual(db.handles[db.fam.rv], path) - - reset() - # test duplicate adds - for path in self.testpaths.values(): - reset() - db.add_directory_monitor(path) - self.assertFalse(db.fam.AddMonitor.called) - - reset() - mock_isdir.return_value = False - db.add_directory_monitor('bogus') - self.assertFalse(db.fam.AddMonitor.called) - self.assertNotIn(db.fam.rv, db.handles) - - def test_add_entry(self): - db = self.get_obj() - db.fam = Mock() - class MockChild(Mock): - def __init__(self, path, fam, **kwargs): - Mock.__init__(self, **kwargs) - self.path = path - self.fam = fam - self.HandleEvent = Mock() - db.__child__ = MockChild - - for path in self.testpaths.values(): - event = Mock() - db.add_entry(path, event) - self.assertIn(path, db.entries) - self.assertEqual(db.entries[path].path, - os.path.join(db.data, path)) - self.assertEqual(db.entries[path].fam, db.fam) - db.entries[path].HandleEvent.assert_called_with(event) - - @patch("os.path.isdir") - @patch("Bcfg2.Server.Plugin.%s.add_entry" % test_obj.__name__) - @patch("Bcfg2.Server.Plugin.%s.add_directory_monitor" % test_obj.__name__) - def test_HandleEvent(self, mock_add_monitor, mock_add_entry, mock_isdir): - db = self.get_obj() - # a path with a leading / should never get into - # DirectoryBacked.handles, so strip that test case - for rid, path in self.testpaths.items(): - path = path.lstrip('/') - db.handles[rid] = path - - def reset(): - mock_isdir.reset_mock() - mock_add_entry.reset_mock() - mock_add_monitor.reset_mock() - - def get_event(filename, action, requestID): - event = Mock() - event.code2str.return_value = action - event.filename = filename - event.requestID = requestID - return event - - # test events on the data directory itself - reset() - mock_isdir.return_value = True - event = get_event(db.data, "exists", 1) - db.HandleEvent(event) - mock_add_monitor.assert_called_with("") - - # test events on paths that aren't handled - reset() - mock_isdir.return_value = False - event = get_event('/' + self.testfiles[0], 'created', - max(self.testpaths.keys()) + 1) - db.HandleEvent(event) - self.assertFalse(mock_add_monitor.called) - self.assertFalse(mock_add_entry.called) - - for req_id, path in self.testpaths.items(): - # a path with a leading / should never get into - # DirectoryBacked.handles, so strip that test case - path = path.lstrip('/') - basepath = os.path.join(datastore, path) - for fname in self.testfiles: - relpath = os.path.join(path, fname) - abspath = os.path.join(basepath, fname) - - # test endExist does nothing - reset() - event = get_event(fname, 'endExist', req_id) - db.HandleEvent(event) - self.assertFalse(mock_add_monitor.called) - self.assertFalse(mock_add_entry.called) - - mock_isdir.return_value = True - for evt in ["created", "exists", "changed"]: - # test that creating or changing a directory works - reset() - event = get_event(fname, evt, req_id) - db.HandleEvent(event) - mock_add_monitor.assert_called_with(relpath) - self.assertFalse(mock_add_entry.called) - - mock_isdir.return_value = False - for evt in ["created", "exists"]: - # test that creating a file works - reset() - event = get_event(fname, evt, req_id) - db.HandleEvent(event) - mock_add_entry.assert_called_with(relpath, event) - self.assertFalse(mock_add_monitor.called) - db.entries[relpath] = MagicMock() - - # test that changing a file that already exists works - reset() - event = get_event(fname, "changed", req_id) - db.HandleEvent(event) - db.entries[relpath].HandleEvent.assert_called_with(event) - self.assertFalse(mock_add_monitor.called) - self.assertFalse(mock_add_entry.called) - - # test that deleting an entry works - reset() - event = get_event(fname, "deleted", req_id) - db.HandleEvent(event) - self.assertNotIn(relpath, db.entries) - - # test that changing a file that doesn't exist works - reset() - event = get_event(fname, "changed", req_id) - db.HandleEvent(event) - mock_add_entry.assert_called_with(relpath, event) - self.assertFalse(mock_add_monitor.called) - db.entries[relpath] = MagicMock() - - # test that deleting a directory works. this is a little - # strange because the _parent_ directory has to handle the - # deletion - reset() - event = get_event('quux', "deleted", 1) - db.HandleEvent(event) - for key in db.entries.keys(): - self.assertFalse(key.startswith('quux')) - - # test bad events - for fname in self.badevents: - reset() - event = get_event(fname, "created", 1) - db.HandleEvent(event) - self.assertFalse(mock_add_entry.called) - self.assertFalse(mock_add_monitor.called) - - # test ignored events - for fname in self.ignore: - reset() - event = get_event(fname, "created", 1) - db.HandleEvent(event) - self.assertFalse(mock_isdir.called, - msg="Failed to ignore %s" % fname) - self.assertFalse(mock_add_entry.called, - msg="Failed to ignore %s" % fname) - self.assertFalse(mock_add_monitor.called, - msg="Failed to ignore %s" % fname) - - -class TestXMLFileBacked(TestFileBacked): - test_obj = XMLFileBacked - path = os.path.join(datastore, "test", "test1.xml") - - def get_obj(self, path=None, fam=None, should_monitor=False): - if path is None: - path = self.path - return self.test_obj(path, fam=fam, should_monitor=should_monitor) - - def test__init(self): - fam = Mock() - xfb = self.get_obj() - self.assertIsNone(xfb.fam) - - xfb = self.get_obj(fam=fam) - self.assertFalse(fam.AddMonitor.called) - - fam.reset_mock() - xfb = self.get_obj(fam=fam, should_monitor=True) - fam.AddMonitor.assert_called_with(self.path, xfb) - - @patch("os.path.exists") - @patch("lxml.etree.parse") - def test_follow_xincludes(self, mock_parse, mock_exists): - xfb = self.get_obj() - xfb.add_monitor = Mock() - - def reset(): - xfb.add_monitor.reset_mock() - mock_parse.reset_mock() - mock_exists.reset_mock() - xfb.extras = [] - - mock_exists.return_value = True - xdata = dict() - mock_parse.side_effect = lambda p: xdata[p] - - # basic functionality - xdata['/test/test2.xml'] = lxml.etree.Element("Test").getroottree() - xfb._follow_xincludes(xdata=xdata['/test/test2.xml']) - self.assertFalse(xfb.add_monitor.called) - - if (not hasattr(self.test_obj, "xdata") or - not isinstance(self.test_obj.xdata, property)): - # if xdata is settable, test that method of getting data - # to _follow_xincludes - reset() - xfb.xdata = xdata['/test/test2.xml'].getroot() - xfb._follow_xincludes() - self.assertFalse(xfb.add_monitor.called) - xfb.xdata = None - - reset() - xfb._follow_xincludes(fname="/test/test2.xml") - self.assertFalse(xfb.add_monitor.called) - - # test one level of xinclude - xdata[self.path] = lxml.etree.Element("Test").getroottree() - lxml.etree.SubElement(xdata[self.path].getroot(), - Bcfg2.Server.XI_NAMESPACE + "include", - href="/test/test2.xml") - reset() - xfb._follow_xincludes(fname=self.path) - xfb.add_monitor.assert_called_with("/test/test2.xml") - self.assertItemsEqual(mock_parse.call_args_list, - [call(f) for f in xdata.keys()]) - mock_exists.assert_called_with("/test/test2.xml") - - reset() - xfb._follow_xincludes(fname=self.path, xdata=xdata[self.path]) - xfb.add_monitor.assert_called_with("/test/test2.xml") - self.assertItemsEqual(mock_parse.call_args_list, - [call(f) for f in xdata.keys() - if f != self.path]) - mock_exists.assert_called_with("/test/test2.xml") - - # test two-deep level of xinclude, with some files in another - # directory - xdata["/test/test3.xml"] = \ - lxml.etree.Element("Test").getroottree() - lxml.etree.SubElement(xdata["/test/test3.xml"].getroot(), - Bcfg2.Server.XI_NAMESPACE + "include", - href="/test/test_dir/test4.xml") - xdata["/test/test_dir/test4.xml"] = \ - lxml.etree.Element("Test").getroottree() - lxml.etree.SubElement(xdata["/test/test_dir/test4.xml"].getroot(), - Bcfg2.Server.XI_NAMESPACE + "include", - href="/test/test_dir/test5.xml") - xdata['/test/test_dir/test5.xml'] = \ - lxml.etree.Element("Test").getroottree() - xdata['/test/test_dir/test6.xml'] = \ - lxml.etree.Element("Test").getroottree() - # relative includes - lxml.etree.SubElement(xdata[self.path].getroot(), - Bcfg2.Server.XI_NAMESPACE + "include", - href="test3.xml") - lxml.etree.SubElement(xdata["/test/test3.xml"].getroot(), - Bcfg2.Server.XI_NAMESPACE + "include", - href="test_dir/test6.xml") - - reset() - xfb._follow_xincludes(fname=self.path) - self.assertItemsEqual(xfb.add_monitor.call_args_list, - [call(f) for f in xdata.keys() if f != self.path]) - self.assertItemsEqual(mock_parse.call_args_list, - [call(f) for f in xdata.keys()]) - self.assertItemsEqual(mock_exists.call_args_list, - [call(f) for f in xdata.keys() if f != self.path]) - - reset() - xfb._follow_xincludes(fname=self.path, xdata=xdata[self.path]) - self.assertItemsEqual(xfb.add_monitor.call_args_list, - [call(f) for f in xdata.keys() if f != self.path]) - self.assertItemsEqual(mock_parse.call_args_list, - [call(f) for f in xdata.keys() if f != self.path]) - self.assertItemsEqual(mock_exists.call_args_list, - [call(f) for f in xdata.keys() if f != self.path]) - - @patch("lxml.etree._ElementTree", FakeElementTree) - @patch("Bcfg2.Server.Plugin.%s._follow_xincludes" % test_obj.__name__) - def test_Index(self, mock_follow): - xfb = self.get_obj() - - def reset(): - mock_follow.reset_mock() - FakeElementTree.xinclude.reset_mock() - xfb.extras = [] - xfb.xdata = None - - # syntax error - xfb.data = "<" - self.assertRaises(PluginInitError, xfb.Index) - - # no xinclude - reset() - xdata = lxml.etree.Element("Test", name="test") - children = [lxml.etree.SubElement(xdata, "Foo"), - lxml.etree.SubElement(xdata, "Bar", name="bar")] - xfb.data = tostring(xdata) - xfb.Index() - mock_follow.assert_any_call() - try: - self.assertEqual(xfb.xdata.base, self.path) - except AttributeError: - # python 2.4 and/or lxml 2.0 don't store the base_url in - # .base -- no idea where it's stored. - pass - self.assertItemsEqual([tostring(e) for e in xfb.entries], - [tostring(e) for e in children]) - - # with xincludes - reset() - mock_follow.side_effect = \ - lambda: xfb.extras.extend(["/test/test2.xml", - "/test/test_dir/test3.xml"]) - children.extend([ - lxml.etree.SubElement(xdata, - Bcfg2.Server.XI_NAMESPACE + "include", - href="/test/test2.xml"), - lxml.etree.SubElement(xdata, - Bcfg2.Server.XI_NAMESPACE + "include", - href="/test/test_dir/test3.xml")]) - test2 = lxml.etree.Element("Test", name="test2") - lxml.etree.SubElement(test2, "Baz") - test3 = lxml.etree.Element("Test", name="test3") - replacements = {"/test/test2.xml": test2, - "/test/test_dir/test3.xml": test3} - def xinclude(): - for el in xfb.xdata.findall('//%sinclude' % - Bcfg2.Server.XI_NAMESPACE): - xfb.xdata.replace(el, replacements[el.get("href")]) - FakeElementTree.xinclude.side_effect = xinclude - - xfb.data = tostring(xdata) - xfb.Index() - mock_follow.assert_any_call() - FakeElementTree.xinclude.assert_any_call - try: - self.assertEqual(xfb.xdata.base, self.path) - except AttributeError: - pass - self.assertItemsEqual([tostring(e) for e in xfb.entries], - [tostring(e) for e in children]) - - def test_add_monitor(self): - xfb = self.get_obj() - xfb.add_monitor("/test/test2.xml") - self.assertIn("/test/test2.xml", xfb.extras) - - fam = Mock() - xfb = self.get_obj(fam=fam) - fam.reset_mock() - xfb.add_monitor("/test/test3.xml") - self.assertFalse(fam.AddMonitor.called) - self.assertIn("/test/test3.xml", xfb.extras) - - fam.reset_mock() - xfb = self.get_obj(fam=fam, should_monitor=True) - xfb.add_monitor("/test/test4.xml") - fam.AddMonitor.assert_called_with("/test/test4.xml", xfb) - self.assertIn("/test/test4.xml", xfb.extras) - - -class TestStructFile(TestXMLFileBacked): - test_obj = StructFile - - def _get_test_data(self): - """ build a very complex set of test data """ - # top-level group and client elements - groups = dict() - # group and client elements that are descendents of other group or - # client elements - subgroups = dict() - # children of elements in `groups' that should be included in - # match results - children = dict() - # children of elements in `subgroups' that should be included in - # match results - subchildren = dict() - # top-level tags that are not group elements - standalone = [] - xdata = lxml.etree.Element("Test", name="test") - groups[0] = lxml.etree.SubElement(xdata, "Group", name="group1", - include="true") - children[0] = [lxml.etree.SubElement(groups[0], "Child", name="c1"), - lxml.etree.SubElement(groups[0], "Child", name="c2")] - subgroups[0] = [lxml.etree.SubElement(groups[0], "Group", - name="subgroup1", include="true"), - lxml.etree.SubElement(groups[0], - "Client", name="client1", - include="false")] - subchildren[0] = \ - [lxml.etree.SubElement(subgroups[0][0], "Child", name="sc1"), - lxml.etree.SubElement(subgroups[0][0], "Child", name="sc2", - attr="some attr"), - lxml.etree.SubElement(subgroups[0][0], "Child", name="sc3")] - lxml.etree.SubElement(subchildren[0][-1], "SubChild", name="subchild") - lxml.etree.SubElement(subgroups[0][1], "Child", name="sc4") - - groups[1] = lxml.etree.SubElement(xdata, "Group", name="group2", - include="false") - children[1] = [] - subgroups[1] = [] - subchildren[1] = [] - lxml.etree.SubElement(groups[1], "Child", name="c3") - lxml.etree.SubElement(groups[1], "Child", name="c4") - - standalone.append(lxml.etree.SubElement(xdata, "Standalone", name="s1")) - - groups[2] = lxml.etree.SubElement(xdata, "Client", name="client2", - include="false") - children[2] = [] - subgroups[2] = [] - subchildren[2] = [] - lxml.etree.SubElement(groups[2], "Child", name="c5") - lxml.etree.SubElement(groups[2], "Child", name="c6") - - standalone.append(lxml.etree.SubElement(xdata, "Standalone", name="s2", - attr="some attr")) - - groups[3] = lxml.etree.SubElement(xdata, "Client", name="client3", - include="true") - children[3] = [lxml.etree.SubElement(groups[3], "Child", name="c7", - attr="some_attr"), - lxml.etree.SubElement(groups[3], "Child", name="c8")] - subgroups[3] = [] - subchildren[3] = [] - lxml.etree.SubElement(children[3][-1], "SubChild", name="subchild") - - standalone.append(lxml.etree.SubElement(xdata, "Standalone", name="s3")) - lxml.etree.SubElement(standalone[-1], "SubStandalone", name="sub1") - - children[4] = standalone - return (xdata, groups, subgroups, children, subchildren, standalone) - - def test_include_element(self): - sf = self.get_obj() - metadata = Mock() - metadata.groups = ["group1", "group2"] - metadata.hostname = "foo.example.com" - - inc = lambda tag, **attrs: \ - sf._include_element(lxml.etree.Element(tag, **attrs), metadata) - - self.assertFalse(sf._include_element(lxml.etree.Comment("test"), - metadata)) - - self.assertFalse(inc("Group", name="group3")) - self.assertFalse(inc("Group", name="group2", negate="true")) - self.assertFalse(inc("Group", name="group2", negate="tRuE")) - self.assertTrue(inc("Group", name="group2")) - self.assertTrue(inc("Group", name="group2", negate="false")) - self.assertTrue(inc("Group", name="group2", negate="faLSe")) - self.assertTrue(inc("Group", name="group3", negate="true")) - self.assertTrue(inc("Group", name="group3", negate="tRUe")) - - self.assertFalse(inc("Client", name="bogus.example.com")) - self.assertFalse(inc("Client", name="foo.example.com", negate="true")) - self.assertFalse(inc("Client", name="foo.example.com", negate="tRuE")) - self.assertTrue(inc("Client", name="foo.example.com")) - self.assertTrue(inc("Client", name="foo.example.com", negate="false")) - self.assertTrue(inc("Client", name="foo.example.com", negate="faLSe")) - self.assertTrue(inc("Client", name="bogus.example.com", negate="true")) - self.assertTrue(inc("Client", name="bogus.example.com", negate="tRUe")) - - self.assertTrue(inc("Other")) - - @patch("Bcfg2.Server.Plugin.%s._include_element" % test_obj.__name__) - def test__match(self, mock_include): - sf = self.get_obj() - metadata = Mock() - - (xdata, groups, subgroups, children, subchildren, standalone) = \ - self._get_test_data() - - mock_include.side_effect = \ - lambda x, _: (x.tag not in ['Client', 'Group'] or - x.get("include") == "true") - - for i, group in groups.items(): - actual = sf._match(group, metadata) - expected = children[i] + subchildren[i] - self.assertEqual(len(actual), len(expected)) - # easiest way to compare the values is actually to make - # them into an XML document and let assertXMLEqual compare - # them - xactual = lxml.etree.Element("Container") - xactual.extend(actual) - xexpected = lxml.etree.Element("Container") - xexpected.extend(expected) - self.assertXMLEqual(xactual, xexpected) - - for el in standalone: - self.assertXMLEqual(el, sf._match(el, metadata)[0]) - - @patch("Bcfg2.Server.Plugin.%s._match" % test_obj.__name__) - def test_Match(self, mock_match): - sf = self.get_obj() - metadata = Mock() - - (xdata, groups, subgroups, children, subchildren, standalone) = \ - self._get_test_data() - sf.entries.extend(copy.deepcopy(xdata).getchildren()) - - def match_rv(el, _): - if el.tag not in ['Client', 'Group']: - return [el] - elif x.get("include") == "true": - return el.getchildren() - else: - return [] - mock_match.side_effect = match_rv - actual = sf.Match(metadata) - expected = reduce(lambda x, y: x + y, - list(children.values()) + list(subgroups.values())) - self.assertEqual(len(actual), len(expected)) - # easiest way to compare the values is actually to make - # them into an XML document and let assertXMLEqual compare - # them - xactual = lxml.etree.Element("Container") - xactual.extend(actual) - xexpected = lxml.etree.Element("Container") - xexpected.extend(expected) - self.assertXMLEqual(xactual, xexpected) - - @patch("Bcfg2.Server.Plugin.%s._include_element" % test_obj.__name__) - def test__xml_match(self, mock_include): - sf = self.get_obj() - metadata = Mock() - - (xdata, groups, subgroups, children, subchildren, standalone) = \ - self._get_test_data() - - mock_include.side_effect = \ - lambda x, _: (x.tag not in ['Client', 'Group'] or - x.get("include") == "true") - - actual = copy.deepcopy(xdata) - for el in actual.getchildren(): - sf._xml_match(el, metadata) - expected = lxml.etree.Element(xdata.tag, **dict(xdata.attrib)) - expected.text = xdata.text - expected.extend(reduce(lambda x, y: x + y, - list(children.values()) + list(subchildren.values()))) - expected.extend(standalone) - self.assertXMLEqual(actual, expected) - - @patch("Bcfg2.Server.Plugin.%s._xml_match" % test_obj.__name__) - def test_Match(self, mock_xml_match): - sf = self.get_obj() - metadata = Mock() - - (sf.xdata, groups, subgroups, children, subchildren, standalone) = \ - self._get_test_data() - - sf.XMLMatch(metadata) - actual = [] - for call in mock_xml_match.call_args_list: - actual.append(call[0][0]) - self.assertEqual(call[0][1], metadata) - expected = list(groups.values()) + standalone - # easiest way to compare the values is actually to make - # them into an XML document and let assertXMLEqual compare - # them - xactual = lxml.etree.Element("Container") - xactual.extend(actual) - xexpected = lxml.etree.Element("Container") - xexpected.extend(expected) - self.assertXMLEqual(xactual, xexpected) - - -class TestINode(Bcfg2TestCase): - test_obj = INode - - # INode.__init__ and INode._load_children() call each other - # recursively, which makes this class kind of a nightmare to test. - # we have to first patch INode._load_children so that we can - # create an INode object with no children loaded, then we unpatch - # INode._load_children and patch INode.__init__ so that child - # objects aren't actually created. but in order to test things - # atomically, we do this umpteen times in order to test with - # different data. this convenience method makes this a little - # easier. fun fun fun. - @patch("Bcfg2.Server.Plugin.%s._load_children" % test_obj.__name__, Mock()) - def _get_inode(self, data, idict): - return self.test_obj(data, idict) - - def test_raw_predicates(self): - metadata = Mock() - metadata.groups = ["group1", "group2"] - metadata.hostname = "foo.example.com" - entry = None - - parent_predicate = lambda m, e: True - pred = eval(self.test_obj.raw['Client'] % dict(name="foo.example.com"), - dict(predicate=parent_predicate)) - self.assertTrue(pred(metadata, entry)) - pred = eval(self.test_obj.raw['Client'] % dict(name="bar.example.com"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - - pred = eval(self.test_obj.raw['Group'] % dict(name="group1"), - dict(predicate=parent_predicate)) - self.assertTrue(pred(metadata, entry)) - pred = eval(self.test_obj.raw['Group'] % dict(name="group3"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - - pred = eval(self.test_obj.nraw['Client'] % dict(name="foo.example.com"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - pred = eval(self.test_obj.nraw['Client'] % dict(name="bar.example.com"), - dict(predicate=parent_predicate)) - self.assertTrue(pred(metadata, entry)) - - pred = eval(self.test_obj.nraw['Group'] % dict(name="group1"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - pred = eval(self.test_obj.nraw['Group'] % dict(name="group3"), - dict(predicate=parent_predicate)) - self.assertTrue(pred(metadata, entry)) - - parent_predicate = lambda m, e: False - pred = eval(self.test_obj.raw['Client'] % dict(name="foo.example.com"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - pred = eval(self.test_obj.raw['Group'] % dict(name="group1"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - pred = eval(self.test_obj.nraw['Client'] % dict(name="bar.example.com"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - pred = eval(self.test_obj.nraw['Group'] % dict(name="group3"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - - self.assertItemsEqual(self.test_obj.containers, - self.test_obj.raw.keys()) - self.assertItemsEqual(self.test_obj.containers, - self.test_obj.nraw.keys()) - - @patch("Bcfg2.Server.Plugin.INode._load_children") - def test__init(self, mock_load_children): - data = lxml.etree.Element("Bogus") - # called with no parent, should not raise an exception; it's a - # top-level tag in an XML file and so is not expected to be a - # proper predicate - INode(data, dict()) - self.assertRaises(PluginExecutionError, - INode, data, dict(), Mock()) - - data = lxml.etree.Element("Client", name="foo.example.com") - idict = dict() - inode = INode(data, idict) - mock_load_children.assert_called_with(data, idict) - self.assertTrue(inode.predicate(Mock(), Mock())) - - parent = Mock() - parent.predicate = lambda m, e: True - metadata = Mock() - metadata.groups = ["group1", "group2"] - metadata.hostname = "foo.example.com" - entry = None - - # test setting predicate with parent object - mock_load_children.reset_mock() - inode = INode(data, idict, parent=parent) - mock_load_children.assert_called_with(data, idict) - self.assertTrue(inode.predicate(metadata, entry)) - - # test negation - data = lxml.etree.Element("Client", name="foo.example.com", - negate="true") - mock_load_children.reset_mock() - inode = INode(data, idict, parent=parent) - mock_load_children.assert_called_with(data, idict) - self.assertFalse(inode.predicate(metadata, entry)) - - # test failure of a matching predicate (client names do not match) - data = lxml.etree.Element("Client", name="foo.example.com") - metadata.hostname = "bar.example.com" - mock_load_children.reset_mock() - inode = INode(data, idict, parent=parent) - mock_load_children.assert_called_with(data, idict) - self.assertFalse(inode.predicate(metadata, entry)) - - # test that parent predicate is AND'ed in correctly - parent.predicate = lambda m, e: False - metadata.hostname = "foo.example.com" - mock_load_children.reset_mock() - inode = INode(data, idict, parent=parent) - mock_load_children.assert_called_with(data, idict) - self.assertFalse(inode.predicate(metadata, entry)) - - def test_load_children(self): - data = lxml.etree.Element("Parent") - child1 = lxml.etree.SubElement(data, "Client", name="foo.example.com") - child2 = lxml.etree.SubElement(data, "Group", name="bar", negate="true") - idict = dict() - - inode = self._get_inode(data, idict) - - @patch("Bcfg2.Server.Plugin.%s.__init__" % inode.__class__.__name__) - def inner(mock_init): - mock_init.return_value = None - inode._load_children(data, idict) - self.assertItemsEqual(mock_init.call_args_list, - [call(child1, idict, inode), - call(child2, idict, inode)]) - self.assertEqual(idict, dict()) - self.assertItemsEqual(inode.contents, dict()) - - inner() - - data = lxml.etree.Element("Parent") - child1 = lxml.etree.SubElement(data, "Data", name="child1", - attr="some attr") - child1.text = "text" - subchild1 = lxml.etree.SubElement(child1, "SubChild", name="subchild") - child2 = lxml.etree.SubElement(data, "Group", name="bar", negate="true") - idict = dict() - - inode = self._get_inode(data, idict) - inode.ignore = [] - - @patch("Bcfg2.Server.Plugin.%s.__init__" % inode.__class__.__name__) - def inner2(mock_init): - mock_init.return_value = None - inode._load_children(data, idict) - mock_init.assert_called_with(child2, idict, inode) - tag = child1.tag - name = child1.get("name") - self.assertEqual(idict, dict(Data=[name])) - self.assertIn(tag, inode.contents) - self.assertIn(name, inode.contents[tag]) - self.assertItemsEqual(inode.contents[tag][name], - dict(name=name, - attr=child1.get('attr'), - __text__=child1.text, - __children__=[subchild1])) - - inner2() - - # test ignore. no ignore is set on INode by default, so we - # have to set one - old_ignore = copy.copy(self.test_obj.ignore) - self.test_obj.ignore.append("Data") - idict = dict() - - inode = self._get_inode(data, idict) - - @patch("Bcfg2.Server.Plugin.%s.__init__" % inode.__class__.__name__) - def inner3(mock_init): - mock_init.return_value = None - inode._load_children(data, idict) - mock_init.assert_called_with(child2, idict, inode) - self.assertEqual(idict, dict()) - self.assertItemsEqual(inode.contents, dict()) - - inner3() - self.test_obj.ignore = old_ignore - - def test_Match(self): - idata = lxml.etree.Element("Parent") - contents = lxml.etree.SubElement(idata, "Data", name="contents", - attr="some attr") - child = lxml.etree.SubElement(idata, "Group", name="bar", negate="true") - - inode = INode(idata, dict()) - inode.predicate = Mock() - inode.predicate.return_value = False - - metadata = Mock() - metadata.groups = ['foo'] - data = dict() - entry = child - - inode.Match(metadata, data, entry=child) - self.assertEqual(data, dict()) - inode.predicate.assert_called_with(metadata, child) - - inode.predicate.reset_mock() - inode.Match(metadata, data) - self.assertEqual(data, dict()) - # can't easily compare XML args without the original - # object, and we're testing that Match() works without an - # XML object passed in, so... - self.assertEqual(inode.predicate.call_args[0][0], - metadata) - self.assertXMLEqual(inode.predicate.call_args[0][1], - lxml.etree.Element("None")) - - inode.predicate.reset_mock() - inode.predicate.return_value = True - inode.Match(metadata, data, entry=child) - self.assertEqual(data, inode.contents) - inode.predicate.assert_called_with(metadata, child) - - -class TestInfoNode(TestINode): - __test__ = True - test_obj = InfoNode - - def test_raw_predicates(self): - TestINode.test_raw_predicates(self) - metadata = Mock() - entry = lxml.etree.Element("Path", name="/tmp/foo", - realname="/tmp/bar") - - parent_predicate = lambda m, d: True - pred = eval(self.test_obj.raw['Path'] % dict(name="/tmp/foo"), - dict(predicate=parent_predicate)) - self.assertTrue(pred(metadata, entry)) - pred = eval(InfoNode.raw['Path'] % dict(name="/tmp/bar"), - dict(predicate=parent_predicate)) - self.assertTrue(pred(metadata, entry)) - pred = eval(InfoNode.raw['Path'] % dict(name="/tmp/bogus"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - - pred = eval(self.test_obj.nraw['Path'] % dict(name="/tmp/foo"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - pred = eval(InfoNode.nraw['Path'] % dict(name="/tmp/bar"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - pred = eval(InfoNode.nraw['Path'] % dict(name="/tmp/bogus"), - dict(predicate=parent_predicate)) - self.assertTrue(pred(metadata, entry)) - - parent_predicate = lambda m, d: False - pred = eval(self.test_obj.raw['Path'] % dict(name="/tmp/foo"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - pred = eval(InfoNode.raw['Path'] % dict(name="/tmp/bar"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - pred = eval(InfoNode.nraw['Path'] % dict(name="/tmp/bogus"), - dict(predicate=parent_predicate)) - self.assertFalse(pred(metadata, entry)) - - -class TestXMLSrc(TestXMLFileBacked): - test_obj = XMLSrc - - def test_node_interface(self): - # ensure that the node object has the necessary interface - self.assertTrue(hasattr(self.test_obj.__node__, "Match")) - - @patch("%s.open" % builtins) - def test_HandleEvent(self, mock_open): - xdata = lxml.etree.Element("Test") - lxml.etree.SubElement(xdata, "Path", name="path", attr="whatever") - - xsrc = self.get_obj("/test/foo.xml") - xsrc.__node__ = Mock() - mock_open.return_value.read.return_value = tostring(xdata) - - if xsrc.__priority_required__: - # test with no priority at all - self.assertRaises(PluginExecutionError, - xsrc.HandleEvent, Mock()) - - # test with bogus priority - xdata.set("priority", "cow") - mock_open.return_value.read.return_value = tostring(xdata) - self.assertRaises(PluginExecutionError, - xsrc.HandleEvent, Mock()) - - # assign a priority to use in future tests - xdata.set("priority", "10") - mock_open.return_value.read.return_value = tostring(xdata) - - mock_open.reset_mock() - xsrc = self.get_obj("/test/foo.xml") - xsrc.__node__ = Mock() - xsrc.HandleEvent(Mock()) - mock_open.assert_called_with("/test/foo.xml") - mock_open.return_value.read.assert_any_call() - self.assertXMLEqual(xsrc.__node__.call_args[0][0], xdata) - self.assertEqual(xsrc.__node__.call_args[0][1], dict()) - self.assertEqual(xsrc.pnode, xsrc.__node__.return_value) - self.assertEqual(xsrc.cache, None) - - @patch("Bcfg2.Server.Plugin.XMLSrc.HandleEvent") - def test_Cache(self, mock_HandleEvent): - xsrc = self.get_obj("/test/foo.xml") - metadata = Mock() - xsrc.Cache(metadata) - mock_HandleEvent.assert_any_call() - - xsrc.pnode = Mock() - xsrc.Cache(metadata) - xsrc.pnode.Match.assert_called_with(metadata, xsrc.__cacheobj__()) - self.assertEqual(xsrc.cache[0], metadata) - - xsrc.pnode.reset_mock() - xsrc.Cache(metadata) - self.assertFalse(xsrc.pnode.Mock.called) - self.assertEqual(xsrc.cache[0], metadata) - - xsrc.cache = ("bogus") - xsrc.Cache(metadata) - xsrc.pnode.Match.assert_called_with(metadata, xsrc.__cacheobj__()) - self.assertEqual(xsrc.cache[0], metadata) - - -class TestInfoXML(TestXMLSrc): - test_obj = InfoXML - - -class TestXMLDirectoryBacked(TestDirectoryBacked): - test_obj = XMLDirectoryBacked - testfiles = ['foo.xml', 'bar/baz.xml', 'plugh.plugh.xml'] - badpaths = ["foo", "foo.txt", "foo.xsd", "xml"] - - -class TestPrioDir(TestPlugin, TestGenerator, TestXMLDirectoryBacked): - test_obj = PrioDir - - @patch("Bcfg2.Server.Plugin.%s.add_directory_monitor" % test_obj.__name__, - Mock()) - def get_obj(self, core=None): - if core is None: - core = Mock() - return self.test_obj(core, datastore) - - def test_HandleEvent(self): - TestXMLDirectoryBacked.test_HandleEvent(self) - - @patch("Bcfg2.Server.Plugin.XMLDirectoryBacked.HandleEvent", Mock()) - def inner(): - pd = self.get_obj() - test1 = Mock() - test1.items = dict(Path=["/etc/foo.conf", "/etc/bar.conf"]) - test2 = Mock() - test2.items = dict(Path=["/etc/baz.conf"], - Package=["quux", "xyzzy"]) - pd.entries = {"/test1.xml": test1, - "/test2.xml": test2} - pd.HandleEvent(Mock()) - self.assertItemsEqual(pd.Entries, - dict(Path={"/etc/foo.conf": pd.BindEntry, - "/etc/bar.conf": pd.BindEntry, - "/etc/baz.conf": pd.BindEntry}, - Package={"quux": pd.BindEntry, - "xyzzy": pd.BindEntry})) - - inner() - - def test__matches(self): - pd = self.get_obj() - self.assertTrue(pd._matches(lxml.etree.Element("Test", - name="/etc/foo.conf"), - Mock(), - {"/etc/foo.conf": pd.BindEntry, - "/etc/bar.conf": pd.BindEntry})) - self.assertFalse(pd._matches(lxml.etree.Element("Test", - name="/etc/baz.conf"), - Mock(), - {"/etc/foo.conf": pd.BindEntry, - "/etc/bar.conf": pd.BindEntry})) - - def test_BindEntry(self): - pd = self.get_obj() - pd.get_attrs = Mock(return_value=dict(test1="test1", test2="test2")) - entry = lxml.etree.Element("Path", name="/etc/foo.conf", test1="bogus") - metadata = Mock() - pd.BindEntry(entry, metadata) - pd.get_attrs.assert_called_with(entry, metadata) - self.assertItemsEqual(entry.attrib, - dict(name="/etc/foo.conf", - test1="test1", test2="test2")) - - def test_get_attrs(self): - pd = self.get_obj() - entry = lxml.etree.Element("Path", name="/etc/foo.conf") - children = [lxml.etree.Element("Child")] - metadata = Mock() - pd.entries = dict() - - def reset(): - metadata.reset_mock() - for src in pd.entries.values(): - src.reset_mock() - src.cache = None - - # test with no matches - self.assertRaises(PluginExecutionError, - pd.get_attrs, entry, metadata) - - def add_entry(name, data, prio=10): - path = os.path.join(pd.data, name) - pd.entries[path] = Mock() - pd.entries[path].priority = prio - def do_Cache(metadata): - pd.entries[path].cache = (metadata, data) - pd.entries[path].Cache.side_effect = do_Cache - - add_entry('test1.xml', - dict(Path={'/etc/foo.conf': dict(attr="attr1", - __children__=children), - '/etc/bar.conf': dict()})) - add_entry('test2.xml', - dict(Path={'/etc/bar.conf': dict(__text__="text", - attr="attr1")}, - Package={'quux': dict(), - 'xyzzy': dict()}), - prio=20) - add_entry('test3.xml', - dict(Path={'/etc/baz.conf': dict()}, - Package={'xyzzy': dict()}), - prio=20) - - # test with exactly one match, __children__ - reset() - self.assertItemsEqual(pd.get_attrs(entry, metadata), - dict(attr="attr1")) - for src in pd.entries.values(): - src.Cache.assert_called_with(metadata) - self.assertEqual(len(entry.getchildren()), 1) - self.assertXMLEqual(entry.getchildren()[0], children[0]) - - # test with multiple matches with different priorities, __text__ - reset() - entry = lxml.etree.Element("Path", name="/etc/bar.conf") - self.assertItemsEqual(pd.get_attrs(entry, metadata), - dict(attr="attr1")) - for src in pd.entries.values(): - src.Cache.assert_called_with(metadata) - self.assertEqual(entry.text, "text") - - # test with multiple matches with identical priorities - reset() - entry = lxml.etree.Element("Package", name="xyzzy") - self.assertRaises(PluginExecutionError, - pd.get_attrs, entry, metadata) - - -class TestSpecificityError(Bcfg2TestCase): - """ placeholder for future tests """ - pass - - -class TestSpecificity(Bcfg2TestCase): - test_obj = Specificity - - def get_obj(self, **kwargs): - return self.test_obj(**kwargs) - - def test_matches(self): - metadata = Mock() - metadata.hostname = "foo.example.com" - metadata.groups = ["group1", "group2"] - self.assertTrue(self.get_obj(all=True).matches(metadata)) - self.assertTrue(self.get_obj(group="group1").matches(metadata)) - self.assertTrue(self.get_obj(hostname="foo.example.com").matches(metadata)) - self.assertFalse(self.get_obj().matches(metadata)) - self.assertFalse(self.get_obj(group="group3").matches(metadata)) - self.assertFalse(self.get_obj(hostname="bar.example.com").matches(metadata)) - - def test__cmp(self): - specs = [self.get_obj(all=True), - self.get_obj(group="group1", prio=10), - self.get_obj(group="group1", prio=20), - self.get_obj(hostname="foo.example.com")] - - for i in range(len(specs)): - for j in range(len(specs)): - if i == j: - self.assertEqual(0, specs[i].__cmp__(specs[j])) - self.assertEqual(0, specs[j].__cmp__(specs[i])) - elif i > j: - self.assertEqual(-1, specs[i].__cmp__(specs[j])) - self.assertEqual(1, specs[j].__cmp__(specs[i])) - elif i < j: - self.assertEqual(1, specs[i].__cmp__(specs[j])) - self.assertEqual(-1, specs[j].__cmp__(specs[i])) - - def test_cmp(self): - """ test __lt__/__gt__/__eq__ """ - specs = [self.get_obj(all=True), - self.get_obj(group="group1", prio=10), - self.get_obj(group="group1", prio=20), - self.get_obj(hostname="foo.example.com")] - - for i in range(len(specs)): - for j in range(len(specs)): - if i < j: - self.assertGreater(specs[i], specs[j]) - self.assertLess(specs[j], specs[i]) - self.assertGreaterEqual(specs[i], specs[j]) - self.assertLessEqual(specs[j], specs[i]) - elif i == j: - self.assertEqual(specs[i], specs[j]) - self.assertEqual(specs[j], specs[i]) - self.assertLessEqual(specs[i], specs[j]) - self.assertGreaterEqual(specs[j], specs[i]) - elif i > j: - self.assertLess(specs[i], specs[j]) - self.assertGreater(specs[j], specs[i]) - self.assertLessEqual(specs[i], specs[j]) - self.assertGreaterEqual(specs[j], specs[i]) - - -class TestSpecificData(Bcfg2TestCase): - test_obj = SpecificData - path = os.path.join(datastore, "test.txt") - - def get_obj(self, name=None, specific=None, encoding=None): - if name is None: - name = self.path - if specific is None: - specific = Mock() - return self.test_obj(name, specific, encoding) - - @patch("%s.open" % builtins) - def test_handle_event(self, mock_open): - event = Mock() - event.code2str.return_value = 'deleted' - sd = self.get_obj() - sd.handle_event(event) - self.assertFalse(mock_open.called) - if hasattr(sd, 'data'): - self.assertIsNone(sd.data) - else: - self.assertFalse(hasattr(sd, 'data')) - - event = Mock() - mock_open.return_value.read.return_value = "test" - sd.handle_event(event) - mock_open.assert_called_with(self.path) - mock_open.return_value.read.assert_any_call() - self.assertEqual(sd.data, "test") - - -class TestEntrySet(TestDebuggable): - test_obj = EntrySet - # filenames that should be matched successfully by the EntrySet - # 'specific' regex. these are filenames alone -- a specificity - # will be added to these - basenames = ["test", "test.py", "test with spaces.txt", - "test.multiple.dots.py", "test_underscores.and.dots", - "really_misleading.G10_test", - "name$with*regex(special){chars}", - "misleading.H_hostname.test.com"] - # filenames that do not match any of the basenames (or the - # basename regex, if applicable) - bogus_names = ["bogus"] - # filenames that should be ignored - ignore = ["foo~", ".#foo", ".foo.swp", ".foo.swx", - "test.txt.genshi_include", "test.G_foo.genshi_include"] - - - def get_obj(self, basename="test", path=datastore, entry_type=MagicMock(), - encoding=None): - return self.test_obj(basename, path, entry_type, encoding) - - def test__init(self): - for basename in self.basenames: - eset = self.get_obj(basename=basename) - self.assertIsInstance(eset.specific, re_type) - self.assertTrue(eset.specific.match(os.path.join(datastore, - basename))) - ppath = os.path.join(datastore, "Plugin", basename) - self.assertTrue(eset.specific.match(ppath)) - self.assertTrue(eset.specific.match(ppath + ".G20_foo")) - self.assertTrue(eset.specific.match(ppath + ".G1_foo")) - self.assertTrue(eset.specific.match(ppath + ".G32768_foo")) - # a group named '_' - self.assertTrue(eset.specific.match(ppath + ".G10__")) - self.assertTrue(eset.specific.match(ppath + ".H_hostname")) - self.assertTrue(eset.specific.match(ppath + ".H_fqdn.subdomain.example.com")) - self.assertTrue(eset.specific.match(ppath + ".G20_group_with_underscores")) - - self.assertFalse(eset.specific.match(ppath + ".G20_group with spaces")) - self.assertFalse(eset.specific.match(ppath + ".G_foo")) - self.assertFalse(eset.specific.match(ppath + ".G_")) - self.assertFalse(eset.specific.match(ppath + ".G20_")) - self.assertFalse(eset.specific.match(ppath + ".H_")) - - for bogus in self.bogus_names: - self.assertFalse(eset.specific.match(os.path.join(datastore, - "Plugin", - bogus))) - - for ignore in self.ignore: - self.assertTrue(eset.ignore.match(ignore)) - - self.assertFalse(eset.ignore.match(basename)) - self.assertFalse(eset.ignore.match(basename + ".G20_foo")) - self.assertFalse(eset.ignore.match(basename + ".G1_foo")) - self.assertFalse(eset.ignore.match(basename + ".G32768_foo")) - self.assertFalse(eset.ignore.match(basename + ".G10__")) - self.assertFalse(eset.ignore.match(basename + ".H_hostname")) - self.assertFalse(eset.ignore.match(basename + ".H_fqdn.subdomain.example.com")) - self.assertFalse(eset.ignore.match(basename + ".G20_group_with_underscores")) - - def test_get_matching(self): - items = {0: Mock(), 1: Mock(), 2: Mock(), 3: Mock(), 4: Mock(), - 5: Mock()} - items[0].specific.matches.return_value = False - items[1].specific.matches.return_value = True - items[2].specific.matches.return_value = False - items[3].specific.matches.return_value = False - items[4].specific.matches.return_value = True - items[5].specific.matches.return_value = True - metadata = Mock() - eset = self.get_obj() - eset.entries = items - self.assertItemsEqual(eset.get_matching(metadata), - [items[1], items[4], items[5]]) - for i in items.values(): - i.specific.matches.assert_called_with(metadata) - - @patch("Bcfg2.Server.Plugin.%s.get_matching" % test_obj.__name__) - def test_best_matching(self, mock_get_matching): - eset = self.get_obj() - metadata = Mock() - matching = [] - - def reset(): - mock_get_matching.reset_mock() - metadata.reset_mock() - for m in matching: - m.reset_mock() - - def specific(all=False, group=False, prio=None, hostname=False): - spec = Mock() - spec.specific = Specificity(all=all, group=group, prio=prio, - hostname=hostname) - return spec - - self.assertRaises(PluginExecutionError, - eset.best_matching, metadata, matching=[]) - - reset() - mock_get_matching.return_value = matching - self.assertRaises(PluginExecutionError, - eset.best_matching, metadata) - mock_get_matching.assert_called_with(metadata) - - # test with a single file for all - reset() - expected = specific(all=True) - matching.append(expected) - mock_get_matching.return_value = matching - self.assertEqual(eset.best_matching(metadata), expected) - mock_get_matching.assert_called_with(metadata) - - # test with a single group-specific file - reset() - expected = specific(group=True, prio=10) - matching.append(expected) - mock_get_matching.return_value = matching - self.assertEqual(eset.best_matching(metadata), expected) - mock_get_matching.assert_called_with(metadata) - - # test with multiple group-specific files - reset() - expected = specific(group=True, prio=20) - matching.append(expected) - mock_get_matching.return_value = matching - self.assertEqual(eset.best_matching(metadata), expected) - mock_get_matching.assert_called_with(metadata) - - # test with host-specific file - reset() - expected = specific(hostname=True) - matching.append(expected) - mock_get_matching.return_value = matching - self.assertEqual(eset.best_matching(metadata), expected) - mock_get_matching.assert_called_with(metadata) - - @patch("Bcfg2.Server.Plugin.%s.entry_init" % test_obj.__name__) - @patch("Bcfg2.Server.Plugin.%s.reset_metadata" % test_obj.__name__) - @patch("Bcfg2.Server.Plugin.%s.update_metadata" % test_obj.__name__) - def test_handle_event(self, mock_update_md, mock_reset_md, mock_init): - def reset(): - mock_update_md.reset_mock() - mock_reset_md.reset_mock() - mock_init.reset_mock() - - eset = self.get_obj() - for fname in ["info", "info.xml", ":info"]: - for evt in ["exists", "created", "changed"]: - reset() - event = Mock() - event.code2str.return_value = evt - event.filename = fname - eset.handle_event(event) - mock_update_md.assert_called_with(event) - self.assertFalse(mock_init.called) - self.assertFalse(mock_reset_md.called) - - reset() - event = Mock() - event.code2str.return_value = "deleted" - event.filename = fname - eset.handle_event(event) - mock_reset_md.assert_called_with(event) - self.assertFalse(mock_init.called) - self.assertFalse(mock_update_md.called) - - for evt in ["exists", "created", "changed"]: - reset() - event = Mock() - event.code2str.return_value = evt - event.filename = "test.txt" - eset.handle_event(event) - mock_init.assert_called_with(event) - self.assertFalse(mock_reset_md.called) - self.assertFalse(mock_update_md.called) - - reset() - entry = Mock() - eset.entries["test.txt"] = entry - event = Mock() - event.code2str.return_value = "changed" - event.filename = "test.txt" - eset.handle_event(event) - entry.handle_event.assert_called_with(event) - self.assertFalse(mock_init.called) - self.assertFalse(mock_reset_md.called) - self.assertFalse(mock_update_md.called) - - reset() - entry = Mock() - eset.entries["test.txt"] = entry - event = Mock() - event.code2str.return_value = "deleted" - event.filename = "test.txt" - eset.handle_event(event) - self.assertNotIn("test.txt", eset.entries) - - @patch("Bcfg2.Server.Plugin.%s.specificity_from_filename" % - test_obj.__name__) - def test_entry_init(self, mock_spec): - eset = self.get_obj() - - def reset(): - eset.entry_type.reset_mock() - mock_spec.reset_mock() - - event = Mock() - event.code2str.return_value = "created" - event.filename = "test.txt" - eset.entry_init(event) - mock_spec.assert_called_with("test.txt", specific=None) - eset.entry_type.assert_called_with(os.path.join(eset.path, "test.txt"), - mock_spec.return_value, None) - eset.entry_type.return_value.handle_event.assert_called_with(event) - self.assertIn("test.txt", eset.entries) - - # test duplicate add - reset() - eset.entry_init(event) - self.assertFalse(mock_spec.called) - self.assertFalse(eset.entry_type.called) - eset.entries["test.txt"].handle_event.assert_called_with(event) - - # test keyword args - etype = Mock() - specific = Mock() - event = Mock() - event.code2str.return_value = "created" - event.filename = "test2.txt" - eset.entry_init(event, entry_type=etype, specific=specific) - mock_spec.assert_called_with("test2.txt", specific=specific) - etype.assert_called_with(os.path.join(eset.path, "test2.txt"), - mock_spec.return_value, None) - etype.return_value.handle_event.assert_called_with(event) - self.assertIn("test2.txt", eset.entries) - - # test specificity error - event = Mock() - event.code2str.return_value = "created" - event.filename = "test3.txt" - mock_spec.side_effect = SpecificityError - eset.entry_init(event) - mock_spec.assert_called_with("test3.txt", specific=None) - self.assertFalse(eset.entry_type.called) - - @patch("Bcfg2.Server.Plugin.Specificity") - def test_specificity_from_filename(self, mock_spec): - def test(eset, fname, **kwargs): - mock_spec.reset_mock() - if "specific" in kwargs: - specific = kwargs['specific'] - del kwargs['specific'] - else: - specific = None - self.assertEqual(eset.specificity_from_filename(fname, - specific=specific), - mock_spec.return_value) - mock_spec.assert_called_with(**kwargs) - - def fails(eset, fname, specific=None): - mock_spec.reset_mock() - self.assertRaises(SpecificityError, - eset.specificity_from_filename, fname, - specific=specific) - - for basename in self.basenames: - eset = self.get_obj(basename=basename) - ppath = os.path.join(datastore, "Plugin", basename) - test(eset, ppath, all=True) - test(eset, ppath + ".G20_foo", group="foo", prio=20) - test(eset, ppath + ".G1_foo", group="foo", prio=1) - test(eset, ppath + ".G32768_foo", group="foo", prio=32768) - test(eset, ppath + ".G10__", group="_", prio=10) - test(eset, ppath + ".H_hostname", hostname="hostname") - test(eset, ppath + ".H_fqdn.subdomain.example.com", - hostname="fqdn.subdomain.example.com") - test(eset, ppath + ".G20_group_with_underscores", - group="group_with_underscores", prio=20) - - for bogus in self.bogus_names: - fails(eset, bogus) - fails(eset, ppath + ".G_group with spaces") - fails(eset, ppath + ".G_foo") - fails(eset, ppath + ".G_") - fails(eset, ppath + ".G20_") - fails(eset, ppath + ".H_") - - @patch("%s.open" % builtins) - @patch("Bcfg2.Server.Plugin.InfoXML") - def test_update_metadata(self, mock_InfoXML, mock_open): - eset = self.get_obj() - - # add info.xml - event = Mock() - event.filename = "info.xml" - eset.update_metadata(event) - mock_InfoXML.assert_called_with(os.path.join(eset.path, "info.xml")) - mock_InfoXML.return_value.HandleEvent.assert_called_with(event) - self.assertEqual(eset.infoxml, mock_InfoXML.return_value) - - # modify info.xml - mock_InfoXML.reset_mock() - eset.update_metadata(event) - self.assertFalse(mock_InfoXML.called) - eset.infoxml.HandleEvent.assert_called_with(event) - - for fname in [':info', 'info']: - event = Mock() - event.filename = fname - - idata = ["owner:owner", - "group: GROUP", - "perms: 775", - "important: true", - "bogus: line"] - mock_open.return_value.readlines.return_value = idata - eset.update_metadata(event) - expected = default_file_metadata.copy() - expected['owner'] = 'owner' - expected['group'] = 'GROUP' - expected['perms'] = '0775' - expected['important'] = 'true' - self.assertItemsEqual(eset.metadata, - expected) - - def test_reset_metadata(self): - eset = self.get_obj() - - # test info.xml - event = Mock() - event.filename = "info.xml" - eset.infoxml = Mock() - eset.reset_metadata(event) - self.assertIsNone(eset.infoxml) - - for fname in [':info', 'info']: - event = Mock() - event.filename = fname - eset.metadata = Mock() - eset.reset_metadata(event) - self.assertItemsEqual(eset.metadata, default_file_metadata) - - @patch("Bcfg2.Server.Plugin.bind_info") - def test_bind_info_to_entry(self, mock_bind_info): - eset = self.get_obj() - entry = Mock() - metadata = Mock() - eset.bind_info_to_entry(entry, metadata) - mock_bind_info.assert_called_with(entry, metadata, - infoxml=eset.infoxml, - default=eset.metadata) - - @patch("Bcfg2.Server.Plugin.%s.best_matching" % test_obj.__name__) - @patch("Bcfg2.Server.Plugin.%s.bind_info_to_entry" % test_obj.__name__) - def test_bind_entry(self, mock_bind_info, mock_best_matching): - eset = self.get_obj() - entry = Mock() - metadata = Mock() - eset.bind_entry(entry, metadata) - mock_bind_info.assert_called_with(entry, metadata) - mock_best_matching.assert_called_with(metadata) - mock_best_matching.return_value.bind_entry.assert_called_with(entry, - metadata) - - -class TestGroupSpool(TestPlugin, TestGenerator): - test_obj = GroupSpool - - @patch("Bcfg2.Server.Plugin.%s.AddDirectoryMonitor" % test_obj.__name__) - def get_obj(self, core=None): - return TestPlugin.get_obj(self, core=core) - - @patch("Bcfg2.Server.Plugin.%s.AddDirectoryMonitor" % test_obj.__name__) - def test__init(self, mock_Add): - core = Mock() - gs = self.test_obj(core, datastore) - mock_Add.assert_called_with('') - self.assertItemsEqual(gs.Entries, {gs.entry_type: {}}) - - @patch("os.path.isdir") - @patch("os.path.isfile") - @patch("Bcfg2.Server.Plugin.%s.event_id" % test_obj.__name__) - @patch("Bcfg2.Server.Plugin.%s.event_path" % test_obj.__name__) - @patch("Bcfg2.Server.Plugin.%s.AddDirectoryMonitor" % test_obj.__name__) - def test_add_entry(self, mock_Add, mock_event_path, mock_event_id, - mock_isfile, mock_isdir): - gs = self.get_obj() - gs.es_cls = Mock() - gs.es_child_cls = Mock() - - def reset(): - gs.es_cls.reset_mock() - gs.es_child_cls.reset_mock() - mock_Add.reset_mock() - mock_event_path.reset_mock() - mock_event_id.reset_mock() - mock_isfile.reset_mock() - mock_isdir.reset_mock() - - # directory - event = Mock() - event.filename = "foo" - basedir = "test" - epath = os.path.join(gs.data, basedir, event.filename) - ident = os.path.join(basedir, event.filename) - mock_event_path.return_value = epath - mock_event_id.return_value = ident - mock_isdir.return_value = True - mock_isfile.return_value = False - gs.add_entry(event) - mock_Add.assert_called_with(os.path.join("/" + basedir, event.filename)) - self.assertNotIn(ident, gs.entries) - mock_isdir.assert_called_with(epath) - - # file that is not in self.entries - reset() - event = Mock() - event.filename = "foo" - basedir = "test/foo/" - epath = os.path.join(gs.data, basedir, event.filename) - ident = basedir[:-1] - mock_event_path.return_value = epath - mock_event_id.return_value = ident - mock_isdir.return_value = False - mock_isfile.return_value = True - gs.add_entry(event) - self.assertFalse(mock_Add.called) - gs.es_cls.assert_called_with(gs.filename_pattern, - gs.data + ident, - gs.es_child_cls, - gs.encoding) - self.assertIn(ident, gs.entries) - self.assertEqual(gs.entries[ident], gs.es_cls.return_value) - self.assertIn(ident, gs.Entries[gs.entry_type]) - self.assertEqual(gs.Entries[gs.entry_type][ident], - gs.es_cls.return_value.bind_entry) - gs.entries[ident].handle_event.assert_called_with(event) - mock_isfile.assert_called_with(epath) - - # file that is in self.entries - reset() - gs.add_entry(event) - self.assertFalse(mock_Add.called) - self.assertFalse(gs.es_cls.called) - gs.entries[ident].handle_event.assert_called_with(event) - - def test_event_path(self): - gs = self.get_obj() - gs.handles[1] = "/var/lib/foo/" - gs.handles[2] = "/etc/foo/" - gs.handles[3] = "/usr/share/foo/" - event = Mock() - event.filename = "foo" - for i in range(1, 4): - event.requestID = i - self.assertEqual(gs.event_path(event), - os.path.join(datastore, gs.name, - gs.handles[event.requestID].lstrip('/'), - event.filename)) - - @patch("os.path.isdir") - @patch("Bcfg2.Server.Plugin.%s.event_path" % test_obj.__name__) - def test_event_id(self, mock_event_path, mock_isdir): - gs = self.get_obj() - - def reset(): - mock_event_path.reset_mock() - mock_isdir.reset_mock() - - gs.handles[1] = "/var/lib/foo/" - gs.handles[2] = "/etc/foo/" - gs.handles[3] = "/usr/share/foo/" - event = Mock() - event.filename = "foo" - for i in range(1, 4): - event.requestID = i - reset() - mock_isdir.return_value = True - self.assertEqual(gs.event_id(event), - os.path.join(gs.handles[event.requestID].lstrip('/'), - event.filename)) - mock_isdir.assert_called_with(mock_event_path.return_value) - - reset() - mock_isdir.return_value = False - self.assertEqual(gs.event_id(event), - gs.handles[event.requestID].rstrip('/')) - mock_isdir.assert_called_with(mock_event_path.return_value) - - def test_toggle_debug(self): - gs = self.get_obj() - gs.entries = {"/foo": Mock(), - "/bar": Mock(), - "/baz/quux": Mock()} - - @patch("Bcfg2.Server.Plugin.Plugin.toggle_debug") - def inner(mock_debug): - gs.toggle_debug() - mock_debug.assert_called_with(gs) - for entry in gs.entries.values(): - entry.toggle_debug.assert_any_call() - - inner() - - TestPlugin.test_toggle_debug(self) - - def test_HandleEvent(self): - gs = self.get_obj() - gs.entries = {"/foo": Mock(), - "/bar": Mock(), - "/baz": Mock(), - "/baz/quux": Mock()} - for path in gs.entries.keys(): - gs.Entries[gs.entry_type] = {path: Mock()} - gs.handles = {1: "/foo/", - 2: "/bar/", - 3: "/baz/", - 4: "/baz/quux"} - - gs.add_entry = Mock() - gs.event_id = Mock() - - def reset(): - gs.add_entry.reset_mock() - gs.event_id.reset_mock() - for entry in gs.entries.values(): - entry.reset_mock() - - # test event creation, changing entry that doesn't exist - for evt in ["exists", "created", "changed"]: - reset() - event = Mock() - event.filename = "foo" - event.requestID = 1 - event.code2str.return_value = evt - gs.HandleEvent(event) - gs.event_id.assert_called_with(event) - gs.add_entry.assert_called_with(event) - - # test deleting entry, changing entry that does exist - for evt in ["changed", "deleted"]: - reset() - event = Mock() - event.filename = "quux" - event.requestID = 4 - event.code2str.return_value = evt - gs.event_id.return_value = "/baz/quux" - gs.HandleEvent(event) - gs.event_id.assert_called_with(event) - self.assertIn(gs.event_id.return_value, gs.entries) - gs.entries[gs.event_id.return_value].handle_event.assert_called_with(event) - self.assertFalse(gs.add_entry.called) - - # test deleting directory - reset() - event = Mock() - event.filename = "quux" - event.requestID = 3 - event.code2str.return_value = "deleted" - gs.event_id.return_value = "/baz/quux" - gs.HandleEvent(event) - gs.event_id.assert_called_with(event) - self.assertNotIn("/baz/quux", gs.entries) - self.assertNotIn("/baz/quux", gs.Entries[gs.entry_type]) - - - diff --git a/testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testbase.py b/testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testbase.py new file mode 100644 index 000000000..9f2f618c9 --- /dev/null +++ b/testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testbase.py @@ -0,0 +1,83 @@ +import os +import sys +import logging +from mock import Mock, MagicMock, patch +from Bcfg2.Server.Plugin.base import * + +# add all parent testsuite directories to sys.path to allow (most) +# relative imports in python 2.4 +path = os.path.dirname(__file__) +while path != '/': + if os.path.basename(path).lower().startswith("test"): + sys.path.append(path) + if os.path.basename(path) == "testsuite": + break + path = os.path.dirname(path) +from common import call, builtins, skip, skipIf, skipUnless, Bcfg2TestCase, \ + patchIf, datastore + + +class TestDebuggable(Bcfg2TestCase): + test_obj = Debuggable + + def get_obj(self): + return self.test_obj() + + def test__init(self): + d = self.get_obj() + self.assertIsInstance(d.logger, logging.Logger) + self.assertFalse(d.debug_flag) + + @patch("Bcfg2.Server.Plugin.base.%s.debug_log" % test_obj.__name__) + def test_toggle_debug(self, mock_debug): + d = self.get_obj() + orig = d.debug_flag + d.toggle_debug() + self.assertNotEqual(orig, d.debug_flag) + self.assertTrue(mock_debug.called) + + mock_debug.reset_mock() + + changed = d.debug_flag + d.toggle_debug() + self.assertNotEqual(changed, d.debug_flag) + self.assertEqual(orig, d.debug_flag) + self.assertTrue(mock_debug.called) + + def test_debug_log(self): + d = self.get_obj() + d.logger = Mock() + d.debug_flag = False + d.debug_log("test") + self.assertFalse(d.logger.error.called) + + d.logger.reset_mock() + d.debug_log("test", flag=True) + self.assertTrue(d.logger.error.called) + + d.logger.reset_mock() + d.debug_flag = True + d.debug_log("test") + self.assertTrue(d.logger.error.called) + + +class TestPlugin(TestDebuggable): + test_obj = Plugin + + def get_obj(self, core=None): + if core is None: + core = Mock() + return self.test_obj(core, datastore) + + def test__init(self): + core = Mock() + p = self.get_obj(core=core) + self.assertEqual(p.data, os.path.join(datastore, p.name)) + self.assertEqual(p.core, core) + self.assertIsInstance(p, Debuggable) + + @patch("os.makedirs") + def test_init_repo(self, mock_makedirs): + self.test_obj.init_repo(datastore) + mock_makedirs.assert_called_with(os.path.join(datastore, + self.test_obj.name)) diff --git a/testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testexceptions.py b/testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testexceptions.py new file mode 100644 index 000000000..d2b72251e --- /dev/null +++ b/testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testexceptions.py @@ -0,0 +1,47 @@ +import os +import sys +from mock import Mock, MagicMock, patch +from Bcfg2.Server.Plugin.exceptions import * + +# add all parent testsuite directories to sys.path to allow (most) +# relative imports in python 2.4 +path = os.path.dirname(__file__) +while path != '/': + if os.path.basename(path).lower().startswith("test"): + sys.path.append(path) + if os.path.basename(path) == "testsuite": + break + path = os.path.dirname(path) +from common import call, builtins, skip, skipIf, skipUnless, Bcfg2TestCase, \ + patchIf, datastore + + +class TestPluginInitError(Bcfg2TestCase): + """ placeholder for future tests """ + pass + + +class TestPluginExecutionError(Bcfg2TestCase): + """ placeholder for future tests """ + pass + + +class TestMetadataConsistencyError(Bcfg2TestCase): + """ placeholder for future tests """ + pass + + +class TestMetadataRuntimeError(Bcfg2TestCase): + """ placeholder for future tests """ + pass + + +class TestValidationError(Bcfg2TestCase): + """ placeholder for future tests """ + pass + + +class TestSpecificityError(Bcfg2TestCase): + """ placeholder for future tests """ + pass + diff --git a/testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testhelpers.py b/testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testhelpers.py new file mode 100644 index 000000000..f19aa6b57 --- /dev/null +++ b/testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testhelpers.py @@ -0,0 +1,1938 @@ +import os +import re +import sys +import copy +import lxml.etree +import Bcfg2.Server +from Bcfg2.Compat import reduce +from mock import Mock, MagicMock, patch +from Bcfg2.Server.Plugin.helpers import * + +# add all parent testsuite directories to sys.path to allow (most) +# relative imports in python 2.4 +path = os.path.dirname(__file__) +while path != '/': + if os.path.basename(path).lower().startswith("test"): + sys.path.append(path) + if os.path.basename(path) == "testsuite": + break + path = os.path.dirname(path) +from common import XI_NAMESPACE, XI, call, builtins, skip, skipIf, skipUnless, \ + Bcfg2TestCase, DBModelTestCase, syncdb, patchIf, datastore +from Testbase import TestPlugin, TestDebuggable +from Testinterfaces import TestGenerator + +try: + re_type = re._pattern_type +except AttributeError: + re_type = type(re.compile("")) + +def tostring(el): + return lxml.etree.tostring(el, xml_declaration=False).decode('UTF-8') + + +class FakeElementTree(lxml.etree._ElementTree): + xinclude = Mock() + + +class TestFunctions(Bcfg2TestCase): + def test_bind_info(self): + entry = lxml.etree.Element("Path", name="/test") + metadata = Mock() + default = dict(test1="test1", test2="test2") + # test without infoxml + bind_info(entry, metadata, default=default) + self.assertItemsEqual(entry.attrib, + dict(test1="test1", + test2="test2", + name="/test")) + + # test with bogus infoxml + entry = lxml.etree.Element("Path", name="/test") + infoxml = Mock() + self.assertRaises(PluginExecutionError, + bind_info, + entry, metadata, infoxml=infoxml) + infoxml.pnode.Match.assert_called_with(metadata, dict(), entry=entry) + + # test with valid infoxml + entry = lxml.etree.Element("Path", name="/test") + infoxml.reset_mock() + infodata = {None: {"test3": "test3", "test4": "test4"}} + def infoxml_rv(metadata, rv, entry=None): + rv['Info'] = infodata + infoxml.pnode.Match.side_effect = infoxml_rv + bind_info(entry, metadata, infoxml=infoxml, default=default) + # mock objects don't properly track the called-with value of + # arguments whose value is changed by the function, so it + # thinks Match() was called with the final value of the mdata + # arg, not the initial value. makes this test a little less + # worthwhile, TBH. + infoxml.pnode.Match.assert_called_with(metadata, dict(Info=infodata), + entry=entry) + self.assertItemsEqual(entry.attrib, + dict(test1="test1", + test2="test2", + test3="test3", + test4="test4", + name="/test")) + + +class TestDatabaseBacked(TestPlugin): + test_obj = DatabaseBacked + + @skipUnless(has_django, "Django not found") + def test__use_db(self): + core = Mock() + core.setup.cfp.getboolean.return_value = True + db = self.get_obj(core) + self.assertTrue(db._use_db) + + core = Mock() + core.setup.cfp.getboolean.return_value = False + db = self.get_obj(core) + self.assertFalse(db._use_db) + + Bcfg2.Server.Plugin.helpers.has_django = False + core = Mock() + db = self.get_obj(core) + self.assertFalse(db._use_db) + + core = Mock() + core.setup.cfp.getboolean.return_value = True + db = self.get_obj(core) + self.assertFalse(db._use_db) + Bcfg2.Server.Plugin.helpers.has_django = True + + +class TestPluginDatabaseModel(Bcfg2TestCase): + """ placeholder for future tests """ + pass + + +class TestFileBacked(Bcfg2TestCase): + test_obj = FileBacked + path = os.path.join(datastore, "test") + + def get_obj(self, path=None, fam=None): + if path is None: + path = self.path + return self.test_obj(path, fam=fam) + + @patch("%s.open" % builtins) + def test_HandleEvent(self, mock_open): + fb = self.get_obj() + fb.Index = Mock() + + def reset(): + fb.Index.reset_mock() + mock_open.reset_mock() + + for evt in ["exists", "changed", "created"]: + reset() + event = Mock() + event.code2str.return_value = evt + fb.HandleEvent(event) + mock_open.assert_called_with(self.path) + mock_open.return_value.read.assert_any_call() + fb.Index.assert_any_call() + + reset() + event = Mock() + event.code2str.return_value = "endExist" + fb.HandleEvent(event) + self.assertFalse(mock_open.called) + self.assertFalse(fb.Index.called) + + +class TestDirectoryBacked(Bcfg2TestCase): + test_obj = DirectoryBacked + testpaths = {1: '', + 2: '/foo', + 3: '/foo/bar', + 4: '/foo/bar/baz', + 5: 'quux', + 6: 'xyzzy/', + 7: 'xyzzy/plugh/'} + testfiles = ['foo', 'bar/baz.txt', 'plugh.py'] + ignore = [] # ignore no events + badevents = [] # DirectoryBacked handles all files, so there's no + # such thing as a bad event + + def test_child_interface(self): + # ensure that the child object has the correct interface + self.assertTrue(hasattr(self.test_obj.__child__, "HandleEvent")) + + @patch("Bcfg2.Server.Plugin.helpers.%s.add_directory_monitor" % + test_obj.__name__, Mock()) + def get_obj(self, fam=None): + if fam is None: + fam = Mock() + return self.test_obj(os.path.join(datastore, self.test_obj.__name__), + fam) + + @patch("Bcfg2.Server.Plugin.helpers.%s.add_directory_monitor" % + test_obj.__name__) + def test__init(self, mock_add_monitor): + db = self.test_obj(datastore, Mock()) + mock_add_monitor.assert_called_with('') + + def test__getitem(self): + db = self.get_obj() + db.entries.update(dict(a=1, b=2, c=3)) + self.assertEqual(db['a'], 1) + self.assertEqual(db['b'], 2) + expected = KeyError + try: + db['d'] + except expected: + pass + except: + err = sys.exc_info()[1] + self.assertFalse(True, "%s raised instead of %s" % + (err.__class__.__name__, + expected.__class__.__name__)) + else: + self.assertFalse(True, + "%s not raised" % expected.__class__.__name__) + + def test__iter(self): + db = self.get_obj() + db.entries.update(dict(a=1, b=2, c=3)) + self.assertEqual([i for i in db], + [i for i in db.entries.items()]) + + @patch("os.path.isdir") + def test_add_directory_monitor(self, mock_isdir): + db = self.get_obj() + db.fam = Mock() + db.fam.rv = 0 + + def reset(): + db.fam.rv += 1 + db.fam.AddMonitor.return_value = db.fam.rv + db.fam.reset_mock() + mock_isdir.reset_mock() + + mock_isdir.return_value = True + for path in self.testpaths.values(): + reset() + db.add_directory_monitor(path) + db.fam.AddMonitor.assert_called_with(os.path.join(db.data, path), + db) + self.assertIn(db.fam.rv, db.handles) + self.assertEqual(db.handles[db.fam.rv], path) + + reset() + # test duplicate adds + for path in self.testpaths.values(): + reset() + db.add_directory_monitor(path) + self.assertFalse(db.fam.AddMonitor.called) + + reset() + mock_isdir.return_value = False + db.add_directory_monitor('bogus') + self.assertFalse(db.fam.AddMonitor.called) + self.assertNotIn(db.fam.rv, db.handles) + + def test_add_entry(self): + db = self.get_obj() + db.fam = Mock() + class MockChild(Mock): + def __init__(self, path, fam, **kwargs): + Mock.__init__(self, **kwargs) + self.path = path + self.fam = fam + self.HandleEvent = Mock() + db.__child__ = MockChild + + for path in self.testpaths.values(): + event = Mock() + db.add_entry(path, event) + self.assertIn(path, db.entries) + self.assertEqual(db.entries[path].path, + os.path.join(db.data, path)) + self.assertEqual(db.entries[path].fam, db.fam) + db.entries[path].HandleEvent.assert_called_with(event) + + @patch("os.path.isdir") + @patch("Bcfg2.Server.Plugin.helpers.%s.add_entry" % test_obj.__name__) + @patch("Bcfg2.Server.Plugin.helpers.%s.add_directory_monitor" % + test_obj.__name__) + def test_HandleEvent(self, mock_add_monitor, mock_add_entry, mock_isdir): + db = self.get_obj() + # a path with a leading / should never get into + # DirectoryBacked.handles, so strip that test case + for rid, path in self.testpaths.items(): + path = path.lstrip('/') + db.handles[rid] = path + + def reset(): + mock_isdir.reset_mock() + mock_add_entry.reset_mock() + mock_add_monitor.reset_mock() + + def get_event(filename, action, requestID): + event = Mock() + event.code2str.return_value = action + event.filename = filename + event.requestID = requestID + return event + + # test events on the data directory itself + reset() + mock_isdir.return_value = True + event = get_event(db.data, "exists", 1) + db.HandleEvent(event) + mock_add_monitor.assert_called_with("") + + # test events on paths that aren't handled + reset() + mock_isdir.return_value = False + event = get_event('/' + self.testfiles[0], 'created', + max(self.testpaths.keys()) + 1) + db.HandleEvent(event) + self.assertFalse(mock_add_monitor.called) + self.assertFalse(mock_add_entry.called) + + for req_id, path in self.testpaths.items(): + # a path with a leading / should never get into + # DirectoryBacked.handles, so strip that test case + path = path.lstrip('/') + basepath = os.path.join(datastore, path) + for fname in self.testfiles: + relpath = os.path.join(path, fname) + abspath = os.path.join(basepath, fname) + + # test endExist does nothing + reset() + event = get_event(fname, 'endExist', req_id) + db.HandleEvent(event) + self.assertFalse(mock_add_monitor.called) + self.assertFalse(mock_add_entry.called) + + mock_isdir.return_value = True + for evt in ["created", "exists", "changed"]: + # test that creating or changing a directory works + reset() + event = get_event(fname, evt, req_id) + db.HandleEvent(event) + mock_add_monitor.assert_called_with(relpath) + self.assertFalse(mock_add_entry.called) + + mock_isdir.return_value = False + for evt in ["created", "exists"]: + # test that creating a file works + reset() + event = get_event(fname, evt, req_id) + db.HandleEvent(event) + mock_add_entry.assert_called_with(relpath, event) + self.assertFalse(mock_add_monitor.called) + db.entries[relpath] = MagicMock() + + # test that changing a file that already exists works + reset() + event = get_event(fname, "changed", req_id) + db.HandleEvent(event) + db.entries[relpath].HandleEvent.assert_called_with(event) + self.assertFalse(mock_add_monitor.called) + self.assertFalse(mock_add_entry.called) + + # test that deleting an entry works + reset() + event = get_event(fname, "deleted", req_id) + db.HandleEvent(event) + self.assertNotIn(relpath, db.entries) + + # test that changing a file that doesn't exist works + reset() + event = get_event(fname, "changed", req_id) + db.HandleEvent(event) + mock_add_entry.assert_called_with(relpath, event) + self.assertFalse(mock_add_monitor.called) + db.entries[relpath] = MagicMock() + + # test that deleting a directory works. this is a little + # strange because the _parent_ directory has to handle the + # deletion + reset() + event = get_event('quux', "deleted", 1) + db.HandleEvent(event) + for key in db.entries.keys(): + self.assertFalse(key.startswith('quux')) + + # test bad events + for fname in self.badevents: + reset() + event = get_event(fname, "created", 1) + db.HandleEvent(event) + self.assertFalse(mock_add_entry.called) + self.assertFalse(mock_add_monitor.called) + + # test ignored events + for fname in self.ignore: + reset() + event = get_event(fname, "created", 1) + db.HandleEvent(event) + self.assertFalse(mock_isdir.called, + msg="Failed to ignore %s" % fname) + self.assertFalse(mock_add_entry.called, + msg="Failed to ignore %s" % fname) + self.assertFalse(mock_add_monitor.called, + msg="Failed to ignore %s" % fname) + + +class TestXMLFileBacked(TestFileBacked): + test_obj = XMLFileBacked + path = os.path.join(datastore, "test", "test1.xml") + + def get_obj(self, path=None, fam=None, should_monitor=False): + if path is None: + path = self.path + return self.test_obj(path, fam=fam, should_monitor=should_monitor) + + def test__init(self): + fam = Mock() + xfb = self.get_obj() + self.assertIsNone(xfb.fam) + + xfb = self.get_obj(fam=fam) + self.assertFalse(fam.AddMonitor.called) + + fam.reset_mock() + xfb = self.get_obj(fam=fam, should_monitor=True) + fam.AddMonitor.assert_called_with(self.path, xfb) + + @patch("os.path.exists") + @patch("lxml.etree.parse") + def test_follow_xincludes(self, mock_parse, mock_exists): + xfb = self.get_obj() + xfb.add_monitor = Mock() + + def reset(): + xfb.add_monitor.reset_mock() + mock_parse.reset_mock() + mock_exists.reset_mock() + xfb.extras = [] + + mock_exists.return_value = True + xdata = dict() + mock_parse.side_effect = lambda p: xdata[p] + + # basic functionality + xdata['/test/test2.xml'] = lxml.etree.Element("Test").getroottree() + xfb._follow_xincludes(xdata=xdata['/test/test2.xml']) + self.assertFalse(xfb.add_monitor.called) + + if (not hasattr(self.test_obj, "xdata") or + not isinstance(self.test_obj.xdata, property)): + # if xdata is settable, test that method of getting data + # to _follow_xincludes + reset() + xfb.xdata = xdata['/test/test2.xml'].getroot() + xfb._follow_xincludes() + self.assertFalse(xfb.add_monitor.called) + xfb.xdata = None + + reset() + xfb._follow_xincludes(fname="/test/test2.xml") + self.assertFalse(xfb.add_monitor.called) + + # test one level of xinclude + xdata[self.path] = lxml.etree.Element("Test").getroottree() + lxml.etree.SubElement(xdata[self.path].getroot(), + Bcfg2.Server.XI_NAMESPACE + "include", + href="/test/test2.xml") + reset() + xfb._follow_xincludes(fname=self.path) + xfb.add_monitor.assert_called_with("/test/test2.xml") + self.assertItemsEqual(mock_parse.call_args_list, + [call(f) for f in xdata.keys()]) + mock_exists.assert_called_with("/test/test2.xml") + + reset() + xfb._follow_xincludes(fname=self.path, xdata=xdata[self.path]) + xfb.add_monitor.assert_called_with("/test/test2.xml") + self.assertItemsEqual(mock_parse.call_args_list, + [call(f) for f in xdata.keys() + if f != self.path]) + mock_exists.assert_called_with("/test/test2.xml") + + # test two-deep level of xinclude, with some files in another + # directory + xdata["/test/test3.xml"] = \ + lxml.etree.Element("Test").getroottree() + lxml.etree.SubElement(xdata["/test/test3.xml"].getroot(), + Bcfg2.Server.XI_NAMESPACE + "include", + href="/test/test_dir/test4.xml") + xdata["/test/test_dir/test4.xml"] = \ + lxml.etree.Element("Test").getroottree() + lxml.etree.SubElement(xdata["/test/test_dir/test4.xml"].getroot(), + Bcfg2.Server.XI_NAMESPACE + "include", + href="/test/test_dir/test5.xml") + xdata['/test/test_dir/test5.xml'] = \ + lxml.etree.Element("Test").getroottree() + xdata['/test/test_dir/test6.xml'] = \ + lxml.etree.Element("Test").getroottree() + # relative includes + lxml.etree.SubElement(xdata[self.path].getroot(), + Bcfg2.Server.XI_NAMESPACE + "include", + href="test3.xml") + lxml.etree.SubElement(xdata["/test/test3.xml"].getroot(), + Bcfg2.Server.XI_NAMESPACE + "include", + href="test_dir/test6.xml") + + reset() + xfb._follow_xincludes(fname=self.path) + self.assertItemsEqual(xfb.add_monitor.call_args_list, + [call(f) for f in xdata.keys() if f != self.path]) + self.assertItemsEqual(mock_parse.call_args_list, + [call(f) for f in xdata.keys()]) + self.assertItemsEqual(mock_exists.call_args_list, + [call(f) for f in xdata.keys() if f != self.path]) + + reset() + xfb._follow_xincludes(fname=self.path, xdata=xdata[self.path]) + self.assertItemsEqual(xfb.add_monitor.call_args_list, + [call(f) for f in xdata.keys() if f != self.path]) + self.assertItemsEqual(mock_parse.call_args_list, + [call(f) for f in xdata.keys() if f != self.path]) + self.assertItemsEqual(mock_exists.call_args_list, + [call(f) for f in xdata.keys() if f != self.path]) + + @patch("lxml.etree._ElementTree", FakeElementTree) + @patch("Bcfg2.Server.Plugin.helpers.%s._follow_xincludes" % + test_obj.__name__) + def test_Index(self, mock_follow): + xfb = self.get_obj() + + def reset(): + mock_follow.reset_mock() + FakeElementTree.xinclude.reset_mock() + xfb.extras = [] + xfb.xdata = None + + # syntax error + xfb.data = "<" + self.assertRaises(PluginInitError, xfb.Index) + + # no xinclude + reset() + xdata = lxml.etree.Element("Test", name="test") + children = [lxml.etree.SubElement(xdata, "Foo"), + lxml.etree.SubElement(xdata, "Bar", name="bar")] + xfb.data = tostring(xdata) + xfb.Index() + mock_follow.assert_any_call() + try: + self.assertEqual(xfb.xdata.base, self.path) + except AttributeError: + # python 2.4 and/or lxml 2.0 don't store the base_url in + # .base -- no idea where it's stored. + pass + self.assertItemsEqual([tostring(e) for e in xfb.entries], + [tostring(e) for e in children]) + + # with xincludes + reset() + mock_follow.side_effect = \ + lambda: xfb.extras.extend(["/test/test2.xml", + "/test/test_dir/test3.xml"]) + children.extend([ + lxml.etree.SubElement(xdata, + Bcfg2.Server.XI_NAMESPACE + "include", + href="/test/test2.xml"), + lxml.etree.SubElement(xdata, + Bcfg2.Server.XI_NAMESPACE + "include", + href="/test/test_dir/test3.xml")]) + test2 = lxml.etree.Element("Test", name="test2") + lxml.etree.SubElement(test2, "Baz") + test3 = lxml.etree.Element("Test", name="test3") + replacements = {"/test/test2.xml": test2, + "/test/test_dir/test3.xml": test3} + def xinclude(): + for el in xfb.xdata.findall('//%sinclude' % + Bcfg2.Server.XI_NAMESPACE): + xfb.xdata.replace(el, replacements[el.get("href")]) + FakeElementTree.xinclude.side_effect = xinclude + + xfb.data = tostring(xdata) + xfb.Index() + mock_follow.assert_any_call() + FakeElementTree.xinclude.assert_any_call + try: + self.assertEqual(xfb.xdata.base, self.path) + except AttributeError: + pass + self.assertItemsEqual([tostring(e) for e in xfb.entries], + [tostring(e) for e in children]) + + def test_add_monitor(self): + xfb = self.get_obj() + xfb.add_monitor("/test/test2.xml") + self.assertIn("/test/test2.xml", xfb.extras) + + fam = Mock() + xfb = self.get_obj(fam=fam) + fam.reset_mock() + xfb.add_monitor("/test/test3.xml") + self.assertFalse(fam.AddMonitor.called) + self.assertIn("/test/test3.xml", xfb.extras) + + fam.reset_mock() + xfb = self.get_obj(fam=fam, should_monitor=True) + xfb.add_monitor("/test/test4.xml") + fam.AddMonitor.assert_called_with("/test/test4.xml", xfb) + self.assertIn("/test/test4.xml", xfb.extras) + + +class TestStructFile(TestXMLFileBacked): + test_obj = StructFile + + def _get_test_data(self): + """ build a very complex set of test data """ + # top-level group and client elements + groups = dict() + # group and client elements that are descendents of other group or + # client elements + subgroups = dict() + # children of elements in `groups' that should be included in + # match results + children = dict() + # children of elements in `subgroups' that should be included in + # match results + subchildren = dict() + # top-level tags that are not group elements + standalone = [] + xdata = lxml.etree.Element("Test", name="test") + groups[0] = lxml.etree.SubElement(xdata, "Group", name="group1", + include="true") + children[0] = [lxml.etree.SubElement(groups[0], "Child", name="c1"), + lxml.etree.SubElement(groups[0], "Child", name="c2")] + subgroups[0] = [lxml.etree.SubElement(groups[0], "Group", + name="subgroup1", include="true"), + lxml.etree.SubElement(groups[0], + "Client", name="client1", + include="false")] + subchildren[0] = \ + [lxml.etree.SubElement(subgroups[0][0], "Child", name="sc1"), + lxml.etree.SubElement(subgroups[0][0], "Child", name="sc2", + attr="some attr"), + lxml.etree.SubElement(subgroups[0][0], "Child", name="sc3")] + lxml.etree.SubElement(subchildren[0][-1], "SubChild", name="subchild") + lxml.etree.SubElement(subgroups[0][1], "Child", name="sc4") + + groups[1] = lxml.etree.SubElement(xdata, "Group", name="group2", + include="false") + children[1] = [] + subgroups[1] = [] + subchildren[1] = [] + lxml.etree.SubElement(groups[1], "Child", name="c3") + lxml.etree.SubElement(groups[1], "Child", name="c4") + + standalone.append(lxml.etree.SubElement(xdata, "Standalone", name="s1")) + + groups[2] = lxml.etree.SubElement(xdata, "Client", name="client2", + include="false") + children[2] = [] + subgroups[2] = [] + subchildren[2] = [] + lxml.etree.SubElement(groups[2], "Child", name="c5") + lxml.etree.SubElement(groups[2], "Child", name="c6") + + standalone.append(lxml.etree.SubElement(xdata, "Standalone", name="s2", + attr="some attr")) + + groups[3] = lxml.etree.SubElement(xdata, "Client", name="client3", + include="true") + children[3] = [lxml.etree.SubElement(groups[3], "Child", name="c7", + attr="some_attr"), + lxml.etree.SubElement(groups[3], "Child", name="c8")] + subgroups[3] = [] + subchildren[3] = [] + lxml.etree.SubElement(children[3][-1], "SubChild", name="subchild") + + standalone.append(lxml.etree.SubElement(xdata, "Standalone", name="s3")) + lxml.etree.SubElement(standalone[-1], "SubStandalone", name="sub1") + + children[4] = standalone + return (xdata, groups, subgroups, children, subchildren, standalone) + + def test_include_element(self): + sf = self.get_obj() + metadata = Mock() + metadata.groups = ["group1", "group2"] + metadata.hostname = "foo.example.com" + + inc = lambda tag, **attrs: \ + sf._include_element(lxml.etree.Element(tag, **attrs), metadata) + + self.assertFalse(sf._include_element(lxml.etree.Comment("test"), + metadata)) + + self.assertFalse(inc("Group", name="group3")) + self.assertFalse(inc("Group", name="group2", negate="true")) + self.assertFalse(inc("Group", name="group2", negate="tRuE")) + self.assertTrue(inc("Group", name="group2")) + self.assertTrue(inc("Group", name="group2", negate="false")) + self.assertTrue(inc("Group", name="group2", negate="faLSe")) + self.assertTrue(inc("Group", name="group3", negate="true")) + self.assertTrue(inc("Group", name="group3", negate="tRUe")) + + self.assertFalse(inc("Client", name="bogus.example.com")) + self.assertFalse(inc("Client", name="foo.example.com", negate="true")) + self.assertFalse(inc("Client", name="foo.example.com", negate="tRuE")) + self.assertTrue(inc("Client", name="foo.example.com")) + self.assertTrue(inc("Client", name="foo.example.com", negate="false")) + self.assertTrue(inc("Client", name="foo.example.com", negate="faLSe")) + self.assertTrue(inc("Client", name="bogus.example.com", negate="true")) + self.assertTrue(inc("Client", name="bogus.example.com", negate="tRUe")) + + self.assertTrue(inc("Other")) + + @patch("Bcfg2.Server.Plugin.helpers.%s._include_element" % + test_obj.__name__) + def test__match(self, mock_include): + sf = self.get_obj() + metadata = Mock() + + (xdata, groups, subgroups, children, subchildren, standalone) = \ + self._get_test_data() + + mock_include.side_effect = \ + lambda x, _: (x.tag not in ['Client', 'Group'] or + x.get("include") == "true") + + for i, group in groups.items(): + actual = sf._match(group, metadata) + expected = children[i] + subchildren[i] + self.assertEqual(len(actual), len(expected)) + # easiest way to compare the values is actually to make + # them into an XML document and let assertXMLEqual compare + # them + xactual = lxml.etree.Element("Container") + xactual.extend(actual) + xexpected = lxml.etree.Element("Container") + xexpected.extend(expected) + self.assertXMLEqual(xactual, xexpected) + + for el in standalone: + self.assertXMLEqual(el, sf._match(el, metadata)[0]) + + @patch("Bcfg2.Server.Plugin.helpers.%s._match" % test_obj.__name__) + def test_Match(self, mock_match): + sf = self.get_obj() + metadata = Mock() + + (xdata, groups, subgroups, children, subchildren, standalone) = \ + self._get_test_data() + sf.entries.extend(copy.deepcopy(xdata).getchildren()) + + def match_rv(el, _): + if el.tag not in ['Client', 'Group']: + return [el] + elif x.get("include") == "true": + return el.getchildren() + else: + return [] + mock_match.side_effect = match_rv + actual = sf.Match(metadata) + expected = reduce(lambda x, y: x + y, + list(children.values()) + list(subgroups.values())) + self.assertEqual(len(actual), len(expected)) + # easiest way to compare the values is actually to make + # them into an XML document and let assertXMLEqual compare + # them + xactual = lxml.etree.Element("Container") + xactual.extend(actual) + xexpected = lxml.etree.Element("Container") + xexpected.extend(expected) + self.assertXMLEqual(xactual, xexpected) + + @patch("Bcfg2.Server.Plugin.helpers.%s._include_element" % + test_obj.__name__) + def test__xml_match(self, mock_include): + sf = self.get_obj() + metadata = Mock() + + (xdata, groups, subgroups, children, subchildren, standalone) = \ + self._get_test_data() + + mock_include.side_effect = \ + lambda x, _: (x.tag not in ['Client', 'Group'] or + x.get("include") == "true") + + actual = copy.deepcopy(xdata) + for el in actual.getchildren(): + sf._xml_match(el, metadata) + expected = lxml.etree.Element(xdata.tag, **dict(xdata.attrib)) + expected.text = xdata.text + expected.extend(reduce(lambda x, y: x + y, + list(children.values()) + list(subchildren.values()))) + expected.extend(standalone) + self.assertXMLEqual(actual, expected) + + @patch("Bcfg2.Server.Plugin.helpers.%s._xml_match" % test_obj.__name__) + def test_Match(self, mock_xml_match): + sf = self.get_obj() + metadata = Mock() + + (sf.xdata, groups, subgroups, children, subchildren, standalone) = \ + self._get_test_data() + + sf.XMLMatch(metadata) + actual = [] + for call in mock_xml_match.call_args_list: + actual.append(call[0][0]) + self.assertEqual(call[0][1], metadata) + expected = list(groups.values()) + standalone + # easiest way to compare the values is actually to make + # them into an XML document and let assertXMLEqual compare + # them + xactual = lxml.etree.Element("Container") + xactual.extend(actual) + xexpected = lxml.etree.Element("Container") + xexpected.extend(expected) + self.assertXMLEqual(xactual, xexpected) + + +class TestINode(Bcfg2TestCase): + test_obj = INode + + # INode.__init__ and INode._load_children() call each other + # recursively, which makes this class kind of a nightmare to test. + # we have to first patch INode._load_children so that we can + # create an INode object with no children loaded, then we unpatch + # INode._load_children and patch INode.__init__ so that child + # objects aren't actually created. but in order to test things + # atomically, we do this umpteen times in order to test with + # different data. this convenience method makes this a little + # easier. fun fun fun. + @patch("Bcfg2.Server.Plugin.helpers.%s._load_children" % + test_obj.__name__, Mock()) + def _get_inode(self, data, idict): + return self.test_obj(data, idict) + + def test_raw_predicates(self): + metadata = Mock() + metadata.groups = ["group1", "group2"] + metadata.hostname = "foo.example.com" + entry = None + + parent_predicate = lambda m, e: True + pred = eval(self.test_obj.raw['Client'] % dict(name="foo.example.com"), + dict(predicate=parent_predicate)) + self.assertTrue(pred(metadata, entry)) + pred = eval(self.test_obj.raw['Client'] % dict(name="bar.example.com"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + + pred = eval(self.test_obj.raw['Group'] % dict(name="group1"), + dict(predicate=parent_predicate)) + self.assertTrue(pred(metadata, entry)) + pred = eval(self.test_obj.raw['Group'] % dict(name="group3"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + + pred = eval(self.test_obj.nraw['Client'] % dict(name="foo.example.com"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + pred = eval(self.test_obj.nraw['Client'] % dict(name="bar.example.com"), + dict(predicate=parent_predicate)) + self.assertTrue(pred(metadata, entry)) + + pred = eval(self.test_obj.nraw['Group'] % dict(name="group1"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + pred = eval(self.test_obj.nraw['Group'] % dict(name="group3"), + dict(predicate=parent_predicate)) + self.assertTrue(pred(metadata, entry)) + + parent_predicate = lambda m, e: False + pred = eval(self.test_obj.raw['Client'] % dict(name="foo.example.com"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + pred = eval(self.test_obj.raw['Group'] % dict(name="group1"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + pred = eval(self.test_obj.nraw['Client'] % dict(name="bar.example.com"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + pred = eval(self.test_obj.nraw['Group'] % dict(name="group3"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + + self.assertItemsEqual(self.test_obj.containers, + self.test_obj.raw.keys()) + self.assertItemsEqual(self.test_obj.containers, + self.test_obj.nraw.keys()) + + @patch("Bcfg2.Server.Plugin.helpers.INode._load_children") + def test__init(self, mock_load_children): + data = lxml.etree.Element("Bogus") + # called with no parent, should not raise an exception; it's a + # top-level tag in an XML file and so is not expected to be a + # proper predicate + INode(data, dict()) + self.assertRaises(PluginExecutionError, + INode, data, dict(), Mock()) + + data = lxml.etree.Element("Client", name="foo.example.com") + idict = dict() + inode = INode(data, idict) + mock_load_children.assert_called_with(data, idict) + self.assertTrue(inode.predicate(Mock(), Mock())) + + parent = Mock() + parent.predicate = lambda m, e: True + metadata = Mock() + metadata.groups = ["group1", "group2"] + metadata.hostname = "foo.example.com" + entry = None + + # test setting predicate with parent object + mock_load_children.reset_mock() + inode = INode(data, idict, parent=parent) + mock_load_children.assert_called_with(data, idict) + self.assertTrue(inode.predicate(metadata, entry)) + + # test negation + data = lxml.etree.Element("Client", name="foo.example.com", + negate="true") + mock_load_children.reset_mock() + inode = INode(data, idict, parent=parent) + mock_load_children.assert_called_with(data, idict) + self.assertFalse(inode.predicate(metadata, entry)) + + # test failure of a matching predicate (client names do not match) + data = lxml.etree.Element("Client", name="foo.example.com") + metadata.hostname = "bar.example.com" + mock_load_children.reset_mock() + inode = INode(data, idict, parent=parent) + mock_load_children.assert_called_with(data, idict) + self.assertFalse(inode.predicate(metadata, entry)) + + # test that parent predicate is AND'ed in correctly + parent.predicate = lambda m, e: False + metadata.hostname = "foo.example.com" + mock_load_children.reset_mock() + inode = INode(data, idict, parent=parent) + mock_load_children.assert_called_with(data, idict) + self.assertFalse(inode.predicate(metadata, entry)) + + def test_load_children(self): + data = lxml.etree.Element("Parent") + child1 = lxml.etree.SubElement(data, "Client", name="foo.example.com") + child2 = lxml.etree.SubElement(data, "Group", name="bar", negate="true") + idict = dict() + + inode = self._get_inode(data, idict) + + @patch("Bcfg2.Server.Plugin.helpers.%s.__init__" % + inode.__class__.__name__) + def inner(mock_init): + mock_init.return_value = None + inode._load_children(data, idict) + self.assertItemsEqual(mock_init.call_args_list, + [call(child1, idict, inode), + call(child2, idict, inode)]) + self.assertEqual(idict, dict()) + self.assertItemsEqual(inode.contents, dict()) + + inner() + + data = lxml.etree.Element("Parent") + child1 = lxml.etree.SubElement(data, "Data", name="child1", + attr="some attr") + child1.text = "text" + subchild1 = lxml.etree.SubElement(child1, "SubChild", name="subchild") + child2 = lxml.etree.SubElement(data, "Group", name="bar", negate="true") + idict = dict() + + inode = self._get_inode(data, idict) + inode.ignore = [] + + @patch("Bcfg2.Server.Plugin.helpers.%s.__init__" % + inode.__class__.__name__) + def inner2(mock_init): + mock_init.return_value = None + inode._load_children(data, idict) + mock_init.assert_called_with(child2, idict, inode) + tag = child1.tag + name = child1.get("name") + self.assertEqual(idict, dict(Data=[name])) + self.assertIn(tag, inode.contents) + self.assertIn(name, inode.contents[tag]) + self.assertItemsEqual(inode.contents[tag][name], + dict(name=name, + attr=child1.get('attr'), + __text__=child1.text, + __children__=[subchild1])) + + inner2() + + # test ignore. no ignore is set on INode by default, so we + # have to set one + old_ignore = copy.copy(self.test_obj.ignore) + self.test_obj.ignore.append("Data") + idict = dict() + + inode = self._get_inode(data, idict) + + @patch("Bcfg2.Server.Plugin.helpers.%s.__init__" % + inode.__class__.__name__) + def inner3(mock_init): + mock_init.return_value = None + inode._load_children(data, idict) + mock_init.assert_called_with(child2, idict, inode) + self.assertEqual(idict, dict()) + self.assertItemsEqual(inode.contents, dict()) + + inner3() + self.test_obj.ignore = old_ignore + + def test_Match(self): + idata = lxml.etree.Element("Parent") + contents = lxml.etree.SubElement(idata, "Data", name="contents", + attr="some attr") + child = lxml.etree.SubElement(idata, "Group", name="bar", negate="true") + + inode = INode(idata, dict()) + inode.predicate = Mock() + inode.predicate.return_value = False + + metadata = Mock() + metadata.groups = ['foo'] + data = dict() + entry = child + + inode.Match(metadata, data, entry=child) + self.assertEqual(data, dict()) + inode.predicate.assert_called_with(metadata, child) + + inode.predicate.reset_mock() + inode.Match(metadata, data) + self.assertEqual(data, dict()) + # can't easily compare XML args without the original + # object, and we're testing that Match() works without an + # XML object passed in, so... + self.assertEqual(inode.predicate.call_args[0][0], + metadata) + self.assertXMLEqual(inode.predicate.call_args[0][1], + lxml.etree.Element("None")) + + inode.predicate.reset_mock() + inode.predicate.return_value = True + inode.Match(metadata, data, entry=child) + self.assertEqual(data, inode.contents) + inode.predicate.assert_called_with(metadata, child) + + +class TestInfoNode(TestINode): + __test__ = True + test_obj = InfoNode + + def test_raw_predicates(self): + TestINode.test_raw_predicates(self) + metadata = Mock() + entry = lxml.etree.Element("Path", name="/tmp/foo", + realname="/tmp/bar") + + parent_predicate = lambda m, d: True + pred = eval(self.test_obj.raw['Path'] % dict(name="/tmp/foo"), + dict(predicate=parent_predicate)) + self.assertTrue(pred(metadata, entry)) + pred = eval(InfoNode.raw['Path'] % dict(name="/tmp/bar"), + dict(predicate=parent_predicate)) + self.assertTrue(pred(metadata, entry)) + pred = eval(InfoNode.raw['Path'] % dict(name="/tmp/bogus"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + + pred = eval(self.test_obj.nraw['Path'] % dict(name="/tmp/foo"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + pred = eval(InfoNode.nraw['Path'] % dict(name="/tmp/bar"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + pred = eval(InfoNode.nraw['Path'] % dict(name="/tmp/bogus"), + dict(predicate=parent_predicate)) + self.assertTrue(pred(metadata, entry)) + + parent_predicate = lambda m, d: False + pred = eval(self.test_obj.raw['Path'] % dict(name="/tmp/foo"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + pred = eval(InfoNode.raw['Path'] % dict(name="/tmp/bar"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + pred = eval(InfoNode.nraw['Path'] % dict(name="/tmp/bogus"), + dict(predicate=parent_predicate)) + self.assertFalse(pred(metadata, entry)) + + +class TestXMLSrc(TestXMLFileBacked): + test_obj = XMLSrc + + def test_node_interface(self): + # ensure that the node object has the necessary interface + self.assertTrue(hasattr(self.test_obj.__node__, "Match")) + + @patch("%s.open" % builtins) + def test_HandleEvent(self, mock_open): + xdata = lxml.etree.Element("Test") + lxml.etree.SubElement(xdata, "Path", name="path", attr="whatever") + + xsrc = self.get_obj("/test/foo.xml") + xsrc.__node__ = Mock() + mock_open.return_value.read.return_value = tostring(xdata) + + if xsrc.__priority_required__: + # test with no priority at all + self.assertRaises(PluginExecutionError, + xsrc.HandleEvent, Mock()) + + # test with bogus priority + xdata.set("priority", "cow") + mock_open.return_value.read.return_value = tostring(xdata) + self.assertRaises(PluginExecutionError, + xsrc.HandleEvent, Mock()) + + # assign a priority to use in future tests + xdata.set("priority", "10") + mock_open.return_value.read.return_value = tostring(xdata) + + mock_open.reset_mock() + xsrc = self.get_obj("/test/foo.xml") + xsrc.__node__ = Mock() + xsrc.HandleEvent(Mock()) + mock_open.assert_called_with("/test/foo.xml") + mock_open.return_value.read.assert_any_call() + self.assertXMLEqual(xsrc.__node__.call_args[0][0], xdata) + self.assertEqual(xsrc.__node__.call_args[0][1], dict()) + self.assertEqual(xsrc.pnode, xsrc.__node__.return_value) + self.assertEqual(xsrc.cache, None) + + @patch("Bcfg2.Server.Plugin.helpers.XMLSrc.HandleEvent") + def test_Cache(self, mock_HandleEvent): + xsrc = self.get_obj("/test/foo.xml") + metadata = Mock() + xsrc.Cache(metadata) + mock_HandleEvent.assert_any_call() + + xsrc.pnode = Mock() + xsrc.Cache(metadata) + xsrc.pnode.Match.assert_called_with(metadata, xsrc.__cacheobj__()) + self.assertEqual(xsrc.cache[0], metadata) + + xsrc.pnode.reset_mock() + xsrc.Cache(metadata) + self.assertFalse(xsrc.pnode.Mock.called) + self.assertEqual(xsrc.cache[0], metadata) + + xsrc.cache = ("bogus") + xsrc.Cache(metadata) + xsrc.pnode.Match.assert_called_with(metadata, xsrc.__cacheobj__()) + self.assertEqual(xsrc.cache[0], metadata) + + +class TestInfoXML(TestXMLSrc): + test_obj = InfoXML + + +class TestXMLDirectoryBacked(TestDirectoryBacked): + test_obj = XMLDirectoryBacked + testfiles = ['foo.xml', 'bar/baz.xml', 'plugh.plugh.xml'] + badpaths = ["foo", "foo.txt", "foo.xsd", "xml"] + + +class TestPrioDir(TestPlugin, TestGenerator, TestXMLDirectoryBacked): + test_obj = PrioDir + + @patch("Bcfg2.Server.Plugin.helpers.%s.add_directory_monitor" % + test_obj.__name__, + Mock()) + def get_obj(self, core=None): + if core is None: + core = Mock() + return self.test_obj(core, datastore) + + def test_HandleEvent(self): + TestXMLDirectoryBacked.test_HandleEvent(self) + + @patch("Bcfg2.Server.Plugin.helpers.XMLDirectoryBacked.HandleEvent", + Mock()) + def inner(): + pd = self.get_obj() + test1 = Mock() + test1.items = dict(Path=["/etc/foo.conf", "/etc/bar.conf"]) + test2 = Mock() + test2.items = dict(Path=["/etc/baz.conf"], + Package=["quux", "xyzzy"]) + pd.entries = {"/test1.xml": test1, + "/test2.xml": test2} + pd.HandleEvent(Mock()) + self.assertItemsEqual(pd.Entries, + dict(Path={"/etc/foo.conf": pd.BindEntry, + "/etc/bar.conf": pd.BindEntry, + "/etc/baz.conf": pd.BindEntry}, + Package={"quux": pd.BindEntry, + "xyzzy": pd.BindEntry})) + + inner() + + def test__matches(self): + pd = self.get_obj() + self.assertTrue(pd._matches(lxml.etree.Element("Test", + name="/etc/foo.conf"), + Mock(), + {"/etc/foo.conf": pd.BindEntry, + "/etc/bar.conf": pd.BindEntry})) + self.assertFalse(pd._matches(lxml.etree.Element("Test", + name="/etc/baz.conf"), + Mock(), + {"/etc/foo.conf": pd.BindEntry, + "/etc/bar.conf": pd.BindEntry})) + + def test_BindEntry(self): + pd = self.get_obj() + pd.get_attrs = Mock(return_value=dict(test1="test1", test2="test2")) + entry = lxml.etree.Element("Path", name="/etc/foo.conf", test1="bogus") + metadata = Mock() + pd.BindEntry(entry, metadata) + pd.get_attrs.assert_called_with(entry, metadata) + self.assertItemsEqual(entry.attrib, + dict(name="/etc/foo.conf", + test1="test1", test2="test2")) + + def test_get_attrs(self): + pd = self.get_obj() + entry = lxml.etree.Element("Path", name="/etc/foo.conf") + children = [lxml.etree.Element("Child")] + metadata = Mock() + pd.entries = dict() + + def reset(): + metadata.reset_mock() + for src in pd.entries.values(): + src.reset_mock() + src.cache = None + + # test with no matches + self.assertRaises(PluginExecutionError, + pd.get_attrs, entry, metadata) + + def add_entry(name, data, prio=10): + path = os.path.join(pd.data, name) + pd.entries[path] = Mock() + pd.entries[path].priority = prio + def do_Cache(metadata): + pd.entries[path].cache = (metadata, data) + pd.entries[path].Cache.side_effect = do_Cache + + add_entry('test1.xml', + dict(Path={'/etc/foo.conf': dict(attr="attr1", + __children__=children), + '/etc/bar.conf': dict()})) + add_entry('test2.xml', + dict(Path={'/etc/bar.conf': dict(__text__="text", + attr="attr1")}, + Package={'quux': dict(), + 'xyzzy': dict()}), + prio=20) + add_entry('test3.xml', + dict(Path={'/etc/baz.conf': dict()}, + Package={'xyzzy': dict()}), + prio=20) + + # test with exactly one match, __children__ + reset() + self.assertItemsEqual(pd.get_attrs(entry, metadata), + dict(attr="attr1")) + for src in pd.entries.values(): + src.Cache.assert_called_with(metadata) + self.assertEqual(len(entry.getchildren()), 1) + self.assertXMLEqual(entry.getchildren()[0], children[0]) + + # test with multiple matches with different priorities, __text__ + reset() + entry = lxml.etree.Element("Path", name="/etc/bar.conf") + self.assertItemsEqual(pd.get_attrs(entry, metadata), + dict(attr="attr1")) + for src in pd.entries.values(): + src.Cache.assert_called_with(metadata) + self.assertEqual(entry.text, "text") + + # test with multiple matches with identical priorities + reset() + entry = lxml.etree.Element("Package", name="xyzzy") + self.assertRaises(PluginExecutionError, + pd.get_attrs, entry, metadata) + + +class TestSpecificity(Bcfg2TestCase): + test_obj = Specificity + + def get_obj(self, **kwargs): + return self.test_obj(**kwargs) + + def test_matches(self): + metadata = Mock() + metadata.hostname = "foo.example.com" + metadata.groups = ["group1", "group2"] + self.assertTrue(self.get_obj(all=True).matches(metadata)) + self.assertTrue(self.get_obj(group="group1").matches(metadata)) + self.assertTrue(self.get_obj(hostname="foo.example.com").matches(metadata)) + self.assertFalse(self.get_obj().matches(metadata)) + self.assertFalse(self.get_obj(group="group3").matches(metadata)) + self.assertFalse(self.get_obj(hostname="bar.example.com").matches(metadata)) + + def test__cmp(self): + specs = [self.get_obj(all=True), + self.get_obj(group="group1", prio=10), + self.get_obj(group="group1", prio=20), + self.get_obj(hostname="foo.example.com")] + + for i in range(len(specs)): + for j in range(len(specs)): + if i == j: + self.assertEqual(0, specs[i].__cmp__(specs[j])) + self.assertEqual(0, specs[j].__cmp__(specs[i])) + elif i > j: + self.assertEqual(-1, specs[i].__cmp__(specs[j])) + self.assertEqual(1, specs[j].__cmp__(specs[i])) + elif i < j: + self.assertEqual(1, specs[i].__cmp__(specs[j])) + self.assertEqual(-1, specs[j].__cmp__(specs[i])) + + def test_cmp(self): + """ test __lt__/__gt__/__eq__ """ + specs = [self.get_obj(all=True), + self.get_obj(group="group1", prio=10), + self.get_obj(group="group1", prio=20), + self.get_obj(hostname="foo.example.com")] + + for i in range(len(specs)): + for j in range(len(specs)): + if i < j: + self.assertGreater(specs[i], specs[j]) + self.assertLess(specs[j], specs[i]) + self.assertGreaterEqual(specs[i], specs[j]) + self.assertLessEqual(specs[j], specs[i]) + elif i == j: + self.assertEqual(specs[i], specs[j]) + self.assertEqual(specs[j], specs[i]) + self.assertLessEqual(specs[i], specs[j]) + self.assertGreaterEqual(specs[j], specs[i]) + elif i > j: + self.assertLess(specs[i], specs[j]) + self.assertGreater(specs[j], specs[i]) + self.assertLessEqual(specs[i], specs[j]) + self.assertGreaterEqual(specs[j], specs[i]) + + +class TestSpecificData(Bcfg2TestCase): + test_obj = SpecificData + path = os.path.join(datastore, "test.txt") + + def get_obj(self, name=None, specific=None, encoding=None): + if name is None: + name = self.path + if specific is None: + specific = Mock() + return self.test_obj(name, specific, encoding) + + @patch("%s.open" % builtins) + def test_handle_event(self, mock_open): + event = Mock() + event.code2str.return_value = 'deleted' + sd = self.get_obj() + sd.handle_event(event) + self.assertFalse(mock_open.called) + if hasattr(sd, 'data'): + self.assertIsNone(sd.data) + else: + self.assertFalse(hasattr(sd, 'data')) + + event = Mock() + mock_open.return_value.read.return_value = "test" + sd.handle_event(event) + mock_open.assert_called_with(self.path) + mock_open.return_value.read.assert_any_call() + self.assertEqual(sd.data, "test") + + +class TestEntrySet(TestDebuggable): + test_obj = EntrySet + # filenames that should be matched successfully by the EntrySet + # 'specific' regex. these are filenames alone -- a specificity + # will be added to these + basenames = ["test", "test.py", "test with spaces.txt", + "test.multiple.dots.py", "test_underscores.and.dots", + "really_misleading.G10_test", + "name$with*regex(special){chars}", + "misleading.H_hostname.test.com"] + # filenames that do not match any of the basenames (or the + # basename regex, if applicable) + bogus_names = ["bogus"] + # filenames that should be ignored + ignore = ["foo~", ".#foo", ".foo.swp", ".foo.swx", + "test.txt.genshi_include", "test.G_foo.genshi_include"] + + + def get_obj(self, basename="test", path=datastore, entry_type=MagicMock(), + encoding=None): + return self.test_obj(basename, path, entry_type, encoding) + + def test__init(self): + for basename in self.basenames: + eset = self.get_obj(basename=basename) + self.assertIsInstance(eset.specific, re_type) + self.assertTrue(eset.specific.match(os.path.join(datastore, + basename))) + ppath = os.path.join(datastore, "Plugin", basename) + self.assertTrue(eset.specific.match(ppath)) + self.assertTrue(eset.specific.match(ppath + ".G20_foo")) + self.assertTrue(eset.specific.match(ppath + ".G1_foo")) + self.assertTrue(eset.specific.match(ppath + ".G32768_foo")) + # a group named '_' + self.assertTrue(eset.specific.match(ppath + ".G10__")) + self.assertTrue(eset.specific.match(ppath + ".H_hostname")) + self.assertTrue(eset.specific.match(ppath + ".H_fqdn.subdomain.example.com")) + self.assertTrue(eset.specific.match(ppath + ".G20_group_with_underscores")) + + self.assertFalse(eset.specific.match(ppath + ".G20_group with spaces")) + self.assertFalse(eset.specific.match(ppath + ".G_foo")) + self.assertFalse(eset.specific.match(ppath + ".G_")) + self.assertFalse(eset.specific.match(ppath + ".G20_")) + self.assertFalse(eset.specific.match(ppath + ".H_")) + + for bogus in self.bogus_names: + self.assertFalse(eset.specific.match(os.path.join(datastore, + "Plugin", + bogus))) + + for ignore in self.ignore: + self.assertTrue(eset.ignore.match(ignore)) + + self.assertFalse(eset.ignore.match(basename)) + self.assertFalse(eset.ignore.match(basename + ".G20_foo")) + self.assertFalse(eset.ignore.match(basename + ".G1_foo")) + self.assertFalse(eset.ignore.match(basename + ".G32768_foo")) + self.assertFalse(eset.ignore.match(basename + ".G10__")) + self.assertFalse(eset.ignore.match(basename + ".H_hostname")) + self.assertFalse(eset.ignore.match(basename + ".H_fqdn.subdomain.example.com")) + self.assertFalse(eset.ignore.match(basename + ".G20_group_with_underscores")) + + def test_get_matching(self): + items = {0: Mock(), 1: Mock(), 2: Mock(), 3: Mock(), 4: Mock(), + 5: Mock()} + items[0].specific.matches.return_value = False + items[1].specific.matches.return_value = True + items[2].specific.matches.return_value = False + items[3].specific.matches.return_value = False + items[4].specific.matches.return_value = True + items[5].specific.matches.return_value = True + metadata = Mock() + eset = self.get_obj() + eset.entries = items + self.assertItemsEqual(eset.get_matching(metadata), + [items[1], items[4], items[5]]) + for i in items.values(): + i.specific.matches.assert_called_with(metadata) + + @patch("Bcfg2.Server.Plugin.helpers.%s.get_matching" % test_obj.__name__) + def test_best_matching(self, mock_get_matching): + eset = self.get_obj() + metadata = Mock() + matching = [] + + def reset(): + mock_get_matching.reset_mock() + metadata.reset_mock() + for m in matching: + m.reset_mock() + + def specific(all=False, group=False, prio=None, hostname=False): + spec = Mock() + spec.specific = Specificity(all=all, group=group, prio=prio, + hostname=hostname) + return spec + + self.assertRaises(PluginExecutionError, + eset.best_matching, metadata, matching=[]) + + reset() + mock_get_matching.return_value = matching + self.assertRaises(PluginExecutionError, + eset.best_matching, metadata) + mock_get_matching.assert_called_with(metadata) + + # test with a single file for all + reset() + expected = specific(all=True) + matching.append(expected) + mock_get_matching.return_value = matching + self.assertEqual(eset.best_matching(metadata), expected) + mock_get_matching.assert_called_with(metadata) + + # test with a single group-specific file + reset() + expected = specific(group=True, prio=10) + matching.append(expected) + mock_get_matching.return_value = matching + self.assertEqual(eset.best_matching(metadata), expected) + mock_get_matching.assert_called_with(metadata) + + # test with multiple group-specific files + reset() + expected = specific(group=True, prio=20) + matching.append(expected) + mock_get_matching.return_value = matching + self.assertEqual(eset.best_matching(metadata), expected) + mock_get_matching.assert_called_with(metadata) + + # test with host-specific file + reset() + expected = specific(hostname=True) + matching.append(expected) + mock_get_matching.return_value = matching + self.assertEqual(eset.best_matching(metadata), expected) + mock_get_matching.assert_called_with(metadata) + + @patch("Bcfg2.Server.Plugin.helpers.%s.entry_init" % test_obj.__name__) + @patch("Bcfg2.Server.Plugin.helpers.%s.reset_metadata" % test_obj.__name__) + @patch("Bcfg2.Server.Plugin.helpers.%s.update_metadata" % test_obj.__name__) + def test_handle_event(self, mock_update_md, mock_reset_md, mock_init): + def reset(): + mock_update_md.reset_mock() + mock_reset_md.reset_mock() + mock_init.reset_mock() + + eset = self.get_obj() + for fname in ["info", "info.xml", ":info"]: + for evt in ["exists", "created", "changed"]: + reset() + event = Mock() + event.code2str.return_value = evt + event.filename = fname + eset.handle_event(event) + mock_update_md.assert_called_with(event) + self.assertFalse(mock_init.called) + self.assertFalse(mock_reset_md.called) + + reset() + event = Mock() + event.code2str.return_value = "deleted" + event.filename = fname + eset.handle_event(event) + mock_reset_md.assert_called_with(event) + self.assertFalse(mock_init.called) + self.assertFalse(mock_update_md.called) + + for evt in ["exists", "created", "changed"]: + reset() + event = Mock() + event.code2str.return_value = evt + event.filename = "test.txt" + eset.handle_event(event) + mock_init.assert_called_with(event) + self.assertFalse(mock_reset_md.called) + self.assertFalse(mock_update_md.called) + + reset() + entry = Mock() + eset.entries["test.txt"] = entry + event = Mock() + event.code2str.return_value = "changed" + event.filename = "test.txt" + eset.handle_event(event) + entry.handle_event.assert_called_with(event) + self.assertFalse(mock_init.called) + self.assertFalse(mock_reset_md.called) + self.assertFalse(mock_update_md.called) + + reset() + entry = Mock() + eset.entries["test.txt"] = entry + event = Mock() + event.code2str.return_value = "deleted" + event.filename = "test.txt" + eset.handle_event(event) + self.assertNotIn("test.txt", eset.entries) + + @patch("Bcfg2.Server.Plugin.helpers.%s.specificity_from_filename" % + test_obj.__name__) + def test_entry_init(self, mock_spec): + eset = self.get_obj() + + def reset(): + eset.entry_type.reset_mock() + mock_spec.reset_mock() + + event = Mock() + event.code2str.return_value = "created" + event.filename = "test.txt" + eset.entry_init(event) + mock_spec.assert_called_with("test.txt", specific=None) + eset.entry_type.assert_called_with(os.path.join(eset.path, "test.txt"), + mock_spec.return_value, None) + eset.entry_type.return_value.handle_event.assert_called_with(event) + self.assertIn("test.txt", eset.entries) + + # test duplicate add + reset() + eset.entry_init(event) + self.assertFalse(mock_spec.called) + self.assertFalse(eset.entry_type.called) + eset.entries["test.txt"].handle_event.assert_called_with(event) + + # test keyword args + etype = Mock() + specific = Mock() + event = Mock() + event.code2str.return_value = "created" + event.filename = "test2.txt" + eset.entry_init(event, entry_type=etype, specific=specific) + mock_spec.assert_called_with("test2.txt", specific=specific) + etype.assert_called_with(os.path.join(eset.path, "test2.txt"), + mock_spec.return_value, None) + etype.return_value.handle_event.assert_called_with(event) + self.assertIn("test2.txt", eset.entries) + + # test specificity error + event = Mock() + event.code2str.return_value = "created" + event.filename = "test3.txt" + mock_spec.side_effect = SpecificityError + eset.entry_init(event) + mock_spec.assert_called_with("test3.txt", specific=None) + self.assertFalse(eset.entry_type.called) + + @patch("Bcfg2.Server.Plugin.helpers.Specificity") + def test_specificity_from_filename(self, mock_spec): + def test(eset, fname, **kwargs): + mock_spec.reset_mock() + if "specific" in kwargs: + specific = kwargs['specific'] + del kwargs['specific'] + else: + specific = None + self.assertEqual(eset.specificity_from_filename(fname, + specific=specific), + mock_spec.return_value) + mock_spec.assert_called_with(**kwargs) + + def fails(eset, fname, specific=None): + mock_spec.reset_mock() + self.assertRaises(SpecificityError, + eset.specificity_from_filename, fname, + specific=specific) + + for basename in self.basenames: + eset = self.get_obj(basename=basename) + ppath = os.path.join(datastore, "Plugin", basename) + test(eset, ppath, all=True) + test(eset, ppath + ".G20_foo", group="foo", prio=20) + test(eset, ppath + ".G1_foo", group="foo", prio=1) + test(eset, ppath + ".G32768_foo", group="foo", prio=32768) + test(eset, ppath + ".G10__", group="_", prio=10) + test(eset, ppath + ".H_hostname", hostname="hostname") + test(eset, ppath + ".H_fqdn.subdomain.example.com", + hostname="fqdn.subdomain.example.com") + test(eset, ppath + ".G20_group_with_underscores", + group="group_with_underscores", prio=20) + + for bogus in self.bogus_names: + fails(eset, bogus) + fails(eset, ppath + ".G_group with spaces") + fails(eset, ppath + ".G_foo") + fails(eset, ppath + ".G_") + fails(eset, ppath + ".G20_") + fails(eset, ppath + ".H_") + + @patch("%s.open" % builtins) + @patch("Bcfg2.Server.Plugin.helpers.InfoXML") + def test_update_metadata(self, mock_InfoXML, mock_open): + eset = self.get_obj() + + # add info.xml + event = Mock() + event.filename = "info.xml" + eset.update_metadata(event) + mock_InfoXML.assert_called_with(os.path.join(eset.path, "info.xml")) + mock_InfoXML.return_value.HandleEvent.assert_called_with(event) + self.assertEqual(eset.infoxml, mock_InfoXML.return_value) + + # modify info.xml + mock_InfoXML.reset_mock() + eset.update_metadata(event) + self.assertFalse(mock_InfoXML.called) + eset.infoxml.HandleEvent.assert_called_with(event) + + for fname in [':info', 'info']: + event = Mock() + event.filename = fname + + idata = ["owner:owner", + "group: GROUP", + "perms: 775", + "important: true", + "bogus: line"] + mock_open.return_value.readlines.return_value = idata + eset.update_metadata(event) + expected = default_file_metadata.copy() + expected['owner'] = 'owner' + expected['group'] = 'GROUP' + expected['perms'] = '0775' + expected['important'] = 'true' + self.assertItemsEqual(eset.metadata, + expected) + + def test_reset_metadata(self): + eset = self.get_obj() + + # test info.xml + event = Mock() + event.filename = "info.xml" + eset.infoxml = Mock() + eset.reset_metadata(event) + self.assertIsNone(eset.infoxml) + + for fname in [':info', 'info']: + event = Mock() + event.filename = fname + eset.metadata = Mock() + eset.reset_metadata(event) + self.assertItemsEqual(eset.metadata, default_file_metadata) + + @patch("Bcfg2.Server.Plugin.helpers.bind_info") + def test_bind_info_to_entry(self, mock_bind_info): + eset = self.get_obj() + entry = Mock() + metadata = Mock() + eset.bind_info_to_entry(entry, metadata) + mock_bind_info.assert_called_with(entry, metadata, + infoxml=eset.infoxml, + default=eset.metadata) + + @patch("Bcfg2.Server.Plugin.helpers.%s.best_matching" % test_obj.__name__) + @patch("Bcfg2.Server.Plugin.helpers.%s.bind_info_to_entry" % + test_obj.__name__) + def test_bind_entry(self, mock_bind_info, mock_best_matching): + eset = self.get_obj() + entry = Mock() + metadata = Mock() + eset.bind_entry(entry, metadata) + mock_bind_info.assert_called_with(entry, metadata) + mock_best_matching.assert_called_with(metadata) + mock_best_matching.return_value.bind_entry.assert_called_with(entry, + metadata) + + +class TestGroupSpool(TestPlugin, TestGenerator): + test_obj = GroupSpool + + @patch("Bcfg2.Server.Plugin.helpers.%s.AddDirectoryMonitor" % + test_obj.__name__) + def get_obj(self, core=None): + return TestPlugin.get_obj(self, core=core) + + @patch("Bcfg2.Server.Plugin.helpers.%s.AddDirectoryMonitor" % + test_obj.__name__) + def test__init(self, mock_Add): + core = Mock() + gs = self.test_obj(core, datastore) + mock_Add.assert_called_with('') + self.assertItemsEqual(gs.Entries, {gs.entry_type: {}}) + + @patch("os.path.isdir") + @patch("os.path.isfile") + @patch("Bcfg2.Server.Plugin.helpers.%s.event_id" % test_obj.__name__) + @patch("Bcfg2.Server.Plugin.helpers.%s.event_path" % test_obj.__name__) + @patch("Bcfg2.Server.Plugin.helpers.%s.AddDirectoryMonitor" % + test_obj.__name__) + def test_add_entry(self, mock_Add, mock_event_path, mock_event_id, + mock_isfile, mock_isdir): + gs = self.get_obj() + gs.es_cls = Mock() + gs.es_child_cls = Mock() + + def reset(): + gs.es_cls.reset_mock() + gs.es_child_cls.reset_mock() + mock_Add.reset_mock() + mock_event_path.reset_mock() + mock_event_id.reset_mock() + mock_isfile.reset_mock() + mock_isdir.reset_mock() + + # directory + event = Mock() + event.filename = "foo" + basedir = "test" + epath = os.path.join(gs.data, basedir, event.filename) + ident = os.path.join(basedir, event.filename) + mock_event_path.return_value = epath + mock_event_id.return_value = ident + mock_isdir.return_value = True + mock_isfile.return_value = False + gs.add_entry(event) + mock_Add.assert_called_with(os.path.join("/" + basedir, event.filename)) + self.assertNotIn(ident, gs.entries) + mock_isdir.assert_called_with(epath) + + # file that is not in self.entries + reset() + event = Mock() + event.filename = "foo" + basedir = "test/foo/" + epath = os.path.join(gs.data, basedir, event.filename) + ident = basedir[:-1] + mock_event_path.return_value = epath + mock_event_id.return_value = ident + mock_isdir.return_value = False + mock_isfile.return_value = True + gs.add_entry(event) + self.assertFalse(mock_Add.called) + gs.es_cls.assert_called_with(gs.filename_pattern, + gs.data + ident, + gs.es_child_cls, + gs.encoding) + self.assertIn(ident, gs.entries) + self.assertEqual(gs.entries[ident], gs.es_cls.return_value) + self.assertIn(ident, gs.Entries[gs.entry_type]) + self.assertEqual(gs.Entries[gs.entry_type][ident], + gs.es_cls.return_value.bind_entry) + gs.entries[ident].handle_event.assert_called_with(event) + mock_isfile.assert_called_with(epath) + + # file that is in self.entries + reset() + gs.add_entry(event) + self.assertFalse(mock_Add.called) + self.assertFalse(gs.es_cls.called) + gs.entries[ident].handle_event.assert_called_with(event) + + def test_event_path(self): + gs = self.get_obj() + gs.handles[1] = "/var/lib/foo/" + gs.handles[2] = "/etc/foo/" + gs.handles[3] = "/usr/share/foo/" + event = Mock() + event.filename = "foo" + for i in range(1, 4): + event.requestID = i + self.assertEqual(gs.event_path(event), + os.path.join(datastore, gs.name, + gs.handles[event.requestID].lstrip('/'), + event.filename)) + + @patch("os.path.isdir") + @patch("Bcfg2.Server.Plugin.helpers.%s.event_path" % test_obj.__name__) + def test_event_id(self, mock_event_path, mock_isdir): + gs = self.get_obj() + + def reset(): + mock_event_path.reset_mock() + mock_isdir.reset_mock() + + gs.handles[1] = "/var/lib/foo/" + gs.handles[2] = "/etc/foo/" + gs.handles[3] = "/usr/share/foo/" + event = Mock() + event.filename = "foo" + for i in range(1, 4): + event.requestID = i + reset() + mock_isdir.return_value = True + self.assertEqual(gs.event_id(event), + os.path.join(gs.handles[event.requestID].lstrip('/'), + event.filename)) + mock_isdir.assert_called_with(mock_event_path.return_value) + + reset() + mock_isdir.return_value = False + self.assertEqual(gs.event_id(event), + gs.handles[event.requestID].rstrip('/')) + mock_isdir.assert_called_with(mock_event_path.return_value) + + def test_toggle_debug(self): + gs = self.get_obj() + gs.entries = {"/foo": Mock(), + "/bar": Mock(), + "/baz/quux": Mock()} + + @patch("Bcfg2.Server.Plugin.base.Plugin.toggle_debug") + def inner(mock_debug): + gs.toggle_debug() + mock_debug.assert_called_with(gs) + for entry in gs.entries.values(): + entry.toggle_debug.assert_any_call() + + inner() + + TestPlugin.test_toggle_debug(self) + + def test_HandleEvent(self): + gs = self.get_obj() + gs.entries = {"/foo": Mock(), + "/bar": Mock(), + "/baz": Mock(), + "/baz/quux": Mock()} + for path in gs.entries.keys(): + gs.Entries[gs.entry_type] = {path: Mock()} + gs.handles = {1: "/foo/", + 2: "/bar/", + 3: "/baz/", + 4: "/baz/quux"} + + gs.add_entry = Mock() + gs.event_id = Mock() + + def reset(): + gs.add_entry.reset_mock() + gs.event_id.reset_mock() + for entry in gs.entries.values(): + entry.reset_mock() + + # test event creation, changing entry that doesn't exist + for evt in ["exists", "created", "changed"]: + reset() + event = Mock() + event.filename = "foo" + event.requestID = 1 + event.code2str.return_value = evt + gs.HandleEvent(event) + gs.event_id.assert_called_with(event) + gs.add_entry.assert_called_with(event) + + # test deleting entry, changing entry that does exist + for evt in ["changed", "deleted"]: + reset() + event = Mock() + event.filename = "quux" + event.requestID = 4 + event.code2str.return_value = evt + gs.event_id.return_value = "/baz/quux" + gs.HandleEvent(event) + gs.event_id.assert_called_with(event) + self.assertIn(gs.event_id.return_value, gs.entries) + gs.entries[gs.event_id.return_value].handle_event.assert_called_with(event) + self.assertFalse(gs.add_entry.called) + + # test deleting directory + reset() + event = Mock() + event.filename = "quux" + event.requestID = 3 + event.code2str.return_value = "deleted" + gs.event_id.return_value = "/baz/quux" + gs.HandleEvent(event) + gs.event_id.assert_called_with(event) + self.assertNotIn("/baz/quux", gs.entries) + self.assertNotIn("/baz/quux", gs.Entries[gs.entry_type]) + + + diff --git a/testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testinterfaces.py b/testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testinterfaces.py new file mode 100644 index 000000000..01d7db067 --- /dev/null +++ b/testsuite/Testsrc/Testlib/TestServer/TestPlugin/Testinterfaces.py @@ -0,0 +1,342 @@ +import os +import sys +import lxml.etree +import Bcfg2.Server +from mock import Mock, MagicMock, patch +from Bcfg2.Server.Plugin.interfaces import * + +# add all parent testsuite directories to sys.path to allow (most) +# relative imports in python 2.4 +path = os.path.dirname(__file__) +while path != '/': + if os.path.basename(path).lower().startswith("test"): + sys.path.append(path) + if os.path.basename(path) == "testsuite": + break + path = os.path.dirname(path) +from common import call, builtins, skip, skipIf, skipUnless, Bcfg2TestCase, \ + patchIf, datastore +from Testbase import TestPlugin + +class TestGenerator(Bcfg2TestCase): + test_obj = Generator + + def test_HandlesEntry(self): + pass + + def test_HandleEntry(self): + pass + + +class TestStructure(Bcfg2TestCase): + test_obj = Structure + + def get_obj(self): + return self.test_obj() + + def test_BuildStructures(self): + s = self.get_obj() + self.assertRaises(NotImplementedError, + s.BuildStructures, None) + + +class TestMetadata(Bcfg2TestCase): + test_obj = Metadata + + def get_obj(self): + return self.test_obj() + + def test_AuthenticateConnection(self): + m = self.get_obj() + self.assertRaises(NotImplementedError, + m.AuthenticateConnection, + None, None, None, (None, None)) + + def test_get_initial_metadata(self): + m = self.get_obj() + self.assertRaises(NotImplementedError, + m.get_initial_metadata, None) + + def test_merge_additional_data(self): + m = self.get_obj() + self.assertRaises(NotImplementedError, + m.merge_additional_data, None, None, None) + + def test_merge_additional_groups(self): + m = self.get_obj() + self.assertRaises(NotImplementedError, + m.merge_additional_groups, None, None) + + +class TestConnector(Bcfg2TestCase): + """ placeholder """ + def test_get_additional_groups(self): + pass + + def test_get_additional_data(self): + pass + + +class TestProbing(Bcfg2TestCase): + test_obj = Probing + + def get_obj(self): + return self.test_obj() + + def test_GetProbes(self): + p = self.get_obj() + self.assertRaises(NotImplementedError, + p.GetProbes, None) + + def test_ReceiveData(self): + p = self.get_obj() + self.assertRaises(NotImplementedError, + p.ReceiveData, None, None) + + +class TestStatistics(TestPlugin): + test_obj = Statistics + + def get_obj(self, core=None): + if core is None: + core = Mock() + return self.test_obj(core, datastore) + + def test_process_statistics(self): + s = self.get_obj() + self.assertRaises(NotImplementedError, + s.process_statistics, None, None) + + +class TestThreadedStatistics(TestStatistics): + test_obj = ThreadedStatistics + data = [("foo.example.com", ""), + ("bar.example.com", "")] + + @patch("threading.Thread.start") + def test__init(self, mock_start): + core = Mock() + ts = self.get_obj(core) + mock_start.assert_any_call() + + @patch("%s.open" % builtins) + @patch("%s.dump" % cPickle.__name__) + @patch("Bcfg2.Server.Plugin.interfaces.ThreadedStatistics.run", Mock()) + def test_save(self, mock_dump, mock_open): + core = Mock() + ts = self.get_obj(core) + queue = Mock() + queue.empty = Mock(side_effect=Empty) + ts.work_queue = queue + + mock_open.side_effect = OSError + # test that save does _not_ raise an exception even when + # everything goes pear-shaped + ts._save() + queue.empty.assert_any_call() + mock_open.assert_called_with(ts.pending_file, 'w') + + queue.reset_mock() + mock_open.reset_mock() + + queue.data = [] + for hostname, xml in self.data: + md = Mock() + md.hostname = hostname + queue.data.append((md, lxml.etree.XML(xml))) + queue.empty.side_effect = lambda: len(queue.data) == 0 + queue.get_nowait = Mock(side_effect=lambda: queue.data.pop()) + mock_open.side_effect = None + + ts._save() + queue.empty.assert_any_call() + queue.get_nowait.assert_any_call() + mock_open.assert_called_with(ts.pending_file, 'w') + mock_open.return_value.close.assert_any_call() + # the order of the queue data gets changed, so we have to + # verify this call in an ugly way + self.assertItemsEqual(mock_dump.call_args[0][0], self.data) + self.assertEqual(mock_dump.call_args[0][1], mock_open.return_value) + + @patch("os.unlink") + @patch("os.path.exists") + @patch("%s.open" % builtins) + @patch("lxml.etree.XML") + @patch("%s.load" % cPickle.__name__) + @patch("Bcfg2.Server.Plugin.interfaces.ThreadedStatistics.run", Mock()) + def test_load(self, mock_load, mock_XML, mock_open, mock_exists, + mock_unlink): + core = Mock() + core.terminate.isSet.return_value = False + ts = self.get_obj(core) + + ts.work_queue = Mock() + ts.work_queue.data = [] + def reset(): + core.reset_mock() + mock_open.reset_mock() + mock_exists.reset_mock() + mock_unlink.reset_mock() + mock_load.reset_mock() + mock_XML.reset_mock() + ts.work_queue.reset_mock() + ts.work_queue.data = [] + + mock_exists.return_value = False + self.assertTrue(ts._load()) + mock_exists.assert_called_with(ts.pending_file) + + reset() + mock_exists.return_value = True + mock_open.side_effect = OSError + self.assertFalse(ts._load()) + mock_exists.assert_called_with(ts.pending_file) + mock_open.assert_called_with(ts.pending_file, 'r') + + reset() + mock_open.side_effect = None + mock_load.return_value = self.data + ts.work_queue.put_nowait.side_effect = Full + self.assertTrue(ts._load()) + mock_exists.assert_called_with(ts.pending_file) + mock_open.assert_called_with(ts.pending_file, 'r') + mock_open.return_value.close.assert_any_call() + mock_load.assert_called_with(mock_open.return_value) + + reset() + core.build_metadata.side_effect = lambda x: x + mock_XML.side_effect = lambda x, parser=None: x + ts.work_queue.put_nowait.side_effect = None + self.assertTrue(ts._load()) + mock_exists.assert_called_with(ts.pending_file) + mock_open.assert_called_with(ts.pending_file, 'r') + mock_open.return_value.close.assert_any_call() + mock_load.assert_called_with(mock_open.return_value) + self.assertItemsEqual(mock_XML.call_args_list, + [call(x, parser=Bcfg2.Server.XMLParser) + for h, x in self.data]) + self.assertItemsEqual(ts.work_queue.put_nowait.call_args_list, + [call((h, x)) for h, x in self.data]) + mock_unlink.assert_called_with(ts.pending_file) + + @patch("threading.Thread.start", Mock()) + @patch("Bcfg2.Server.Plugin.interfaces.ThreadedStatistics._load") + @patch("Bcfg2.Server.Plugin.interfaces.ThreadedStatistics._save") + @patch("Bcfg2.Server.Plugin.interfaces.ThreadedStatistics.handle_statistic") + def test_run(self, mock_handle, mock_save, mock_load): + core = Mock() + ts = self.get_obj(core) + mock_load.return_value = True + ts.work_queue = Mock() + + def reset(): + mock_handle.reset_mock() + mock_save.reset_mock() + mock_load.reset_mock() + core.reset_mock() + ts.work_queue.reset_mock() + ts.work_queue.data = self.data[:] + ts.work_queue.get_calls = 0 + + reset() + + def get_rv(**kwargs): + ts.work_queue.get_calls += 1 + try: + return ts.work_queue.data.pop() + except: + raise Empty + ts.work_queue.get.side_effect = get_rv + def terminate_isset(): + # this lets the loop go on a few iterations with an empty + # queue to test that it doesn't error out + return ts.work_queue.get_calls > 3 + core.terminate.isSet.side_effect = terminate_isset + + ts.work_queue.empty.return_value = False + ts.run() + mock_load.assert_any_call() + self.assertGreaterEqual(ts.work_queue.get.call_count, len(self.data)) + self.assertItemsEqual(mock_handle.call_args_list, + [call(h, x) for h, x in self.data]) + mock_save.assert_any_call() + + @patch("copy.copy", Mock(side_effect=lambda x: x)) + @patch("Bcfg2.Server.Plugin.interfaces.ThreadedStatistics.run", Mock()) + def test_process_statistics(self): + core = Mock() + ts = self.get_obj(core) + ts.work_queue = Mock() + ts.process_statistics(*self.data[0]) + ts.work_queue.put_nowait.assert_called_with(self.data[0]) + + ts.work_queue.reset_mock() + ts.work_queue.put_nowait.side_effect = Full + # test that no exception is thrown + ts.process_statistics(*self.data[0]) + + def test_handle_statistic(self): + ts = self.get_obj() + self.assertRaises(NotImplementedError, + ts.handle_statistic, None, None) + + +class TestPullSource(Bcfg2TestCase): + def test_GetCurrentEntry(self): + ps = PullSource() + self.assertRaises(NotImplementedError, + ps.GetCurrentEntry, None, None, None) + + +class TestPullTarget(Bcfg2TestCase): + def test_AcceptChoices(self): + pt = PullTarget() + self.assertRaises(NotImplementedError, + pt.AcceptChoices, None, None) + + def test_AcceptPullData(self): + pt = PullTarget() + self.assertRaises(NotImplementedError, + pt.AcceptPullData, None, None, None) + + +class TestDecision(Bcfg2TestCase): + test_obj = Decision + + def get_obj(self): + return self.test_obj() + + def test_GetDecisions(self): + d = self.get_obj() + self.assertRaises(NotImplementedError, + d.GetDecisions, None, None) + + +class TestStructureValidator(Bcfg2TestCase): + def test_validate_structures(self): + sv = StructureValidator() + self.assertRaises(NotImplementedError, + sv.validate_structures, None, None) + + +class TestGoalValidator(Bcfg2TestCase): + def test_validate_goals(self): + gv = GoalValidator() + self.assertRaises(NotImplementedError, + gv.validate_goals, None, None) + + +class TestVersion(Bcfg2TestCase): + test_obj = Version + + def get_obj(self): + return self.test_obj() + + def test_get_revision(self): + d = self.get_obj() + self.assertRaises(NotImplementedError, d.get_revision) + + +class TestClientRunHooks(Bcfg2TestCase): + """ placeholder for future tests """ + pass diff --git a/testsuite/Testsrc/Testlib/TestServer/TestPlugin/__init__.py b/testsuite/Testsrc/Testlib/TestServer/TestPlugin/__init__.py new file mode 100644 index 000000000..d86cf1079 --- /dev/null +++ b/testsuite/Testsrc/Testlib/TestServer/TestPlugin/__init__.py @@ -0,0 +1,17 @@ +import os +import sys + +# add all parent testsuite directories to sys.path to allow (most) +# relative imports in python 2.4 +path = os.path.dirname(__file__) +while path != "/": + if os.path.basename(path).lower().startswith("test"): + sys.path.append(path) + if os.path.basename(path) == "testsuite": + break + path = os.path.dirname(path) + +from Testbase import * +from Testinterfaces import * +from Testhelpers import * +from Testexceptions import * -- cgit v1.2.3-1-g7c22