summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorChris St. Pierre <chris.a.st.pierre@gmail.com>2012-08-15 08:21:45 -0400
committerChris St. Pierre <chris.a.st.pierre@gmail.com>2012-08-15 08:21:45 -0400
commitb455dafd90b9710020f798cd73d63cd049685695 (patch)
tree0456007832aa19bc5d8bea84cbb647492926c7e2
parent06a2efea2f666b94eaf7e74bda798ed261fc47de (diff)
parente911b57eb38dfa0fc06d19e70e02e121ae721e57 (diff)
downloadbcfg2-b455dafd90b9710020f798cd73d63cd049685695.tar.gz
bcfg2-b455dafd90b9710020f798cd73d63cd049685695.tar.bz2
bcfg2-b455dafd90b9710020f798cd73d63cd049685695.zip
Merge branch 'tests'
-rw-r--r--src/lib/Bcfg2/Bcfg2Py3k.py5
-rw-r--r--src/lib/Bcfg2/Server/Admin/Reports.py4
-rw-r--r--src/lib/Bcfg2/Server/Admin/Syncdb.py2
-rw-r--r--src/lib/Bcfg2/Server/Core.py25
-rw-r--r--src/lib/Bcfg2/Server/Plugin.py395
-rw-r--r--src/lib/Bcfg2/Server/Plugins/DBStats.py4
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Decisions.py6
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Deps.py21
-rw-r--r--src/lib/Bcfg2/Server/Plugins/FileProbes.py4
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Metadata.py81
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Packages/Apt.py7
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Packages/Pac.py7
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Packages/PackagesSources.py11
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Probes.py11
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Snapshots.py6
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Statistics.py4
-rw-r--r--src/lib/Bcfg2/Server/Plugins/TemplateHelper.py7
-rwxr-xr-xsrc/sbin/bcfg2-info2
-rwxr-xr-xsrc/sbin/bcfg2-yum-helper16
-rw-r--r--testsuite/Testlib/TestOptions.py9
-rw-r--r--testsuite/Testlib/TestServer/TestPlugin.py2183
-rw-r--r--testsuite/Testlib/TestServer/TestPlugins/TestMetadata.py355
-rw-r--r--testsuite/Testlib/TestServer/TestPlugins/TestProbes.py116
-rw-r--r--testsuite/Testlib/TestServer/TestPlugins/__init__.py0
-rw-r--r--testsuite/Testlib/TestServer/__init__.py0
-rw-r--r--testsuite/Testlib/__init__.py0
-rw-r--r--testsuite/__init__.py0
-rw-r--r--testsuite/common.py77
28 files changed, 2786 insertions, 572 deletions
diff --git a/src/lib/Bcfg2/Bcfg2Py3k.py b/src/lib/Bcfg2/Bcfg2Py3k.py
index 7b0c6a2b5..7fce94789 100644
--- a/src/lib/Bcfg2/Bcfg2Py3k.py
+++ b/src/lib/Bcfg2/Bcfg2Py3k.py
@@ -88,11 +88,6 @@ except NameError:
import functools
reduce = functools.reduce
-if sys.hexversion >= 0x03000000:
- from io import FileIO as file
-else:
- file = file
-
try:
from collections import MutableMapping
except ImportError:
diff --git a/src/lib/Bcfg2/Server/Admin/Reports.py b/src/lib/Bcfg2/Server/Admin/Reports.py
index 175b99d1d..335d6a1e7 100644
--- a/src/lib/Bcfg2/Server/Admin/Reports.py
+++ b/src/lib/Bcfg2/Server/Admin/Reports.py
@@ -109,7 +109,7 @@ class Reports(Bcfg2.Server.Admin.Mode):
try:
update_database()
except UpdaterError:
- print "Update failed"
+ print("Update failed")
raise SystemExit(-1)
elif args[0] == 'load_stats':
quick = '-O3' in args
@@ -121,7 +121,7 @@ class Reports(Bcfg2.Server.Admin.Mode):
if stats_file[0] == '-':
self.errExit("Invalid statistics file: %s" % stats_file)
elif args[i] == '-c' or args[i] == '--clients-file':
- print "DeprecationWarning: %s is no longer used" % args[i]
+ print("DeprecationWarning: %s is no longer used" % args[i])
i = i + 1
self.load_stats(stats_file, self.log.getEffectiveLevel() > logging.WARNING, quick)
elif args[0] == 'purge':
diff --git a/src/lib/Bcfg2/Server/Admin/Syncdb.py b/src/lib/Bcfg2/Server/Admin/Syncdb.py
index c9bba0810..72d3d469e 100644
--- a/src/lib/Bcfg2/Server/Admin/Syncdb.py
+++ b/src/lib/Bcfg2/Server/Admin/Syncdb.py
@@ -35,5 +35,5 @@ class Syncdb(Bcfg2.Server.Admin.Mode):
try:
update_database()
except UpdaterError:
- print "Update failed"
+ print("Update failed")
raise SystemExit(-1)
diff --git a/src/lib/Bcfg2/Server/Core.py b/src/lib/Bcfg2/Server/Core.py
index 0676b1d8f..660d2c3ef 100644
--- a/src/lib/Bcfg2/Server/Core.py
+++ b/src/lib/Bcfg2/Server/Core.py
@@ -10,15 +10,10 @@ import time
import inspect
import lxml.etree
from traceback import format_exc
-
-# this must be set before we import the Metadata plugin
-os.environ['DJANGO_SETTINGS_MODULE'] = 'Bcfg2.settings'
-
import Bcfg2.settings
import Bcfg2.Server
import Bcfg2.Logger
import Bcfg2.Server.FileMonitor
-import Bcfg2.Server.Plugins.Metadata
from Bcfg2.Bcfg2Py3k import xmlrpclib
from Bcfg2.Server.Plugin import PluginInitError, PluginExecutionError
@@ -31,6 +26,8 @@ try:
except:
pass
+os.environ['DJANGO_SETTINGS_MODULE'] = 'Bcfg2.settings'
+
def exposed(func):
func.exposed = True
return func
@@ -357,7 +354,7 @@ class BaseCore(object):
revision=self.revision)
try:
meta = self.build_metadata(client)
- except Bcfg2.Server.Plugins.Metadata.MetadataConsistencyError:
+ except Bcfg2.Server.Plugin.MetadataConsistencyError:
self.logger.error("Metadata consistency error for client %s" %
client)
return lxml.etree.Element("error", type='metadata error')
@@ -448,7 +445,7 @@ class BaseCore(object):
"""Build the metadata structure."""
if not hasattr(self, 'metadata'):
# some threads start before metadata is even loaded
- raise Bcfg2.Server.Plugins.Metadata.MetadataRuntimeError
+ raise Bcfg2.Server.Plugin.MetadataRuntimeError
imd = self.metadata.get_initial_metadata(client_name)
for conn in self.connectors:
grps = conn.get_additional_groups(imd)
@@ -484,11 +481,11 @@ class BaseCore(object):
meta = self.build_metadata(client)
else:
meta = None
- except Bcfg2.Server.Plugins.Metadata.MetadataConsistencyError:
+ except Bcfg2.Server.Plugin.MetadataConsistencyError:
err = sys.exc_info()[1]
self.critical_error("Client metadata resolution error for %s: %s" %
(address[0], err))
- except Bcfg2.Server.Plugins.Metadata.MetadataRuntimeError:
+ except Bcfg2.Server.Plugin.MetadataRuntimeError:
err = sys.exc_info()[1]
self.critical_error('Metadata system runtime failure for %s: %s' %
(address[0], err))
@@ -527,8 +524,8 @@ class BaseCore(object):
client, metadata = self.resolve_client(address)
try:
self.metadata.set_version(client, version)
- except (Bcfg2.Server.Plugins.Metadata.MetadataConsistencyError,
- Bcfg2.Server.Plugins.Metadata.MetadataRuntimeError):
+ except (Bcfg2.Server.Plugin.MetadataConsistencyError,
+ Bcfg2.Server.Plugin.MetadataRuntimeError):
err = sys.exc_info()[1]
self.critical_error("Unable to set version for %s: %s" %
(client, err))
@@ -585,8 +582,8 @@ class BaseCore(object):
client = self.resolve_client(address, metadata=False)[0]
try:
self.metadata.set_profile(client, profile, address)
- except (Bcfg2.Server.Plugins.Metadata.MetadataConsistencyError,
- Bcfg2.Server.Plugins.Metadata.MetadataRuntimeError):
+ except (Bcfg2.Server.Plugin.MetadataConsistencyError,
+ Bcfg2.Server.Plugin.MetadataRuntimeError):
err = sys.exc_info()[1]
self.critical_error("Unable to assert profile for %s: %s" %
(client, err))
@@ -600,7 +597,7 @@ class BaseCore(object):
config = self.BuildConfiguration(client)
return lxml.etree.tostring(config, encoding='UTF-8',
xml_declaration=True)
- except Bcfg2.Server.Plugins.Metadata.MetadataConsistencyError:
+ except Bcfg2.Server.Plugin.MetadataConsistencyError:
self.critical_error("Metadata consistency failure for %s" % client)
@exposed
diff --git a/src/lib/Bcfg2/Server/Plugin.py b/src/lib/Bcfg2/Server/Plugin.py
index 7fe98ea92..7e7f7db18 100644
--- a/src/lib/Bcfg2/Server/Plugin.py
+++ b/src/lib/Bcfg2/Server/Plugin.py
@@ -4,8 +4,6 @@ import copy
import logging
import lxml.etree
import os
-import pickle
-import posixpath
import re
import sys
import threading
@@ -22,12 +20,7 @@ except ImportError:
# py3k compatibility
if sys.hexversion >= 0x03000000:
from functools import reduce
- from io import FileIO as BUILTIN_FILE_TYPE
-else:
- BUILTIN_FILE_TYPE = file
-from Bcfg2.Bcfg2Py3k import Queue
-from Bcfg2.Bcfg2Py3k import Empty
-from Bcfg2.Bcfg2Py3k import Full
+from Bcfg2.Bcfg2Py3k import Queue, Empty, Full, cPickle
# grab default metadata info from bcfg2.conf
opts = {'owner': Bcfg2.Options.MDATA_OWNER,
@@ -37,24 +30,21 @@ opts = {'owner': Bcfg2.Options.MDATA_OWNER,
'important': Bcfg2.Options.MDATA_IMPORTANT,
'paranoid': Bcfg2.Options.MDATA_PARANOID,
'sensitive': Bcfg2.Options.MDATA_SENSITIVE}
-mdata_setup = Bcfg2.Options.OptionParser(opts)
-mdata_setup.parse([])
-del mdata_setup['args']
+default_file_metadata = Bcfg2.Options.OptionParser(opts)
+default_file_metadata.parse([])
+del default_file_metadata['args']
logger = logging.getLogger('Bcfg2.Server.Plugin')
-default_file_metadata = mdata_setup
-
-info_regex = re.compile( \
- 'encoding:(\s)*(?P<encoding>\w+)|' +
- 'group:(\s)*(?P<group>\S+)|' +
- 'important:(\s)*(?P<important>\S+)|' +
- 'mtime:(\s)*(?P<mtime>\w+)|' +
- 'owner:(\s)*(?P<owner>\S+)|' +
- 'paranoid:(\s)*(?P<paranoid>\S+)|' +
- 'perms:(\s)*(?P<perms>\w+)|' +
- 'secontext:(\s)*(?P<secontext>\S+)|' +
- 'sensitive:(\s)*(?P<sensitive>\S+)|')
+info_regex = re.compile('owner:(\s)*(?P<owner>\S+)|' +
+ 'group:(\s)*(?P<group>\S+)|' +
+ 'perms:(\s)*(?P<perms>\w+)|' +
+ 'secontext:(\s)*(?P<secontext>\S+)|' +
+ 'paranoid:(\s)*(?P<paranoid>\S+)|' +
+ 'sensitive:(\s)*(?P<sensitive>\S+)|' +
+ 'encoding:(\s)*(?P<encoding>\w+)|' +
+ 'important:(\s)*(?P<important>\S+)|' +
+ 'mtime:(\s)*(?P<mtime>\w+)|')
def bind_info(entry, metadata, infoxml=None, default=default_file_metadata):
for attr, val in list(default.items()):
@@ -80,6 +70,18 @@ class PluginExecutionError(Exception):
pass
+class MetadataConsistencyError(Exception):
+ """This error gets raised when metadata is internally inconsistent."""
+ pass
+
+
+class MetadataRuntimeError(Exception):
+ """This error is raised when the metadata engine
+ is called prior to reading enough data.
+ """
+ pass
+
+
class Debuggable(object):
__rmi__ = ['toggle_debug']
@@ -102,30 +104,6 @@ class Debuggable(object):
self.logger.error(message)
-class DatabaseBacked(object):
- def __init__(self):
- pass
-
- @property
- def _use_db(self):
- use_db = self.core.setup.cfp.getboolean(self.name.lower(),
- "use_database",
- default=False)
- if use_db and has_django:
- return True
- elif not use_db:
- return False
- else:
- self.logger.error("use_database is true but django not found")
- return False
-
-
-
-class PluginDatabaseModel(object):
- class Meta:
- app_label = "Server"
-
-
class Plugin(Debuggable):
"""This is the base class for all Bcfg2 Server plugins.
Several attributes must be defined in the subclass:
@@ -172,6 +150,26 @@ class Plugin(Debuggable):
return "%s Plugin" % self.__class__.__name__
+class DatabaseBacked(Plugin):
+ @property
+ def _use_db(self):
+ use_db = self.core.setup.cfp.getboolean(self.name.lower(),
+ "use_database",
+ default=False)
+ if use_db and has_django:
+ return True
+ elif not use_db:
+ return False
+ else:
+ self.logger.error("use_database is true but django not found")
+ return False
+
+
+class PluginDatabaseModel(object):
+ class Meta:
+ app_label = "Server"
+
+
class Generator(object):
"""Generator plugins contribute to literal client configurations."""
def HandlesEntry(self, entry, metadata):
@@ -180,14 +178,14 @@ class Generator(object):
def HandleEntry(self, entry, metadata):
"""This is the slow-path handler for configuration entry binding."""
- raise PluginExecutionError
+ return entry
class Structure(object):
"""Structure Plugins contribute to abstract client configurations."""
def BuildStructures(self, metadata):
"""Return a list of abstract goal structures for client."""
- raise PluginExecutionError
+ raise NotImplementedError
class Metadata(object):
@@ -208,10 +206,13 @@ class Metadata(object):
pass
def get_initial_metadata(self, client_name):
- raise PluginExecutionError
+ raise NotImplementedError
+
+ def merge_additional_data(self, imd, source, data):
+ raise NotImplementedError
- def merge_additional_data(self, imd, source, groups, data):
- raise PluginExecutionError
+ def merge_additional_groups(self, imd, groups):
+ raise NotImplementedError
class Connector(object):
@@ -236,22 +237,22 @@ class Probing(object):
pass
-class Statistics(object):
+class Statistics(Plugin):
"""Signal statistics handling capability."""
def process_statistics(self, client, xdata):
pass
-class ThreadedStatistics(Statistics,
- threading.Thread):
+class ThreadedStatistics(Statistics, threading.Thread):
"""Threaded statistics handling capability."""
def __init__(self, core, datastore):
- Statistics.__init__(self)
+ Statistics.__init__(self, core, datastore)
threading.Thread.__init__(self)
# Event from the core signaling an exit
self.terminate = core.terminate
self.work_queue = Queue(100000)
- self.pending_file = "%s/etc/%s.pending" % (datastore, self.__class__.__name__)
+ self.pending_file = os.path.join(datastore, "etc",
+ "%s.pending" % self.name)
self.daemon = False
self.start()
@@ -262,32 +263,37 @@ class ThreadedStatistics(Statistics,
while not self.work_queue.empty():
(metadata, data) = self.work_queue.get_nowait()
try:
- pending_data.append((metadata.hostname, lxml.etree.tostring(data)))
+ pending_data.append((metadata.hostname,
+ lxml.etree.tostring(data)))
except:
- self.logger.warning("Dropping interaction for %s" % metadata.hostname)
+ err = sys.exc_info()[1]
+ self.logger.warning("Dropping interaction for %s: %s" %
+ (metadata.hostname, err))
except Empty:
pass
try:
savefile = open(self.pending_file, 'w')
- pickle.dump(pending_data, savefile)
+ cPickle.dump(pending_data, savefile)
savefile.close()
- self.logger.info("Saved pending %s data" % self.__class__.__name__)
+ self.logger.info("Saved pending %s data" % self.name)
except:
- self.logger.warning("Failed to save pending data")
+ err = sys.exc_info()[1]
+ self.logger.warning("Failed to save pending data: %s" % err)
def load(self):
- """Load any pending data to a file."""
+ """Load any pending data from a file."""
if not os.path.exists(self.pending_file):
return True
pending_data = []
try:
savefile = open(self.pending_file, 'r')
- pending_data = pickle.load(savefile)
+ pending_data = cPickle.load(savefile)
savefile.close()
except Exception:
e = sys.exc_info()[1]
self.logger.warning("Failed to load pending data: %s" % e)
+ return False
for (pmetadata, pdata) in pending_data:
# check that shutdown wasnt called early
if self.terminate.isSet():
@@ -298,7 +304,7 @@ class ThreadedStatistics(Statistics,
try:
metadata = self.core.build_metadata(pmetadata)
break
- except Bcfg2.Server.Plugins.Metadata.MetadataRuntimeError:
+ except MetadataRuntimeError:
pass
self.terminate.wait(5)
@@ -313,14 +319,17 @@ class ThreadedStatistics(Statistics,
break
except lxml.etree.LxmlError:
lxml_error = sys.exc_info()[1]
- self.logger.error("Unable to load save interaction: %s" % lxml_error)
- except Bcfg2.Server.Plugins.Metadata.MetadataConsistencyError:
- self.logger.error("Unable to load metadata for save interaction: %s" % pmetadata)
+ self.logger.error("Unable to load saved interaction: %s" %
+ lxml_error)
+ except MetadataConsistencyError:
+ self.logger.error("Unable to load metadata for save "
+ "interaction: %s" % pmetadata)
try:
os.unlink(self.pending_file)
except:
- self.logger.error("Failed to unlink save file: %s" % self.pending_file)
- self.logger.info("Loaded pending %s data" % self.__class__.__name__)
+ self.logger.error("Failed to unlink save file: %s" %
+ self.pending_file)
+ self.logger.info("Loaded pending %s data" % self.name)
return True
def run(self):
@@ -328,28 +337,25 @@ class ThreadedStatistics(Statistics,
return
while not self.terminate.isSet() and self.work_queue != None:
try:
- (xdata, client) = self.work_queue.get(block=True, timeout=2)
+ (client, xdata) = self.work_queue.get(block=True, timeout=2)
except Empty:
continue
except Exception:
e = sys.exc_info()[1]
self.logger.error("ThreadedStatistics: %s" % e)
continue
- self.handle_statistic(xdata, client)
+ self.handle_statistic(client, xdata)
if self.work_queue != None and not self.work_queue.empty():
self.save()
def process_statistics(self, metadata, data):
- warned = False
try:
self.work_queue.put_nowait((metadata, copy.copy(data)))
- warned = False
except Full:
- if not warned:
- self.logger.warning("%s: Queue is full. Dropping interactions." % self.__class__.__name__)
- warned = True
+ self.logger.warning("%s: Queue is full. Dropping interactions." %
+ self.name)
- def handle_statistics(self, metadata, data):
+ def handle_statistic(self, metadata, data):
"""Handle stats here."""
pass
@@ -359,17 +365,17 @@ class PullSource(object):
return []
def GetCurrentEntry(self, client, e_type, e_name):
- raise PluginExecutionError
+ raise NotImplementedError
class PullTarget(object):
def AcceptChoices(self, entry, metadata):
- raise PluginExecutionError
+ raise NotImplementedError
def AcceptPullData(self, specific, new_entry, verbose):
"""This is the null per-plugin implementation
of bcfg2-admin pull."""
- raise PluginExecutionError
+ raise NotImplementedError
class Decision(object):
@@ -385,13 +391,13 @@ class ValidationError(Exception):
class StructureValidator(object):
"""Validate/modify goal structures."""
def validate_structures(self, metadata, structures):
- raise ValidationError("not implemented")
+ raise NotImplementedError
class GoalValidator(object):
"""Validate/modify configuration goals."""
def validate_goals(self, metadata, goals):
- raise ValidationError("not implemented")
+ raise NotImplementedError
class Version(object):
@@ -434,7 +440,7 @@ class FileBacked(object):
if event and event.code2str() not in ['exists', 'changed', 'created']:
return
try:
- self.data = BUILTIN_FILE_TYPE(self.name).read()
+ self.data = open(self.name).read()
self.Index()
except IOError:
err = sys.exc_info()[1]
@@ -498,8 +504,8 @@ class DirectoryBacked(object):
"""
dirpathname = os.path.join(self.data, relative)
if relative not in self.handles.values():
- if not posixpath.isdir(dirpathname):
- logger.error("Failed to open directory %s" % (dirpathname))
+ if not os.path.isdir(dirpathname):
+ logger.error("%s is not a directory" % dirpathname)
return
reqid = self.fam.AddMonitor(dirpathname, self)
self.handles[reqid] = relative
@@ -531,11 +537,6 @@ class DirectoryBacked(object):
"""
action = event.code2str()
- # Clean up the absolute path names passed in
- event.filename = os.path.normpath(event.filename)
- if event.filename.startswith(self.data):
- event.filename = event.filename[len(self.data)+1:]
-
# Exclude events for actions we don't care about
if action == 'endExist':
return
@@ -545,10 +546,14 @@ class DirectoryBacked(object):
(action, event.requestID, event.filename))
return
+ # Clean up path names
+ event.filename = os.path.normpath(event.filename.lstrip('/'))
+
# Calculate the absolute and relative paths this event refers to
abspath = os.path.join(self.data, self.handles[event.requestID],
event.filename)
- relpath = os.path.join(self.handles[event.requestID], event.filename)
+ relpath = os.path.join(self.handles[event.requestID],
+ event.filename).lstrip('/')
if action == 'deleted':
for key in self.entries.keys():
@@ -559,7 +564,7 @@ class DirectoryBacked(object):
# watching a directory just because it gets deleted. If it
# is recreated, we will start getting notifications for it
# again without having to add a new monitor.
- elif posixpath.isdir(abspath):
+ elif os.path.isdir(abspath):
# Deal with events for directories
if action in ['exists', 'created']:
self.add_directory_monitor(relpath)
@@ -637,14 +642,18 @@ class XMLFileBacked(FileBacked):
Bcfg2.Server.XI_NAMESPACE)]
for el in included:
name = el.get("href")
- if name not in self.extras:
- if name.startswith("/"):
- fpath = name
+ if name.startswith("/"):
+ fpath = name
+ else:
+ if fname:
+ rel = fname
else:
- fpath = os.path.join(os.path.dirname(self.name), name)
+ rel = self.name
+ fpath = os.path.join(os.path.dirname(rel), name)
+ if fpath not in self.extras:
if os.path.exists(fpath):
self._follow_xincludes(fname=fpath)
- self.add_monitor(fpath, name)
+ self.add_monitor(fpath)
else:
msg = "%s: %s does not exist, skipping" % (self.name, name)
if el.findall('./%sfallback' % Bcfg2.Server.XI_NAMESPACE):
@@ -658,9 +667,9 @@ class XMLFileBacked(FileBacked):
self.xdata = lxml.etree.XML(self.data, base_url=self.name,
parser=Bcfg2.Server.XMLParser)
except lxml.etree.XMLSyntaxError:
- err = sys.exc_info()[1]
- logger.error("Failed to parse %s: %s" % (self.name, err))
- raise Bcfg2.Server.Plugin.PluginInitError
+ msg = "Failed to parse %s: %s" % (self.name, sys.exc_info()[1])
+ logger.error(msg)
+ raise PluginInitError(msg)
self._follow_xincludes()
if self.extras:
@@ -674,8 +683,8 @@ class XMLFileBacked(FileBacked):
if self.__identifier__ is not None:
self.label = self.xdata.attrib[self.__identifier__]
- def add_monitor(self, fpath, fname):
- self.extras.append(fname)
+ def add_monitor(self, fpath):
+ self.extras.append(fpath)
if self.fam and self.should_monitor:
self.fam.AddMonitor(fpath, self)
@@ -696,11 +705,9 @@ class StructFile(XMLFileBacked):
return False
negate = item.get('negate', 'false').lower() == 'true'
if item.tag == 'Group':
- return ((negate and item.get('name') not in metadata.groups) or
- (not negate and item.get('name') in metadata.groups))
+ return negate == (item.get('name') not in metadata.groups)
elif item.tag == 'Client':
- return ((negate and item.get('name') != metadata.hostname) or
- (not negate and item.get('name') == metadata.hostname))
+ return negate == (item.get('name') != metadata.hostname)
else:
return True
@@ -714,7 +721,7 @@ class StructFile(XMLFileBacked):
rv.extend(self._match(child, metadata))
return rv
else:
- rv = copy.copy(item)
+ rv = copy.deepcopy(item)
for child in rv.iterchildren():
rv.remove(child)
for child in item.iterchildren():
@@ -754,26 +761,28 @@ class StructFile(XMLFileBacked):
return rv
-class INode:
+class INode(object):
"""
LNodes provide lists of things available at a particular
group intersection.
"""
- raw = {'Client': "lambda m, e:'%(name)s' == m.hostname and predicate(m, e)",
- 'Group': "lambda m, e:'%(name)s' in m.groups and predicate(m, e)"}
- nraw = {'Client': "lambda m, e:'%(name)s' != m.hostname and predicate(m, e)",
- 'Group': "lambda m, e:'%(name)s' not in m.groups and predicate(m, e)"}
+ raw = dict(
+ Client="lambda m, e:'%(name)s' == m.hostname and predicate(m, e)",
+ Group="lambda m, e:'%(name)s' in m.groups and predicate(m, e)")
+ nraw = dict(
+ Client="lambda m, e:'%(name)s' != m.hostname and predicate(m, e)",
+ Group="lambda m, e:'%(name)s' not in m.groups and predicate(m, e)")
containers = ['Group', 'Client']
ignore = []
def __init__(self, data, idict, parent=None):
self.data = data
self.contents = {}
- if parent == None:
- self.predicate = lambda m, d: True
+ if parent is None:
+ self.predicate = lambda m, e: True
else:
predicate = parent.predicate
- if data.get('negate', 'false') in ['true', 'True']:
+ if data.get('negate', 'false').lower() == 'true':
psrc = self.nraw
else:
psrc = self.raw
@@ -782,20 +791,23 @@ class INode:
{'name': data.get('name')},
{'predicate': predicate})
else:
- raise Exception
- mytype = self.__class__
+ raise PluginExecutionError("Unknown tag: %s" % data.tag)
self.children = []
+ self._load_children(data, idict)
+
+ def _load_children(self, data, idict):
for item in data.getchildren():
if item.tag in self.ignore:
continue
elif item.tag in self.containers:
- self.children.append(mytype(item, idict, self))
+ self.children.append(self.__class__(item, idict, self))
else:
try:
self.contents[item.tag][item.get('name')] = \
dict(item.attrib)
except KeyError:
- self.contents[item.tag] = {item.get('name'): dict(item.attrib)}
+ self.contents[item.tag] = \
+ {item.get('name'): dict(item.attrib)}
if item.text:
self.contents[item.tag][item.get('name')]['__text__'] = \
item.text
@@ -847,31 +859,36 @@ class XMLSrc(XMLFileBacked):
def HandleEvent(self, _=None):
"""Read file upon update."""
try:
- data = BUILTIN_FILE_TYPE(self.name).read()
+ data = open(self.name).read()
except IOError:
- logger.error("Failed to read file %s" % (self.name))
- return
+ msg = "Failed to read file %s: %s" % (self.name, sys.exc_info()[1])
+ logger.error(msg)
+ raise PluginExecutionError(msg)
self.items = {}
try:
xdata = lxml.etree.XML(data, parser=Bcfg2.Server.XMLParser)
except lxml.etree.XMLSyntaxError:
- logger.error("Failed to parse file %s" % (self.name))
- return
+ msg = "Failed to parse file %s" % (self.name, sys.exc_info()[1])
+ logger.error(msg)
+ raise PluginExecutionError(msg)
self.pnode = self.__node__(xdata, self.items)
self.cache = None
try:
self.priority = int(xdata.get('priority'))
except (ValueError, TypeError):
if not self.noprio:
- logger.error("Got bogus priority %s for file %s" %
- (xdata.get('priority'), self.name))
+ msg = "Got bogus priority %s for file %s" % \
+ (xdata.get('priority'), self.name)
+ logger.error(msg)
+ raise PluginExecutionError(msg)
+
del xdata, data
def Cache(self, metadata):
"""Build a package dict for a given host."""
- if self.cache == None or self.cache[0] != metadata:
+ if self.cache is None or self.cache[0] != metadata:
cache = (metadata, self.__cacheobj__())
- if self.pnode == None:
+ if self.pnode is None:
logger.error("Cache method called early for %s; forcing data load" % (self.name))
self.HandleEvent()
return
@@ -900,11 +917,7 @@ class PrioDir(Plugin, Generator, XMLDirectoryBacked):
def __init__(self, core, datastore):
Plugin.__init__(self, core, datastore)
Generator.__init__(self)
- try:
- XMLDirectoryBacked.__init__(self, self.data, self.core.fam)
- except OSError:
- self.logger.error("Failed to load %s indices" % (self.name))
- raise PluginInitError
+ XMLDirectoryBacked.__init__(self, self.data, self.core.fam)
def HandleEvent(self, event):
"""Handle events and update dispatch table."""
@@ -943,13 +956,13 @@ class PrioDir(Plugin, Generator, XMLDirectoryBacked):
else:
prio = [int(src.priority) for src in matching]
if prio.count(max(prio)) > 1:
- self.logger.error("Found conflicting sources with "
- "same priority for %s, %s %s" %
- (metadata.hostname,
- entry.tag.lower(), entry.get('name')))
+ msg = "Found conflicting sources with same priority for " + \
+ "%s:%s for %s" % (entry.tag, entry.get("name"),
+ metadata.hostname)
+ self.logger.error(msg)
self.logger.error([item.name for item in matching])
self.logger.error("Priority was %s" % max(prio))
- raise PluginExecutionError
+ raise PluginExecutionError(msg)
index = prio.index(max(prio))
for rname in list(matching[index].cache[1][entry.tag].keys()):
@@ -975,9 +988,9 @@ class SpecificityError(Exception):
pass
-class Specificity:
-
- def __init__(self, all=False, group=False, hostname=False, prio=0, delta=False):
+class Specificity(object):
+ def __init__(self, all=False, group=False, hostname=False, prio=0,
+ delta=False):
self.hostname = hostname
self.all = all
self.group = group
@@ -987,6 +1000,12 @@ class Specificity:
def __lt__(self, other):
return self.__cmp__(other) < 0
+ def __gt__(self, other):
+ return self.__cmp__(other) > 0
+
+ def __eq__(self, other):
+ return self.__cmp__(other) == 0
+
def matches(self, metadata):
return self.all or \
self.hostname == metadata.hostname or \
@@ -995,26 +1014,36 @@ class Specificity:
def __cmp__(self, other):
"""Sort most to least specific."""
if self.all:
- return 1
- if self.group:
+ if other.all:
+ return 0
+ else:
+ return 1
+ elif other.all:
+ return -1
+ elif self.group:
if other.hostname:
return 1
if other.group and other.prio > self.prio:
return 1
if other.group and other.prio == self.prio:
return 0
+ elif other.group:
+ return -1
+ elif self.hostname and other.hostname:
+ return 0
return -1
- def more_specific(self, other):
- """Test if self is more specific than other."""
+ def __str__(self):
+ rv = [self.__class__.__name__, ': ']
if self.all:
- True
+ rv.append("all")
elif self.group:
- if other.hostname:
- return True
- elif other.group and other.prio > self.prio:
- return True
- return False
+ rv.append("Group %s, priority %s" % (self.group, self.prio))
+ elif self.hostname:
+ rv.append("Host %s" % self.hostname)
+ if self.delta:
+ rv.append(", delta=%s" % self.delta)
+ return "".join(rv)
class SpecificData(object):
@@ -1037,7 +1066,7 @@ class EntrySet(Debuggable):
"""Entry sets deal with the host- and group-specific entries."""
ignore = re.compile("^(\.#.*|.*~|\\..*\\.(sw[px])|.*\\.genshi_include)$")
- def __init__(self, basename, path, entry_type, encoding):
+ def __init__(self, basename, path, entry_type, encoding, is_regex=False):
Debuggable.__init__(self, name=basename)
self.path = path
self.entry_type = entry_type
@@ -1045,7 +1074,12 @@ class EntrySet(Debuggable):
self.metadata = default_file_metadata.copy()
self.infoxml = None
self.encoding = encoding
- pattern = '(.*/)?%s(\.((H_(?P<hostname>\S+))|' % basename
+
+ if is_regex:
+ base_pat = basename
+ else:
+ base_pat = re.escape(basename)
+ pattern = '(.*/)?%s(\.((H_(?P<hostname>\S+))|' % base_pat
pattern += '(G(?P<prio>\d+)_(?P<group>\S+))))?$'
self.specific = re.compile(pattern)
@@ -1062,20 +1096,13 @@ class EntrySet(Debuggable):
if matching is None:
matching = self.get_matching(metadata)
- hspec = [ent for ent in matching if ent.specific.hostname]
- if hspec:
- return hspec[0]
-
- gspec = [ent for ent in matching if ent.specific.group]
- if gspec:
- gspec.sort()
- return gspec[-1]
-
- aspec = [ent for ent in matching if ent.specific.all]
- if aspec:
- return aspec[0]
-
- raise PluginExecutionError
+ if matching:
+ matching.sort()
+ return matching[0]
+ else:
+ raise PluginExecutionError("No matching entries available for %s "
+ "for %s" % (self.path,
+ metadata.hostname))
def handle_event(self, event):
"""Handle FAM events for the TemplateSet."""
@@ -1164,8 +1191,7 @@ class EntrySet(Debuggable):
if value:
self.metadata[key] = value
if len(self.metadata['perms']) == 3:
- self.metadata['perms'] = "0%s" % \
- (self.metadata['perms'])
+ self.metadata['perms'] = "0%s" % self.metadata['perms']
def reset_metadata(self, event):
"""Reset metadata to defaults if info or info.xml removed."""
@@ -1178,7 +1204,8 @@ class EntrySet(Debuggable):
bind_info(entry, metadata, infoxml=self.infoxml, default=self.metadata)
def bind_entry(self, entry, metadata):
- """Return the appropriate interpreted template from the set of available templates."""
+ """Return the appropriate interpreted template from the set of
+ available templates."""
self.bind_info_to_entry(entry, metadata)
return self.best_matching(metadata).bind_entry(entry, metadata)
@@ -1206,36 +1233,38 @@ class GroupSpool(Plugin, Generator):
def add_entry(self, event):
epath = self.event_path(event)
ident = self.event_id(event)
- if posixpath.isdir(epath):
+ if os.path.isdir(epath):
self.AddDirectoryMonitor(epath[len(self.data):])
- if ident not in self.entries and posixpath.isfile(epath):
- dirpath = "".join([self.data, ident])
+ if ident not in self.entries and os.path.isfile(epath):
+ dirpath = self.data + ident
self.entries[ident] = self.es_cls(self.filename_pattern,
dirpath,
self.es_child_cls,
self.encoding)
self.Entries[self.entry_type][ident] = \
self.entries[ident].bind_entry
- if not posixpath.isdir(epath):
+ if not os.path.isdir(epath):
# do not pass through directory events
self.entries[ident].handle_event(event)
def event_path(self, event):
- return "".join([self.data, self.handles[event.requestID],
- event.filename])
+ return os.path.join(self.data,
+ self.handles[event.requestID].lstrip("/"),
+ event.filename)
def event_id(self, event):
epath = self.event_path(event)
- if posixpath.isdir(epath):
- return self.handles[event.requestID] + event.filename
+ if os.path.isdir(epath):
+ return os.path.join(self.handles[event.requestID].lstrip("/"),
+ event.filename)
else:
- return self.handles[event.requestID][:-1]
+ return self.handles[event.requestID].rstrip("/")
def toggle_debug(self):
for entry in self.entries.values():
if hasattr(entry, "toggle_debug"):
entry.toggle_debug()
- return Plugin.toggle_debug()
+ return Plugin.toggle_debug(self)
def HandleEvent(self, event):
"""Unified FAM event handler for GroupSpool."""
@@ -1246,7 +1275,7 @@ class GroupSpool(Plugin, Generator):
if action in ['exists', 'created']:
self.add_entry(event)
- if action == 'changed':
+ elif action == 'changed':
if ident in self.entries:
self.entries[ident].handle_event(event)
else:
@@ -1274,7 +1303,7 @@ class GroupSpool(Plugin, Generator):
relative += '/'
name = self.data + relative
if relative not in list(self.handles.values()):
- if not posixpath.isdir(name):
+ if not os.path.isdir(name):
self.logger.error("Failed to open directory %s" % name)
return
reqid = self.core.fam.AddMonitor(name, self)
diff --git a/src/lib/Bcfg2/Server/Plugins/DBStats.py b/src/lib/Bcfg2/Server/Plugins/DBStats.py
index 40ab11208..fa1f45a39 100644
--- a/src/lib/Bcfg2/Server/Plugins/DBStats.py
+++ b/src/lib/Bcfg2/Server/Plugins/DBStats.py
@@ -18,13 +18,11 @@ from Bcfg2.Server.Reports.reports.models import Client
logger = logging.getLogger('Bcfg2.Plugins.DBStats')
-class DBStats(Bcfg2.Server.Plugin.Plugin,
- Bcfg2.Server.Plugin.ThreadedStatistics,
+class DBStats(Bcfg2.Server.Plugin.ThreadedStatistics,
Bcfg2.Server.Plugin.PullSource):
name = 'DBStats'
def __init__(self, core, datastore):
- Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.ThreadedStatistics.__init__(self, core, datastore)
Bcfg2.Server.Plugin.PullSource.__init__(self)
self.cpath = "%s/Metadata/clients.xml" % datastore
diff --git a/src/lib/Bcfg2/Server/Plugins/Decisions.py b/src/lib/Bcfg2/Server/Plugins/Decisions.py
index b432474f2..78b549c2c 100644
--- a/src/lib/Bcfg2/Server/Plugins/Decisions.py
+++ b/src/lib/Bcfg2/Server/Plugins/Decisions.py
@@ -23,9 +23,9 @@ class DecisionSet(Bcfg2.Server.Plugin.EntrySet):
- `encoding`: XML character encoding
"""
- pattern = '(white|black)list'
- Bcfg2.Server.Plugin.EntrySet.__init__(self, pattern, path, \
- DecisionFile, encoding)
+ Bcfg2.Server.Plugin.EntrySet.__init__(self, '(white|black)list', path,
+ DecisionFile, encoding,
+ is_regex=True)
try:
fam.AddMonitor(path, self)
except OSError:
diff --git a/src/lib/Bcfg2/Server/Plugins/Deps.py b/src/lib/Bcfg2/Server/Plugins/Deps.py
index 9b848baae..d3a1ee871 100644
--- a/src/lib/Bcfg2/Server/Plugins/Deps.py
+++ b/src/lib/Bcfg2/Server/Plugins/Deps.py
@@ -7,27 +7,10 @@ import Bcfg2.Server.Plugin
class DNode(Bcfg2.Server.Plugin.INode):
"""DNode provides supports for single predicate types for dependencies."""
- raw = {'Group': "lambda m, e:'%(name)s' in m.groups and predicate(m, e)"}
- containers = ['Group']
-
- def __init__(self, data, idict, parent=None):
- self.data = data
- self.contents = {}
- if parent == None:
- self.predicate = lambda x, d: True
- else:
- predicate = parent.predicate
- if data.tag in list(self.raw.keys()):
- self.predicate = eval(self.raw[data.tag] %
- {'name': data.get('name')},
- {'predicate': predicate})
- else:
- raise Exception
- mytype = self.__class__
- self.children = []
+ def _load_children(self, data, idict):
for item in data.getchildren():
if item.tag in self.containers:
- self.children.append(mytype(item, idict, self))
+ self.children.append(self.__class__(item, idict, self))
else:
data = [(child.tag, child.get('name'))
for child in item.getchildren()]
diff --git a/src/lib/Bcfg2/Server/Plugins/FileProbes.py b/src/lib/Bcfg2/Server/Plugins/FileProbes.py
index f95c05d42..a403c78d7 100644
--- a/src/lib/Bcfg2/Server/Plugins/FileProbes.py
+++ b/src/lib/Bcfg2/Server/Plugins/FileProbes.py
@@ -24,7 +24,7 @@ import lxml.etree
path = "%s"
if not os.path.exists(path):
- print "%%s does not exist" %% path
+ print("%%s does not exist" %% path)
raise SystemExit(1)
stat = os.stat(path)
@@ -34,7 +34,7 @@ data = lxml.etree.Element("ProbedFileData",
group=grp.getgrgid(stat[5])[0],
perms=oct(stat[0] & 07777))
data.text = binascii.b2a_base64(open(path).read())
-print lxml.etree.tostring(data)
+print(lxml.etree.tostring(data))
"""
class FileProbes(Bcfg2.Server.Plugin.Plugin,
diff --git a/src/lib/Bcfg2/Server/Plugins/Metadata.py b/src/lib/Bcfg2/Server/Plugins/Metadata.py
index e7be7c227..3c2c3701a 100644
--- a/src/lib/Bcfg2/Server/Plugins/Metadata.py
+++ b/src/lib/Bcfg2/Server/Plugins/Metadata.py
@@ -81,18 +81,6 @@ if has_django:
return False
-class MetadataConsistencyError(Exception):
- """This error gets raised when metadata is internally inconsistent."""
- pass
-
-
-class MetadataRuntimeError(Exception):
- """This error is raised when the metadata engine
- is called prior to reading enough data.
- """
- pass
-
-
class XMLMetadataConfig(Bcfg2.Server.Plugin.XMLFileBacked):
"""Handles xml config files and all XInclude statements"""
def __init__(self, metadata, watch_clients, basefile):
@@ -117,13 +105,15 @@ class XMLMetadataConfig(Bcfg2.Server.Plugin.XMLFileBacked):
@property
def xdata(self):
if not self.data:
- raise MetadataRuntimeError("%s has no data" % self.basefile)
+ raise Bcfg2.Server.Plugin.MetadataRuntimeError("%s has no data" %
+ self.basefile)
return self.data
@property
def base_xdata(self):
if not self.basedata:
- raise MetadataRuntimeError("%s has no data" % self.basefile)
+ raise Bcfg2.Server.Plugin.MetadataRuntimeError("%s has no data" %
+ self.basefile)
return self.basedata
def load_xml(self):
@@ -158,7 +148,7 @@ class XMLMetadataConfig(Bcfg2.Server.Plugin.XMLFileBacked):
except IOError:
msg = "Failed to write %s: %s" % (tmpfile, sys.exc_info()[1])
self.logger.error(msg)
- raise MetadataRuntimeError(msg)
+ raise Bcfg2.Server.Plugin.MetadataRuntimeError(msg)
# prep data
dataroot = xmltree.getroot()
newcontents = lxml.etree.tostring(dataroot, pretty_print=True)
@@ -174,7 +164,7 @@ class XMLMetadataConfig(Bcfg2.Server.Plugin.XMLFileBacked):
(tmpfile, sys.exc_info()[1])
self.logger.error(msg, exc_info=1)
os.unlink(tmpfile)
- raise MetadataRuntimeError(msg)
+ raise Bcfg2.Server.Plugin.MetadataRuntimeError(msg)
datafile.close()
# check if clients.xml is a symlink
if os.path.islink(fname):
@@ -187,7 +177,7 @@ class XMLMetadataConfig(Bcfg2.Server.Plugin.XMLFileBacked):
msg = "Metadata: Failed to rename %s: %s" % (tmpfile,
sys.exc_info()[1])
self.logger.error(msg)
- raise MetadataRuntimeError(msg)
+ raise Bcfg2.Server.Plugin.MetadataRuntimeError(msg)
def find_xml_for_xpath(self, xpath):
"""Find and load xml file containing the xpath query"""
@@ -203,28 +193,26 @@ class XMLMetadataConfig(Bcfg2.Server.Plugin.XMLFileBacked):
"""Try to find the data in included files"""
for included in self.extras:
try:
- xdata = lxml.etree.parse(os.path.join(self.basedir,
- included),
+ xdata = lxml.etree.parse(included,
parser=Bcfg2.Server.XMLParser)
cli = xdata.xpath(xpath)
if len(cli) > 0:
- return {'filename': os.path.join(self.basedir,
- included),
+ return {'filename': included,
'xmltree': xdata,
'xquery': cli}
except lxml.etree.XMLSyntaxError:
- self.logger.error('Failed to parse %s' % (included))
+ self.logger.error('Failed to parse %s' % included)
return {}
- def add_monitor(self, fpath, fname):
- self.extras.append(fname)
+ def add_monitor(self, fpath):
+ self.extras.append(fpath)
if self.fam and self.should_monitor:
self.fam.AddMonitor(fpath, self.metadata)
def HandleEvent(self, event):
"""Handle fam events"""
filename = os.path.basename(event.filename)
- if filename in self.extras:
+ if event.filename in self.extras:
if event.code2str() == 'exists':
return False
elif filename != self.basefile:
@@ -318,8 +306,7 @@ class MetadataGroup(tuple):
def __hash__(self):
return hash(self.name)
-class Metadata(Bcfg2.Server.Plugin.Plugin,
- Bcfg2.Server.Plugin.Metadata,
+class Metadata(Bcfg2.Server.Plugin.Metadata,
Bcfg2.Server.Plugin.Statistics,
Bcfg2.Server.Plugin.DatabaseBacked):
"""This class contains data for bcfg2 server metadata."""
@@ -328,10 +315,9 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
sort_order = 500
def __init__(self, core, datastore, watch_clients=True):
- Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Metadata.__init__(self)
- Bcfg2.Server.Plugin.Statistics.__init__(self)
- Bcfg2.Server.Plugin.DatabaseBacked.__init__(self)
+ Bcfg2.Server.Plugin.Statistics.__init__(self, core, datastore)
+ Bcfg2.Server.Plugin.DatabaseBacked.__init__(self, core, datastore)
self.watch_clients = watch_clients
self.states = dict()
self.extra = dict()
@@ -436,7 +422,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
node = self._search_xdata(tag, name, config.xdata, alias=alias)
if node != None:
self.logger.error("%s \"%s\" already exists" % (tag, name))
- raise MetadataConsistencyError
+ raise Bcfg2.Server.Plugin.MetadataConsistencyError
element = lxml.etree.SubElement(config.base_xdata.getroot(),
tag, name=name)
if attribs:
@@ -480,13 +466,13 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
node = self._search_xdata(tag, name, config.xdata, alias=alias)
if node == None:
self.logger.error("%s \"%s\" does not exist" % (tag, name))
- raise MetadataConsistencyError
+ raise Bcfg2.Server.Plugin.MetadataConsistencyError
xdict = config.find_xml_for_xpath('.//%s[@name="%s"]' %
(tag, node.get('name')))
if not xdict:
self.logger.error("Unexpected error finding %s \"%s\"" %
(tag, name))
- raise MetadataConsistencyError
+ raise Bcfg2.Server.Plugin.MetadataConsistencyError
for key, val in list(attribs.items()):
xdict['xquery'][0].set(key, val)
config.write_xml(xdict['filename'], xdict['xmltree'])
@@ -522,13 +508,13 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
node = self._search_xdata(tag, name, config.xdata)
if node == None:
self.logger.error("%s \"%s\" does not exist" % (tag, name))
- raise MetadataConsistencyError
+ raise Bcfg2.Server.Plugin.MetadataConsistencyError
xdict = config.find_xml_for_xpath('.//%s[@name="%s"]' %
(tag, node.get('name')))
if not xdict:
self.logger.error("Unexpected error finding %s \"%s\"" %
(tag, name))
- raise MetadataConsistencyError
+ raise Bcfg2.Server.Plugin.MetadataConsistencyError
xdict['xquery'][0].getparent().remove(xdict['xquery'][0])
config.write_xml(xdict['filename'], xdict['xmltree'])
@@ -558,7 +544,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
except MetadataClientModel.DoesNotExist:
msg = "Client %s does not exist" % client_name
self.logger.warning(msg)
- raise MetadataConsistencyError(msg)
+ raise Bcfg2.Server.Plugin.MetadataConsistencyError(msg)
client.delete()
self.clients = self.list_clients()
else:
@@ -741,16 +727,16 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
self.logger.info("Asserting client %s profile to %s" %
(client, profile))
if False in list(self.states.values()):
- raise MetadataRuntimeError
+ raise Bcfg2.Server.Plugin.MetadataRuntimeError("Metadata has not been read yet")
if not force and profile not in self.groups:
msg = "Profile group %s does not exist" % profile
self.logger.error(msg)
- raise MetadataConsistencyError(msg)
+ raise Bcfg2.Server.Plugin.MetadataConsistencyError(msg)
group = self.groups[profile]
if not force and not group.is_public:
msg = "Cannot set client %s to private group %s" % (client, profile)
self.logger.error(msg)
- raise MetadataConsistencyError(msg)
+ raise Bcfg2.Server.Plugin.MetadataConsistencyError(msg)
if client in self.clients:
if self._use_db:
@@ -797,7 +783,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
else:
msg = "Cannot set version on non-existent client %s" % client
self.logger.error(msg)
- raise MetadataConsistencyError(msg)
+ raise Bcfg2.Server.Plugin.MetadataConsistencyError(msg)
self.versions[client] = version
self.clients_xml.write()
@@ -831,7 +817,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
if len(self.addresses[address]) != 1:
err = "Address %s has multiple reverse assignments; a uuid must be used" % address
self.logger.error(err)
- raise MetadataConsistencyError(err)
+ raise Bcfg2.Server.Plugin.MetadataConsistencyError(err)
return self.addresses[address][0]
try:
cname = socket.gethostbyaddr(address)[0].lower()
@@ -841,7 +827,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
except socket.herror:
warning = "address resolution error for %s" % address
self.logger.warning(warning)
- raise MetadataConsistencyError(warning)
+ raise Bcfg2.Server.Plugin.MetadataConsistencyError(warning)
def _merge_groups(self, client, groups, categories=None):
""" set group membership based on the contents of groups.xml
@@ -871,7 +857,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
def get_initial_metadata(self, client):
"""Return the metadata for a given client."""
if False in list(self.states.values()):
- raise MetadataRuntimeError("Metadata has not been read yet")
+ raise Bcfg2.Server.Plugin.MetadataRuntimeError("Metadata has not been read yet")
client = client.lower()
if client in self.aliases:
client = self.aliases[client]
@@ -898,7 +884,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
else:
msg = "Cannot add new client %s; no default group set" % client
self.logger.error(msg)
- raise MetadataConsistencyError(msg)
+ raise Bcfg2.Server.Plugin.MetadataConsistencyError(msg)
if client in self.clientgroups:
for cgroup in self.clientgroups[client]:
@@ -1052,9 +1038,10 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
id_method = 'address'
try:
client = self.resolve_client(address)
- except MetadataConsistencyError:
- self.logger.error("Client %s failed to resolve; metadata problem"
- % address[0])
+ except Bcfg2.Server.Plugin.MetadataConsistencyError:
+ err = sys.exc_info()[1]
+ self.logger.error("Client %s failed to resolve: %s" %
+ (address[0], err))
return False
else:
id_method = 'uuid'
diff --git a/src/lib/Bcfg2/Server/Plugins/Packages/Apt.py b/src/lib/Bcfg2/Server/Plugins/Packages/Apt.py
index 83f7bbd10..685cd5c1d 100644
--- a/src/lib/Bcfg2/Server/Plugins/Packages/Apt.py
+++ b/src/lib/Bcfg2/Server/Plugins/Packages/Apt.py
@@ -2,8 +2,7 @@ import re
import gzip
from Bcfg2.Server.Plugins.Packages.Collection import Collection
from Bcfg2.Server.Plugins.Packages.Source import Source
-from Bcfg2.Bcfg2Py3k import cPickle, file
-
+from Bcfg2.Bcfg2Py3k import cPickle
class AptCollection(Collection):
def get_group(self, group):
@@ -25,13 +24,13 @@ class AptSource(Source):
'components': self.components, 'arches': self.arches}]
def save_state(self):
- cache = file(self.cachefile, 'wb')
+ cache = open(self.cachefile, 'wb')
cPickle.dump((self.pkgnames, self.deps, self.provides,
self.essentialpkgs), cache, 2)
cache.close()
def load_state(self):
- data = file(self.cachefile)
+ data = open(self.cachefile)
(self.pkgnames, self.deps, self.provides,
self.essentialpkgs) = cPickle.load(data)
diff --git a/src/lib/Bcfg2/Server/Plugins/Packages/Pac.py b/src/lib/Bcfg2/Server/Plugins/Packages/Pac.py
index 99a090739..34c7b42c1 100644
--- a/src/lib/Bcfg2/Server/Plugins/Packages/Pac.py
+++ b/src/lib/Bcfg2/Server/Plugins/Packages/Pac.py
@@ -1,6 +1,6 @@
import gzip
import tarfile
-from Bcfg2.Bcfg2Py3k import cPickle, file
+from Bcfg2.Bcfg2Py3k import cPickle
from Bcfg2.Server.Plugins.Packages.Collection import Collection
from Bcfg2.Server.Plugins.Packages.Source import Source
@@ -9,6 +9,7 @@ class PacCollection(Collection):
self.logger.warning("Packages: Package groups are not supported by Pacman")
return []
+
class PacSource(Source):
basegroups = ['arch', 'parabola']
ptype = 'pacman'
@@ -22,13 +23,13 @@ class PacSource(Source):
'components': self.components, 'arches': self.arches}]
def save_state(self):
- cache = file(self.cachefile, 'wb')
+ cache = open(self.cachefile, 'wb')
cPickle.dump((self.pkgnames, self.deps, self.provides),
cache, 2)
cache.close()
def load_state(self):
- data = file(self.cachefile)
+ data = open(self.cachefile)
self.pkgnames, self.deps, self.provides = cPickle.load(data)
def filter_unknown(self, unknown):
diff --git a/src/lib/Bcfg2/Server/Plugins/Packages/PackagesSources.py b/src/lib/Bcfg2/Server/Plugins/Packages/PackagesSources.py
index 3ca96c0a4..0d565be31 100644
--- a/src/lib/Bcfg2/Server/Plugins/Packages/PackagesSources.py
+++ b/src/lib/Bcfg2/Server/Plugins/Packages/PackagesSources.py
@@ -41,16 +41,9 @@ class PackagesSources(Bcfg2.Server.Plugin.StructFile,
def HandleEvent(self, event=None):
Bcfg2.Server.Plugin.XMLFileBacked.HandleEvent(self, event=event)
if event and event.filename != self.name:
- for fname in self.extras:
- fpath = None
- if fname.startswith("/"):
- fpath = os.path.abspath(fname)
- else:
- fpath = \
- os.path.abspath(os.path.join(os.path.dirname(self.name),
- fname))
+ for fpath in self.extras:
if fpath == os.path.abspath(event.filename):
- self.parsed.add(fname)
+ self.parsed.add(fpath)
break
if self.loaded:
diff --git a/src/lib/Bcfg2/Server/Plugins/Probes.py b/src/lib/Bcfg2/Server/Plugins/Probes.py
index 114a9bbd8..e08f52a28 100644
--- a/src/lib/Bcfg2/Server/Plugins/Probes.py
+++ b/src/lib/Bcfg2/Server/Plugins/Probes.py
@@ -115,11 +115,10 @@ class ProbeSet(Bcfg2.Server.Plugin.EntrySet):
bangline = re.compile('^#!\s*(?P<interpreter>.*)$')
def __init__(self, path, fam, encoding, plugin_name):
- fpattern = '[0-9A-Za-z_\-]+'
self.plugin_name = plugin_name
- Bcfg2.Server.Plugin.EntrySet.__init__(self, fpattern, path,
+ Bcfg2.Server.Plugin.EntrySet.__init__(self, '[0-9A-Za-z_\-]+', path,
Bcfg2.Server.Plugin.SpecificData,
- encoding)
+ encoding, is_regex=True)
fam.AddMonitor(path, self)
def HandleEvent(self, event):
@@ -152,8 +151,7 @@ class ProbeSet(Bcfg2.Server.Plugin.EntrySet):
return ret
-class Probes(Bcfg2.Server.Plugin.Plugin,
- Bcfg2.Server.Plugin.Probing,
+class Probes(Bcfg2.Server.Plugin.Probing,
Bcfg2.Server.Plugin.Connector,
Bcfg2.Server.Plugin.DatabaseBacked):
"""A plugin to gather information from a client machine."""
@@ -161,10 +159,9 @@ class Probes(Bcfg2.Server.Plugin.Plugin,
__author__ = 'bcfg-dev@mcs.anl.gov'
def __init__(self, core, datastore):
- Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Connector.__init__(self)
Bcfg2.Server.Plugin.Probing.__init__(self)
- Bcfg2.Server.Plugin.DatabaseBacked.__init__(self)
+ Bcfg2.Server.Plugin.DatabaseBacked.__init__(self, core, datastore)
try:
self.probes = ProbeSet(self.data, core.fam, core.encoding,
diff --git a/src/lib/Bcfg2/Server/Plugins/Snapshots.py b/src/lib/Bcfg2/Server/Plugins/Snapshots.py
index 232dbb0c3..666beef21 100644
--- a/src/lib/Bcfg2/Server/Plugins/Snapshots.py
+++ b/src/lib/Bcfg2/Server/Plugins/Snapshots.py
@@ -62,14 +62,12 @@ def build_snap_ent(entry):
return [desired, state]
-class Snapshots(Bcfg2.Server.Plugin.Statistics,
- Bcfg2.Server.Plugin.Plugin):
+class Snapshots(Bcfg2.Server.Plugin.Statistics):
name = 'Snapshots'
experimental = True
def __init__(self, core, datastore):
- Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
- Bcfg2.Server.Plugin.Statistics.__init__(self)
+ Bcfg2.Server.Plugin.Statistics.__init__(self, core, datastore)
self.session = Bcfg2.Server.Snapshots.setup_session(core.cfile)
self.work_queue = Queue()
self.loader = threading.Thread(target=self.load_snapshot)
diff --git a/src/lib/Bcfg2/Server/Plugins/Statistics.py b/src/lib/Bcfg2/Server/Plugins/Statistics.py
index 9af7549ff..ce8d085cc 100644
--- a/src/lib/Bcfg2/Server/Plugins/Statistics.py
+++ b/src/lib/Bcfg2/Server/Plugins/Statistics.py
@@ -113,13 +113,11 @@ class StatisticsStore(object):
return (now-utime) > secondsPerDay
-class Statistics(Bcfg2.Server.Plugin.Plugin,
- Bcfg2.Server.Plugin.ThreadedStatistics,
+class Statistics(Bcfg2.Server.Plugin.ThreadedStatistics,
Bcfg2.Server.Plugin.PullSource):
name = 'Statistics'
def __init__(self, core, datastore):
- Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.ThreadedStatistics.__init__(self, core, datastore)
Bcfg2.Server.Plugin.PullSource.__init__(self)
fpath = "%s/etc/statistics.xml" % datastore
diff --git a/src/lib/Bcfg2/Server/Plugins/TemplateHelper.py b/src/lib/Bcfg2/Server/Plugins/TemplateHelper.py
index 3712506d6..2b3aa6bc5 100644
--- a/src/lib/Bcfg2/Server/Plugins/TemplateHelper.py
+++ b/src/lib/Bcfg2/Server/Plugins/TemplateHelper.py
@@ -50,12 +50,13 @@ class HelperModule(Bcfg2.Server.Plugin.SpecificData):
class HelperSet(Bcfg2.Server.Plugin.EntrySet):
ignore = re.compile("^(\.#.*|.*~|\\..*\\.(sw[px])|.*\.py[co])$")
+ fpattern = '[0-9A-Za-z_\-]+\.py'
def __init__(self, path, fam, encoding, plugin_name):
- fpattern = '[0-9A-Za-z_\-]+\.py'
self.plugin_name = plugin_name
- Bcfg2.Server.Plugin.EntrySet.__init__(self, fpattern, path,
- HelperModule, encoding)
+ Bcfg2.Server.Plugin.EntrySet.__init__(self, self.fpattern, path,
+ HelperModule, encoding,
+ is_regex=True)
fam.AddMonitor(path, self)
def HandleEvent(self, event):
diff --git a/src/sbin/bcfg2-info b/src/sbin/bcfg2-info
index a8cc17a7b..bba8eca7c 100755
--- a/src/sbin/bcfg2-info
+++ b/src/sbin/bcfg2-info
@@ -643,7 +643,7 @@ class infoCore(cmd.Cmd, Bcfg2.Server.Core.BaseCore):
print("Unable to build metadata for host %s" % args)
return
collection = self.plugins['Packages']._get_collection(metadata)
- print collection.sourcelist()
+ print(collection.sourcelist())
def do_profile(self, arg):
"""."""
diff --git a/src/sbin/bcfg2-yum-helper b/src/sbin/bcfg2-yum-helper
index 53784518b..a0698cc90 100755
--- a/src/sbin/bcfg2-yum-helper
+++ b/src/sbin/bcfg2-yum-helper
@@ -286,27 +286,27 @@ def main():
depsolver = DepSolver(options.config, options.verbose)
if cmd == "clean":
depsolver.clean_cache()
- print json.dumps(True)
+ print(json.dumps(True))
elif cmd == "complete":
data = json.loads(sys.stdin.read())
depsolver.groups = data['groups']
(packages, unknown) = depsolver.complete([pkg_to_tuple(p)
for p in data['packages']])
- print json.dumps(dict(packages=list(packages),
- unknown=list(unknown)))
+ print(json.dumps(dict(packages=list(packages),
+ unknown=list(unknown))))
elif cmd == "is_virtual_package":
package = pkg_to_tuple(json.loads(sys.stdin.read()))
- print json.dumps(bool(depsolver.get_provides(package, silent=True)))
+ print(json.dumps(bool(depsolver.get_provides(package, silent=True))))
elif cmd == "get_deps" or cmd == "get_provides":
package = pkg_to_tuple(json.loads(sys.stdin.read()))
- print json.dumps([p.name for p in getattr(depsolver, cmd)(package)])
+ print(json.dumps([p.name for p in getattr(depsolver, cmd)(package)]))
elif cmd == "get_group":
data = json.loads(sys.stdin.read())
if "type" in data:
packages = depsolver.get_group(data['group'], ptype=data['type'])
else:
packages = depsolver.get_group(data['group'])
- print json.dumps(list(packages))
+ print(json.dumps(list(packages)))
elif cmd == "get_groups":
data = json.loads(sys.stdin.read())
rv = dict()
@@ -317,10 +317,10 @@ def main():
else:
packages = depsolver.get_group(gdata['group'])
rv[gdata['group']] = list(packages)
- print json.dumps(rv)
+ print(json.dumps(rv))
elif cmd == "is_package":
package = pkg_to_tuple(json.loads(sys.stdin.read()))
- print json.dumps(getattr(depsolver, cmd)(package))
+ print(json.dumps(getattr(depsolver, cmd)(package)))
if __name__ == '__main__':
diff --git a/testsuite/Testlib/TestOptions.py b/testsuite/Testlib/TestOptions.py
index 2129b9287..e3bba787d 100644
--- a/testsuite/Testlib/TestOptions.py
+++ b/testsuite/Testlib/TestOptions.py
@@ -3,9 +3,9 @@ import sys
import unittest
from mock import Mock, patch
import Bcfg2.Options
+from ..common import *
-
-class TestOption(unittest.TestCase):
+class TestOption(Bcfg2TestCase):
def test__init(self):
self.assertRaises(Bcfg2.Options.OptionFailure,
Bcfg2.Options.Option,
@@ -30,7 +30,6 @@ class TestOption(unittest.TestCase):
c.get('foo', False, cmd='-f')
mock_cp.assert_any_call()
mock_open.assert_any_call(Bcfg2.Options.DEFAULT_CONFIG_LOCATION)
- print(mock_cp.return_value.get.called)
self.assertTrue(mock_cp.return_value.get.called)
@patch('Bcfg2.Options.DefaultConfigParser')
@@ -68,7 +67,7 @@ class TestOption(unittest.TestCase):
assert o2.value == True
-class TestOptionSet(unittest.TestCase):
+class TestOptionSet(Bcfg2TestCase):
def test_buildGetopt(self):
opts = [('foo', Bcfg2.Options.Option('foo', 'test1', cmd='-G')),
('bar', Bcfg2.Options.Option('foo', 'test2')),
@@ -108,7 +107,7 @@ class TestOptionSet(unittest.TestCase):
self.assertTrue(oset3['foo'])
-class TestOptionParser(unittest.TestCase):
+class TestOptionParser(Bcfg2TestCase):
def test__init(self):
opts = [('foo', Bcfg2.Options.Option('foo', 'test1', cmd='-h')),
('bar', Bcfg2.Options.Option('foo', 'test2')),
diff --git a/testsuite/Testlib/TestServer/TestPlugin.py b/testsuite/Testlib/TestServer/TestPlugin.py
new file mode 100644
index 000000000..3cfe0ca42
--- /dev/null
+++ b/testsuite/Testlib/TestServer/TestPlugin.py
@@ -0,0 +1,2183 @@
+import os
+import re
+import copy
+import logging
+import unittest
+import lxml.etree
+from mock import Mock, MagicMock, patch
+from Bcfg2.Server.Plugin import *
+import Bcfg2.Server
+from ...common import *
+
+class FakeElementTree(lxml.etree._ElementTree):
+ xinclude = Mock()
+
+
+class TestFunctions(Bcfg2TestCase):
+ def test_bind_info(self):
+ entry = lxml.etree.Element("Path", name="/test")
+ metadata = Mock()
+ default = dict(test1="test1", test2="test2")
+ # test without infoxml
+ bind_info(entry, metadata, default=default)
+ self.assertItemsEqual(entry.attrib,
+ dict(test1="test1",
+ test2="test2",
+ name="/test"))
+
+ # test with bogus infoxml
+ entry = lxml.etree.Element("Path", name="/test")
+ infoxml = Mock()
+ self.assertRaises(PluginExecutionError,
+ bind_info,
+ entry, metadata, infoxml=infoxml)
+ infoxml.pnode.Match.assert_called_with(metadata, dict(), entry=entry)
+
+ # test with valid infoxml
+ entry = lxml.etree.Element("Path", name="/test")
+ infoxml.reset_mock()
+ infodata = {None: {"test3": "test3", "test4": "test4"}}
+ def infoxml_rv(metadata, rv, entry=None):
+ rv['Info'] = infodata
+ infoxml.pnode.Match.side_effect = infoxml_rv
+ bind_info(entry, metadata, infoxml=infoxml, default=default)
+ # mock objects don't properly track the called-with value of
+ # arguments whose value is changed by the function, so it
+ # thinks Match() was called with the final value of the mdata
+ # arg, not the initial value. makes this test a little less
+ # worthwhile, TBH.
+ infoxml.pnode.Match.assert_called_with(metadata, dict(Info=infodata),
+ entry=entry)
+ self.assertItemsEqual(entry.attrib,
+ dict(test1="test1",
+ test2="test2",
+ test3="test3",
+ test4="test4",
+ name="/test"))
+
+
+class TestPluginInitError(Bcfg2TestCase):
+ """ placeholder for future tests """
+ pass
+
+
+class TestPluginExecutionError(Bcfg2TestCase):
+ """ placeholder for future tests """
+ pass
+
+
+class TestDebuggable(Bcfg2TestCase):
+ test_obj = Debuggable
+
+ def get_obj(self):
+ return self.test_obj()
+
+ def test__init(self):
+ d = self.get_obj()
+ self.assertIsInstance(d.logger, logging.Logger)
+ self.assertFalse(d.debug_flag)
+
+ @patch("Bcfg2.Server.Plugin.%s.debug_log" % test_obj.__name__)
+ def test_toggle_debug(self, mock_debug):
+ d = self.get_obj()
+ orig = d.debug_flag
+ d.toggle_debug()
+ self.assertNotEqual(orig, d.debug_flag)
+ self.assertTrue(mock_debug.called)
+
+ mock_debug.reset_mock()
+
+ changed = d.debug_flag
+ d.toggle_debug()
+ self.assertNotEqual(changed, d.debug_flag)
+ self.assertEqual(orig, d.debug_flag)
+ self.assertTrue(mock_debug.called)
+
+ def test_debug_log(self):
+ d = self.get_obj()
+ d.logger = Mock()
+ d.debug_flag = False
+ d.debug_log("test")
+ self.assertFalse(d.logger.error.called)
+
+ d.logger.reset_mock()
+ d.debug_log("test", flag=True)
+ self.assertTrue(d.logger.error.called)
+
+ d.logger.reset_mock()
+ d.debug_flag = True
+ d.debug_log("test")
+ self.assertTrue(d.logger.error.called)
+
+
+class TestPlugin(TestDebuggable):
+ test_obj = Plugin
+
+ def get_obj(self, core=None):
+ if core is None:
+ core = Mock()
+ return self.test_obj(core, datastore)
+
+ def test__init(self):
+ core = Mock()
+ p = self.get_obj(core=core)
+ self.assertEqual(p.data, os.path.join(datastore, p.name))
+ self.assertEqual(p.core, core)
+ self.assertIsInstance(p, Debuggable)
+
+ @patch("os.makedirs")
+ def test_init_repo(self, mock_makedirs):
+ self.test_obj.init_repo(datastore)
+ mock_makedirs.assert_called_with(os.path.join(datastore,
+ self.test_obj.name))
+
+
+class TestDatabaseBacked(TestPlugin):
+ test_obj = DatabaseBacked
+
+ @unittest.skipUnless(has_django, "Django not found")
+ def test__use_db(self):
+ core = Mock()
+ core.setup.cfp.getboolean.return_value = True
+ db = self.get_obj(core)
+ self.assertTrue(db._use_db)
+
+ core = Mock()
+ core.setup.cfp.getboolean.return_value = False
+ db = self.get_obj(core)
+ self.assertFalse(db._use_db)
+
+ Bcfg2.Server.Plugin.has_django = False
+ core = Mock()
+ db = self.get_obj(core)
+ self.assertFalse(db._use_db)
+
+ core = Mock()
+ core.setup.cfp.getboolean.return_value = True
+ db = self.get_obj(core)
+ self.assertFalse(db._use_db)
+ Bcfg2.Server.Plugin.has_django = True
+
+
+class TestPluginDatabaseModel(Bcfg2TestCase):
+ """ placeholder for future tests """
+ pass
+
+
+class TestGenerator(Bcfg2TestCase):
+ test_obj = Generator
+
+
+class TestStructure(Bcfg2TestCase):
+ test_obj = Structure
+
+ def get_obj(self):
+ return self.test_obj()
+
+ def test_BuildStructures(self):
+ s = self.get_obj()
+ self.assertRaises(NotImplementedError,
+ s.BuildStructures, None)
+
+
+class TestMetadata(Bcfg2TestCase):
+ test_obj = Metadata
+
+ def get_obj(self):
+ return self.test_obj()
+
+ def test_get_initial_metadata(self):
+ m = self.get_obj()
+ self.assertRaises(NotImplementedError,
+ m.get_initial_metadata, None)
+
+ def test_merge_additional_data(self):
+ m = self.get_obj()
+ self.assertRaises(NotImplementedError,
+ m.merge_additional_data, None, None, None)
+
+ def test_merge_additional_groups(self):
+ m = self.get_obj()
+ self.assertRaises(NotImplementedError,
+ m.merge_additional_groups, None, None)
+
+
+class TestConnector(Bcfg2TestCase):
+ """ placeholder """
+ pass
+
+
+class TestProbing(Bcfg2TestCase):
+ """ placeholder """
+ pass
+
+
+class TestStatistics(TestPlugin):
+ test_obj = Statistics
+
+
+class TestThreadedStatistics(TestStatistics):
+ test_obj = ThreadedStatistics
+ data = [("foo.example.com", "<foo/>"),
+ ("bar.example.com", "<bar/>")]
+
+ @patch("threading.Thread.start")
+ def test__init(self, mock_start):
+ core = Mock()
+ ts = self.get_obj(core)
+ mock_start.assert_any_call()
+
+ @patch("__builtin__.open")
+ @patch("Bcfg2.Server.Plugin.ThreadedStatistics.run", Mock())
+ def test_save(self, mock_open):
+ core = Mock()
+ ts = self.get_obj(core)
+ queue = Mock()
+ queue.empty = Mock(side_effect=Empty)
+ ts.work_queue = queue
+
+ mock_open.side_effect = OSError
+ # test that save does _not_ raise an exception even when
+ # everything goes pear-shaped
+ ts.save()
+ queue.empty.assert_any_call()
+ mock_open.assert_called_with(ts.pending_file, 'w')
+
+ queue.reset_mock()
+ mock_open.reset_mock()
+
+ queue.data = []
+ for hostname, xml in self.data:
+ md = Mock()
+ md.hostname = hostname
+ queue.data.append((md, lxml.etree.XML(xml)))
+ queue.empty.side_effect = lambda: len(queue.data) == 0
+ queue.get_nowait = Mock(side_effect=lambda: queue.data.pop())
+ mock_open.side_effect = None
+
+ # oh, the joy of working around different package names in
+ # py3k...
+ with patch("%s.dump" % cPickle.__name__) as mock_dump:
+ ts.save()
+ queue.empty.assert_any_call()
+ queue.get_nowait.assert_any_call()
+ mock_open.assert_called_with(ts.pending_file, 'w')
+ mock_open.return_value.close.assert_any_call()
+ # the order of the queue data gets changed, so we have to
+ # verify this call in an ugly way
+ self.assertItemsEqual(mock_dump.call_args[0][0], self.data)
+ self.assertEqual(mock_dump.call_args[0][1], mock_open.return_value)
+
+ @patch("os.unlink")
+ @patch("os.path.exists")
+ @patch("__builtin__.open")
+ @patch("lxml.etree.XML")
+ @patch("Bcfg2.Server.Plugin.ThreadedStatistics.run", Mock())
+ def test_load(self, mock_XML, mock_open, mock_exists, mock_unlink):
+ core = Mock()
+ core.terminate.isSet.return_value = False
+ ts = self.get_obj(core)
+
+ with patch("%s.load" % cPickle.__name__) as mock_load:
+ ts.work_queue = Mock()
+ ts.work_queue.data = []
+ def reset():
+ core.reset_mock()
+ mock_open.reset_mock()
+ mock_exists.reset_mock()
+ mock_unlink.reset_mock()
+ mock_load.reset_mock()
+ mock_XML.reset_mock()
+ ts.work_queue.reset_mock()
+ ts.work_queue.data = []
+
+ mock_exists.return_value = False
+ self.assertTrue(ts.load())
+ mock_exists.assert_called_with(ts.pending_file)
+
+ reset()
+ mock_exists.return_value = True
+ mock_open.side_effect = OSError
+ self.assertFalse(ts.load())
+ mock_exists.assert_called_with(ts.pending_file)
+ mock_open.assert_called_with(ts.pending_file, 'r')
+
+ reset()
+ mock_open.side_effect = None
+ mock_load.return_value = self.data
+ ts.work_queue.put_nowait.side_effect = Full
+ self.assertTrue(ts.load())
+ mock_exists.assert_called_with(ts.pending_file)
+ mock_open.assert_called_with(ts.pending_file, 'r')
+ mock_open.return_value.close.assert_any_call()
+ mock_load.assert_called_with(mock_open.return_value)
+
+ reset()
+ core.build_metadata.side_effect = lambda x: x
+ mock_XML.side_effect = lambda x, parser=None: x
+ ts.work_queue.put_nowait.side_effect = None
+ self.assertTrue(ts.load())
+ mock_exists.assert_called_with(ts.pending_file)
+ mock_open.assert_called_with(ts.pending_file, 'r')
+ mock_open.return_value.close.assert_any_call()
+ mock_load.assert_called_with(mock_open.return_value)
+ self.assertItemsEqual(mock_XML.call_args_list,
+ [call(x, parser=Bcfg2.Server.XMLParser)
+ for h, x in self.data])
+ self.assertItemsEqual(ts.work_queue.put_nowait.call_args_list,
+ [call((h, x)) for h, x in self.data])
+ mock_unlink.assert_called_with(ts.pending_file)
+
+ @patch("threading.Thread.start", Mock())
+ @patch("Bcfg2.Server.Plugin.ThreadedStatistics.load")
+ @patch("Bcfg2.Server.Plugin.ThreadedStatistics.save")
+ @patch("Bcfg2.Server.Plugin.ThreadedStatistics.handle_statistic")
+ def test_run(self, mock_handle, mock_save, mock_load):
+ core = Mock()
+ ts = self.get_obj(core)
+ mock_load.return_value = True
+ ts.work_queue = Mock()
+
+ def reset():
+ mock_handle.reset_mock()
+ mock_save.reset_mock()
+ mock_load.reset_mock()
+ core.reset_mock()
+ ts.work_queue.reset_mock()
+ ts.work_queue.data = self.data[:]
+ ts.work_queue.get_calls = 0
+
+ reset()
+
+ def get_rv(**kwargs):
+ ts.work_queue.get_calls += 1
+ try:
+ return ts.work_queue.data.pop()
+ except:
+ raise Empty
+ ts.work_queue.get.side_effect = get_rv
+ def terminate_isset():
+ # this lets the loop go on a few iterations with an empty
+ # queue to test that it doesn't error out
+ return ts.work_queue.get_calls > 3
+ core.terminate.isSet.side_effect = terminate_isset
+
+ ts.run()
+ mock_load.assert_any_call()
+ self.assertGreaterEqual(ts.work_queue.get.call_count, len(self.data))
+ self.assertItemsEqual(mock_handle.call_args_list,
+ [call(h, x) for h, x in self.data])
+ mock_save.assert_any_call()
+
+ @patch("copy.copy", Mock(side_effect=lambda x: x))
+ @patch("Bcfg2.Server.Plugin.ThreadedStatistics.run", Mock())
+ def test_process_statistics(self):
+ core = Mock()
+ ts = self.get_obj(core)
+ ts.work_queue = Mock()
+ ts.process_statistics(*self.data[0])
+ ts.work_queue.put_nowait.assert_called_with(self.data[0])
+
+ ts.work_queue.reset_mock()
+ ts.work_queue.put_nowait.side_effect = Full
+ # test that no exception is thrown
+ ts.process_statistics(*self.data[0])
+
+
+class TestPullSource(Bcfg2TestCase):
+ def test_GetCurrentEntry(self):
+ ps = PullSource()
+ self.assertRaises(NotImplementedError,
+ ps.GetCurrentEntry, None, None, None)
+
+
+class TestPullTarget(Bcfg2TestCase):
+ def test_AcceptChoices(self):
+ pt = PullTarget()
+ self.assertRaises(NotImplementedError,
+ pt.AcceptChoices, None, None)
+
+ def test_AcceptPullData(self):
+ pt = PullTarget()
+ self.assertRaises(NotImplementedError,
+ pt.AcceptPullData, None, None, None)
+
+
+class TestDecision(Bcfg2TestCase):
+ """ placeholder for future tests """
+ pass
+
+
+class TestValidationError(Bcfg2TestCase):
+ """ placeholder for future tests """
+ pass
+
+
+class TestStructureValidator(Bcfg2TestCase):
+ def test_validate_structures(self):
+ sv = StructureValidator()
+ self.assertRaises(NotImplementedError,
+ sv.validate_structures, None, None)
+
+
+class TestGoalValidator(Bcfg2TestCase):
+ def test_validate_goals(self):
+ gv = GoalValidator()
+ self.assertRaises(NotImplementedError,
+ gv.validate_goals, None, None)
+
+
+class TestVersion(Bcfg2TestCase):
+ """ placeholder for future tests """
+ pass
+
+
+class TestClientRunHooks(Bcfg2TestCase):
+ """ placeholder for future tests """
+ pass
+
+
+class TestFileBacked(Bcfg2TestCase):
+ test_obj = FileBacked
+
+ def get_obj(self, path=datastore, fam=None):
+ return self.test_obj(path, fam=fam)
+
+ @patch("__builtin__.open")
+ def test_HandleEvent(self, mock_open):
+ path = "/test"
+ fb = self.get_obj(path)
+ fb.Index = Mock()
+
+ def reset():
+ fb.Index.reset_mock()
+ mock_open.reset_mock()
+
+ for evt in ["exists", "changed", "created"]:
+ reset()
+ event = Mock()
+ event.code2str.return_value = evt
+ fb.HandleEvent(event)
+ mock_open.assert_called_with(path)
+ mock_open.return_value.read.assert_any_call()
+ fb.Index.assert_any_call()
+
+ reset()
+ event = Mock()
+ event.code2str.return_value = "endExist"
+ fb.HandleEvent(event)
+ self.assertFalse(mock_open.called)
+ self.assertFalse(fb.Index.called)
+
+
+class TestDirectoryBacked(Bcfg2TestCase):
+ test_obj = DirectoryBacked
+ testpaths = {1: '',
+ 2: '/foo',
+ 3: '/foo/bar',
+ 4: '/foo/bar/baz',
+ 5: 'quux',
+ 6: 'xyzzy/',
+ 7: 'xyzzy/plugh/'}
+
+ def test_child_interface(self):
+ # ensure that the child object has the correct interface
+ self.assertTrue(hasattr(self.test_obj.__child__, "HandleEvent"))
+
+ @patch("Bcfg2.Server.Plugin.%s.add_directory_monitor" % test_obj.__name__,
+ Mock())
+ def get_obj(self, fam=None):
+ if fam is None:
+ fam = Mock()
+ return self.test_obj(datastore, fam)
+
+ @patch("Bcfg2.Server.Plugin.%s.add_directory_monitor" % test_obj.__name__)
+ def test__init(self, mock_add_monitor):
+ db = self.test_obj(datastore, Mock())
+ mock_add_monitor.assert_called_with('')
+
+ def test__getitem(self):
+ db = self.get_obj()
+ db.entries.update(dict(a=1, b=2, c=3))
+ self.assertEqual(db['a'], 1)
+ self.assertEqual(db['b'], 2)
+ with self.assertRaises(KeyError):
+ db['d']
+
+ def test__iter(self):
+ db = self.get_obj()
+ db.entries.update(dict(a=1, b=2, c=3))
+ self.assertEqual([i for i in db],
+ [i for i in db.entries.items()])
+
+ @patch("os.path.isdir")
+ def test_add_directory_monitor(self, mock_isdir):
+ db = self.get_obj()
+ db.fam = Mock()
+ db.fam.rv = 0
+
+ def reset():
+ db.fam.rv += 1
+ db.fam.AddMonitor.return_value = db.fam.rv
+ db.fam.reset_mock()
+ mock_isdir.reset_mock()
+
+ mock_isdir.return_value = True
+ for path in self.testpaths.values():
+ reset()
+ db.add_directory_monitor(path)
+ db.fam.AddMonitor.assert_called_with(os.path.join(db.data, path),
+ db)
+ self.assertIn(db.fam.rv, db.handles)
+ self.assertEqual(db.handles[db.fam.rv], path)
+
+ reset()
+ # test duplicate adds
+ for path in self.testpaths.values():
+ reset()
+ db.add_directory_monitor(path)
+ self.assertFalse(db.fam.AddMonitor.called)
+
+ reset()
+ mock_isdir.return_value = False
+ db.add_directory_monitor('bogus')
+ self.assertFalse(db.fam.AddMonitor.called)
+ self.assertNotIn(db.fam.rv, db.handles)
+
+ def test_add_entry(self):
+ db = self.get_obj()
+ db.fam = Mock()
+ class MockChild(Mock):
+ def __init__(self, path, fam, **kwargs):
+ Mock.__init__(self, **kwargs)
+ self.path = path
+ self.fam = fam
+ self.HandleEvent = Mock()
+ db.__child__ = MockChild
+
+ for path in self.testpaths.values():
+ event = Mock()
+ db.add_entry(path, event)
+ self.assertIn(path, db.entries)
+ self.assertEqual(db.entries[path].path,
+ os.path.join(db.data, path))
+ self.assertEqual(db.entries[path].fam, db.fam)
+ db.entries[path].HandleEvent.assert_called_with(event)
+
+ @patch("os.path.isdir")
+ @patch("Bcfg2.Server.Plugin.%s.add_entry" % test_obj.__name__)
+ @patch("Bcfg2.Server.Plugin.%s.add_directory_monitor" % test_obj.__name__)
+ def test_HandleEvent(self, mock_add_monitor, mock_add_entry, mock_isdir):
+ db = self.get_obj()
+ # a path with a leading / should never get into
+ # DirectoryBacked.handles, so strip that test case
+ for rid, path in self.testpaths.items():
+ path = path.lstrip('/')
+ db.handles[rid] = path
+
+ def reset():
+ mock_isdir.reset_mock()
+ mock_add_entry.reset_mock()
+ mock_add_monitor.reset_mock()
+
+ def get_event(filename, action, requestID):
+ event = Mock()
+ event.code2str.return_value = action
+ event.filename = filename
+ event.requestID = requestID
+ return event
+
+ # test that events on paths that aren't handled fail properly
+ reset()
+ event = get_event('/foo', 'created', max(self.testpaths.keys()) + 1)
+ db.HandleEvent(event)
+ self.assertFalse(mock_add_monitor.called)
+ self.assertFalse(mock_add_entry.called)
+
+ for req_id, path in self.testpaths.items():
+ # a path with a leading / should never get into
+ # DirectoryBacked.handles, so strip that test case
+ path = path.lstrip('/')
+ basepath = os.path.join(datastore, path)
+ for fname in ['foo', 'bar/baz.txt', 'plugh.py']:
+ relpath = os.path.join(path, fname)
+ abspath = os.path.join(basepath, fname)
+
+ # test endExist does nothing
+ reset()
+ event = get_event(fname, 'endExist', req_id)
+ db.HandleEvent(event)
+ self.assertFalse(mock_add_monitor.called)
+ self.assertFalse(mock_add_entry.called)
+
+ mock_isdir.return_value = True
+ for evt in ["created", "exists", "changed"]:
+ # test that creating or changing a directory works
+ reset()
+ event = get_event(fname, evt, req_id)
+ db.HandleEvent(event)
+ mock_add_monitor.assert_called_with(relpath)
+ self.assertFalse(mock_add_entry.called)
+
+ mock_isdir.return_value = False
+ for evt in ["created", "exists"]:
+ # test that creating a file works
+ reset()
+ event = get_event(fname, evt, req_id)
+ db.HandleEvent(event)
+ mock_add_entry.assert_called_with(relpath, event)
+ self.assertFalse(mock_add_monitor.called)
+ db.entries[relpath] = MagicMock()
+
+ # test that changing a file that already exists works
+ reset()
+ event = get_event(fname, "changed", req_id)
+ db.HandleEvent(event)
+ db.entries[relpath].HandleEvent.assert_called_with(event)
+ self.assertFalse(mock_add_monitor.called)
+ self.assertFalse(mock_add_entry.called)
+
+ # test that deleting an entry works
+ reset()
+ event = get_event(fname, "deleted", req_id)
+ db.HandleEvent(event)
+ self.assertNotIn(relpath, db.entries)
+
+ # test that changing a file that doesn't exist works
+ reset()
+ event = get_event(fname, "changed", req_id)
+ db.HandleEvent(event)
+ mock_add_entry.assert_called_with(relpath, event)
+ self.assertFalse(mock_add_monitor.called)
+ db.entries[relpath] = MagicMock()
+
+ # test that deleting a directory works. this is a little
+ # strange because the _parent_ directory has to handle the
+ # deletion
+ reset()
+ event = get_event('quux', "deleted", 1)
+ db.HandleEvent(event)
+ for key in db.entries.keys():
+ self.assertFalse(key.startswith('quux'))
+
+
+class TestXMLFileBacked(TestFileBacked):
+ test_obj = XMLFileBacked
+
+ def get_obj(self, path=datastore, fam=None, should_monitor=False):
+ return self.test_obj(path, fam=fam, should_monitor=should_monitor)
+
+ def test__init(self):
+ fam = Mock()
+ fname = "/test"
+ xfb = self.get_obj(fname)
+ self.assertIsNone(xfb.fam)
+
+ xfb = self.get_obj(fname, fam=fam)
+ self.assertFalse(fam.AddMonitor.called)
+
+ fam.reset_mock()
+ xfb = self.get_obj(fname, fam=fam, should_monitor=True)
+ fam.AddMonitor.assert_called_with(fname, xfb)
+
+ @patch("os.path.exists")
+ @patch("lxml.etree.parse")
+ def test_follow_xincludes(self, mock_parse, mock_exists):
+ fname = "/test/test1.xml"
+ xfb = self.get_obj(fname)
+ xfb.add_monitor = Mock()
+
+ def reset():
+ xfb.add_monitor.reset_mock()
+ mock_parse.reset_mock()
+ mock_exists.reset_mock()
+ xfb.extras = []
+
+ mock_exists.return_value = True
+ xdata = dict()
+ mock_parse.side_effect = lambda p: xdata[p]
+
+ # basic functionality
+ xdata['/test/test2.xml'] = lxml.etree.Element("Test").getroottree()
+ xfb._follow_xincludes(xdata=xdata['/test/test2.xml'])
+ self.assertFalse(xfb.add_monitor.called)
+
+ if (not hasattr(self.test_obj, "xdata") or
+ not isinstance(self.test_obj.xdata, property)):
+ # if xdata is settable, test that method of getting data
+ # to _follow_xincludes
+ reset()
+ xfb.xdata = xdata['/test/test2.xml'].getroot()
+ xfb._follow_xincludes()
+ self.assertFalse(xfb.add_monitor.called)
+ xfb.xdata = None
+
+ reset()
+ xfb._follow_xincludes(fname="/test/test2.xml")
+ self.assertFalse(xfb.add_monitor.called)
+
+ # test one level of xinclude
+ xdata[fname] = lxml.etree.Element("Test").getroottree()
+ lxml.etree.SubElement(xdata[fname].getroot(),
+ Bcfg2.Server.XI_NAMESPACE + "include",
+ href="/test/test2.xml")
+ reset()
+ xfb._follow_xincludes(fname=fname)
+ xfb.add_monitor.assert_called_with("/test/test2.xml")
+ self.assertItemsEqual(mock_parse.call_args_list,
+ [call(f) for f in xdata.keys()])
+ mock_exists.assert_called_with("/test/test2.xml")
+
+ reset()
+ xfb._follow_xincludes(xdata=xdata[fname])
+ xfb.add_monitor.assert_called_with("/test/test2.xml")
+ self.assertItemsEqual(mock_parse.call_args_list,
+ [call(f) for f in xdata.keys()
+ if f != fname])
+ mock_exists.assert_called_with("/test/test2.xml")
+
+ # test two-deep level of xinclude, with some files in another
+ # directory
+ xdata["/test/test3.xml"] = \
+ lxml.etree.Element("Test").getroottree()
+ lxml.etree.SubElement(xdata["/test/test3.xml"].getroot(),
+ Bcfg2.Server.XI_NAMESPACE + "include",
+ href="/test/test_dir/test4.xml")
+ xdata["/test/test_dir/test4.xml"] = \
+ lxml.etree.Element("Test").getroottree()
+ lxml.etree.SubElement(xdata["/test/test_dir/test4.xml"].getroot(),
+ Bcfg2.Server.XI_NAMESPACE + "include",
+ href="/test/test_dir/test5.xml")
+ xdata['/test/test_dir/test5.xml'] = \
+ lxml.etree.Element("Test").getroottree()
+ xdata['/test/test_dir/test6.xml'] = \
+ lxml.etree.Element("Test").getroottree()
+ # relative includes
+ lxml.etree.SubElement(xdata[fname].getroot(),
+ Bcfg2.Server.XI_NAMESPACE + "include",
+ href="test3.xml")
+ lxml.etree.SubElement(xdata["/test/test3.xml"].getroot(),
+ Bcfg2.Server.XI_NAMESPACE + "include",
+ href="test_dir/test6.xml")
+
+ reset()
+ xfb._follow_xincludes(fname=fname)
+ self.assertItemsEqual(xfb.add_monitor.call_args_list,
+ [call(f) for f in xdata.keys() if f != fname])
+ self.assertItemsEqual(mock_parse.call_args_list,
+ [call(f) for f in xdata.keys()])
+ self.assertItemsEqual(mock_exists.call_args_list,
+ [call(f) for f in xdata.keys() if f != fname])
+
+ reset()
+ xfb._follow_xincludes(xdata=xdata[fname])
+ self.assertItemsEqual(xfb.add_monitor.call_args_list,
+ [call(f) for f in xdata.keys() if f != fname])
+ self.assertItemsEqual(mock_parse.call_args_list,
+ [call(f) for f in xdata.keys() if f != fname])
+ self.assertItemsEqual(mock_exists.call_args_list,
+ [call(f) for f in xdata.keys() if f != fname])
+
+ @patch("lxml.etree._ElementTree", FakeElementTree)
+ @patch("Bcfg2.Server.Plugin.%s._follow_xincludes" % test_obj.__name__)
+ def test_Index(self, mock_follow):
+ fname = "/test/test1.xml"
+ xfb = self.get_obj(fname)
+
+ def reset():
+ mock_follow.reset_mock()
+ FakeElementTree.xinclude.reset_mock()
+ xfb.extras = []
+ xfb.xdata = None
+
+ # syntax error
+ xfb.data = "<"
+ self.assertRaises(PluginInitError, xfb.Index)
+
+ # no xinclude
+ reset()
+ xdata = lxml.etree.Element("Test", name="test")
+ children = [lxml.etree.SubElement(xdata, "Foo"),
+ lxml.etree.SubElement(xdata, "Bar", name="bar")]
+ xfb.data = lxml.etree.tostring(xdata)
+ xfb.Index()
+ mock_follow.assert_any_call()
+ self.assertEqual(xfb.xdata.base, fname)
+ self.assertItemsEqual([lxml.etree.tostring(e) for e in xfb.entries],
+ [lxml.etree.tostring(e) for e in children])
+
+ # with xincludes
+ reset()
+ mock_follow.side_effect = \
+ lambda: xfb.extras.extend(["/test/test2.xml",
+ "/test/test_dir/test3.xml"])
+ children.extend([
+ lxml.etree.SubElement(xdata,
+ Bcfg2.Server.XI_NAMESPACE + "include",
+ href="/test/test2.xml"),
+ lxml.etree.SubElement(xdata,
+ Bcfg2.Server.XI_NAMESPACE + "include",
+ href="/test/test_dir/test3.xml")])
+ test2 = lxml.etree.Element("Test", name="test2")
+ lxml.etree.SubElement(test2, "Baz")
+ test3 = lxml.etree.Element("Test", name="test3")
+ replacements = {"/test/test2.xml": test2,
+ "/test/test_dir/test3.xml": test3}
+ def xinclude():
+ for el in xfb.xdata.findall('//%sinclude' %
+ Bcfg2.Server.XI_NAMESPACE):
+ xfb.xdata.replace(el, replacements[el.get("href")])
+ FakeElementTree.xinclude.side_effect = xinclude
+
+ xfb.data = lxml.etree.tostring(xdata)
+ xfb.Index()
+ mock_follow.assert_any_call()
+ FakeElementTree.xinclude.assert_any_call
+ self.assertEqual(xfb.xdata.base, fname)
+ self.assertItemsEqual([lxml.etree.tostring(e) for e in xfb.entries],
+ [lxml.etree.tostring(e) for e in children])
+
+ def test_add_monitor(self):
+ fname = "/test/test1.xml"
+ xfb = self.get_obj(fname)
+ xfb.add_monitor("/test/test2.xml")
+ self.assertIn("/test/test2.xml", xfb.extras)
+
+ fam = Mock()
+ xfb = self.get_obj(fname, fam=fam)
+ fam.reset_mock()
+ xfb.add_monitor("/test/test3.xml")
+ self.assertFalse(fam.AddMonitor.called)
+ self.assertIn("/test/test3.xml", xfb.extras)
+
+ fam.reset_mock()
+ xfb = self.get_obj(fname, fam=fam, should_monitor=True)
+ xfb.add_monitor("/test/test4.xml")
+ fam.AddMonitor.assert_called_with("/test/test4.xml", xfb)
+ self.assertIn("/test/test4.xml", xfb.extras)
+
+
+class TestStructFile(TestXMLFileBacked):
+ test_obj = StructFile
+
+ def _get_test_data(self):
+ """ build a very complex set of test data """
+ # top-level group and client elements
+ groups = dict()
+ # group and client elements that are descendents of other group or
+ # client elements
+ subgroups = dict()
+ # children of elements in `groups' that should be included in
+ # match results
+ children = dict()
+ # children of elements in `subgroups' that should be included in
+ # match results
+ subchildren = dict()
+ # top-level tags that are not group elements
+ standalone = []
+ xdata = lxml.etree.Element("Test", name="test")
+ groups[0] = lxml.etree.SubElement(xdata, "Group", name="group1",
+ include="true")
+ children[0] = [lxml.etree.SubElement(groups[0], "Child", name="c1"),
+ lxml.etree.SubElement(groups[0], "Child", name="c2")]
+ subgroups[0] = [lxml.etree.SubElement(groups[0], "Group",
+ name="subgroup1", include="true"),
+ lxml.etree.SubElement(groups[0],
+ "Client", name="client1",
+ include="false")]
+ subchildren[0] = \
+ [lxml.etree.SubElement(subgroups[0][0], "Child", name="sc1"),
+ lxml.etree.SubElement(subgroups[0][0], "Child", name="sc2",
+ attr="some attr"),
+ lxml.etree.SubElement(subgroups[0][0], "Child", name="sc3")]
+ lxml.etree.SubElement(subchildren[0][-1], "SubChild", name="subchild")
+ lxml.etree.SubElement(subgroups[0][1], "Child", name="sc4")
+
+ groups[1] = lxml.etree.SubElement(xdata, "Group", name="group2",
+ include="false")
+ children[1] = []
+ subgroups[1] = []
+ subchildren[1] = []
+ lxml.etree.SubElement(groups[1], "Child", name="c3")
+ lxml.etree.SubElement(groups[1], "Child", name="c4")
+
+ standalone.append(lxml.etree.SubElement(xdata, "Standalone", name="s1"))
+
+ groups[2] = lxml.etree.SubElement(xdata, "Client", name="client2",
+ include="false")
+ children[2] = []
+ subgroups[2] = []
+ subchildren[2] = []
+ lxml.etree.SubElement(groups[2], "Child", name="c5")
+ lxml.etree.SubElement(groups[2], "Child", name="c6")
+
+ standalone.append(lxml.etree.SubElement(xdata, "Standalone", name="s2",
+ attr="some attr"))
+
+ groups[3] = lxml.etree.SubElement(xdata, "Client", name="client3",
+ include="true")
+ children[3] = [lxml.etree.SubElement(groups[3], "Child", name="c7",
+ attr="some_attr"),
+ lxml.etree.SubElement(groups[3], "Child", name="c8")]
+ subgroups[3] = []
+ subchildren[3] = []
+ lxml.etree.SubElement(children[3][-1], "SubChild", name="subchild")
+
+ standalone.append(lxml.etree.SubElement(xdata, "Standalone", name="s3"))
+ lxml.etree.SubElement(standalone[-1], "SubStandalone", name="sub1")
+
+ children[4] = standalone
+ return (xdata, groups, subgroups, children, subchildren, standalone)
+
+ def test_include_element(self):
+ sf = self.get_obj("/test/test.xml")
+ metadata = Mock()
+ metadata.groups = ["group1", "group2"]
+ metadata.hostname = "foo.example.com"
+
+ inc = lambda tag, **attrs: \
+ sf._include_element(lxml.etree.Element(tag, **attrs), metadata)
+
+ self.assertFalse(sf._include_element(lxml.etree.Comment("test"),
+ metadata))
+
+ self.assertFalse(inc("Group", name="group3"))
+ self.assertFalse(inc("Group", name="group2", negate="true"))
+ self.assertFalse(inc("Group", name="group2", negate="tRuE"))
+ self.assertTrue(inc("Group", name="group2"))
+ self.assertTrue(inc("Group", name="group2", negate="false"))
+ self.assertTrue(inc("Group", name="group2", negate="faLSe"))
+ self.assertTrue(inc("Group", name="group3", negate="true"))
+ self.assertTrue(inc("Group", name="group3", negate="tRUe"))
+
+ self.assertFalse(inc("Client", name="bogus.example.com"))
+ self.assertFalse(inc("Client", name="foo.example.com", negate="true"))
+ self.assertFalse(inc("Client", name="foo.example.com", negate="tRuE"))
+ self.assertTrue(inc("Client", name="foo.example.com"))
+ self.assertTrue(inc("Client", name="foo.example.com", negate="false"))
+ self.assertTrue(inc("Client", name="foo.example.com", negate="faLSe"))
+ self.assertTrue(inc("Client", name="bogus.example.com", negate="true"))
+ self.assertTrue(inc("Client", name="bogus.example.com", negate="tRUe"))
+
+ self.assertTrue(inc("Other"))
+
+ @patch("Bcfg2.Server.Plugin.%s._include_element" % test_obj.__name__)
+ def test__match(self, mock_include):
+ sf = self.get_obj("/test/test.xml")
+ metadata = Mock()
+
+ (xdata, groups, subgroups, children, subchildren, standalone) = \
+ self._get_test_data()
+
+ mock_include.side_effect = \
+ lambda x, _: (x.tag not in ['Client', 'Group'] or
+ x.get("include") == "true")
+
+ for i, group in groups.items():
+ actual = sf._match(group, metadata)
+ expected = children[i] + subchildren[i]
+ self.assertEqual(len(actual), len(expected))
+ # easiest way to compare the values is actually to make
+ # them into an XML document and let assertXMLEqual compare
+ # them
+ xactual = lxml.etree.Element("Container")
+ xactual.extend(actual)
+ xexpected = lxml.etree.Element("Container")
+ xexpected.extend(expected)
+ self.assertXMLEqual(xactual, xexpected)
+
+ for el in standalone:
+ self.assertXMLEqual(el, sf._match(el, metadata)[0])
+
+ @patch("Bcfg2.Server.Plugin.%s._match" % test_obj.__name__)
+ def test_Match(self, mock_match):
+ sf = self.get_obj("/test/test.xml")
+ metadata = Mock()
+
+ (xdata, groups, subgroups, children, subchildren, standalone) = \
+ self._get_test_data()
+ sf.entries.extend(copy.deepcopy(xdata).getchildren())
+
+ def match_rv(el, _):
+ if el.tag not in ['Client', 'Group']:
+ return [el]
+ elif x.get("include") == "true":
+ return el.getchildren()
+ else:
+ return []
+ mock_match.side_effect = match_rv
+ actual = sf.Match(metadata)
+ expected = reduce(lambda x, y: x + y,
+ children.values() + subgroups.values())
+ self.assertEqual(len(actual), len(expected))
+ # easiest way to compare the values is actually to make
+ # them into an XML document and let assertXMLEqual compare
+ # them
+ xactual = lxml.etree.Element("Container")
+ xactual.extend(actual)
+ xexpected = lxml.etree.Element("Container")
+ xexpected.extend(expected)
+ self.assertXMLEqual(xactual, xexpected)
+
+ @patch("Bcfg2.Server.Plugin.%s._include_element" % test_obj.__name__)
+ def test__xml_match(self, mock_include):
+ sf = self.get_obj("/test/test.xml")
+ metadata = Mock()
+
+ (xdata, groups, subgroups, children, subchildren, standalone) = \
+ self._get_test_data()
+
+ mock_include.side_effect = \
+ lambda x, _: (x.tag not in ['Client', 'Group'] or
+ x.get("include") == "true")
+
+ actual = copy.deepcopy(xdata)
+ for el in actual.getchildren():
+ sf._xml_match(el, metadata)
+ expected = lxml.etree.Element(xdata.tag, **xdata.attrib)
+ expected.text = xdata.text
+ expected.extend(reduce(lambda x, y: x + y,
+ children.values() + subchildren.values()))
+ expected.extend(standalone)
+ self.assertXMLEqual(actual, expected)
+
+ @patch("Bcfg2.Server.Plugin.%s._xml_match" % test_obj.__name__)
+ def test_Match(self, mock_xml_match):
+ sf = self.get_obj("/test/test.xml")
+ metadata = Mock()
+
+ (sf.xdata, groups, subgroups, children, subchildren, standalone) = \
+ self._get_test_data()
+
+ sf.XMLMatch(metadata)
+ actual = []
+ for call in mock_xml_match.call_args_list:
+ actual.append(call[0][0])
+ self.assertEqual(call[0][1], metadata)
+ expected = groups.values() + standalone
+ # easiest way to compare the values is actually to make
+ # them into an XML document and let assertXMLEqual compare
+ # them
+ xactual = lxml.etree.Element("Container")
+ xactual.extend(actual)
+ xexpected = lxml.etree.Element("Container")
+ xexpected.extend(expected)
+ self.assertXMLEqual(xactual, xexpected)
+
+
+# INode.__init__ and INode._load_children() call each other
+# recursively, which makes this class kind of a nightmare to test. we
+# have to first patch INode._load_children so that we can create an
+# INode object with no children loaded, then we unpatch
+# INode._load_children and patch INode.__init__ so that child objects
+# aren't actually created. but in order to test things atomically, we
+# do this umpteen times in order to test with different data. we
+# write our own context manager to make this a little easier. fun fun
+# fun.
+class patch_inode(object):
+ def __init__(self, test_obj, data, idict):
+ self.test_obj = test_obj
+ self.data = data
+ self.idict = idict
+ self.patch_init = None
+ self.inode = None
+
+ def __enter__(self):
+ with patch("Bcfg2.Server.Plugin.%s._load_children" %
+ self.test_obj.__name__):
+ self.inode = self.test_obj(self.data, self.idict)
+ self.patch_init = patch("Bcfg2.Server.Plugin.%s.__init__" %
+ self.inode.__class__.__name__,
+ new=Mock(return_value=None))
+ self.patch_init.start()
+ self.inode._load_children(self.data, self.idict)
+ return (self.inode, self.patch_init.new)
+
+ def __exit__(self, type, value, traceback):
+ self.patch_init.stop()
+ del self.patch_init
+ del self.inode
+
+
+class TestINode(Bcfg2TestCase):
+ test_obj = INode
+
+ def test_raw_predicates(self):
+ metadata = Mock()
+ metadata.groups = ["group1", "group2"]
+ metadata.hostname = "foo.example.com"
+ entry = None
+
+ parent_predicate = lambda m, e: True
+ pred = eval(self.test_obj.raw['Client'] % dict(name="foo.example.com"),
+ dict(predicate=parent_predicate))
+ self.assertTrue(pred(metadata, entry))
+ pred = eval(self.test_obj.raw['Client'] % dict(name="bar.example.com"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+
+ pred = eval(self.test_obj.raw['Group'] % dict(name="group1"),
+ dict(predicate=parent_predicate))
+ self.assertTrue(pred(metadata, entry))
+ pred = eval(self.test_obj.raw['Group'] % dict(name="group3"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+
+ pred = eval(self.test_obj.nraw['Client'] % dict(name="foo.example.com"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+ pred = eval(self.test_obj.nraw['Client'] % dict(name="bar.example.com"),
+ dict(predicate=parent_predicate))
+ self.assertTrue(pred(metadata, entry))
+
+ pred = eval(self.test_obj.nraw['Group'] % dict(name="group1"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+ pred = eval(self.test_obj.nraw['Group'] % dict(name="group3"),
+ dict(predicate=parent_predicate))
+ self.assertTrue(pred(metadata, entry))
+
+ parent_predicate = lambda m, e: False
+ pred = eval(self.test_obj.raw['Client'] % dict(name="foo.example.com"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+ pred = eval(self.test_obj.raw['Group'] % dict(name="group1"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+ pred = eval(self.test_obj.nraw['Client'] % dict(name="bar.example.com"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+ pred = eval(self.test_obj.nraw['Group'] % dict(name="group3"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+
+ self.assertItemsEqual(self.test_obj.containers,
+ self.test_obj.raw.keys())
+ self.assertItemsEqual(self.test_obj.containers,
+ self.test_obj.nraw.keys())
+
+ @patch("Bcfg2.Server.Plugin.INode._load_children")
+ def test__init(self, mock_load_children):
+ data = lxml.etree.Element("Bogus")
+ # called with no parent, should not raise an exception; it's a
+ # top-level tag in an XML file and so is not expected to be a
+ # proper predicate
+ INode(data, dict())
+ self.assertRaises(PluginExecutionError,
+ INode, data, dict(), Mock())
+
+ data = lxml.etree.Element("Client", name="foo.example.com")
+ idict = dict()
+ inode = INode(data, idict)
+ mock_load_children.assert_called_with(data, idict)
+ self.assertTrue(inode.predicate(Mock(), Mock()))
+
+ parent = Mock()
+ parent.predicate = lambda m, e: True
+ metadata = Mock()
+ metadata.groups = ["group1", "group2"]
+ metadata.hostname = "foo.example.com"
+ entry = None
+
+ # test setting predicate with parent object
+ mock_load_children.reset_mock()
+ inode = INode(data, idict, parent=parent)
+ mock_load_children.assert_called_with(data, idict)
+ self.assertTrue(inode.predicate(metadata, entry))
+
+ # test negation
+ data = lxml.etree.Element("Client", name="foo.example.com",
+ negate="true")
+ mock_load_children.reset_mock()
+ inode = INode(data, idict, parent=parent)
+ mock_load_children.assert_called_with(data, idict)
+ self.assertFalse(inode.predicate(metadata, entry))
+
+ # test failure of a matching predicate (client names do not match)
+ data = lxml.etree.Element("Client", name="foo.example.com")
+ metadata.hostname = "bar.example.com"
+ mock_load_children.reset_mock()
+ inode = INode(data, idict, parent=parent)
+ mock_load_children.assert_called_with(data, idict)
+ self.assertFalse(inode.predicate(metadata, entry))
+
+ # test that parent predicate is AND'ed in correctly
+ parent.predicate = lambda m, e: False
+ metadata.hostname = "foo.example.com"
+ mock_load_children.reset_mock()
+ inode = INode(data, idict, parent=parent)
+ mock_load_children.assert_called_with(data, idict)
+ self.assertFalse(inode.predicate(metadata, entry))
+
+ def test_load_children(self):
+ data = lxml.etree.Element("Parent")
+ child1 = lxml.etree.SubElement(data, "Client", name="foo.example.com")
+ child2 = lxml.etree.SubElement(data, "Group", name="bar", negate="true")
+ idict = dict()
+ with patch_inode(self.test_obj, data, idict) as (inode, mock_init):
+ self.assertItemsEqual(mock_init.call_args_list,
+ [call(child1, idict, inode),
+ call(child2, idict, inode)])
+ self.assertEqual(idict, dict())
+ self.assertItemsEqual(inode.contents, dict())
+
+ data = lxml.etree.Element("Parent")
+ child1 = lxml.etree.SubElement(data, "Data", name="child1",
+ attr="some attr")
+ child1.text = "text"
+ subchild1 = lxml.etree.SubElement(child1, "SubChild", name="subchild")
+ child2 = lxml.etree.SubElement(data, "Group", name="bar", negate="true")
+ idict = dict()
+ with patch_inode(self.test_obj, data, idict) as (inode, mock_init):
+ mock_init.assert_called_with(child2, idict, inode)
+ tag = child1.tag
+ name = child1.get("name")
+ self.assertEqual(idict, dict(Data=[name]))
+ self.assertIn(tag, inode.contents)
+ self.assertIn(name, inode.contents[tag])
+ self.assertItemsEqual(inode.contents[tag][name],
+ dict(name=name,
+ attr=child1.get('attr'),
+ __text__=child1.text,
+ __children__=[subchild1]))
+
+ # test ignore. no ignore is set on INode by default, so we
+ # have to set one
+ old_ignore = copy.copy(self.test_obj.ignore)
+ self.test_obj.ignore.append("Data")
+ idict = dict()
+ with patch_inode(self.test_obj, data, idict) as (inode, mock_init):
+ mock_init.assert_called_with(child2, idict, inode)
+ self.assertEqual(idict, dict())
+ self.assertItemsEqual(inode.contents, dict())
+ self.test_obj.ignore = old_ignore
+
+ def test_Match(self):
+ idata = lxml.etree.Element("Parent")
+ contents = lxml.etree.SubElement(idata, "Data", name="contents",
+ attr="some attr")
+ child = lxml.etree.SubElement(idata, "Group", name="bar", negate="true")
+
+ inode = INode(idata, dict())
+ inode.predicate = Mock()
+ inode.predicate.return_value = False
+
+ metadata = Mock()
+ metadata.groups = ['foo']
+ data = dict()
+ entry = child
+
+ inode.Match(metadata, data, entry=child)
+ self.assertEqual(data, dict())
+ inode.predicate.assert_called_with(metadata, child)
+
+ inode.predicate.reset_mock()
+ inode.Match(metadata, data)
+ self.assertEqual(data, dict())
+ # can't easily compare XML args without the original
+ # object, and we're testing that Match() works without an
+ # XML object passed in, so...
+ self.assertEqual(inode.predicate.call_args[0][0],
+ metadata)
+ self.assertXMLEqual(inode.predicate.call_args[0][1],
+ lxml.etree.Element("None"))
+
+ inode.predicate.reset_mock()
+ inode.predicate.return_value = True
+ inode.Match(metadata, data, entry=child)
+ self.assertEqual(data, inode.contents)
+ inode.predicate.assert_called_with(metadata, child)
+
+
+class TestInfoNode(TestINode):
+ __test__ = True
+ test_obj = InfoNode
+
+ def test_raw_predicates(self):
+ TestINode.test_raw_predicates(self)
+ metadata = Mock()
+ entry = lxml.etree.Element("Path", name="/tmp/foo",
+ realname="/tmp/bar")
+
+ parent_predicate = lambda m, d: True
+ pred = eval(self.test_obj.raw['Path'] % dict(name="/tmp/foo"),
+ dict(predicate=parent_predicate))
+ self.assertTrue(pred(metadata, entry))
+ pred = eval(InfoNode.raw['Path'] % dict(name="/tmp/bar"),
+ dict(predicate=parent_predicate))
+ self.assertTrue(pred(metadata, entry))
+ pred = eval(InfoNode.raw['Path'] % dict(name="/tmp/bogus"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+
+ pred = eval(self.test_obj.nraw['Path'] % dict(name="/tmp/foo"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+ pred = eval(InfoNode.nraw['Path'] % dict(name="/tmp/bar"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+ pred = eval(InfoNode.nraw['Path'] % dict(name="/tmp/bogus"),
+ dict(predicate=parent_predicate))
+ self.assertTrue(pred(metadata, entry))
+
+ parent_predicate = lambda m, d: False
+ pred = eval(self.test_obj.raw['Path'] % dict(name="/tmp/foo"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+ pred = eval(InfoNode.raw['Path'] % dict(name="/tmp/bar"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+ pred = eval(InfoNode.nraw['Path'] % dict(name="/tmp/bogus"),
+ dict(predicate=parent_predicate))
+ self.assertFalse(pred(metadata, entry))
+
+
+class TestXMLSrc(TestXMLFileBacked):
+ test_obj = XMLSrc
+
+ def test_node_interface(self):
+ # ensure that the node object has the necessary interface
+ self.assertTrue(hasattr(self.test_obj.__node__, "Match"))
+
+ @patch("__builtin__.open")
+ def test_HandleEvent(self, mock_open):
+ xdata = lxml.etree.Element("Test")
+ lxml.etree.SubElement(xdata, "Path", name="path", attr="whatever")
+
+ xsrc = self.get_obj("/test/foo.xml")
+ xsrc.__node__ = Mock()
+ mock_open.return_value.read.return_value = lxml.etree.tostring(xdata)
+
+ self.assertRaises(PluginExecutionError,
+ xsrc.HandleEvent, Mock())
+
+ xdata.set("priority", "cow")
+ mock_open.return_value.read.return_value = lxml.etree.tostring(xdata)
+ self.assertRaises(PluginExecutionError,
+ xsrc.HandleEvent, Mock())
+
+ xdata.set("priority", "10")
+ mock_open.return_value.read.return_value = lxml.etree.tostring(xdata)
+
+ mock_open.reset_mock()
+ xsrc = self.get_obj("/test/foo.xml")
+ xsrc.__node__ = Mock()
+ xsrc.HandleEvent(Mock())
+ mock_open.assert_called_with("/test/foo.xml")
+ mock_open.return_value.read.assert_any_call()
+ self.assertXMLEqual(xsrc.__node__.call_args[0][0], xdata)
+ self.assertEqual(xsrc.__node__.call_args[0][1], dict())
+ self.assertEqual(xsrc.pnode, xsrc.__node__.return_value)
+ self.assertEqual(xsrc.cache, None)
+
+ @patch("Bcfg2.Server.Plugin.XMLSrc.HandleEvent")
+ def test_Cache(self, mock_HandleEvent):
+ xsrc = self.get_obj("/test/foo.xml")
+ metadata = Mock()
+ xsrc.Cache(metadata)
+ mock_HandleEvent.assert_any_call()
+
+ xsrc.pnode = Mock()
+ xsrc.Cache(metadata)
+ xsrc.pnode.Match.assert_called_with(metadata, xsrc.__cacheobj__())
+ self.assertEqual(xsrc.cache[0], metadata)
+
+ xsrc.pnode.reset_mock()
+ xsrc.Cache(metadata)
+ self.assertFalse(xsrc.pnode.Mock.called)
+ self.assertEqual(xsrc.cache[0], metadata)
+
+ xsrc.cache = ("bogus")
+ xsrc.Cache(metadata)
+ xsrc.pnode.Match.assert_called_with(metadata, xsrc.__cacheobj__())
+ self.assertEqual(xsrc.cache[0], metadata)
+
+
+class TestInfoXML(TestXMLSrc):
+ test_obj = InfoXML
+
+
+class TestXMLDirectoryBacked(TestDirectoryBacked):
+ test_obj = XMLDirectoryBacked
+
+
+class TestPrioDir(TestPlugin, TestGenerator, TestXMLDirectoryBacked):
+ test_obj = PrioDir
+
+ @patch("Bcfg2.Server.Plugin.%s.add_directory_monitor" % test_obj.__name__,
+ Mock())
+ def get_obj(self, core=None):
+ if core is None:
+ core = Mock()
+ return self.test_obj(core, datastore)
+
+ def test_HandleEvent(self):
+ TestXMLDirectoryBacked.test_HandleEvent(self)
+ with patch("Bcfg2.Server.Plugin.XMLDirectoryBacked.HandleEvent"):
+ pd = self.get_obj()
+ test1 = Mock()
+ test1.items = dict(Path=["/etc/foo.conf", "/etc/bar.conf"])
+ test2 = Mock()
+ test2.items = dict(Path=["/etc/baz.conf"],
+ Package=["quux", "xyzzy"])
+ pd.entries = {"/test1.xml": test1,
+ "/test2.xml": test2}
+ pd.HandleEvent(Mock())
+ self.assertItemsEqual(pd.Entries,
+ dict(Path={"/etc/foo.conf": pd.BindEntry,
+ "/etc/bar.conf": pd.BindEntry,
+ "/etc/baz.conf": pd.BindEntry},
+ Package={"quux": pd.BindEntry,
+ "xyzzy": pd.BindEntry}))
+
+ def test__matches(self):
+ pd = self.get_obj()
+ self.assertTrue(pd._matches(lxml.etree.Element("Test",
+ name="/etc/foo.conf"),
+ Mock(),
+ {"/etc/foo.conf": pd.BindEntry,
+ "/etc/bar.conf": pd.BindEntry}))
+ self.assertFalse(pd._matches(lxml.etree.Element("Test",
+ name="/etc/baz.conf"),
+ Mock(),
+ {"/etc/foo.conf": pd.BindEntry,
+ "/etc/bar.conf": pd.BindEntry}))
+
+ def test_BindEntry(self):
+ pd = self.get_obj()
+ pd.get_attrs = Mock(return_value=dict(test1="test1", test2="test2"))
+ entry = lxml.etree.Element("Path", name="/etc/foo.conf", test1="bogus")
+ metadata = Mock()
+ pd.BindEntry(entry, metadata)
+ pd.get_attrs.assert_called_with(entry, metadata)
+ self.assertItemsEqual(entry.attrib,
+ dict(name="/etc/foo.conf",
+ test1="test1", test2="test2"))
+
+ def test_get_attrs(self):
+ pd = self.get_obj()
+ entry = lxml.etree.Element("Path", name="/etc/foo.conf")
+ children = [lxml.etree.Element("Child")]
+ metadata = Mock()
+ pd.entries = dict()
+
+ def reset():
+ metadata.reset_mock()
+ for src in pd.entries.values():
+ src.reset_mock()
+ src.cache = None
+
+ # test with no matches
+ self.assertRaises(PluginExecutionError,
+ pd.get_attrs, entry, metadata)
+
+ def add_entry(name, data, prio=10):
+ path = os.path.join(pd.data, name)
+ pd.entries[path] = Mock()
+ pd.entries[path].priority = prio
+ def do_Cache(metadata):
+ pd.entries[path].cache = (metadata, data)
+ pd.entries[path].Cache.side_effect = do_Cache
+
+ add_entry('test1.xml',
+ dict(Path={'/etc/foo.conf': dict(attr="attr1",
+ __children__=children),
+ '/etc/bar.conf': dict()}))
+ add_entry('test2.xml',
+ dict(Path={'/etc/bar.conf': dict(__text__="text",
+ attr="attr1")},
+ Package={'quux': dict(),
+ 'xyzzy': dict()}),
+ prio=20)
+ add_entry('test3.xml',
+ dict(Path={'/etc/baz.conf': dict()},
+ Package={'xyzzy': dict()}),
+ prio=20)
+
+ # test with exactly one match, __children__
+ reset()
+ self.assertItemsEqual(pd.get_attrs(entry, metadata),
+ dict(attr="attr1"))
+ for src in pd.entries.values():
+ src.Cache.assert_called_with(metadata)
+ self.assertEqual(len(entry.getchildren()), 1)
+ self.assertXMLEqual(entry.getchildren()[0], children[0])
+
+ # test with multiple matches with different priorities, __text__
+ reset()
+ entry = lxml.etree.Element("Path", name="/etc/bar.conf")
+ self.assertItemsEqual(pd.get_attrs(entry, metadata),
+ dict(attr="attr1"))
+ for src in pd.entries.values():
+ src.Cache.assert_called_with(metadata)
+ self.assertEqual(entry.text, "text")
+
+ # test with multiple matches with identical priorities
+ reset()
+ entry = lxml.etree.Element("Package", name="xyzzy")
+ self.assertRaises(PluginExecutionError,
+ pd.get_attrs, entry, metadata)
+
+
+class TestSpecificityError(Bcfg2TestCase):
+ """ placeholder for future tests """
+ pass
+
+
+class TestSpecificity(Bcfg2TestCase):
+ test_obj = Specificity
+
+ def get_obj(self, **kwargs):
+ return self.test_obj(**kwargs)
+
+ def test_matches(self):
+ metadata = Mock()
+ metadata.hostname = "foo.example.com"
+ metadata.groups = ["group1", "group2"]
+ self.assertTrue(self.get_obj(all=True).matches(metadata))
+ self.assertTrue(self.get_obj(group="group1").matches(metadata))
+ self.assertTrue(self.get_obj(hostname="foo.example.com").matches(metadata))
+ self.assertFalse(self.get_obj().matches(metadata))
+ self.assertFalse(self.get_obj(group="group3").matches(metadata))
+ self.assertFalse(self.get_obj(hostname="bar.example.com").matches(metadata))
+
+ def test__cmp(self):
+ specs = [self.get_obj(all=True),
+ self.get_obj(group="group1", prio=10),
+ self.get_obj(group="group1", prio=20),
+ self.get_obj(hostname="foo.example.com")]
+
+ for i in range(len(specs)):
+ for j in range(len(specs)):
+ if i == j:
+ self.assertEqual(0, specs[i].__cmp__(specs[j]))
+ self.assertEqual(0, specs[j].__cmp__(specs[i]))
+ elif i > j:
+ self.assertEqual(-1, specs[i].__cmp__(specs[j]))
+ self.assertEqual(1, specs[j].__cmp__(specs[i]))
+ elif i < j:
+ self.assertEqual(1, specs[i].__cmp__(specs[j]))
+ self.assertEqual(-1, specs[j].__cmp__(specs[i]))
+
+ def test_cmp(self):
+ """ test __lt__/__gt__/__eq__ """
+ specs = [self.get_obj(all=True),
+ self.get_obj(group="group1", prio=10),
+ self.get_obj(group="group1", prio=20),
+ self.get_obj(hostname="foo.example.com")]
+
+ for i in range(len(specs)):
+ for j in range(len(specs)):
+ if i < j:
+ self.assertGreater(specs[i], specs[j])
+ self.assertLess(specs[j], specs[i])
+ self.assertGreaterEqual(specs[i], specs[j])
+ self.assertLessEqual(specs[j], specs[i])
+ elif i == j:
+ self.assertEqual(specs[i], specs[j])
+ self.assertEqual(specs[j], specs[i])
+ self.assertLessEqual(specs[i], specs[j])
+ self.assertGreaterEqual(specs[j], specs[i])
+ elif i > j:
+ self.assertLess(specs[i], specs[j])
+ self.assertGreater(specs[j], specs[i])
+ self.assertLessEqual(specs[i], specs[j])
+ self.assertGreaterEqual(specs[j], specs[i])
+
+
+class TestSpecificData(Bcfg2TestCase):
+ test_obj = SpecificData
+
+ def get_obj(self, name="/test.txt", specific=None, encoding=None):
+ if specific is None:
+ specific = Mock()
+ return self.test_obj(name, specific, encoding)
+
+ @patch("__builtin__.open")
+ def test_handle_event(self, mock_open):
+ event = Mock()
+ event.code2str.return_value = 'deleted'
+ sd = self.get_obj()
+ sd.handle_event(event)
+ self.assertFalse(mock_open.called)
+ if hasattr(sd, 'data'):
+ self.assertIsNone(sd.data)
+ else:
+ self.assertFalse(hasattr(sd, 'data'))
+
+ event = Mock()
+ mock_open.return_value.read.return_value = "test"
+ sd.handle_event(event)
+ mock_open.assert_called_with("/test.txt")
+ mock_open.return_value.read.assert_any_call()
+ self.assertEqual(sd.data, "test")
+
+
+class TestEntrySet(TestDebuggable):
+ test_obj = EntrySet
+ # filenames that should be matched successfully by the EntrySet
+ # 'specific' regex. these are filenames alone -- a specificity
+ # will be added to these
+ basenames = ["test", "test.py", "test with spaces.txt",
+ "test.multiple.dots.py", "test_underscores.and.dots",
+ "really_misleading.G10_test",
+ "name$with*regex(special){chars}",
+ "misleading.H_hostname.test.com"]
+ # filenames that do not match any of the basenames (or the
+ # basename regex, if applicable)
+ bogus_names = ["bogus"]
+ # filenames that should be ignored
+ ignore = ["foo~", ".#foo", ".foo.swp", ".foo.swx",
+ "test.txt.genshi_include", "test.G_foo.genshi_include"]
+
+
+ def get_obj(self, basename="test", path=datastore, entry_type=MagicMock(),
+ encoding=None):
+ return self.test_obj(basename, path, entry_type, encoding)
+
+ def test__init(self):
+ for basename in self.basenames:
+ eset = self.get_obj(basename=basename)
+ self.assertIsInstance(eset.specific, re._pattern_type)
+ self.assertTrue(eset.specific.match(os.path.join(datastore,
+ basename)))
+ ppath = os.path.join(datastore, "Plugin", basename)
+ self.assertTrue(eset.specific.match(ppath))
+ self.assertTrue(eset.specific.match(ppath + ".G20_foo"))
+ self.assertTrue(eset.specific.match(ppath + ".G1_foo"))
+ self.assertTrue(eset.specific.match(ppath + ".G32768_foo"))
+ # a group named '_'
+ self.assertTrue(eset.specific.match(ppath + ".G10__"))
+ self.assertTrue(eset.specific.match(ppath + ".H_hostname"))
+ self.assertTrue(eset.specific.match(ppath + ".H_fqdn.subdomain.example.com"))
+ self.assertTrue(eset.specific.match(ppath + ".G20_group_with_underscores"))
+
+ self.assertFalse(eset.specific.match(ppath + ".G20_group with spaces"))
+ self.assertFalse(eset.specific.match(ppath + ".G_foo"))
+ self.assertFalse(eset.specific.match(ppath + ".G_"))
+ self.assertFalse(eset.specific.match(ppath + ".G20_"))
+ self.assertFalse(eset.specific.match(ppath + ".H_"))
+
+ for bogus in self.bogus_names:
+ self.assertFalse(eset.specific.match(os.path.join(datastore,
+ "Plugin",
+ bogus)))
+
+ for ignore in self.ignore:
+ self.assertTrue(eset.ignore.match(ignore))
+
+ self.assertFalse(eset.ignore.match(basename))
+ self.assertFalse(eset.ignore.match(basename + ".G20_foo"))
+ self.assertFalse(eset.ignore.match(basename + ".G1_foo"))
+ self.assertFalse(eset.ignore.match(basename + ".G32768_foo"))
+ self.assertFalse(eset.ignore.match(basename + ".G10__"))
+ self.assertFalse(eset.ignore.match(basename + ".H_hostname"))
+ self.assertFalse(eset.ignore.match(basename + ".H_fqdn.subdomain.example.com"))
+ self.assertFalse(eset.ignore.match(basename + ".G20_group_with_underscores"))
+
+ def test_get_matching(self):
+ items = {0: Mock(), 1: Mock(), 2: Mock(), 3: Mock(), 4: Mock(),
+ 5: Mock()}
+ items[0].specific.matches.return_value = False
+ items[1].specific.matches.return_value = True
+ items[2].specific.matches.return_value = False
+ items[3].specific.matches.return_value = False
+ items[4].specific.matches.return_value = True
+ items[5].specific.matches.return_value = True
+ metadata = Mock()
+ eset = self.get_obj()
+ eset.entries = items
+ self.assertItemsEqual(eset.get_matching(metadata),
+ [items[1], items[4], items[5]])
+ for i in items.values():
+ i.specific.matches.assert_called_with(metadata)
+
+ @patch("Bcfg2.Server.Plugin.%s.get_matching" % test_obj.__name__)
+ def test_best_matching(self, mock_get_matching):
+ eset = self.get_obj()
+ metadata = Mock()
+ matching = []
+
+ def reset():
+ mock_get_matching.reset_mock()
+ metadata.reset_mock()
+ for m in matching:
+ m.reset_mock()
+
+ def specific(all=False, group=False, prio=None, host=False):
+ spec = MagicMock()
+ spec.all = all
+ spec.group = group
+ spec.prio = prio
+ spec.host = host
+ if prio:
+ spec.__cmp__ = lambda o: cmp(spec.prio, o.prio)
+ return spec
+
+ self.assertRaises(PluginExecutionError,
+ eset.best_matching, metadata, matching=[])
+
+ reset()
+ mock_get_matching.return_value = matching
+ self.assertRaises(PluginExecutionError,
+ eset.best_matching, metadata)
+ mock_get_matching.assert_called_with(metadata)
+
+ # test with a single file for all
+ reset()
+ matching.insert(0, specific(all=True))
+ mock_get_matching.return_value = matching
+ self.assertEqual(eset.best_matching(metadata),
+ matching[0])
+ mock_get_matching.assert_called_with(metadata)
+
+ # test with a single group-specific file
+ reset()
+ matching.insert(0, specific(group=True, prio=10))
+ mock_get_matching.return_value = matching
+ self.assertEqual(eset.best_matching(metadata),
+ matching[0])
+ mock_get_matching.assert_called_with(metadata)
+
+ # test with multiple group-specific files
+ reset()
+ matching.insert(0, specific(group=True, prio=20))
+ mock_get_matching.return_value = matching
+ self.assertEqual(eset.best_matching(metadata),
+ matching[0])
+ mock_get_matching.assert_called_with(metadata)
+
+ # test with host-specific file
+ reset()
+ matching.insert(0, specific(host=True))
+ mock_get_matching.return_value = matching
+ self.assertEqual(eset.best_matching(metadata),
+ matching[0])
+ mock_get_matching.assert_called_with(metadata)
+
+ @patch("Bcfg2.Server.Plugin.%s.entry_init" % test_obj.__name__)
+ @patch("Bcfg2.Server.Plugin.%s.reset_metadata" % test_obj.__name__)
+ @patch("Bcfg2.Server.Plugin.%s.update_metadata" % test_obj.__name__)
+ def test_handle_event(self, mock_update_md, mock_reset_md, mock_init):
+ def reset():
+ mock_update_md.reset_mock()
+ mock_reset_md.reset_mock()
+ mock_init.reset_mock()
+
+ eset = self.get_obj()
+ for fname in ["info", "info.xml", ":info"]:
+ for evt in ["exists", "created", "changed"]:
+ reset()
+ event = Mock()
+ event.code2str.return_value = evt
+ event.filename = fname
+ eset.handle_event(event)
+ mock_update_md.assert_called_with(event)
+ self.assertFalse(mock_init.called)
+ self.assertFalse(mock_reset_md.called)
+
+ reset()
+ event = Mock()
+ event.code2str.return_value = "deleted"
+ event.filename = fname
+ eset.handle_event(event)
+ mock_reset_md.assert_called_with(event)
+ self.assertFalse(mock_init.called)
+ self.assertFalse(mock_update_md.called)
+
+ for evt in ["exists", "created", "changed"]:
+ reset()
+ event = Mock()
+ event.code2str.return_value = evt
+ event.filename = "test.txt"
+ eset.handle_event(event)
+ mock_init.assert_called_with(event)
+ self.assertFalse(mock_reset_md.called)
+ self.assertFalse(mock_update_md.called)
+
+ reset()
+ entry = Mock()
+ eset.entries["test.txt"] = entry
+ event = Mock()
+ event.code2str.return_value = "changed"
+ event.filename = "test.txt"
+ eset.handle_event(event)
+ entry.handle_event.assert_called_with(event)
+ self.assertFalse(mock_init.called)
+ self.assertFalse(mock_reset_md.called)
+ self.assertFalse(mock_update_md.called)
+
+ reset()
+ entry = Mock()
+ eset.entries["test.txt"] = entry
+ event = Mock()
+ event.code2str.return_value = "deleted"
+ event.filename = "test.txt"
+ eset.handle_event(event)
+ self.assertNotIn("test.txt", eset.entries)
+
+ @patch("Bcfg2.Server.Plugin.%s.specificity_from_filename" %
+ test_obj.__name__)
+ def test_entry_init(self, mock_spec):
+ eset = self.get_obj()
+
+ def reset():
+ eset.entry_type.reset_mock()
+ mock_spec.reset_mock()
+
+ event = Mock()
+ event.code2str.return_value = "created"
+ event.filename = "test.txt"
+ eset.entry_init(event)
+ mock_spec.assert_called_with("test.txt", specific=None)
+ eset.entry_type.assert_called_with(os.path.join(eset.path, "test.txt"),
+ mock_spec.return_value, None)
+ eset.entry_type.return_value.handle_event.assert_called_with(event)
+ self.assertIn("test.txt", eset.entries)
+
+ # test duplicate add
+ reset()
+ eset.entry_init(event)
+ self.assertFalse(mock_spec.called)
+ self.assertFalse(eset.entry_type.called)
+ eset.entries["test.txt"].handle_event.assert_called_with(event)
+
+ # test keyword args
+ etype = Mock()
+ specific = Mock()
+ event = Mock()
+ event.code2str.return_value = "created"
+ event.filename = "test2.txt"
+ eset.entry_init(event, entry_type=etype, specific=specific)
+ mock_spec.assert_called_with("test2.txt", specific=specific)
+ etype.assert_called_with(os.path.join(eset.path, "test2.txt"),
+ mock_spec.return_value, None)
+ etype.return_value.handle_event.assert_called_with(event)
+ self.assertIn("test2.txt", eset.entries)
+
+ # test specificity error
+ event = Mock()
+ event.code2str.return_value = "created"
+ event.filename = "test3.txt"
+ mock_spec.side_effect = SpecificityError
+ eset.entry_init(event)
+ mock_spec.assert_called_with("test3.txt", specific=None)
+ self.assertFalse(eset.entry_type.called)
+
+ @patch("Bcfg2.Server.Plugin.Specificity")
+ def test_specificity_from_filename(self, mock_spec):
+ def test(eset, fname, **kwargs):
+ mock_spec.reset_mock()
+ if "specific" in kwargs:
+ specific = kwargs['specific']
+ del kwargs['specific']
+ else:
+ specific = None
+ self.assertEqual(eset.specificity_from_filename(fname,
+ specific=specific),
+ mock_spec.return_value)
+ mock_spec.assert_called_with(**kwargs)
+
+ def fails(eset, fname, specific=None):
+ mock_spec.reset_mock()
+ self.assertRaises(SpecificityError,
+ eset.specificity_from_filename, fname,
+ specific=specific)
+
+ for basename in self.basenames:
+ eset = self.get_obj(basename=basename)
+ ppath = os.path.join(datastore, "Plugin", basename)
+ test(eset, ppath, all=True)
+ test(eset, ppath + ".G20_foo", group="foo", prio=20)
+ test(eset, ppath + ".G1_foo", group="foo", prio=1)
+ test(eset, ppath + ".G32768_foo", group="foo", prio=32768)
+ test(eset, ppath + ".G10__", group="_", prio=10)
+ test(eset, ppath + ".H_hostname", hostname="hostname")
+ test(eset, ppath + ".H_fqdn.subdomain.example.com",
+ hostname="fqdn.subdomain.example.com")
+ test(eset, ppath + ".G20_group_with_underscores",
+ group="group_with_underscores", prio=20)
+
+ for bogus in self.bogus_names:
+ fails(eset, bogus)
+ fails(eset, ppath + ".G_group with spaces")
+ fails(eset, ppath + ".G_foo")
+ fails(eset, ppath + ".G_")
+ fails(eset, ppath + ".G20_")
+ fails(eset, ppath + ".H_")
+
+ @patch("__builtin__.open")
+ @patch("Bcfg2.Server.Plugin.InfoXML")
+ def test_update_metadata(self, mock_InfoXML, mock_open):
+ eset = self.get_obj()
+
+ # add info.xml
+ event = Mock()
+ event.filename = "info.xml"
+ eset.update_metadata(event)
+ mock_InfoXML.assert_called_with(os.path.join(eset.path, "info.xml"),
+ True)
+ mock_InfoXML.return_value.HandleEvent.assert_called_with(event)
+ self.assertEqual(eset.infoxml, mock_InfoXML.return_value)
+
+ # modify info.xml
+ mock_InfoXML.reset_mock()
+ eset.update_metadata(event)
+ self.assertFalse(mock_InfoXML.called)
+ eset.infoxml.HandleEvent.assert_called_with(event)
+
+ for fname in [':info', 'info']:
+ event = Mock()
+ event.filename = fname
+
+ idata = ["owner:owner",
+ "group: GROUP",
+ "perms: 775",
+ "important: true",
+ "bogus: line"]
+ mock_open.return_value.readlines.return_value = idata
+ eset.update_metadata(event)
+ expected = default_file_metadata.copy()
+ expected['owner'] = 'owner'
+ expected['group'] = 'GROUP'
+ expected['perms'] = '0775'
+ expected['important'] = 'true'
+ self.assertItemsEqual(eset.metadata,
+ expected)
+
+ def test_reset_metadata(self):
+ eset = self.get_obj()
+
+ # test info.xml
+ event = Mock()
+ event.filename = "info.xml"
+ eset.infoxml = Mock()
+ eset.reset_metadata(event)
+ self.assertIsNone(eset.infoxml)
+
+ for fname in [':info', 'info']:
+ event = Mock()
+ event.filename = fname
+ eset.metadata = Mock()
+ eset.reset_metadata(event)
+ self.assertItemsEqual(eset.metadata, default_file_metadata)
+
+ @patch("Bcfg2.Server.Plugin.bind_info")
+ def test_bind_info_to_entry(self, mock_bind_info):
+ eset = self.get_obj()
+ entry = Mock()
+ metadata = Mock()
+ eset.bind_info_to_entry(entry, metadata)
+ mock_bind_info.assert_called_with(entry, metadata,
+ infoxml=eset.infoxml,
+ default=eset.metadata)
+
+ @patch("Bcfg2.Server.Plugin.%s.best_matching" % test_obj.__name__)
+ @patch("Bcfg2.Server.Plugin.%s.bind_info_to_entry" % test_obj.__name__)
+ def test_bind_entry(self, mock_bind_info, mock_best_matching):
+ eset = self.get_obj()
+ entry = Mock()
+ metadata = Mock()
+ eset.bind_entry(entry, metadata)
+ mock_bind_info.assert_called_with(entry, metadata)
+ mock_best_matching.assert_called_with(metadata)
+ mock_best_matching.return_value.bind_entry.assert_called_with(entry,
+ metadata)
+
+
+class TestGroupSpool(TestPlugin, TestGenerator):
+ test_obj = GroupSpool
+
+ @patch("Bcfg2.Server.Plugin.%s.AddDirectoryMonitor" % test_obj.__name__)
+ def get_obj(self, core=None):
+ return TestPlugin.get_obj(self, core=core)
+
+ @patch("Bcfg2.Server.Plugin.%s.AddDirectoryMonitor" % test_obj.__name__)
+ def test__init(self, mock_Add):
+ core = Mock()
+ gs = self.test_obj(core, datastore)
+ mock_Add.assert_called_with('')
+ self.assertItemsEqual(gs.Entries, {gs.entry_type: {}})
+
+ @patch("os.path.isdir")
+ @patch("os.path.isfile")
+ @patch("Bcfg2.Server.Plugin.%s.event_id" % test_obj.__name__)
+ @patch("Bcfg2.Server.Plugin.%s.event_path" % test_obj.__name__)
+ @patch("Bcfg2.Server.Plugin.%s.AddDirectoryMonitor" % test_obj.__name__)
+ def test_add_entry(self, mock_Add, mock_event_path, mock_event_id,
+ mock_isfile, mock_isdir):
+ gs = self.get_obj()
+ gs.es_cls = Mock()
+ gs.es_child_cls = Mock()
+
+ def reset():
+ gs.es_cls.reset_mock()
+ gs.es_child_cls.reset_mock()
+ mock_Add.reset_mock()
+ mock_event_path.reset_mock()
+ mock_event_id.reset_mock()
+ mock_isfile.reset_mock()
+ mock_isdir.reset_mock()
+
+ # directory
+ event = Mock()
+ event.filename = "foo"
+ basedir = "test"
+ epath = os.path.join(gs.data, basedir, event.filename)
+ ident = os.path.join(basedir, event.filename)
+ mock_event_path.return_value = epath
+ mock_event_id.return_value = ident
+ mock_isdir.return_value = True
+ mock_isfile.return_value = False
+ gs.add_entry(event)
+ mock_Add.assert_called_with(os.path.join("/" + basedir, event.filename))
+ self.assertNotIn(ident, gs.entries)
+ mock_isdir.assert_called_with(epath)
+
+ # file that is not in self.entries
+ reset()
+ event = Mock()
+ event.filename = "foo"
+ basedir = "test/foo/"
+ epath = os.path.join(gs.data, basedir, event.filename)
+ ident = basedir[:-1]
+ mock_event_path.return_value = epath
+ mock_event_id.return_value = ident
+ mock_isdir.return_value = False
+ mock_isfile.return_value = True
+ gs.add_entry(event)
+ self.assertFalse(mock_Add.called)
+ gs.es_cls.assert_called_with(gs.filename_pattern,
+ gs.data + ident,
+ gs.es_child_cls,
+ gs.encoding)
+ self.assertIn(ident, gs.entries)
+ self.assertEqual(gs.entries[ident], gs.es_cls.return_value)
+ self.assertIn(ident, gs.Entries[gs.entry_type])
+ self.assertEqual(gs.Entries[gs.entry_type][ident],
+ gs.es_cls.return_value.bind_entry)
+ gs.entries[ident].handle_event.assert_called_with(event)
+ mock_isfile.assert_called_with(epath)
+
+ # file that is in self.entries
+ reset()
+ gs.add_entry(event)
+ self.assertFalse(mock_Add.called)
+ self.assertFalse(gs.es_cls.called)
+ gs.entries[ident].handle_event.assert_called_with(event)
+
+ def test_event_path(self):
+ gs = self.get_obj()
+ gs.handles[1] = "/var/lib/foo/"
+ gs.handles[2] = "/etc/foo/"
+ gs.handles[3] = "/usr/share/foo/"
+ event = Mock()
+ event.filename = "foo"
+ for i in range(1, 4):
+ event.requestID = i
+ self.assertEqual(gs.event_path(event),
+ os.path.join(datastore, gs.name,
+ gs.handles[event.requestID].lstrip('/'),
+ event.filename))
+
+ @patch("os.path.isdir")
+ @patch("Bcfg2.Server.Plugin.%s.event_path" % test_obj.__name__)
+ def test_event_id(self, mock_event_path, mock_isdir):
+ gs = self.get_obj()
+
+ def reset():
+ mock_event_path.reset_mock()
+ mock_isdir.reset_mock()
+
+ gs.handles[1] = "/var/lib/foo/"
+ gs.handles[2] = "/etc/foo/"
+ gs.handles[3] = "/usr/share/foo/"
+ event = Mock()
+ event.filename = "foo"
+ for i in range(1, 4):
+ event.requestID = i
+ reset()
+ mock_isdir.return_value = True
+ self.assertEqual(gs.event_id(event),
+ os.path.join(gs.handles[event.requestID].lstrip('/'),
+ event.filename))
+ mock_isdir.assert_called_with(mock_event_path.return_value)
+
+ reset()
+ mock_isdir.return_value = False
+ self.assertEqual(gs.event_id(event),
+ gs.handles[event.requestID].rstrip('/'))
+ mock_isdir.assert_called_with(mock_event_path.return_value)
+
+ def test_toggle_debug(self):
+ gs = self.get_obj()
+ gs.entries = {"/foo": Mock(),
+ "/bar": Mock(),
+ "/baz/quux": Mock()}
+ with patch("Bcfg2.Server.Plugin.Plugin.toggle_debug") as mock_debug:
+ gs.toggle_debug()
+ mock_debug.assert_called_with(gs)
+ for entry in gs.entries.values():
+ entry.toggle_debug.assert_any_call()
+
+ TestPlugin.test_toggle_debug(self)
+
+ @patch("Bcfg2.Server.Plugin.%s.event_id" % test_obj.__name__)
+ @patch("Bcfg2.Server.Plugin.%s.add_entry" % test_obj.__name__)
+ def test_HandleEvent(self, mock_add_entry, mock_event_id):
+ gs = self.get_obj()
+ gs.entries = {"/foo": Mock(),
+ "/bar": Mock(),
+ "/baz": Mock(),
+ "/baz/quux": Mock()}
+ for path in gs.entries.keys():
+ gs.Entries[gs.entry_type] = {path: Mock()}
+ gs.handles = {1: "/foo/",
+ 2: "/bar/",
+ 3: "/baz/",
+ 4: "/baz/quux"}
+
+ def reset():
+ mock_add_entry.reset_mock()
+ mock_event_id.reset_mock()
+ for entry in gs.entries.values():
+ entry.reset_mock()
+
+ # test event creation, changing entry that doesn't exist
+ for evt in ["exists", "created", "changed"]:
+ reset()
+ event = Mock()
+ event.filename = "foo"
+ event.code2str.return_value = evt
+ gs.HandleEvent(event)
+ mock_event_id.assert_called_with(event)
+ mock_add_entry.assert_called_with(event)
+
+ # test deleting entry, changing entry that does exist
+ for evt in ["changed", "deleted"]:
+ reset()
+ event = Mock()
+ event.filename = "quux"
+ event.requestID = 4
+ event.code2str.return_value = evt
+ mock_event_id.return_value = "/baz/quux"
+ gs.HandleEvent(event)
+ mock_event_id.assert_called_with(event)
+ self.assertIn(mock_event_id.return_value, gs.entries)
+ gs.entries[mock_event_id.return_value].handle_event.assert_called_with(event)
+ self.assertFalse(mock_add_entry.called)
+
+ # test deleting directory
+ reset()
+ event = Mock()
+ event.filename = "quux"
+ event.requestID = 3
+ event.code2str.return_value = "deleted"
+ mock_event_id.return_value = "/baz/quux"
+ gs.HandleEvent(event)
+ mock_event_id.assert_called_with(event)
+ self.assertNotIn("/baz/quux", gs.entries)
+ self.assertNotIn("/baz/quux", gs.Entries[gs.entry_type])
+
+
+
diff --git a/testsuite/Testlib/TestServer/TestPlugins/TestMetadata.py b/testsuite/Testlib/TestServer/TestPlugins/TestMetadata.py
index 381bd978e..6aab04a89 100644
--- a/testsuite/Testlib/TestServer/TestPlugins/TestMetadata.py
+++ b/testsuite/Testlib/TestServer/TestPlugins/TestMetadata.py
@@ -6,26 +6,12 @@ import socket
import unittest
import lxml.etree
from mock import Mock, patch
-
-try:
- from django.core.management import setup_environ
- has_django = True
-
- os.environ['DJANGO_SETTINGS_MODULE'] = "Bcfg2.settings"
-
- import Bcfg2.settings
- Bcfg2.settings.DATABASE_NAME = \
- os.path.join(os.path.dirname(os.path.abspath(__file__)), "test.sqlite")
- Bcfg2.settings.DATABASES['default']['NAME'] = Bcfg2.settings.DATABASE_NAME
-except ImportError:
- has_django = False
-
+from ....common import *
import Bcfg2.Server
import Bcfg2.Server.Plugin
from Bcfg2.Server.Plugins.Metadata import *
-
-XI_NAMESPACE = "http://www.w3.org/2001/XInclude"
-XI = "{%s}" % XI_NAMESPACE
+from ..TestPlugin import TestXMLFileBacked, TestMetadata as _TestMetadata, \
+ TestStatistics, TestDatabaseBacked
clients_test_tree = lxml.etree.XML('''
<Clients>
@@ -87,33 +73,19 @@ groups_test_tree = lxml.etree.XML('''
</Group>
</Groups>''').getroottree()
-datastore = "/"
-
-def test_syncdb():
- if not has_django:
- raise unittest.SkipTest("Django not found, skipping")
-
- # create the test database
- setup_environ(Bcfg2.settings)
- from django.core.management.commands import syncdb
- cmd = syncdb.Command()
- cmd.handle_noargs(interactive=False)
- assert os.path.exists(Bcfg2.settings.DATABASE_NAME)
-
- # ensure that we a) can connect to the database; b) start with a
- # clean database
- MetadataClientModel.objects.all().delete()
- assert list(MetadataClientModel.objects.all()) == []
def get_metadata_object(core=None, watch_clients=False, use_db=False):
if core is None:
core = Mock()
- core.setup.cfp.getboolean = Mock()
- core.setup.cfp.getboolean.return_value = use_db
+ core.setup.cfp.getboolean = Mock(return_value=use_db)
return Metadata(core, datastore, watch_clients=watch_clients)
-class TestClientVersions(unittest.TestCase):
+class TestMetadataDB(DBModelTestCase):
+ models = [MetadataClientModel]
+
+
+class TestClientVersions(Bcfg2TestCase):
test_clients = dict(client1="1.2.0",
client2="1.2.2",
client3="1.3.0pre1",
@@ -122,7 +94,7 @@ class TestClientVersions(unittest.TestCase):
client6=None)
def setUp(self):
- test_syncdb()
+ syncdb(TestMetadataDB)
for client, version in self.test_clients.items():
MetadataClientModel(hostname=client, version=version).save()
@@ -169,15 +141,9 @@ class TestClientVersions(unittest.TestCase):
self.assertEqual(v['client2'], "1.2.2")
self.assertIsNone(v['client5'])
- # test exception on nonexistent client. can't use assertRaises
- # for this because assertRaises requires a callable
- try:
+ # test exception on nonexistent client
+ with self.assertRaises(KeyError):
v['clients__getitem']
- assert False
- except KeyError:
- assert True
- except:
- assert False
def test__len(self):
v = ClientVersions()
@@ -199,63 +165,61 @@ class TestClientVersions(unittest.TestCase):
self.assertIsNone(v[new])
-class TestXMLMetadataConfig(unittest.TestCase):
+class TestXMLMetadataConfig(TestXMLFileBacked):
+ test_obj = XMLMetadataConfig
groups_test_tree = groups_test_tree
clients_test_tree = clients_test_tree
- def get_config_object(self, basefile="clients.xml", core=None,
- watch_clients=False):
+ def get_obj(self, basefile="clients.xml", core=None, watch_clients=False):
self.metadata = get_metadata_object(core=core,
watch_clients=watch_clients)
return XMLMetadataConfig(self.metadata, watch_clients, basefile)
+ def test__init(self):
+ xmc = self.get_obj()
+ self.assertEqual(self.metadata.core.fam, xmc.fam)
+ self.assertFalse(xmc.fam.AddMonitor.called)
+
def test_xdata(self):
- config = self.get_config_object()
- # we can't use assertRaises here because xdata is a property
- try:
+ config = self.get_obj()
+ with self.assertRaises(Bcfg2.Server.Plugin.MetadataRuntimeError):
config.xdata
- assert False
- except MetadataRuntimeError:
- assert True
- except:
- assert False
config.data = "<test/>"
self.assertEqual(config.xdata, "<test/>")
def test_base_xdata(self):
- config = self.get_config_object()
+ config = self.get_obj()
# we can't use assertRaises here because base_xdata is a property
- try:
+ with self.assertRaises(Bcfg2.Server.Plugin.MetadataRuntimeError):
config.base_xdata
- assert False
- except MetadataRuntimeError:
- assert True
- except:
- assert False
config.basedata = "<test/>"
self.assertEqual(config.base_xdata, "<test/>")
def test_add_monitor(self):
core = Mock()
- config = self.get_config_object(core=core)
+ config = self.get_obj(core=core)
fname = "test.xml"
fpath = os.path.join(self.metadata.data, fname)
config.extras = []
- config.add_monitor(fpath, fname)
+ config.add_monitor(fpath)
self.assertFalse(core.fam.AddMonitor.called)
- self.assertEqual(config.extras, [fname])
+ self.assertEqual(config.extras, [fpath])
- config = self.get_config_object(core=core, watch_clients=True)
- config.add_monitor(fpath, fname)
+ config = self.get_obj(core=core, watch_clients=True)
+ config.add_monitor(fpath)
core.fam.AddMonitor.assert_called_with(fpath, config.metadata)
- self.assertItemsEqual(config.extras, [fname])
+ self.assertItemsEqual(config.extras, [fpath])
+
+ def test_Index(self):
+ # Index() isn't used on XMLMetadataConfig objects
+ pass
@patch("Bcfg2.Server.Plugins.Metadata.XMLMetadataConfig.add_monitor")
@patch("lxml.etree.parse")
def test_load_xml(self, mock_parse, mock_add_monitor):
- config = self.get_config_object("clients.xml")
+ config = self.get_obj("clients.xml")
mock_parse.side_effect = lxml.etree.XMLSyntaxError(None, None, None,
None)
config.load_xml()
@@ -287,7 +251,7 @@ class TestXMLMetadataConfig(unittest.TestCase):
@patch("Bcfg2.Server.Plugins.Metadata.XMLMetadataConfig.write_xml")
def test_write(self, mock_write_xml):
- config = self.get_config_object("clients.xml")
+ config = self.get_obj("clients.xml")
config.basedata = "<test/>"
config.write()
mock_write_xml.assert_called_with(os.path.join(self.metadata.data,
@@ -304,7 +268,7 @@ class TestXMLMetadataConfig(unittest.TestCase):
def test_write_xml(self, mock_readlink, mock_islink, mock_rename,
mock_unlink, mock_open):
fname = "clients.xml"
- config = self.get_config_object(fname)
+ config = self.get_obj(fname)
fpath = os.path.join(self.metadata.data, fname)
tmpfile = "%s.new" % fpath
linkdest = os.path.join(self.metadata.data, "client-link.xml")
@@ -323,22 +287,22 @@ class TestXMLMetadataConfig(unittest.TestCase):
mock_rename.assert_called_with(tmpfile, linkdest)
mock_rename.side_effect = OSError
- self.assertRaises(MetadataRuntimeError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataRuntimeError,
config.write_xml, fpath, self.clients_test_tree)
mock_open.return_value.write.side_effect = IOError
- self.assertRaises(MetadataRuntimeError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataRuntimeError,
config.write_xml, fpath, self.clients_test_tree)
mock_unlink.assert_called_with(tmpfile)
mock_open.side_effect = IOError
- self.assertRaises(MetadataRuntimeError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataRuntimeError,
config.write_xml, fpath, self.clients_test_tree)
@patch("Bcfg2.Server.Plugins.Metadata.XMLMetadataConfig.load_xml", Mock())
@patch('lxml.etree.parse')
def test_find_xml_for_xpath(self, mock_parse):
- config = self.get_config_object("groups.xml")
+ config = self.get_obj("groups.xml")
config.basedata = self.groups_test_tree
xpath = "//Group[@name='group1']"
self.assertItemsEqual(config.find_xml_for_xpath(xpath),
@@ -349,7 +313,8 @@ class TestXMLMetadataConfig(unittest.TestCase):
self.assertEqual(config.find_xml_for_xpath("//boguselement"), dict())
- config.extras = ["foo.xml", "bar.xml", "clients.xml"]
+ config.extras = [os.path.join(self.metadata.data, p)
+ for p in ["foo.xml", "bar.xml", "clients.xml"]]
def parse_side_effect(fname, parser=Bcfg2.Server.XMLParser):
if fname == os.path.join(self.metadata.data, "clients.xml"):
@@ -367,7 +332,7 @@ class TestXMLMetadataConfig(unittest.TestCase):
@patch("Bcfg2.Server.Plugins.Metadata.XMLMetadataConfig.load_xml")
def test_HandleEvent(self, mock_load_xml):
- config = self.get_config_object("groups.xml")
+ config = self.get_obj("groups.xml")
evt = Mock()
evt.filename = os.path.join(self.metadata.data, "groups.xml")
evt.code2str = Mock(return_value="changed")
@@ -375,7 +340,7 @@ class TestXMLMetadataConfig(unittest.TestCase):
mock_load_xml.assert_called_with()
-class TestClientMetadata(unittest.TestCase):
+class TestClientMetadata(Bcfg2TestCase):
def test_inGroup(self):
cm = ClientMetadata("client1", "group1", ["group1", "group2"],
["bundle1"], [], [], [], None, None, None, None)
@@ -383,15 +348,24 @@ class TestClientMetadata(unittest.TestCase):
self.assertFalse(cm.inGroup("group3"))
-class TestMetadata(unittest.TestCase):
+class TestMetadata(_TestMetadata, TestStatistics, TestDatabaseBacked):
+ test_obj = Metadata
groups_test_tree = groups_test_tree
clients_test_tree = clients_test_tree
use_db = False
- def get_metadata_object(self, core=None, watch_clients=False):
+ def get_obj(self, core=None, watch_clients=False):
return get_metadata_object(core=core, watch_clients=watch_clients,
use_db=self.use_db)
+ @unittest.skipUnless(has_django, "Django not found")
+ def test__use_db(self):
+ # with the way we've set up our metadata tests, it's unweildy
+ # to test _use_db. however, given the way get_obj works, if
+ # there was a bug in _use_db it'd be almost certain to shake
+ # out in the rest of the testing.
+ pass
+
def get_nonexistent_client(self, metadata, prefix="client"):
if metadata is None:
metadata = self.load_clients_data()
@@ -405,7 +379,7 @@ class TestMetadata(unittest.TestCase):
def test__init(self):
# test with watch_clients=False
core = Mock()
- metadata = self.get_metadata_object(core=core)
+ metadata = self.get_obj(core=core)
self.assertIsInstance(metadata, Bcfg2.Server.Plugin.Plugin)
self.assertIsInstance(metadata, Bcfg2.Server.Plugin.Metadata)
self.assertIsInstance(metadata, Bcfg2.Server.Plugin.Statistics)
@@ -416,7 +390,7 @@ class TestMetadata(unittest.TestCase):
# test with watch_clients=True
core.fam = Mock()
- metadata = self.get_metadata_object(core=core, watch_clients=True)
+ metadata = self.get_obj(core=core, watch_clients=True)
self.assertEqual(len(metadata.states), 2)
core.fam.AddMonitor.assert_any_call(os.path.join(metadata.data,
"groups.xml"),
@@ -428,8 +402,7 @@ class TestMetadata(unittest.TestCase):
core.fam.reset_mock()
core.fam.AddMonitor = Mock(side_effect=IOError)
self.assertRaises(Bcfg2.Server.Plugin.PluginInitError,
- self.get_metadata_object,
- core=core, watch_clients=True)
+ self.get_obj, core=core, watch_clients=True)
@patch('os.makedirs', Mock())
@patch('__builtin__.open')
@@ -443,21 +416,21 @@ class TestMetadata(unittest.TestCase):
def test_search_xdata(self):
# test finding a node with the proper name
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
tree = self.groups_test_tree
res = metadata._search_xdata("Group", "group1", tree)
self.assertIsInstance(res, lxml.etree._Element)
self.assertEqual(res.get("name"), "group1")
# test finding a node with the wrong name but correct alias
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
tree = self.clients_test_tree
res = metadata._search_xdata("Client", "alias3", tree, alias=True)
self.assertIsInstance(res, lxml.etree._Element)
self.assertNotEqual(res.get("name"), "alias3")
# test failure finding a node
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
tree = self.clients_test_tree
res = metadata._search_xdata("Client",
self.get_nonexistent_client(metadata),
@@ -465,7 +438,7 @@ class TestMetadata(unittest.TestCase):
self.assertIsNone(res)
def search_xdata(self, tag, name, tree, alias=False):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
res = metadata._search_xdata(tag, name, tree, alias=alias)
self.assertIsInstance(res, lxml.etree._Element)
if not alias:
@@ -488,7 +461,7 @@ class TestMetadata(unittest.TestCase):
self.search_xdata("Client", "alias1", tree, alias=True)
def test_add_group(self):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
metadata.groups_xml.write = Mock()
metadata.groups_xml.data = lxml.etree.XML('<Groups/>').getroottree()
metadata.groups_xml.basedata = copy.copy(metadata.groups_xml.data)
@@ -514,13 +487,13 @@ class TestMetadata(unittest.TestCase):
metadata.groups_xml.basedata = copy.copy(metadata.groups_xml.data)
metadata.groups_xml.write.reset_mock()
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.add_group,
"test1", dict())
self.assertFalse(metadata.groups_xml.write.called)
def test_update_group(self):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
metadata.groups_xml.write_xml = Mock()
metadata.groups_xml.data = copy.deepcopy(self.groups_test_tree)
metadata.groups_xml.basedata = copy.copy(metadata.groups_xml.data)
@@ -532,12 +505,12 @@ class TestMetadata(unittest.TestCase):
self.assertEqual(grp.get("foo"), "bar")
self.assertTrue(metadata.groups_xml.write_xml.called)
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.update_group,
"bogus_group", dict())
def test_remove_group(self):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
metadata.groups_xml.write_xml = Mock()
metadata.groups_xml.data = copy.deepcopy(self.groups_test_tree)
metadata.groups_xml.basedata = copy.copy(metadata.groups_xml.data)
@@ -547,12 +520,12 @@ class TestMetadata(unittest.TestCase):
self.assertIsNone(grp)
self.assertTrue(metadata.groups_xml.write_xml.called)
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.remove_group,
"bogus_group")
def test_add_bundle(self):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
metadata.groups_xml.write = Mock()
metadata.groups_xml.data = lxml.etree.XML('<Groups/>').getroottree()
metadata.groups_xml.basedata = copy.copy(metadata.groups_xml.data)
@@ -569,13 +542,13 @@ class TestMetadata(unittest.TestCase):
metadata.groups_xml.basedata = copy.copy(metadata.groups_xml.data)
metadata.groups_xml.write.reset_mock()
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.add_bundle,
"bundle1")
self.assertFalse(metadata.groups_xml.write.called)
def test_remove_bundle(self):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
metadata.groups_xml.write_xml = Mock()
metadata.groups_xml.data = copy.deepcopy(self.groups_test_tree)
metadata.groups_xml.basedata = copy.copy(metadata.groups_xml.data)
@@ -585,12 +558,12 @@ class TestMetadata(unittest.TestCase):
self.assertIsNone(grp)
self.assertTrue(metadata.groups_xml.write_xml.called)
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.remove_bundle,
"bogus_bundle")
def test_add_client(self):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
metadata.clients_xml.write = Mock()
metadata.clients_xml.data = lxml.etree.XML('<Clients/>').getroottree()
metadata.clients_xml.basedata = copy.copy(metadata.clients_xml.data)
@@ -619,13 +592,13 @@ class TestMetadata(unittest.TestCase):
metadata.clients_xml.basedata = copy.copy(metadata.clients_xml.data)
metadata.clients_xml.write.reset_mock()
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.add_client,
new1, dict())
self.assertFalse(metadata.clients_xml.write.called)
def test_update_client(self):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
metadata.clients_xml.write_xml = Mock()
metadata.clients_xml.data = copy.deepcopy(self.clients_test_tree)
metadata.clients_xml.basedata = copy.copy(metadata.clients_xml.data)
@@ -638,14 +611,15 @@ class TestMetadata(unittest.TestCase):
self.assertTrue(metadata.clients_xml.write_xml.called)
new = self.get_nonexistent_client(metadata)
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.update_client,
new, dict())
def load_clients_data(self, metadata=None, xdata=None):
if metadata is None:
- metadata = self.get_metadata_object()
- metadata.clients_xml.data = xdata or copy.deepcopy(self.clients_test_tree)
+ metadata = self.get_obj()
+ metadata.clients_xml.data = \
+ xdata or copy.deepcopy(self.clients_test_tree)
metadata.clients_xml.basedata = copy.copy(metadata.clients_xml.data)
evt = Mock()
evt.filename = os.path.join(datastore, "Metadata", "clients.xml")
@@ -655,7 +629,7 @@ class TestMetadata(unittest.TestCase):
@patch("Bcfg2.Server.Plugins.Metadata.XMLMetadataConfig.load_xml")
def test_clients_xml_event(self, mock_load_xml):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
metadata.profiles = ["group1", "group2"]
self.load_clients_data(metadata=metadata)
mock_load_xml.assert_any_call()
@@ -699,7 +673,7 @@ class TestMetadata(unittest.TestCase):
def load_groups_data(self, metadata=None, xdata=None):
if metadata is None:
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
metadata.groups_xml.data = xdata or copy.deepcopy(self.groups_test_tree)
metadata.groups_xml.basedata = copy.copy(metadata.groups_xml.data)
evt = Mock()
@@ -752,21 +726,21 @@ class TestMetadata(unittest.TestCase):
@patch("Bcfg2.Server.Plugins.Metadata.XMLMetadataConfig.load_xml", Mock())
def test_set_profile(self):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
if 'clients.xml' in metadata.states:
metadata.states['clients.xml'] = False
- self.assertRaises(MetadataRuntimeError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataRuntimeError,
metadata.set_profile,
None, None, None)
self.load_groups_data(metadata=metadata)
self.load_clients_data(metadata=metadata)
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.set_profile,
"client1", "group5", None)
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.set_profile,
"client1", "group3", None)
@@ -820,7 +794,7 @@ class TestMetadata(unittest.TestCase):
metadata.session_cache[('1.2.3.3', None)] = (time.time(), 'client3')
self.assertEqual(metadata.resolve_client(('1.2.3.3', None)), 'client3')
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.resolve_client,
('1.2.3.2', None))
self.assertEqual(metadata.resolve_client(('1.2.3.1', None)), 'client1')
@@ -844,7 +818,7 @@ class TestMetadata(unittest.TestCase):
mock_gethostbyaddr.reset_mock()
mock_gethostbyaddr.return_value = None
mock_gethostbyaddr.side_effect = socket.herror
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.resolve_client,
('1.2.3.8', None))
mock_gethostbyaddr.assert_called_with('1.2.3.8')
@@ -853,10 +827,10 @@ class TestMetadata(unittest.TestCase):
@patch("Bcfg2.Server.Plugins.Metadata.XMLMetadataConfig.write_xml", Mock())
@patch("Bcfg2.Server.Plugins.Metadata.ClientMetadata")
def test_get_initial_metadata(self, mock_clientmetadata):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
if 'clients.xml' in metadata.states:
metadata.states['clients.xml'] = False
- self.assertRaises(MetadataRuntimeError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataRuntimeError,
metadata.get_initial_metadata, None)
self.load_groups_data(metadata=metadata)
@@ -864,58 +838,71 @@ class TestMetadata(unittest.TestCase):
# test address, password
metadata.get_initial_metadata("client1")
- self.assertEqual(mock_clientmetadata.call_args[0][:9],
- ("client1", "group1", set(["group1"]), set(), set(),
- set(["1.2.3.1"]), dict(category1='group1'), None,
- 'password2'))
+ mock_clientmetadata.assert_called_with("client1", "group1",
+ set(["group1"]), set(), set(),
+ set(["1.2.3.1"]),
+ dict(category1='group1'), None,
+ 'password2', None,
+ metadata.query)
# test address, bundles, category suppression
metadata.get_initial_metadata("client2")
- self.assertEqual(mock_clientmetadata.call_args[0][:9],
- ("client2", "group2", set(["group2"]),
- set(["bundle1", "bundle2"]), set(),
- set(["1.2.3.2"]), dict(category1="group2"),
- None, None))
+ mock_clientmetadata.assert_called_with("client2", "group2",
+ set(["group2"]),
+ set(["bundle1", "bundle2"]),
+ set(), set(["1.2.3.2"]),
+ dict(category1="group2"),
+ None, None, None,
+ metadata.query)
# test aliases, address, uuid, password
imd = metadata.get_initial_metadata("alias1")
- self.assertEqual(mock_clientmetadata.call_args[0][:9],
- ("client3", "group1", set(["group1"]), set(),
- set(['alias1']), set(["1.2.3.3"]),
- dict(category1="group1"), 'uuid1', 'password2'))
+ mock_clientmetadata.assert_called_with("client3", "group1",
+ set(["group1"]), set(),
+ set(['alias1']),
+ set(["1.2.3.3"]),
+ dict(category1="group1"),
+ 'uuid1', 'password2', None,
+ metadata.query)
# test new client creation
new1 = self.get_nonexistent_client(metadata)
imd = metadata.get_initial_metadata(new1)
- self.assertEqual(mock_clientmetadata.call_args[0][:9],
- (new1, "group1", set(["group1"]), set(),
- set(), set(), dict(category1="group1"), None, None))
+ mock_clientmetadata.assert_called_with(new1, "group1", set(["group1"]),
+ set(), set(), set(),
+ dict(category1="group1"), None,
+ None, None, metadata.query)
# test nested groups, address, per-client groups
imd = metadata.get_initial_metadata("client8")
- self.assertEqual(mock_clientmetadata.call_args[0][:9],
- ("client8", "group1",
- set(["group1", "group8", "group9", "group10"]), set(),
- set(), set(["1.2.3.5"]), dict(category1="group1"),
- None, None))
+ mock_clientmetadata.assert_called_with("client8", "group1",
+ set(["group1", "group8",
+ "group9", "group10"]),
+ set(),
+ set(), set(["1.2.3.5"]),
+ dict(category1="group1"),
+ None, None, None, metadata.query)
# test setting per-client groups, group negation, nested groups
imd = metadata.get_initial_metadata("client9")
- self.assertEqual(mock_clientmetadata.call_args[0][:9],
- ("client9", "group2",
- set(["group2", "group8", "group11"]),
- set(["bundle1", "bundle2"]), set(), set(),
- dict(category1="group2"), None, "password3"))
+ mock_clientmetadata.assert_called_with("client9", "group2",
+ set(["group2", "group8",
+ "group11"]),
+ set(["bundle1", "bundle2"]),
+ set(), set(),
+ dict(category1="group2"), None,
+ "password3", None,
+ metadata.query)
# test new client with no default profile
metadata.default = None
new2 = self.get_nonexistent_client(metadata)
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.get_initial_metadata, new2)
@patch("Bcfg2.Server.Plugins.Metadata.XMLMetadataConfig.load_xml", Mock())
def test_merge_groups(self):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
self.load_groups_data(metadata=metadata)
self.load_clients_data(metadata=metadata)
@@ -1076,7 +1063,8 @@ class TestMetadata(unittest.TestCase):
self.assertTrue(metadata.AuthenticateConnection(None, "root",
"password1", "1.2.3.8"))
- mock_resolve_client.side_effect = MetadataConsistencyError
+ mock_resolve_client.side_effect = \
+ Bcfg2.Server.Plugin.MetadataConsistencyError
self.assertFalse(metadata.AuthenticateConnection(None, "root",
"password1",
"1.2.3.8"))
@@ -1133,7 +1121,7 @@ class TestMetadataBase(TestMetadata):
def __init__(self, *args, **kwargs):
TestMetadata.__init__(self, *args, **kwargs)
- test_syncdb()
+ syncdb(TestMetadataDB)
def setUp(self):
if not has_django:
@@ -1141,7 +1129,7 @@ class TestMetadataBase(TestMetadata):
def load_clients_data(self, metadata=None, xdata=None):
if metadata is None:
- metadata = get_metadata_object()
+ metadata = get_obj()
for client in clients_test_tree.findall("Client"):
metadata.add_client(client.get("name"))
return metadata
@@ -1160,7 +1148,7 @@ class TestMetadataBase(TestMetadata):
core = Mock()
core.fam = Mock()
mock_exists.return_value = False
- metadata = self.get_metadata_object(core=core, watch_clients=True)
+ metadata = self.get_obj(core=core, watch_clients=True)
self.assertIsInstance(metadata, Bcfg2.Server.Plugin.DatabaseBacked)
core.fam.AddMonitor.assert_called_once_with(os.path.join(metadata.data,
"groups.xml"),
@@ -1168,7 +1156,7 @@ class TestMetadataBase(TestMetadata):
mock_exists.return_value = True
core.fam.reset_mock()
- metadata = self.get_metadata_object(core=core, watch_clients=True)
+ metadata = self.get_obj(core=core, watch_clients=True)
core.fam.AddMonitor.assert_any_call(os.path.join(metadata.data,
"groups.xml"),
metadata)
@@ -1183,7 +1171,7 @@ class TestMetadataBase(TestMetadata):
pass
def test_add_client(self):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
hostname = self.get_nonexistent_client(metadata)
client = metadata.add_client(hostname)
self.assertIsInstance(client, MetadataClientModel)
@@ -1204,7 +1192,7 @@ class TestMetadataBase(TestMetadata):
pass
def test_list_clients(self):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
self.assertItemsEqual(metadata.list_clients(),
[c.hostname
for c in MetadataClientModel.objects.all()])
@@ -1216,10 +1204,10 @@ class TestMetadataBase(TestMetadata):
pass
def test_remove_client(self):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
client_name = self.get_nonexistent_client(metadata)
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.remove_client,
client_name)
@@ -1253,10 +1241,10 @@ class TestMetadata_NoClientsXML(TestMetadataBase):
@patch("Bcfg2.Server.Plugins.Metadata.XMLMetadataConfig.write_xml", Mock())
@patch("Bcfg2.Server.Plugins.Metadata.ClientMetadata")
def test_get_initial_metadata(self, mock_clientmetadata):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
if 'clients.xml' in metadata.states:
metadata.states['clients.xml'] = False
- self.assertRaises(MetadataRuntimeError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataRuntimeError,
metadata.get_initial_metadata, None)
self.load_groups_data(metadata=metadata)
@@ -1264,43 +1252,51 @@ class TestMetadata_NoClientsXML(TestMetadataBase):
# test basic client metadata
metadata.get_initial_metadata("client1")
- self.assertEqual(mock_clientmetadata.call_args[0][:9],
- ("client1", "group1", set(["group1"]), set(), set(),
- set(), dict(category1='group1'), None, None))
+ mock_clientmetadata.assert_called_with("client1", "group1",
+ set(["group1"]), set(), set(),
+ set(), dict(category1='group1'),
+ None, None, None, metadata.query)
# test bundles, category suppression
metadata.get_initial_metadata("client2")
- self.assertEqual(mock_clientmetadata.call_args[0][:9],
- ("client2", "group2", set(["group2"]),
- set(["bundle1", "bundle2"]), set(), set(),
- dict(category1="group2"), None, None))
+ mock_clientmetadata.assert_called_with("client2", "group2",
+ set(["group2"]),
+ set(["bundle1", "bundle2"]),
+ set(), set(),
+ dict(category1="group2"), None,
+ None, None, metadata.query)
# test new client creation
new1 = self.get_nonexistent_client(metadata)
imd = metadata.get_initial_metadata(new1)
- self.assertEqual(mock_clientmetadata.call_args[0][:9],
- (new1, "group1", set(["group1"]), set(), set(), set(),
- dict(category1="group1"), None, None))
+ mock_clientmetadata.assert_called_with(new1, "group1", set(["group1"]),
+ set(), set(), set(),
+ dict(category1="group1"), None,
+ None, None, metadata.query)
# test nested groups, per-client groups
imd = metadata.get_initial_metadata("client8")
- self.assertEqual(mock_clientmetadata.call_args[0][:9],
- ("client8", "group1",
- set(["group1", "group8", "group9", "group10"]), set(),
- set(), set(), dict(category1="group1"), None, None))
+ mock_clientmetadata.assert_called_with("client8", "group1",
+ set(["group1", "group8",
+ "group9", "group10"]),
+ set(), set(), set(),
+ dict(category1="group1"), None,
+ None, None, metadata.query)
# test per-client groups, group negation, nested groups
imd = metadata.get_initial_metadata("client9")
- self.assertEqual(mock_clientmetadata.call_args[0][:9],
- ("client9", "group2",
- set(["group2", "group8", "group11"]),
- set(["bundle1", "bundle2"]), set(), set(),
- dict(category1="group2"), None, None))
+ mock_clientmetadata.assert_called_with("client9", "group2",
+ set(["group2", "group8",
+ "group11"]),
+ set(["bundle1", "bundle2"]),
+ set(), set(),
+ dict(category1="group2"), None,
+ None, None, metadata.query)
# test exception on new client with no default profile
metadata.default = None
new2 = self.get_nonexistent_client(metadata)
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.get_initial_metadata,
new2)
@@ -1342,7 +1338,8 @@ class TestMetadata_NoClientsXML(TestMetadataBase):
self.assertTrue(metadata.AuthenticateConnection(None, "root",
"password1", "1.2.3.8"))
- mock_resolve_client.side_effect = MetadataConsistencyError
+ mock_resolve_client.side_effect = \
+ Bcfg2.Server.Plugin.MetadataConsistencyError
self.assertFalse(metadata.AuthenticateConnection(None, "root",
"password1",
"1.2.3.8"))
@@ -1368,7 +1365,7 @@ class TestMetadata_NoClientsXML(TestMetadataBase):
mock_gethostbyaddr.reset_mock()
mock_gethostbyaddr.return_value = None
mock_gethostbyaddr.side_effect = socket.herror
- self.assertRaises(MetadataConsistencyError,
+ self.assertRaises(Bcfg2.Server.Plugin.MetadataConsistencyError,
metadata.resolve_client,
('1.2.3.8', None))
mock_gethostbyaddr.assert_called_with('1.2.3.8')
@@ -1383,7 +1380,7 @@ class TestMetadata_ClientsXML(TestMetadataBase):
def load_clients_data(self, metadata=None, xdata=None):
if metadata is None:
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
metadata.core.fam = Mock()
metadata._handle_file("clients.xml")
metadata = TestMetadata.load_clients_data(self, metadata=metadata,
@@ -1396,7 +1393,7 @@ class TestMetadata_ClientsXML(TestMetadataBase):
@patch("Bcfg2.Server.Plugins.Metadata.Metadata.list_clients")
def test_clients_xml_event(self, mock_list_clients, mock_handle_event,
mock_load_xml):
- metadata = self.get_metadata_object()
+ metadata = self.get_obj()
metadata.profiles = ["group1", "group2"]
evt = Mock()
evt.filename = os.path.join(datastore, "Metadata", "clients.xml")
diff --git a/testsuite/Testlib/TestServer/TestPlugins/TestProbes.py b/testsuite/Testlib/TestServer/TestPlugins/TestProbes.py
index 92e0037f3..0bcb65dc4 100644
--- a/testsuite/Testlib/TestServer/TestPlugins/TestProbes.py
+++ b/testsuite/Testlib/TestServer/TestPlugins/TestProbes.py
@@ -3,53 +3,26 @@ import sys
import time
import unittest
import lxml.etree
-from mock import Mock, patch
-
-try:
- from django.core.management import setup_environ
- has_django = True
-
- os.environ['DJANGO_SETTINGS_MODULE'] = "Bcfg2.settings"
-
- import Bcfg2.settings
- Bcfg2.settings.DATABASE_NAME = \
- os.path.join(os.path.dirname(os.path.abspath(__file__)), "test.sqlite")
- Bcfg2.settings.DATABASES['default']['NAME'] = Bcfg2.settings.DATABASE_NAME
-except ImportError:
- has_django = False
-
+from mock import Mock, MagicMock, patch
+from ....common import *
import Bcfg2.Server
import Bcfg2.Server.Plugin
from Bcfg2.Server.Plugins.Probes import *
-
-datastore = "/"
+from ..TestPlugin import TestEntrySet, TestProbing, TestConnector, \
+ TestDatabaseBacked
# test data for JSON and YAML tests
test_data = dict(a=1, b=[1, 2, 3], c="test")
-def test_syncdb():
- if not has_django:
- raise unittest.SkipTest("Django not found, skipping")
-
- # create the test database
- setup_environ(Bcfg2.settings)
- from django.core.management.commands import syncdb
- cmd = syncdb.Command()
- cmd.handle_noargs(interactive=False)
- assert os.path.exists(Bcfg2.settings.DATABASE_NAME)
-
- # ensure that we a) can connect to the database; b) start with a
- # clean database
- ProbesDataModel.objects.all().delete()
- ProbesGroupsModel.objects.all().delete()
- assert list(ProbesDataModel.objects.all()) == []
-
-
class FakeList(list):
sort = Mock()
-class TestClientProbeDataSet(unittest.TestCase):
+class TestProbesDB(DBModelTestCase):
+ models = [ProbesGroupsModel, ProbesDataModel]
+
+
+class TestClientProbeDataSet(Bcfg2TestCase):
def test__init(self):
ds = ClientProbeDataSet()
self.assertLessEqual(ds.timestamp, time.time())
@@ -60,7 +33,7 @@ class TestClientProbeDataSet(unittest.TestCase):
self.assertEqual(ds.timestamp, 123)
self.assertNotIn("timestamp", ds)
-class TestProbeData(unittest.TestCase):
+class TestProbeData(Bcfg2TestCase):
def test_str(self):
# a value that is not valid XML, JSON, or YAML
val = "'test"
@@ -100,20 +73,32 @@ class TestProbeData(unittest.TestCase):
self.assertItemsEqual(test_data, data.yaml)
-class TestProbeSet(unittest.TestCase):
- def get_probeset_object(self, fam=None):
+class TestProbeSet(TestEntrySet):
+ test_obj = ProbeSet
+ basenames = ["test", "_test", "test-test"]
+ ignore = ["foo~", ".#foo", ".foo.swp", ".foo.swx", "probed.xml"]
+ bogus_names = ["test.py"]
+
+ def get_obj(self, path=datastore, fam=None, encoding=None,
+ plugin_name="Probes", basename=None):
+ # get_obj() accepts the basename argument, accepted by the
+ # parent get_obj() method, and just throws it away, since
+ # ProbeSet uses a regex for the "basename"
if fam is None:
fam = Mock()
- return ProbeSet(datastore, fam, None, "Probes")
+ rv = self.test_obj(path, fam, encoding, plugin_name)
+ rv.entry_type = MagicMock()
+ return rv
def test__init(self):
fam = Mock()
- ps = self.get_probeset_object(fam)
+ ps = self.get_obj(fam=fam)
self.assertEqual(ps.plugin_name, "Probes")
fam.AddMonitor.assert_called_with(datastore, ps)
+ TestEntrySet.test__init(self)
def test_HandleEvent(self):
- ps = self.get_probeset_object()
+ ps = self.get_obj()
ps.handle_event = Mock()
# test that events on the data store itself are skipped
@@ -136,7 +121,7 @@ class TestProbeSet(unittest.TestCase):
@patch("__builtin__.list", FakeList)
def test_get_probe_data(self):
- ps = self.get_probeset_object()
+ ps = self.get_obj()
# build some fairly complex test data for this. in the end,
# we want the probe data to include only the most specific
@@ -196,7 +181,9 @@ group-specific"""
assert False, "Strange probe found in get_probe_data() return"
-class TestProbes(unittest.TestCase):
+class TestProbes(TestProbing, TestConnector, TestDatabaseBacked):
+ test_obj = Probes
+
def get_test_probedata(self):
test_xdata = lxml.etree.Element("test")
lxml.etree.SubElement(test_xdata, "test", foo="foo")
@@ -249,19 +236,20 @@ text
"use_database",
default=False)
+ @unittest.skipUnless(has_django, "Django not found, skipping")
@patch("Bcfg2.Server.Plugins.Probes.Probes._write_data_db", Mock())
@patch("Bcfg2.Server.Plugins.Probes.Probes._write_data_xml", Mock())
- def test_write_data(self):
+ def test_write_data_xml(self):
probes = self.get_probes_object(use_db=False)
probes.write_data("test")
probes._write_data_xml.assert_called_with("test")
self.assertFalse(probes._write_data_db.called)
- if not has_django:
- self.skipTest("Django not found, skipping")
+ @unittest.skipUnless(has_django, "Django not found, skipping")
+ @patch("Bcfg2.Server.Plugins.Probes.Probes._write_data_db", Mock())
+ @patch("Bcfg2.Server.Plugins.Probes.Probes._write_data_xml", Mock())
+ def test_write_data_db(self):
probes = self.get_probes_object(use_db=True)
- probes._write_data_xml.reset_mock()
- probes._write_data_db.reset_mock()
probes.write_data("test")
probes._write_data_db.assert_called_with("test")
self.assertFalse(probes._write_data_xml.called)
@@ -322,10 +310,9 @@ text
self.assertIsNotNone(jdata.get("value"))
self.assertItemsEqual(test_data, json.loads(jdata.get("value")))
+ @unittest.skipUnless(has_django, "Django not found, skipping")
def test__write_data_db(self):
- if not has_django:
- self.skipTest("Django not found, skipping")
- test_syncdb()
+ syncdb(TestProbesDB)
probes = self.get_probes_object(use_db=True)
probes.probedata = self.get_test_probedata()
probes.cgroups = self.get_test_cgroups()
@@ -375,23 +362,20 @@ text
pgroups = ProbesGroupsModel.objects.filter(hostname=cname).all()
self.assertEqual(len(pgroups), len(probes.cgroups[cname]))
+ @unittest.skipUnless(has_django, "Django not found, skipping")
@patch("Bcfg2.Server.Plugins.Probes.Probes._load_data_db", Mock())
@patch("Bcfg2.Server.Plugins.Probes.Probes._load_data_xml", Mock())
- def test_load_data(self):
+ def test_load_data_xml(self):
probes = self.get_probes_object(use_db=False)
- probes._load_data_xml.reset_mock()
- probes._load_data_db.reset_mock()
-
probes.load_data()
probes._load_data_xml.assert_any_call()
self.assertFalse(probes._load_data_db.called)
- if not has_django:
- self.skipTest("Django not found, skipping")
-
+ @unittest.skipUnless(has_django, "Django not found, skipping")
+ @patch("Bcfg2.Server.Plugins.Probes.Probes._load_data_db", Mock())
+ @patch("Bcfg2.Server.Plugins.Probes.Probes._load_data_xml", Mock())
+ def test_load_data_db(self):
probes = self.get_probes_object(use_db=True)
- probes._load_data_xml.reset_mock()
- probes._load_data_db.reset_mock()
probes.load_data()
probes._load_data_db.assert_any_call()
self.assertFalse(probes._load_data_xml.called)
@@ -408,7 +392,6 @@ text
probes._write_data_xml(None)
xdata = \
lxml.etree.XML(str(mock_open.return_value.write.call_args[0][0]))
- print "rv = %s" % lxml.etree.tostring(xdata)
mock_parse.return_value = xdata.getroottree()
probes.probedata = dict()
probes.cgroups = dict()
@@ -420,10 +403,9 @@ text
self.assertItemsEqual(probes.probedata, self.get_test_probedata())
self.assertItemsEqual(probes.cgroups, self.get_test_cgroups())
+ @unittest.skipUnless(has_django, "Django not found, skipping")
def test__load_data_db(self):
- if not has_django:
- self.skipTest("Django not found, skipping")
- test_syncdb()
+ syncdb(TestProbesDB)
probes = self.get_probes_object(use_db=True)
probes.probedata = self.get_test_probedata()
probes.cgroups = self.get_test_cgroups()
@@ -467,8 +449,8 @@ text
probes.ReceiveData(client, datalist)
self.assertItemsEqual(mock_ReceiveDataItem.call_args_list,
- [((client, "a"), {}), ((client, "b"), {}),
- ((client, "c"), {})])
+ [call(client, "a"), call(client, "b"),
+ call(client, "c")])
mock_write_data.assert_called_with(client)
def test_ReceiveDataItem(self):
diff --git a/testsuite/Testlib/TestServer/TestPlugins/__init__.py b/testsuite/Testlib/TestServer/TestPlugins/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testsuite/Testlib/TestServer/TestPlugins/__init__.py
diff --git a/testsuite/Testlib/TestServer/__init__.py b/testsuite/Testlib/TestServer/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testsuite/Testlib/TestServer/__init__.py
diff --git a/testsuite/Testlib/__init__.py b/testsuite/Testlib/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testsuite/Testlib/__init__.py
diff --git a/testsuite/__init__.py b/testsuite/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testsuite/__init__.py
diff --git a/testsuite/common.py b/testsuite/common.py
new file mode 100644
index 000000000..006b45970
--- /dev/null
+++ b/testsuite/common.py
@@ -0,0 +1,77 @@
+import os
+import unittest
+
+__all__ = ['call', 'datastore', 'Bcfg2TestCase', 'DBModelTestCase', 'syncdb',
+ 'XI', 'XI_NAMESPACE']
+
+datastore = "/"
+
+XI_NAMESPACE = "http://www.w3.org/2001/XInclude"
+XI = "{%s}" % XI_NAMESPACE
+
+try:
+ from django.core.management import setup_environ
+ has_django = True
+
+ os.environ['DJANGO_SETTINGS_MODULE'] = "Bcfg2.settings"
+
+ import Bcfg2.settings
+ Bcfg2.settings.DATABASE_NAME = \
+ os.path.join(os.path.dirname(os.path.abspath(__file__)), "test.sqlite")
+ Bcfg2.settings.DATABASES['default']['NAME'] = Bcfg2.settings.DATABASE_NAME
+except ImportError:
+ has_django = False
+
+try:
+ from mock import call
+except ImportError:
+ def call(*args, **kwargs):
+ """ the Mock call object is a fairly recent addition, but it's
+ very very useful, so we create our own function to create Mock
+ calls """
+ return (args, kwargs)
+
+
+class Bcfg2TestCase(unittest.TestCase):
+ def assertXMLEqual(self, el1, el2, msg=None):
+ self.assertEqual(el1.tag, el2.tag, msg=msg)
+ self.assertEqual(el1.text, el2.text, msg=msg)
+ self.assertItemsEqual(el1.attrib, el2.attrib, msg=msg)
+ self.assertEqual(len(el1.getchildren()),
+ len(el2.getchildren()))
+ for child1 in el1.getchildren():
+ cname = child1.get("name")
+ self.assertIsNotNone(cname,
+ msg="Element %s has no 'name' attribute" %
+ child1.tag)
+ children2 = el2.xpath("*[@name='%s']" % cname)
+ self.assertEqual(len(children2), 1,
+ msg="More than one element named %s" % cname)
+ self.assertXMLEqual(child1, children2[0], msg=msg)
+
+
+class DBModelTestCase(Bcfg2TestCase):
+ models = []
+
+ @unittest.skipUnless(has_django, "Django not found, skipping")
+ def test_syncdb(self):
+ # create the test database
+ setup_environ(Bcfg2.settings)
+ from django.core.management.commands import syncdb
+ cmd = syncdb.Command()
+ cmd.handle_noargs(interactive=False)
+ self.assertTrue(os.path.exists(Bcfg2.settings.DATABASE_NAME))
+
+ @unittest.skipUnless(has_django, "Django not found, skipping")
+ def test_cleandb(self):
+ """ ensure that we a) can connect to the database; b) start with a
+ clean database """
+ for model in self.models:
+ model.objects.all().delete()
+ self.assertItemsEqual(list(model.objects.all()), [])
+
+
+def syncdb(modeltest):
+ inst = modeltest(methodName='test_syncdb')
+ inst.test_syncdb()
+ inst.test_cleandb()