summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorJason Kincl <kincljc@ornl.gov>2012-11-27 14:28:01 -0500
committerJason Kincl <kincljc@ornl.gov>2012-11-27 14:28:01 -0500
commit5e0265f837f0eb72123be0b5150451aebdf8b031 (patch)
treedbd5fdbe4ec93c48cbba6fec3f608ffefb26eac5 /src
parent894299b01b6138c54a99fd41f166554d175d6106 (diff)
parent4c70626094248495bf2c11c09bf2f2f60917187d (diff)
downloadbcfg2-5e0265f837f0eb72123be0b5150451aebdf8b031.tar.gz
bcfg2-5e0265f837f0eb72123be0b5150451aebdf8b031.tar.bz2
bcfg2-5e0265f837f0eb72123be0b5150451aebdf8b031.zip
Merge remote branch 'upstream/master' into jasons-hacking
Diffstat (limited to 'src')
-rw-r--r--src/lib/Bcfg2/Client/Tools/YUM.py8
-rw-r--r--src/lib/Bcfg2/Logger.py13
-rw-r--r--src/lib/Bcfg2/Reporting/Transport/DirectStore.py21
-rw-r--r--src/lib/Bcfg2/Reporting/Transport/LocalFilesystem.py22
-rw-r--r--src/lib/Bcfg2/Reporting/Transport/base.py22
-rw-r--r--src/lib/Bcfg2/Server/BuiltinCore.py12
-rw-r--r--src/lib/Bcfg2/Server/Core.py48
-rw-r--r--src/lib/Bcfg2/Server/Plugin/interfaces.py27
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Bundler.py15
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Cfg/CfgCheetahGenerator.py4
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Cfg/CfgGenshiGenerator.py16
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Packages/Yum.py35
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Probes.py3
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Reporting.py26
-rw-r--r--src/lib/Bcfg2/Server/Plugins/SSLCA.py5
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Snapshots.py2
-rw-r--r--src/lib/Bcfg2/Server/Plugins/Svn.py3
-rwxr-xr-xsrc/sbin/bcfg2-info44
18 files changed, 224 insertions, 102 deletions
diff --git a/src/lib/Bcfg2/Client/Tools/YUM.py b/src/lib/Bcfg2/Client/Tools/YUM.py
index 5d20c0462..928aba1e1 100644
--- a/src/lib/Bcfg2/Client/Tools/YUM.py
+++ b/src/lib/Bcfg2/Client/Tools/YUM.py
@@ -751,7 +751,7 @@ class YUM(Bcfg2.Client.Tools.PkgTool):
rescode, restring = self.yumbase.buildTransaction()
except yum.Errors.YumBaseError:
err = sys.exc_info()[1]
- self.logger.error("Yum transaction error: %s" % err)
+ self.logger.error("Error building Yum transaction: %s" % err)
cleanup()
return
@@ -767,7 +767,7 @@ class YUM(Bcfg2.Client.Tools.PkgTool):
self.logger.info("Single Pass for Install Succeeded")
except yum.Errors.YumBaseError:
err = sys.exc_info()[1]
- self.logger.error("Yum transaction error: %s" % err)
+ self.logger.error("Error processing Yum transaction: %s" % err)
cleanup()
return
else:
@@ -788,7 +788,7 @@ class YUM(Bcfg2.Client.Tools.PkgTool):
self.logger.debug(" %s" % restring)
except yum.Errors.YumBaseError:
err = sys.exc_info()[1]
- self.logger.error("Yum transaction error: %s" % err)
+ self.logger.error("Error rerunning Yum transaction: %s" % err)
self.yumbase.conf.skip_broken = skip_broken
@@ -850,7 +850,7 @@ class YUM(Bcfg2.Client.Tools.PkgTool):
if inst not in self.instance_status:
self.logger.warning(
" Asked to install/update package never "
- "verified: %s" %
+ "verified: %s" %
nevra2string(build_yname(pkg.get('name'), inst)))
continue
status = self.instance_status[inst]
diff --git a/src/lib/Bcfg2/Logger.py b/src/lib/Bcfg2/Logger.py
index a06d6e79e..24028f71c 100644
--- a/src/lib/Bcfg2/Logger.py
+++ b/src/lib/Bcfg2/Logger.py
@@ -158,7 +158,7 @@ def add_syslog_handler(procname, syslog_facility, level=logging.DEBUG):
logging.Formatter('%(name)s[%(process)d]: %(message)s'))
logging.root.addHandler(syslog)
except socket.error:
- logging.root.error("failed to activate syslogging")
+ logging.root.error("Failed to activate syslogging")
except:
print("Failed to activate syslogging")
@@ -178,17 +178,22 @@ def setup_logging(procname, to_console=True, to_syslog=True,
if hasattr(logging, 'already_setup'):
return
+ params = []
+
if to_console:
if to_console == True:
- clvl = min(logging.WARNING, level)
- else:
- clvl = min(to_console, level)
+ to_console = logging.WARNING
+ clvl = min(to_console, level)
+ params.append("%s to console" % logging.getLevelName(clvl))
add_console_handler(clvl)
if to_syslog:
slvl = min(level, logging.INFO)
+ params.append("%s to syslog" % logging.getLevelName(slvl))
add_syslog_handler(procname, syslog_facility, level=slvl)
if to_file is not None:
+ params.append("%s to %s" % (logging.getLevelName(level), to_file))
add_file_handler(to_file, level=level)
logging.root.setLevel(logging.DEBUG)
+ logging.root.debug("Configured logging: %s" % "; ".join(params))
logging.already_setup = True
diff --git a/src/lib/Bcfg2/Reporting/Transport/DirectStore.py b/src/lib/Bcfg2/Reporting/Transport/DirectStore.py
index 8677efb5f..79d1b5aba 100644
--- a/src/lib/Bcfg2/Reporting/Transport/DirectStore.py
+++ b/src/lib/Bcfg2/Reporting/Transport/DirectStore.py
@@ -15,9 +15,14 @@ class DirectStore(TransportBase, threading.Thread):
TransportBase.__init__(self, setup)
threading.Thread.__init__(self)
self.save_file = os.path.join(self.data, ".saved")
+
self.storage = load_storage_from_config(setup)
+ self.storage.validate()
+
self.queue = Queue(100000)
self.terminate = threading.Event()
+ self.debug_log("Reporting: Starting %s thread" %
+ self.__class__.__name__)
self.start()
def shutdown(self):
@@ -35,6 +40,8 @@ class DirectStore(TransportBase, threading.Thread):
def run(self):
if not self._load():
+ self.logger.warning("Reporting: Failed to load saved data, "
+ "DirectStore thread exiting")
return
while not self.terminate.isSet() and self.queue is not None:
try:
@@ -42,16 +49,19 @@ class DirectStore(TransportBase, threading.Thread):
timeout=self.timeout)
start = time.time()
self.storage.import_interaction(interaction)
- self.logger.info("Imported data for %s in %s seconds" \
- % (interaction.get('hostname', '<unknown>'), \
- time.time() - start))
+ self.logger.info("Imported data for %s in %s seconds" %
+ (interaction.get('hostname', '<unknown>'),
+ time.time() - start))
except Empty:
+ self.debug_log("Reporting: Queue is empty")
continue
except:
err = sys.exc_info()[1]
self.logger.error("Reporting: Could not import interaction: %s"
% err)
continue
+ self.debug_log("Reporting: Stopping %s thread" %
+ self.__class__.__name__)
if self.queue is not None and not self.queue.empty():
self._save()
@@ -74,6 +84,8 @@ class DirectStore(TransportBase, threading.Thread):
def _save(self):
""" Save any saved data to a file """
+ self.debug_log("Reporting: Saving pending data to %s" %
+ self.save_file)
saved_data = []
try:
while not self.queue.empty():
@@ -93,6 +105,7 @@ class DirectStore(TransportBase, threading.Thread):
def _load(self):
""" Load any saved data from a file """
if not os.path.exists(self.save_file):
+ self.debug_log("Reporting: No saved data to load")
return True
saved_data = []
try:
@@ -106,6 +119,8 @@ class DirectStore(TransportBase, threading.Thread):
for interaction in saved_data:
# check that shutdown wasnt called early
if self.terminate.isSet():
+ self.logger.warning("Reporting: Shutdown called while loading "
+ " saved data")
return False
try:
diff --git a/src/lib/Bcfg2/Reporting/Transport/LocalFilesystem.py b/src/lib/Bcfg2/Reporting/Transport/LocalFilesystem.py
index 8ccb9ed56..30ea39263 100644
--- a/src/lib/Bcfg2/Reporting/Transport/LocalFilesystem.py
+++ b/src/lib/Bcfg2/Reporting/Transport/LocalFilesystem.py
@@ -20,7 +20,7 @@ class LocalFilesystem(TransportBase):
super(LocalFilesystem, self).__init__(setup)
self.work_path = "%s/work" % self.data
- self.logger.debug("LocalFilesystem: work path %s" % self.work_path)
+ self.debug_log("LocalFilesystem: work path %s" % self.work_path)
self.fmon = None
self._phony_collector = None
@@ -34,6 +34,11 @@ class LocalFilesystem(TransportBase):
traceback.format_exc().splitlines()[-1]))
raise TransportError
+ def set_debug(self, debug):
+ rv = TransportBase.set_debug(self, debug)
+ self.fmon.set_debug(debug)
+ return rv
+
def start_monitor(self, collector):
"""Start the file monitor. Most of this comes from BaseCore"""
setup = self.setup
@@ -44,12 +49,13 @@ class LocalFilesystem(TransportBase):
"forcing to default" % setup['filemonitor'])
fmon = Bcfg2.Server.FileMonitor.available['default']
- fmdebug = setup.get('debug', False)
try:
- self.fmon = fmon(debug=fmdebug)
- self.logger.info("Using the %s file monitor" % self.fmon.__class__.__name__)
+ self.fmon = fmon(debug=self.debug_flag)
+ self.logger.info("Using the %s file monitor" %
+ self.fmon.__class__.__name__)
except IOError:
- msg = "Failed to instantiate file monitor %s" % setup['filemonitor']
+ msg = "Failed to instantiate file monitor %s" % \
+ setup['filemonitor']
self.logger.error(msg, exc_info=1)
raise TransportError(msg)
self.fmon.start()
@@ -108,11 +114,11 @@ class LocalFilesystem(TransportBase):
#deviate from the normal routines here we only want one event
etype = event.code2str()
- self.logger.debug("Recieved event %s for %s" % (etype, event.filename))
+ self.debug_log("Recieved event %s for %s" % (etype, event.filename))
if os.path.basename(event.filename)[0] == '.':
return None
if etype in ('created', 'exists'):
- self.logger.debug("Handling event %s" % event.filename)
+ self.debug_log("Handling event %s" % event.filename)
payload = os.path.join(self.work_path, event.filename)
try:
payloadfd = open(payload, "r")
@@ -150,7 +156,7 @@ class LocalFilesystem(TransportBase):
except ReportingError:
raise TransportError
except:
- self.logger.error("Failed to load collector: %s" %
+ self.logger.error("Failed to load collector: %s" %
traceback.format_exc().splitlines()[-1])
raise TransportError
diff --git a/src/lib/Bcfg2/Reporting/Transport/base.py b/src/lib/Bcfg2/Reporting/Transport/base.py
index cca7beda0..530011e47 100644
--- a/src/lib/Bcfg2/Reporting/Transport/base.py
+++ b/src/lib/Bcfg2/Reporting/Transport/base.py
@@ -2,26 +2,38 @@
The base for all server -> collector Transports
"""
-import os.path
-import logging
+import os
+import sys
+from Bcfg2.Server.Plugin import Debuggable
+
class TransportError(Exception):
"""Generic TransportError"""
pass
+
class TransportImportError(TransportError):
"""Raised when a transport fails to import"""
pass
-class TransportBase(object):
+
+class TransportBase(Debuggable):
"""The base for all transports"""
def __init__(self, setup):
"""Do something here"""
clsname = self.__class__.__name__
- self.logger = logging.getLogger(clsname)
- self.logger.debug("Loading %s transport" % clsname)
+ Debuggable.__init__(self, name=clsname)
+ self.debug_log("Loading %s transport" % clsname)
self.data = os.path.join(setup['repo'], 'Reporting', clsname)
+ if not os.path.exists(self.data):
+ self.logger.info("%s does not exist, creating" % self.data)
+ try:
+ os.makedirs(self.data)
+ except OSError:
+ self.logger.warning("Could not create %s: %s" %
+ (self.data, sys.exc_info()[1]))
+ self.logger.warning("The transport may not function properly")
self.setup = setup
self.timeout = 2
diff --git a/src/lib/Bcfg2/Server/BuiltinCore.py b/src/lib/Bcfg2/Server/BuiltinCore.py
index 63149c15e..4d7453840 100644
--- a/src/lib/Bcfg2/Server/BuiltinCore.py
+++ b/src/lib/Bcfg2/Server/BuiltinCore.py
@@ -9,6 +9,7 @@ from Bcfg2.Server.Core import BaseCore, NoExposedMethod
from Bcfg2.Compat import xmlrpclib, urlparse
from Bcfg2.SSLServer import XMLRPCServer
+from lockfile import LockFailed
# pylint: disable=E0611
try:
from daemon.pidfile import PIDLockFile
@@ -80,9 +81,14 @@ class Core(BaseCore):
def _daemonize(self):
""" Open :attr:`context` to drop privileges, write the PID
file, and daemonize the server core. """
- self.context.open()
- self.logger.info("%s daemonized" % self.name)
- return True
+ try:
+ self.context.open()
+ self.logger.info("%s daemonized" % self.name)
+ return True
+ except LockFailed:
+ err = sys.exc_info()[1]
+ self.logger.error("Failed to daemonize %s: %s" % (self.name, err))
+ return False
def _run(self):
""" Create :attr:`server` to start the server listening. """
diff --git a/src/lib/Bcfg2/Server/Core.py b/src/lib/Bcfg2/Server/Core.py
index ee875a7e8..040036fb2 100644
--- a/src/lib/Bcfg2/Server/Core.py
+++ b/src/lib/Bcfg2/Server/Core.py
@@ -10,7 +10,6 @@ import threading
import time
import inspect
import lxml.etree
-from traceback import format_exc
import Bcfg2.settings
import Bcfg2.Server
import Bcfg2.Logger
@@ -181,6 +180,19 @@ class BaseCore(object):
#: backend for plugins that have that capability
self._database_available = False
if Bcfg2.settings.HAS_DJANGO:
+ db_settings = Bcfg2.settings.DATABASES['default']
+ if ('daemon' in self.setup and 'daemon_uid' in self.setup and
+ self.setup['daemon'] and self.setup['daemon_uid'] and
+ db_settings['ENGINE'].endswith(".sqlite3") and
+ not os.path.exists(db_settings['NAME'])):
+ # syncdb will create the sqlite database, and we're
+ # going to daemonize, dropping privs to a non-root
+ # user, so we need to chown the database after
+ # creating it
+ do_chown = True
+ else:
+ do_chown = False
+
from django.core.exceptions import ImproperlyConfigured
from django.core import management
try:
@@ -188,11 +200,21 @@ class BaseCore(object):
verbosity=0)
self._database_available = True
except ImproperlyConfigured:
- self.logger.error("Django configuration problem: %s" %
- format_exc().splitlines()[-1])
+ err = sys.exc_info()[1]
+ self.logger.error("Django configuration problem: %s" % err)
except:
- self.logger.error("Database update failed: %s" %
- format_exc().splitlines()[-1])
+ err = sys.exc_info()[1]
+ self.logger.error("Database update failed: %s" % err)
+
+ if do_chown and self._database_available:
+ try:
+ os.chown(db_settings['NAME'],
+ self.setup['daemon_uid'],
+ self.setup['daemon_gid'])
+ except OSError:
+ err = sys.exc_info()[1]
+ self.logger.error("Failed to set ownership of database "
+ "at %s: %s" % (db_settings['NAME'], err))
if '' in setup['plugins']:
setup['plugins'].remove('')
@@ -207,20 +229,23 @@ class BaseCore(object):
"Unloading %s" % (plugin, blacklist))
for plug in blacklist:
del self.plugins[plug]
- # This section logs the experimental plugins
+
+ # Log experimental plugins
expl = [plug for plug in list(self.plugins.values())
if plug.experimental]
if expl:
self.logger.info("Loading experimental plugin(s): %s" %
(" ".join([x.name for x in expl])))
self.logger.info("NOTE: Interfaces subject to change")
- # This section logs the deprecated plugins
+
+ # Log deprecated plugins
depr = [plug for plug in list(self.plugins.values())
if plug.deprecated]
if depr:
self.logger.info("Loading deprecated plugin(s): %s" %
(" ".join([x.name for x in depr])))
+ # Find the metadata plugin and set self.metadata
mlist = self.plugins_by_type(Bcfg2.Server.Plugin.Metadata)
if len(mlist) >= 1:
#: The Metadata plugin
@@ -522,7 +547,7 @@ class BaseCore(object):
except Exception:
exc = sys.exc_info()[1]
if 'failure' not in entry.attrib:
- entry.set('failure', 'bind error: %s' % format_exc())
+ entry.set('failure', 'bind error: %s' % exc)
self.logger.error("Unexpected failure in BindStructure: %s %s"
% (entry.tag, entry.get('name')), exc_info=1)
@@ -599,7 +624,7 @@ class BaseCore(object):
try:
structures = self.GetStructures(meta)
except:
- self.logger.error("error in GetStructures", exc_info=1)
+ self.logger.error("Error in GetStructures", exc_info=1)
return lxml.etree.Element("error", type='structure error')
self.validate_structures(meta, structures)
@@ -662,7 +687,7 @@ class BaseCore(object):
os.chown(piddir,
self.setup['daemon_uid'],
self.setup['daemon_gid'])
- os.chmod(piddir, 420) # 0644
+ os.chmod(piddir, 493) # 0775
if not self._daemonize():
return False
else:
@@ -676,6 +701,9 @@ class BaseCore(object):
self.fam.start()
self.fam_thread.start()
self.fam.AddMonitor(self.cfile, self)
+
+ for plug in self.plugins_by_type(Bcfg2.Server.Plugin.Threaded):
+ plug.start_threads()
except:
self.shutdown()
raise
diff --git a/src/lib/Bcfg2/Server/Plugin/interfaces.py b/src/lib/Bcfg2/Server/Plugin/interfaces.py
index cba3e8145..f42ada773 100644
--- a/src/lib/Bcfg2/Server/Plugin/interfaces.py
+++ b/src/lib/Bcfg2/Server/Plugin/interfaces.py
@@ -299,12 +299,27 @@ class Statistics(Plugin):
raise NotImplementedError
-class ThreadedStatistics(Statistics, threading.Thread):
+class Threaded(object):
+ """ Threaded plugins use threads in any way. The thread must be
+ started after daemonization, so this class implements a single
+ method, :func:`start_threads`, that can be used to start threads
+ after daemonization of the server core. """
+
+ def start_threads(self):
+ """ Start this plugin's threads after daemonization.
+
+ :return: None
+ :raises: :class:`Bcfg2.Server.Plugin.exceptions.PluginInitError`
+ """
+ raise NotImplementedError
+
+class ThreadedStatistics(Statistics, Threaded, threading.Thread):
""" ThreadedStatistics plugins process client statistics in a
separate thread. """
def __init__(self, core, datastore):
Statistics.__init__(self, core, datastore)
+ Threaded.__init__(self)
threading.Thread.__init__(self)
# Event from the core signaling an exit
self.terminate = core.terminate
@@ -312,6 +327,8 @@ class ThreadedStatistics(Statistics, threading.Thread):
self.pending_file = os.path.join(datastore, "etc",
"%s.pending" % self.name)
self.daemon = False
+
+ def start_threads(self):
self.start()
def _save(self):
@@ -517,11 +534,11 @@ class Version(Plugin):
def __init__(self, core, datastore):
Plugin.__init__(self, core, datastore)
+ if core.setup['vcs_root']:
+ self.vcs_root = core.setup['vcs_root']
+ else:
+ self.vcs_root = datastore
if self.__vcs_metadata_path__:
- if core.setup['vcs_root']:
- self.vcs_root = core.setup['vcs_root']
- else:
- self.vcs_root = datastore
self.vcs_path = os.path.join(self.vcs_root,
self.__vcs_metadata_path__)
diff --git a/src/lib/Bcfg2/Server/Plugins/Bundler.py b/src/lib/Bcfg2/Server/Plugins/Bundler.py
index 7933fe9be..b200346bc 100644
--- a/src/lib/Bcfg2/Server/Plugins/Bundler.py
+++ b/src/lib/Bcfg2/Server/Plugins/Bundler.py
@@ -19,6 +19,9 @@ except ImportError:
HAS_GENSHI = False
+SETUP = None
+
+
class BundleFile(Bcfg2.Server.Plugin.StructFile):
""" Representation of a bundle XML file """
def get_xml_value(self, metadata):
@@ -49,7 +52,8 @@ if HAS_GENSHI:
msg = "No parsed template information for %s" % self.name
self.logger.error(msg)
raise Bcfg2.Server.Plugin.PluginExecutionError(msg)
- stream = self.template.generate(metadata=metadata).filter(
+ stream = self.template.generate(metadata=metadata,
+ repo=SETUP['repo']).filter(
Bcfg2.Server.Plugins.TGenshi.removecomment)
data = lxml.etree.XML(stream.render('xml',
strip_whitespace=False),
@@ -93,8 +97,13 @@ class Bundler(Bcfg2.Server.Plugin.Plugin,
self.data,
self.core.fam)
except OSError:
- self.logger.error("Failed to load Bundle repository")
- raise Bcfg2.Server.Plugin.PluginInitError
+ err = sys.exc_info()[1]
+ msg = "Failed to load Bundle repository %s: %s" % (self.data, err)
+ self.logger.error(msg)
+ raise Bcfg2.Server.Plugin.PluginInitError(msg)
+
+ global SETUP
+ SETUP = core.setup
def template_dispatch(self, name, _):
""" Add the correct child entry type to Bundler depending on
diff --git a/src/lib/Bcfg2/Server/Plugins/Cfg/CfgCheetahGenerator.py b/src/lib/Bcfg2/Server/Plugins/Cfg/CfgCheetahGenerator.py
index 8ebd8d921..724164cf5 100644
--- a/src/lib/Bcfg2/Server/Plugins/Cfg/CfgCheetahGenerator.py
+++ b/src/lib/Bcfg2/Server/Plugins/Cfg/CfgCheetahGenerator.py
@@ -3,7 +3,7 @@
:ref:`server-plugins-generators-cfg` files. """
from Bcfg2.Server.Plugin import PluginExecutionError
-from Bcfg2.Server.Plugins.Cfg import CfgGenerator
+from Bcfg2.Server.Plugins.Cfg import CfgGenerator, SETUP
try:
from Cheetah.Template import Template
@@ -37,7 +37,9 @@ class CfgCheetahGenerator(CfgGenerator):
template = Template(self.data.decode(self.encoding),
compilerSettings=self.settings)
template.metadata = metadata
+ template.name = entry.get('realname', entry.get('name'))
template.path = entry.get('realname', entry.get('name'))
template.source_path = self.name
+ template.repo = SETUP['repo']
return template.respond()
get_data.__doc__ = CfgGenerator.get_data.__doc__
diff --git a/src/lib/Bcfg2/Server/Plugins/Cfg/CfgGenshiGenerator.py b/src/lib/Bcfg2/Server/Plugins/Cfg/CfgGenshiGenerator.py
index df0c30c09..3a78b4847 100644
--- a/src/lib/Bcfg2/Server/Plugins/Cfg/CfgGenshiGenerator.py
+++ b/src/lib/Bcfg2/Server/Plugins/Cfg/CfgGenshiGenerator.py
@@ -6,7 +6,7 @@ import re
import sys
import traceback
from Bcfg2.Server.Plugin import PluginExecutionError
-from Bcfg2.Server.Plugins.Cfg import CfgGenerator
+from Bcfg2.Server.Plugins.Cfg import CfgGenerator, SETUP
try:
import genshi.core
@@ -74,7 +74,9 @@ class CfgGenshiGenerator(CfgGenerator):
stream = \
self.template.generate(name=fname,
metadata=metadata,
- path=self.name).filter(removecomment)
+ path=self.name,
+ source_path=self.name,
+ repo=SETUP['repo']).filter(removecomment)
try:
try:
return stream.render('text', encoding=self.encoding,
@@ -135,9 +137,13 @@ class CfgGenshiGenerator(CfgGenerator):
# single line break)
real_lineno = lineno - contents.code.co_firstlineno
src = re.sub(r'\n\n+', '\n', contents.source).splitlines()
- raise PluginExecutionError("%s: %s at '%s'" %
- (err.__class__.__name__, err,
- src[real_lineno]))
+ try:
+ raise PluginExecutionError("%s: %s at '%s'" %
+ (err.__class__.__name__, err,
+ src[real_lineno]))
+ except IndexError:
+ raise PluginExecutionError("%s: %s" %
+ (err.__class__.__name__, err))
raise
def handle_event(self, event):
diff --git a/src/lib/Bcfg2/Server/Plugins/Packages/Yum.py b/src/lib/Bcfg2/Server/Plugins/Packages/Yum.py
index 220146100..37171e1b1 100644
--- a/src/lib/Bcfg2/Server/Plugins/Packages/Yum.py
+++ b/src/lib/Bcfg2/Server/Plugins/Packages/Yum.py
@@ -536,22 +536,37 @@ class YumCollection(Collection):
consumerapi = ConsumerAPI()
consumer = self._get_pulp_consumer(consumerapi=consumerapi)
if consumer is None:
- consumer = consumerapi.create(self.metadata.hostname,
- self.metadata.hostname,
- capabilities=dict(bind=False))
- lxml.etree.SubElement(independent, "BoundAction",
- name="pulp-update", timing="pre",
- when="always", status="check",
- command="pulp-consumer consumer update")
- self.pulp_cert_set.write_data(consumer['certificate'],
- self.metadata)
+ try:
+ consumer = \
+ consumerapi.create(self.metadata.hostname,
+ self.metadata.hostname,
+ capabilities=dict(bind=False))
+ lxml.etree.SubElement(
+ independent, "BoundAction", name="pulp-update",
+ timing="pre", when="always", status="check",
+ command="pulp-consumer consumer update")
+ self.pulp_cert_set.write_data(consumer['certificate'],
+ self.metadata)
+ except server.ServerRequestError:
+ err = sys.exc_info()[1]
+ self.logger.error("Packages: Could not create Pulp "
+ "consumer %s: %s" %
+ (self.metadata.hostname, err))
for source in self:
# each pulp source can only have one arch, so we don't
# have to check the arch in url_map
if (source.pulp_id and
source.pulp_id not in consumer['repoids']):
- consumerapi.bind(self.metadata.hostname, source.pulp_id)
+ try:
+ consumerapi.bind(self.metadata.hostname,
+ source.pulp_id)
+ except server.ServerRequestError:
+ err = sys.exc_info()[1]
+ self.logger.error("Packages: Could not bind %s to "
+ "Pulp repo %s: %s" %
+ (self.metadata.hostname,
+ source.pulp_id, err))
crt = lxml.etree.SubElement(independent, "BoundPath",
name=self.pulp_cert_set.certpath)
diff --git a/src/lib/Bcfg2/Server/Plugins/Probes.py b/src/lib/Bcfg2/Server/Plugins/Probes.py
index 90dff4a66..f106b75a4 100644
--- a/src/lib/Bcfg2/Server/Plugins/Probes.py
+++ b/src/lib/Bcfg2/Server/Plugins/Probes.py
@@ -162,6 +162,9 @@ class ProbeSet(Bcfg2.Server.Plugin.EntrySet):
ret.append(probe)
return ret
+ def __str__(self):
+ return "ProbeSet for %s" % self.plugin_name
+
class Probes(Bcfg2.Server.Plugin.Probing,
Bcfg2.Server.Plugin.Connector,
diff --git a/src/lib/Bcfg2/Server/Plugins/Reporting.py b/src/lib/Bcfg2/Server/Plugins/Reporting.py
index 1a8c3d941..60f5b1e09 100644
--- a/src/lib/Bcfg2/Server/Plugins/Reporting.py
+++ b/src/lib/Bcfg2/Server/Plugins/Reporting.py
@@ -7,8 +7,8 @@ import lxml.etree
from Bcfg2.Reporting.Transport import load_transport_from_config, \
TransportError
from Bcfg2.Options import REPORTING_COMMON_OPTIONS
-from Bcfg2.Server.Plugin import Statistics, PullSource, PluginInitError, \
- PluginExecutionError
+from Bcfg2.Server.Plugin import Statistics, PullSource, Threaded, \
+ Debuggable, PluginInitError, PluginExecutionError
# required for reporting
try:
@@ -31,9 +31,10 @@ def _rpc_call(method):
return _real_rpc_call
-class Reporting(Statistics, PullSource): # pylint: disable=W0223
+# pylint: disable=W0223
+class Reporting(Statistics, Threaded, PullSource, Debuggable):
""" Unified statistics and reporting plugin """
- __rmi__ = ['Ping', 'GetExtra', 'GetCurrentEntry']
+ __rmi__ = Debuggable.__rmi__ + ['Ping', 'GetExtra', 'GetCurrentEntry']
CLIENT_METADATA_FIELDS = ('profile', 'bundles', 'aliases', 'addresses',
'groups', 'categories', 'uuid', 'version')
@@ -41,7 +42,8 @@ class Reporting(Statistics, PullSource): # pylint: disable=W0223
def __init__(self, core, datastore):
Statistics.__init__(self, core, datastore)
PullSource.__init__(self)
- self.core = core
+ Threaded.__init__(self)
+ Debuggable.__init__(self)
self.whoami = platform.node()
self.transport = None
@@ -54,14 +56,20 @@ class Reporting(Statistics, PullSource): # pylint: disable=W0223
self.logger.error(msg)
raise PluginInitError(msg)
+ def start_threads(self):
try:
- self.transport = load_transport_from_config(core.setup)
+ self.transport = load_transport_from_config(self.core.setup)
except TransportError:
msg = "%s: Failed to load transport: %s" % \
(self.name, traceback.format_exc().splitlines()[-1])
self.logger.error(msg)
raise PluginInitError(msg)
+ def set_debug(self, debug):
+ rv = Debuggable.set_debug(self, debug)
+ self.transport.set_debug(debug)
+ return rv
+
def process_statistics(self, client, xdata):
stats = xdata.find("Statistics")
stats.set('time', time.asctime(time.localtime()))
@@ -84,8 +92,8 @@ class Reporting(Statistics, PullSource): # pylint: disable=W0223
lxml.etree.tostring(
stats,
xml_declaration=False).decode('UTF-8'))
- self.logger.debug("%s: Queued statistics data for %s" %
- (self.__class__.__name__, client.hostname))
+ self.debug_log("%s: Queued statistics data for %s" %
+ (self.__class__.__name__, client.hostname))
return
except TransportError:
continue
@@ -94,7 +102,7 @@ class Reporting(Statistics, PullSource): # pylint: disable=W0223
% (self.__class__.__name__, i,
traceback.format_exc().splitlines()[-1]))
self.logger.error("%s: Retry limit reached for %s" %
- (self.__class__.__name__, client.hostname))
+ (self.__class__.__name__, client.hostname))
def shutdown(self):
super(Reporting, self).shutdown()
diff --git a/src/lib/Bcfg2/Server/Plugins/SSLCA.py b/src/lib/Bcfg2/Server/Plugins/SSLCA.py
index 62396f860..b3a49c047 100644
--- a/src/lib/Bcfg2/Server/Plugins/SSLCA.py
+++ b/src/lib/Bcfg2/Server/Plugins/SSLCA.py
@@ -73,9 +73,8 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
cert_spec.get('append_chain',
'false').lower() == 'true',
}
- cfp = ConfigParser.ConfigParser()
- cfp.read(self.core.cfile)
- self.CAs[ca] = dict(cfp.items('sslca_' + ca))
+ self.CAs[ca] = dict(self.core.setup.cfp.items('sslca_%s' %
+ ca))
self.Entries['Path'][ident] = self.get_cert
elif event.filename.endswith("info.xml"):
self.infoxml[ident] = Bcfg2.Server.Plugin.InfoXML(epath)
diff --git a/src/lib/Bcfg2/Server/Plugins/Snapshots.py b/src/lib/Bcfg2/Server/Plugins/Snapshots.py
index 1956af4ad..cc5946bb2 100644
--- a/src/lib/Bcfg2/Server/Plugins/Snapshots.py
+++ b/src/lib/Bcfg2/Server/Plugins/Snapshots.py
@@ -65,6 +65,8 @@ class Snapshots(Bcfg2.Server.Plugin.Statistics):
self.session = Bcfg2.Server.Snapshots.setup_session(core.cfile)
self.work_queue = Queue()
self.loader = threading.Thread(target=self.load_snapshot)
+
+ def start_threads(self):
self.loader.start()
def load_snapshot(self):
diff --git a/src/lib/Bcfg2/Server/Plugins/Svn.py b/src/lib/Bcfg2/Server/Plugins/Svn.py
index 17a275340..bc585570d 100644
--- a/src/lib/Bcfg2/Server/Plugins/Svn.py
+++ b/src/lib/Bcfg2/Server/Plugins/Svn.py
@@ -17,10 +17,9 @@ except ImportError:
class Svn(Bcfg2.Server.Plugin.Version):
"""Svn is a version plugin for dealing with Bcfg2 repos."""
__author__ = 'bcfg-dev@mcs.anl.gov'
+ __vcs_metadata_path__ = ".svn"
if HAS_SVN:
__rmi__ = Bcfg2.Server.Plugin.Version.__rmi__ + ['Update', 'Commit']
- else:
- __vcs_metadata_path__ = ".svn"
def callback_conflict_resolver(self):
"""PySvn callback function to resolve conflicts"""
diff --git a/src/sbin/bcfg2-info b/src/sbin/bcfg2-info
index acb9e4f44..fa8c89b46 100755
--- a/src/sbin/bcfg2-info
+++ b/src/sbin/bcfg2-info
@@ -27,12 +27,6 @@ try:
except ImportError:
HAS_PROFILE = False
-try:
- from Bcfg2.Server.Plugins.Bundler import BundleTemplateFile
- HAS_GENSHI = True
-except ImportError:
- HAS_GENSHI = False
-
class MockLog(object):
""" Fake logger that just discards all messages in order to mask
@@ -401,28 +395,24 @@ Bcfg2 client itself.""")
def do_buildbundle(self, args):
""" buildbundle <bundle> <hostname> - Render a templated
bundle for hostname (not written to disk) """
- if len(args.split()) == 2:
- bname, client = args.split()
- try:
- metadata = self.build_metadata(client)
- if bname in self.plugins['Bundler'].entries:
- bundle = self.plugins['Bundler'].entries[bname]
- if (HAS_GENSHI and
- isinstance(bundle,
- BundleTemplateFile)):
- stream = bundle.template.generate(metadata=metadata)
- print(stream.render("xml"))
- else:
- print(bundle.data)
- else:
- print("No such bundle %s" % bname)
- except: # pylint: disable=W0702
- err = sys.exc_info()[1]
- print("Failed to render bundle %s for host %s: %s" % (bname,
- client,
- err))
- else:
+ if len(args.split()) != 2:
print(self._get_usage(self.do_buildbundle))
+ return 1
+
+ bname, client = args.split()
+ try:
+ metadata = self.build_metadata(client)
+ bundle = self.plugins['Bundler'].entries[bname]
+ print(lxml.etree.tostring(bundle.get_xml_value(metadata),
+ xml_declaration=False,
+ pretty_print=True).decode('UTF-8'))
+ except KeyError:
+ print("No such bundle %s" % bname)
+ except: # pylint: disable=W0702
+ err = sys.exc_info()[1]
+ print("Failed to render bundle %s for host %s: %s" % (bname,
+ client,
+ err))
def do_automatch(self, args):
""" automatch [-f] <propertyfile> <hostname> - Perform automatch on