summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorNarayan Desai <desai@mcs.anl.gov>2011-05-10 11:24:28 -0500
committerNarayan Desai <desai@mcs.anl.gov>2011-05-10 11:24:28 -0500
commit0e75875e9bd9900a6a3c7ab118c448e48829eaef (patch)
tree391204747f48598c4e978d3724afbd5b8aa1d12c /src
parentf2d218ccd2de93ef639347933ba127ef081b4401 (diff)
parent91634f9a3b888eee3cd5f9a777fcb075fc666c9a (diff)
downloadbcfg2-0e75875e9bd9900a6a3c7ab118c448e48829eaef.tar.gz
bcfg2-0e75875e9bd9900a6a3c7ab118c448e48829eaef.tar.bz2
bcfg2-0e75875e9bd9900a6a3c7ab118c448e48829eaef.zip
Merge branch 'master' of git.mcs.anl.gov:bcfg2
Diffstat (limited to 'src')
-rw-r--r--src/lib/Bcfg2Py3Incompat.py2
-rw-r--r--src/lib/Bcfg2Py3k.py81
-rw-r--r--src/lib/Client/Frame.py33
-rw-r--r--src/lib/Client/Tools/APT.py6
-rw-r--r--src/lib/Client/Tools/Action.py7
-rw-r--r--src/lib/Client/Tools/Chkconfig.py5
-rw-r--r--src/lib/Client/Tools/DebInit.py9
-rw-r--r--src/lib/Client/Tools/POSIX.py29
-rw-r--r--src/lib/Client/Tools/Pacman.py2
-rw-r--r--src/lib/Client/Tools/RcUpdate.py5
-rw-r--r--src/lib/Client/Tools/SMF.py5
-rw-r--r--src/lib/Client/Tools/Systemd.py59
-rw-r--r--src/lib/Client/Tools/Upstart.py9
-rw-r--r--src/lib/Client/Tools/VCS.py137
-rw-r--r--src/lib/Client/Tools/YUM24.py54
-rw-r--r--src/lib/Client/Tools/YUMng.py35
-rw-r--r--src/lib/Client/Tools/__init__.py73
-rw-r--r--src/lib/Client/Tools/launchd.py5
-rw-r--r--src/lib/Component.py61
-rw-r--r--src/lib/Logger.py73
-rw-r--r--src/lib/Options.py14
-rw-r--r--src/lib/Proxy.py86
-rw-r--r--src/lib/SSLServer.py52
-rw-r--r--src/lib/Server/Admin/Backup.py3
-rw-r--r--src/lib/Server/Admin/Bundle.py50
-rw-r--r--src/lib/Server/Admin/Client.py15
-rw-r--r--src/lib/Server/Admin/Compare.py59
-rw-r--r--src/lib/Server/Admin/Group.py15
-rw-r--r--src/lib/Server/Admin/Init.py120
-rw-r--r--src/lib/Server/Admin/Perf.py11
-rw-r--r--src/lib/Server/Admin/Pull.py46
-rw-r--r--src/lib/Server/Admin/Query.py18
-rw-r--r--src/lib/Server/Admin/Reports.py109
-rw-r--r--src/lib/Server/Admin/Snapshots.py8
-rw-r--r--src/lib/Server/Admin/Tidy.py14
-rw-r--r--src/lib/Server/Admin/Viz.py16
-rw-r--r--src/lib/Server/Admin/Xcmd.py25
-rw-r--r--src/lib/Server/Admin/__init__.py31
-rw-r--r--src/lib/Server/Core.py81
-rw-r--r--src/lib/Server/FileMonitor.py18
-rw-r--r--src/lib/Server/Hostbase/ldapauth.py82
-rw-r--r--src/lib/Server/Hostbase/media/base.css10
-rw-r--r--src/lib/Server/Hostbase/media/global.css16
-rw-r--r--src/lib/Server/Hostbase/media/layout.css124
-rw-r--r--src/lib/Server/Hostbase/settings.py2
-rw-r--r--src/lib/Server/Lint/Bundles.py64
-rw-r--r--src/lib/Server/Lint/Comments.py188
-rw-r--r--src/lib/Server/Lint/Duplicates.py82
-rw-r--r--src/lib/Server/Lint/InfoXML.py43
-rw-r--r--src/lib/Server/Lint/Pkgmgr.py38
-rw-r--r--src/lib/Server/Lint/RequiredAttrs.py72
-rw-r--r--src/lib/Server/Lint/Validate.py186
-rw-r--r--src/lib/Server/Lint/__init__.py155
-rw-r--r--src/lib/Server/Plugin.py169
-rw-r--r--src/lib/Server/Plugins/Account.py38
-rw-r--r--src/lib/Server/Plugins/Base.py12
-rw-r--r--src/lib/Server/Plugins/Bundler.py6
-rw-r--r--src/lib/Server/Plugins/Cfg.py63
-rw-r--r--src/lib/Server/Plugins/DBStats.py6
-rw-r--r--src/lib/Server/Plugins/Decisions.py3
-rw-r--r--src/lib/Server/Plugins/Deps.py21
-rw-r--r--src/lib/Server/Plugins/Editor.py20
-rw-r--r--src/lib/Server/Plugins/GroupPatterns.py16
-rw-r--r--src/lib/Server/Plugins/Hostbase.py104
-rw-r--r--src/lib/Server/Plugins/Ldap.py14
-rw-r--r--src/lib/Server/Plugins/Metadata.py79
-rw-r--r--src/lib/Server/Plugins/NagiosGen.py209
-rw-r--r--src/lib/Server/Plugins/Ohai.py8
-rw-r--r--src/lib/Server/Plugins/Packages.py83
-rw-r--r--src/lib/Server/Plugins/Pkgmgr.py49
-rw-r--r--src/lib/Server/Plugins/Probes.py16
-rw-r--r--src/lib/Server/Plugins/Properties.py49
-rw-r--r--src/lib/Server/Plugins/SGenshi.py10
-rw-r--r--src/lib/Server/Plugins/SSHbase.py38
-rw-r--r--src/lib/Server/Plugins/SSLCA.py49
-rw-r--r--src/lib/Server/Plugins/Snapshots.py45
-rw-r--r--src/lib/Server/Plugins/Statistics.py4
-rw-r--r--src/lib/Server/Plugins/Svn2.py44
-rw-r--r--src/lib/Server/Plugins/TCheetah.py8
-rw-r--r--src/lib/Server/Plugins/TGenshi.py21
-rw-r--r--src/lib/Server/Reports/backends.py27
-rwxr-xr-xsrc/lib/Server/Reports/importscript.py138
-rwxr-xr-xsrc/lib/Server/Reports/manage.py2
-rw-r--r--src/lib/Server/Reports/nisauth.py19
-rw-r--r--src/lib/Server/Reports/reports/models.py108
-rw-r--r--src/lib/Server/Reports/reports/templates/base.html2
-rw-r--r--src/lib/Server/Reports/reports/templatetags/bcfg2_tags.py20
-rw-r--r--src/lib/Server/Reports/reports/templatetags/syntax_coloring.py13
-rw-r--r--src/lib/Server/Reports/reports/views.py168
-rw-r--r--src/lib/Server/Reports/settings.py54
-rw-r--r--src/lib/Server/Reports/updatefix.py33
-rwxr-xr-xsrc/lib/Server/Reports/utils.py22
-rw-r--r--src/lib/Server/Snapshots/__init__.py5
-rw-r--r--src/lib/Server/Snapshots/model.py112
-rw-r--r--src/lib/Statistics.py2
-rwxr-xr-xsrc/sbin/bcfg229
-rwxr-xr-xsrc/sbin/bcfg2-admin10
-rwxr-xr-xsrc/sbin/bcfg2-build-reports62
-rwxr-xr-xsrc/sbin/bcfg2-info108
-rwxr-xr-xsrc/sbin/bcfg2-lint194
-rwxr-xr-xsrc/sbin/bcfg2-ping-sweep23
l---------[-rwxr-xr-x]src/sbin/bcfg2-repo-validate228
-rwxr-xr-xsrc/sbin/bcfg2-reports24
-rwxr-xr-xsrc/sbin/bcfg2-server3
104 files changed, 3500 insertions, 1633 deletions
diff --git a/src/lib/Bcfg2Py3Incompat.py b/src/lib/Bcfg2Py3Incompat.py
new file mode 100644
index 000000000..6b66e72b0
--- /dev/null
+++ b/src/lib/Bcfg2Py3Incompat.py
@@ -0,0 +1,2 @@
+def fprint(s, f):
+ print(s, file=f)
diff --git a/src/lib/Bcfg2Py3k.py b/src/lib/Bcfg2Py3k.py
new file mode 100644
index 000000000..c9e48a49b
--- /dev/null
+++ b/src/lib/Bcfg2Py3k.py
@@ -0,0 +1,81 @@
+import sys
+
+try:
+ from email.Utils import formatdate
+except ImportError:
+ from email.utils import formatdate
+
+# urllib imports
+try:
+ from urlparse import urljoin, urlparse
+ from urllib2 import HTTPBasicAuthHandler
+ from urllib2 import HTTPPasswordMgrWithDefaultRealm
+ from urllib2 import build_opener
+ from urllib2 import install_opener
+ from urllib import urlopen
+ from urllib2 import HTTPError
+except ImportError:
+ from urllib.parse import urljoin, urlparse
+ from urllib.request import HTTPBasicAuthHandler
+ from urllib.request import HTTPPasswordMgrWithDefaultRealm
+ from urllib.request import build_opener
+ from urllib.request import install_opener
+ from urllib.request import urlopen
+ from urllib.error import HTTPError
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from io import StringIO
+
+try:
+ import ConfigParser
+except ImportError:
+ import configparser as ConfigParser
+
+try:
+ import cPickle
+except ImportError:
+ import pickle as cPickle
+
+try:
+ from Queue import Queue, Empty, Full
+except ImportError:
+ from queue import Queue, Empty, Full
+
+# xmlrpc imports
+try:
+ import xmlrpclib, SimpleXMLRPCServer
+except ImportError:
+ import xmlrpc.client as xmlrpclib
+ import xmlrpc.server as SimpleXMLRPCServer
+
+# socketserver import
+try:
+ import SocketServer
+except ImportError:
+ import socketserver as SocketServer
+
+# httplib imports
+try:
+ import httplib
+except ImportError:
+ import http.client as httplib
+
+# print to file compatibility
+def u_str(string):
+ if sys.hexversion >= 0x03000000:
+ return string
+ else:
+ return unicode(string)
+
+"""
+In order to use the new syntax for printing to a file, we need to do
+a conditional import because there is a syntax incompatibility between
+the two versions of python.
+"""
+if sys.hexversion >= 0x03000000:
+ from Bcfg2.Bcfg2Py3Incompat import fprint
+else:
+ def fprint(s, f):
+ print >> f, s
diff --git a/src/lib/Client/Frame.py b/src/lib/Client/Frame.py
index 545d4b584..60d158eb1 100644
--- a/src/lib/Client/Frame.py
+++ b/src/lib/Client/Frame.py
@@ -8,6 +8,7 @@ import logging
import time
import Bcfg2.Client.Tools
+
def cmpent(ent1, ent2):
"""Sort entries."""
if ent1.tag != ent2.tag:
@@ -15,6 +16,7 @@ def cmpent(ent1, ent2):
else:
return cmp(ent1.get('name'), ent2.get('name'))
+
def promptFilter(prompt, entries):
"""Filter a supplied list based on user input."""
ret = []
@@ -25,7 +27,12 @@ def promptFilter(prompt, entries):
else:
iprompt = prompt % (entry.tag, entry.get('name'))
try:
- if raw_input(iprompt) in ['y', 'Y']:
+ # py3k compatibility
+ try:
+ ans = raw_input(iprompt)
+ except NameError:
+ ans = input(iprompt)
+ if ans in ['y', 'Y']:
ret.append(entry)
except EOFError:
# python 2.4.3 on CentOS doesn't like ^C for some reason
@@ -35,6 +42,7 @@ def promptFilter(prompt, entries):
continue
return ret
+
def matches_entry(entryspec, entry):
# both are (tag, name)
if entryspec == entry:
@@ -52,11 +60,16 @@ def matches_entry(entryspec, entry):
return False
return True
+
def matches_white_list(entry, whitelist):
- return True in [matches_entry(we, (entry.tag, entry.get('name'))) for we in whitelist]
+ return True in [matches_entry(we, (entry.tag, entry.get('name')))
+ for we in whitelist]
+
def passes_black_list(entry, blacklist):
- return True not in [matches_entry(be, (entry.tag, entry.get('name'))) for be in blacklist]
+ return True not in [matches_entry(be, (entry.tag, entry.get('name')))
+ for be in blacklist]
+
class Frame:
"""Frame is the container for all Tool objects and state information."""
@@ -134,8 +147,10 @@ class Frame:
self.logger.error(["%s:%s:%s" % (entry.tag, entry.get('type'), \
entry.get('name')) for entry in problems])
self.logger.error("")
- entries = [(entry.tag, entry.get('name')) for struct in config for entry in struct]
- pkgs = [(entry.get('name'), entry.get('origin')) for struct in config for entry in struct if entry.tag == 'Package']
+ entries = [(entry.tag, entry.get('name'))
+ for struct in config for entry in struct]
+ pkgs = [(entry.get('name'), entry.get('origin'))
+ for struct in config for entry in struct if entry.tag == 'Package']
multi = []
for entry in entries[:]:
if entries.count(entry) > 1:
@@ -151,7 +166,6 @@ class Frame:
self.logger.debug("The following packages are prereqs added by Packages:")
self.logger.debug([pkg[0] for pkg in pkgs if pkg[1] == 'Packages'])
-
def __getattr__(self, name):
if name in ['extra', 'handled', 'modified', '__important__']:
ret = []
@@ -186,10 +200,10 @@ class Frame:
if self.setup['remove']:
if self.setup['remove'] == 'all':
self.removal = self.extra
- elif self.setup['remove'] == 'services':
+ elif self.setup['remove'] in ['services', 'Services']:
self.removal = [entry for entry in self.extra \
if entry.tag == 'Service']
- elif self.setup['remove'] == 'packages':
+ elif self.setup['remove'] in ['packages', 'Packages']:
self.removal = [entry for entry in self.extra \
if entry.tag == 'Package']
@@ -268,7 +282,8 @@ class Frame:
if b_to_remv:
self.logger.info("Not installing entries from Bundle %s" % \
(bundle.get('name')))
- self.logger.info(["%s:%s" % (e.tag, e.get('name')) for e in b_to_remv])
+ self.logger.info(["%s:%s" % (e.tag, e.get('name'))
+ for e in b_to_remv])
[self.whitelist.remove(ent) for ent in b_to_remv]
if self.setup['interactive']:
diff --git a/src/lib/Client/Tools/APT.py b/src/lib/Client/Tools/APT.py
index fe1ef6fdd..a838f5e27 100644
--- a/src/lib/Client/Tools/APT.py
+++ b/src/lib/Client/Tools/APT.py
@@ -69,7 +69,11 @@ class APT(Bcfg2.Client.Tools.Tool):
if self.setup['kevlar'] and not self.setup['dryrun']:
self.cmd.run("%s --force-confold --configure --pending" % DPKG)
self.cmd.run("%s clean" % APTGET)
- self.pkg_cache = apt.cache.Cache()
+ try:
+ self.pkg_cache = apt.cache.Cache()
+ except SystemError, e:
+ self.logger.info("Failed to initialize APT cache: %s" % e)
+ raise Bcfg2.Client.Tools.toolInstantiationError
self.pkg_cache.update()
self.pkg_cache = apt.cache.Cache()
diff --git a/src/lib/Client/Tools/Action.py b/src/lib/Client/Tools/Action.py
index 452788f94..bc57a0e27 100644
--- a/src/lib/Client/Tools/Action.py
+++ b/src/lib/Client/Tools/Action.py
@@ -31,7 +31,12 @@ class Action(Bcfg2.Client.Tools.Tool):
if self.setup['interactive']:
prompt = ('Run Action %s, %s: (y/N): ' %
(entry.get('name'), entry.get('command')))
- if raw_input(prompt) not in ['y', 'Y']:
+ # py3k compatibility
+ try:
+ ans = raw_input(prompt)
+ except NameError:
+ ans = input(prompt)
+ if ans not in ['y', 'Y']:
return False
if self.setup['servicemode'] == 'build':
if entry.get('build', 'true') == 'false':
diff --git a/src/lib/Client/Tools/Chkconfig.py b/src/lib/Client/Tools/Chkconfig.py
index bf2a2c1e1..0c78b0fb5 100644
--- a/src/lib/Client/Tools/Chkconfig.py
+++ b/src/lib/Client/Tools/Chkconfig.py
@@ -76,6 +76,11 @@ class Chkconfig(Bcfg2.Client.Tools.SvcTool):
def InstallService(self, entry):
"""Install Service entry."""
+ # don't take any actions for mode='manual'
+ if entry.get('mode', 'default') == 'manual':
+ self.logger.info("Service %s mode set to manual. Skipping "
+ "installation." % (entry.get('name')))
+ return False
rcmd = "/sbin/chkconfig %s %s"
self.cmd.run("/sbin/chkconfig --add %s"%(entry.attrib['name']))
self.logger.info("Installing Service %s" % (entry.get('name')))
diff --git a/src/lib/Client/Tools/DebInit.py b/src/lib/Client/Tools/DebInit.py
index 119036b32..d6ce16c52 100644
--- a/src/lib/Client/Tools/DebInit.py
+++ b/src/lib/Client/Tools/DebInit.py
@@ -21,6 +21,10 @@ class DebInit(Bcfg2.Client.Tools.SvcTool):
# implement entry (Verify|Install) ops
def VerifyService(self, entry, _):
"""Verify Service status for entry."""
+
+ if entry.get('status') == 'ignore':
+ return True
+
rawfiles = glob.glob("/etc/rc*.d/[SK]*%s" % (entry.get('name')))
files = []
@@ -71,6 +75,11 @@ class DebInit(Bcfg2.Client.Tools.SvcTool):
def InstallService(self, entry):
"""Install Service for entry."""
+ # don't take any actions for mode='manual'
+ if entry.get('mode', 'default') == 'manual':
+ self.logger.info("Service %s mode set to manual. Skipping "
+ "installation." % (entry.get('name')))
+ return False
self.logger.info("Installing Service %s" % (entry.get('name')))
try:
os.stat('/etc/init.d/%s' % entry.get('name'))
diff --git a/src/lib/Client/Tools/POSIX.py b/src/lib/Client/Tools/POSIX.py
index c883fc17a..af3d1a473 100644
--- a/src/lib/Client/Tools/POSIX.py
+++ b/src/lib/Client/Tools/POSIX.py
@@ -137,14 +137,14 @@ class POSIX(Bcfg2.Client.Tools.Tool):
entry.get('owner') == None or \
entry.get('group') == None:
self.logger.error('Entry %s not completely specified. '
- 'Try running bcfg2-repo-validate.' % (entry.get('name')))
+ 'Try running bcfg2-lint.' % (entry.get('name')))
return False
if entry.get('dev_type') in ['block', 'char']:
# check if major/minor are properly specified
if entry.get('major') == None or \
entry.get('minor') == None:
self.logger.error('Entry %s not completely specified. '
- 'Try running bcfg2-repo-validate.' % (entry.get('name')))
+ 'Try running bcfg2-lint.' % (entry.get('name')))
return False
try:
# check for file existence
@@ -167,7 +167,7 @@ class POSIX(Bcfg2.Client.Tools.Tool):
if entry.get('major') == None or \
entry.get('minor') == None:
self.logger.error('Entry %s not completely specified. '
- 'Try running bcfg2-repo-validate.' % (entry.get('name')))
+ 'Try running bcfg2-lint.' % (entry.get('name')))
return False
major = int(entry.get('major'))
minor = int(entry.get('minor'))
@@ -218,7 +218,7 @@ class POSIX(Bcfg2.Client.Tools.Tool):
if entry.get('major') == None or \
entry.get('minor') == None:
self.logger.error('Entry %s not completely specified. '
- 'Try running bcfg2-repo-validate.' % (entry.get('name')))
+ 'Try running bcfg2-lint.' % (entry.get('name')))
return False
major = int(entry.get('major'))
minor = int(entry.get('minor'))
@@ -240,7 +240,7 @@ class POSIX(Bcfg2.Client.Tools.Tool):
entry.get('owner') == None or \
entry.get('group') == None:
self.logger.error('Entry %s not completely specified. '
- 'Try running bcfg2-repo-validate.' % (entry.get('name')))
+ 'Try running bcfg2-lint.' % (entry.get('name')))
return False
while len(entry.get('perms', '')) < 4:
entry.set('perms', '0' + entry.get('perms', ''))
@@ -348,7 +348,7 @@ class POSIX(Bcfg2.Client.Tools.Tool):
entry.get('owner') == None or \
entry.get('group') == None:
self.logger.error('Entry %s not completely specified. '
- 'Try running bcfg2-repo-validate.' % \
+ 'Try running bcfg2-lint.' % \
(entry.get('name')))
return False
self.logger.info("Installing directory %s" % (entry.get('name')))
@@ -542,7 +542,7 @@ class POSIX(Bcfg2.Client.Tools.Tool):
return False
# If we get here, then the parent directory should exist
- if (entry.get("paranoid", False) == 'true') and \
+ if (entry.get("paranoid", False) in ['true', 'True']) and \
self.setup.get("paranoid", False) and not \
(entry.get('current_exists', 'true') == 'false'):
bkupnam = entry.get('name').replace('/', '_')
@@ -550,7 +550,7 @@ class POSIX(Bcfg2.Client.Tools.Tool):
bkuplist = [f for f in os.listdir(self.ppath) if
f.startswith(bkupnam)]
bkuplist.sort()
- if len(bkuplist) == int(self.max_copies):
+ while len(bkuplist) >= int(self.max_copies):
# remove the oldest backup available
oldest = bkuplist.pop(0)
self.logger.info("Removing %s" % oldest)
@@ -563,7 +563,8 @@ class POSIX(Bcfg2.Client.Tools.Tool):
try:
# backup existing file
shutil.copy(entry.get('name'),
- "%s/%s_%s" % (self.ppath, bkupnam, datetime.now()))
+ "%s/%s_%s" % (self.ppath, bkupnam,
+ datetime.isoformat(datetime.now())))
self.logger.info("Backup of %s saved to %s" %
(entry.get('name'), self.ppath))
except IOError, e:
@@ -613,7 +614,7 @@ class POSIX(Bcfg2.Client.Tools.Tool):
"""Verify HardLink entry."""
if entry.get('to') == None:
self.logger.error('Entry %s not completely specified. '
- 'Try running bcfg2-repo-validate.' % \
+ 'Try running bcfg2-lint.' % \
(entry.get('name')))
return False
try:
@@ -636,7 +637,7 @@ class POSIX(Bcfg2.Client.Tools.Tool):
"""Install HardLink entry."""
if entry.get('to') == None:
self.logger.error('Entry %s not completely specified. '
- 'Try running bcfg2-repo-validate.' % \
+ 'Try running bcfg2-lint.' % \
(entry.get('name')))
return False
self.logger.info("Installing Hardlink %s" % (entry.get('name')))
@@ -712,7 +713,7 @@ class POSIX(Bcfg2.Client.Tools.Tool):
entry.get('owner') == None or \
entry.get('group') == None:
self.logger.error('Entry %s not completely specified. '
- 'Try running bcfg2-repo-validate.' % (entry.get('name')))
+ 'Try running bcfg2-lint.' % (entry.get('name')))
return False
try:
os.chown(entry.get('name'), normUid(entry), normGid(entry))
@@ -727,7 +728,7 @@ class POSIX(Bcfg2.Client.Tools.Tool):
"""Verify Path type='symlink' entry."""
if entry.get('to') == None:
self.logger.error('Entry %s not completely specified. '
- 'Try running bcfg2-repo-validate.' % \
+ 'Try running bcfg2-lint.' % \
(entry.get('name')))
return False
try:
@@ -750,7 +751,7 @@ class POSIX(Bcfg2.Client.Tools.Tool):
"""Install Path type='symlink' entry."""
if entry.get('to') == None:
self.logger.error('Entry %s not completely specified. '
- 'Try running bcfg2-repo-validate.' % \
+ 'Try running bcfg2-lint.' % \
(entry.get('name')))
return False
self.logger.info("Installing symlink %s" % (entry.get('name')))
diff --git a/src/lib/Client/Tools/Pacman.py b/src/lib/Client/Tools/Pacman.py
index be3fb0c94..082897934 100644
--- a/src/lib/Client/Tools/Pacman.py
+++ b/src/lib/Client/Tools/Pacman.py
@@ -73,7 +73,7 @@ class Pacman(Bcfg2.Client.Tools.PkgTool):
for pkg in packages:
pkgline += " " + pkg.get('name')
- print "packages : " + pkgline
+ self.logger.info("packages : " + pkgline)
try:
self.logger.debug("Running : %s -S %s" % (self.pkgtool, pkgline))
diff --git a/src/lib/Client/Tools/RcUpdate.py b/src/lib/Client/Tools/RcUpdate.py
index 159172b78..d832d98a8 100644
--- a/src/lib/Client/Tools/RcUpdate.py
+++ b/src/lib/Client/Tools/RcUpdate.py
@@ -57,6 +57,11 @@ class RcUpdate(Bcfg2.Client.Tools.SvcTool):
In supervised mode we also take care it's (not) running.
"""
+ # don't take any actions for mode='manual'
+ if entry.get('mode', 'default') == 'manual':
+ self.logger.info("Service %s mode set to manual. Skipping "
+ "installation." % (entry.get('name')))
+ return False
self.logger.info('Installing Service %s' % entry.get('name'))
if entry.get('status') == 'on':
# make sure it's running if in supervised mode
diff --git a/src/lib/Client/Tools/SMF.py b/src/lib/Client/Tools/SMF.py
index 96c7d9d28..944408326 100644
--- a/src/lib/Client/Tools/SMF.py
+++ b/src/lib/Client/Tools/SMF.py
@@ -74,6 +74,11 @@ class SMF(Bcfg2.Client.Tools.SvcTool):
def InstallService(self, entry):
"""Install SMF Service entry."""
+ # don't take any actions for mode='manual'
+ if entry.get('mode', 'default') == 'manual':
+ self.logger.info("Service %s mode set to manual. Skipping "
+ "installation." % (entry.get('name')))
+ return False
self.logger.info("Installing Service %s" % (entry.get('name')))
if entry.get('status') == 'off':
if entry.get("FMRI").startswith('lrc'):
diff --git a/src/lib/Client/Tools/Systemd.py b/src/lib/Client/Tools/Systemd.py
new file mode 100644
index 000000000..e3f6a4169
--- /dev/null
+++ b/src/lib/Client/Tools/Systemd.py
@@ -0,0 +1,59 @@
+# This is the bcfg2 support for systemd
+
+"""This is systemd support."""
+
+import Bcfg2.Client.Tools
+import Bcfg2.Client.XML
+
+class Systemd(Bcfg2.Client.Tools.SvcTool):
+ """Systemd support for Bcfg2."""
+ name = 'Systemd'
+ __execs__ = ['/bin/systemctl']
+ __handles__ = [('Service', 'systemd')]
+ __req__ = {'Service': ['name', 'status']}
+
+ def get_svc_command(self, service, action):
+ return "/bin/systemctl %s %s.service" % (action, service.get('name'))
+
+ def VerifyService(self, entry, _):
+ """Verify Service status for entry."""
+ cmd = "/bin/systemctl status %s.service " % (entry.get('name'))
+ raw = ''.join(self.cmd.run(cmd)[1])
+
+ if raw.find('Loaded: error') >= 0:
+ entry.set('current_status', 'off')
+ status = False
+
+ elif raw.find('Active: active') >= 0:
+ entry.set('current_status', 'on')
+ if entry.get('status') == 'off':
+ status = False
+ else:
+ status = True
+
+ else:
+ entry.set('current_status', 'off')
+ if entry.get('status') == 'on':
+ status = False
+ else:
+ status = True
+
+ return status
+
+ def InstallService(self, entry):
+ """Install Service entry."""
+ # don't take any actions for mode = 'manual'
+ if entry.get('mode', 'default') == 'manual':
+ self.logger.info("Service %s mode set to manual. Skipping "
+ "installation." % (entry.get('name')))
+ return True
+
+ if entry.get('status') == 'on':
+ pstatus = self.cmd.run(self.get_svc_command(entry, 'enable'))[0]
+ pstatus = self.cmd.run(self.get_svc_command(entry, 'start'))[0]
+
+ else:
+ pstatus = self.cmd.run(self.get_svc_command(entry, 'stop'))[0]
+ pstatus = self.cmd.run(self.get_svc_command(entry, 'disable'))[0]
+
+ return not pstatus
diff --git a/src/lib/Client/Tools/Upstart.py b/src/lib/Client/Tools/Upstart.py
index b75b0927e..41a585c23 100644
--- a/src/lib/Client/Tools/Upstart.py
+++ b/src/lib/Client/Tools/Upstart.py
@@ -29,6 +29,10 @@ class Upstart(Bcfg2.Client.Tools.SvcTool):
/etc/init/servicename.conf. All we need to do is make sure
the service is running when it should be.
"""
+
+ if entry.get('status') == 'ignore':
+ return True
+
if entry.get('parameters'):
params = entry.get('parameters')
else:
@@ -66,6 +70,11 @@ class Upstart(Bcfg2.Client.Tools.SvcTool):
def InstallService(self, entry):
"""Install Service for entry."""
+ # don't take any actions for mode='manual'
+ if entry.get('mode', 'default') == 'manual':
+ self.logger.info("Service %s mode set to manual. Skipping "
+ "installation." % (entry.get('name')))
+ return False
if entry.get('status') == 'on':
pstatus = self.cmd.run(self.get_svc_command(entry, 'start'))[0]
elif entry.get('status') == 'off':
diff --git a/src/lib/Client/Tools/VCS.py b/src/lib/Client/Tools/VCS.py
new file mode 100644
index 000000000..fa7748574
--- /dev/null
+++ b/src/lib/Client/Tools/VCS.py
@@ -0,0 +1,137 @@
+"""VCS support."""
+
+# TODO:
+# * git_write_index
+# * add svn support
+# * integrate properly with reports
+missing = []
+
+import os
+import sys
+# python-dulwich git imports
+try:
+ import dulwich
+ import dulwich.index
+ from dulwich.errors import NotGitRepository
+except:
+ missing.append('git')
+# subversion import
+try:
+ import pysvn
+except:
+ missing.append('svn')
+
+import Bcfg2.Client.Tools
+
+
+class VCS(Bcfg2.Client.Tools.Tool):
+ """VCS support."""
+ name = 'VCS'
+ __handles__ = [('Path', 'vcs')]
+ __req__ = {'Path': ['name',
+ 'type',
+ 'vcstype',
+ 'sourceurl',
+ 'revision']}
+
+ def __init__(self, logger, cfg, setup):
+ Bcfg2.Client.Tools.Tool.__init__(self, logger, cfg, setup)
+ self.cfg = cfg
+
+ def git_write_index(self, entry):
+ """Write the git index"""
+ pass
+
+ def Verifygit(self, entry, _):
+ """Verify git repositories"""
+ try:
+ repo = dulwich.repo.Repo(entry.get('name'))
+ except NotGitRepository:
+ self.logger.info("Repository %s does not exist" %
+ entry.get('name'))
+ return False
+ cur_rev = repo.head()
+
+ if cur_rev != entry.get('revision'):
+ self.logger.info("At revision %s need to go to revision %s" %
+ (cur_rev, entry.get('revision')))
+ return False
+
+ return True
+
+ def Installgit(self, entry):
+ """Checkout contents from a git repository"""
+ destname = entry.get('name')
+ destr = dulwich.repo.Repo.init(destname, mkdir=True)
+ cl, host_path = dulwich.client.get_transport_and_path(entry.get('sourceurl'))
+ remote_refs = cl.fetch(host_path,
+ destr,
+ determine_wants=destr.object_store.determine_wants_all,
+ progress=sys.stdout.write)
+ destr.refs['refs/heads/master'] = entry.get('revision')
+ dtree = destr[entry.get('revision')].tree
+ obj_store = destr.object_store
+ for fname, mode, sha in obj_store.iter_tree_contents(dtree):
+ fullpath = os.path.join(destname, fname)
+ try:
+ f = open(os.path.join(destname, fname), 'wb')
+ except IOError:
+ dir = os.path.split(fullpath)[0]
+ os.makedirs(dir)
+ f = open(os.path.join(destname, fname), 'wb')
+ f.write(destr[sha].data)
+ f.close()
+ os.chmod(os.path.join(destname, fname), mode)
+ return True
+ # FIXME: figure out how to write the git index properly
+ #iname = "%s/.git/index" % entry.get('name')
+ #f = open(iname, 'w+')
+ #entries = obj_store[sha].iteritems()
+ #try:
+ # dulwich.index.write_index(f, entries)
+ #finally:
+ # f.close()
+
+ def Verifysvn(self, entry, _):
+ """Verify svn repositories"""
+ client = pysvn.Client()
+ try:
+ cur_rev = str(client.info(entry.get('name')).revision.number)
+ except:
+ self.logger.info("Repository %s does not exist" % entry.get('name'))
+ return False
+
+ if cur_rev != entry.get('revision'):
+ self.logger.info("At revision %s need to go to revision %s" %
+ (cur_rev, entry.get('revision')))
+ return False
+
+ return True
+
+ def Installsvn(self, entry):
+ """Checkout contents from a svn repository"""
+ try:
+ client = pysvn.Client.update(entry.get('name'), recurse=True)
+ except:
+ self.logger.error("Failed to update repository", exc_info=1)
+ return False
+
+ return True
+
+ def VerifyPath(self, entry, _):
+ vcs = entry.get('vcstype')
+ if vcs in missing:
+ self.logger.error("Missing %s python libraries. Cannot verify" %
+ vcs)
+ return False
+ ret = getattr(self, 'Verify%s' % vcs)
+ return ret(entry, _)
+
+ def InstallPath(self, entry):
+ vcs = entry.get('vcstype')
+ if vcs in missing:
+ self.logger.error("Missing %s python libraries. "
+ "Unable to install" % vcs)
+ return False
+ ret = getattr(self, 'Install%s' % vcs)
+ return ret(entry)
diff --git a/src/lib/Client/Tools/YUM24.py b/src/lib/Client/Tools/YUM24.py
index efe92a059..04d9f5c07 100644
--- a/src/lib/Client/Tools/YUM24.py
+++ b/src/lib/Client/Tools/YUM24.py
@@ -30,6 +30,7 @@ except:
if not hasattr(Bcfg2.Client.Tools.RPMng, 'RPMng'):
raise ImportError
+
def build_yname(pkgname, inst):
"""Build yum appropriate package name."""
ypname = pkgname
@@ -45,6 +46,7 @@ def build_yname(pkgname, inst):
ypname += ".%s" % (inst.get('arch'))
return ypname
+
class YUM24(Bcfg2.Client.Tools.RPMng.RPMng):
"""Support for Yum packages."""
pkgtype = 'yum'
@@ -59,7 +61,8 @@ class YUM24(Bcfg2.Client.Tools.RPMng.RPMng):
__ireq__ = {'Package': ['name']}
#__ireq__ = {'Package': ['name', 'version']}
- __new_req__ = {'Package': ['name'], 'Instance': ['version', 'release', 'arch']}
+ __new_req__ = {'Package': ['name'],
+ 'Instance': ['version', 'release', 'arch']}
__new_ireq__ = {'Package': ['name'], \
'Instance': []}
#__new_ireq__ = {'Package': ['name', 'uri'], \
@@ -68,8 +71,10 @@ class YUM24(Bcfg2.Client.Tools.RPMng.RPMng):
__gpg_req__ = {'Package': ['name', 'version']}
__gpg_ireq__ = {'Package': ['name', 'version']}
- __new_gpg_req__ = {'Package': ['name'], 'Instance': ['version', 'release']}
- __new_gpg_ireq__ = {'Package': ['name'], 'Instance': ['version', 'release']}
+ __new_gpg_req__ = {'Package': ['name'],
+ 'Instance': ['version', 'release']}
+ __new_gpg_ireq__ = {'Package': ['name'],
+ 'Instance': ['version', 'release']}
conflicts = ['YUMng', 'RPMng']
@@ -101,10 +106,14 @@ class YUM24(Bcfg2.Client.Tools.RPMng.RPMng):
data = {pkg.arch: (pkg.epoch, pkg.version, pkg.release)}
else:
pname = pkg[0]
- if pkg[1] is None: a = 'noarch'
- else: a = pkg[1]
- if pkg[2] is None: e = '0'
- else: e = pkg[2]
+ if pkg[1] is None:
+ a = 'noarch'
+ else:
+ a = pkg[1]
+ if pkg[2] is None:
+ e = '0'
+ else:
+ e = pkg[2]
data = {a: (e, pkg[3], pkg[4])}
if pname in dest:
dest[pname].update(data)
@@ -137,24 +146,24 @@ class YUM24(Bcfg2.Client.Tools.RPMng.RPMng):
if entry.get('type', False) == 'yum':
# Check for virtual provides or packages. If we don't have
# this package use Yum to resolve it to a real package name
- knownPkgs = self.yum_installed.keys() + self.yum_avail.keys()
+ knownPkgs = list(self.yum_installed.keys()) + list(self.yum_avail.keys())
if entry.get('name') not in knownPkgs:
# If the package name matches something installed
# or available the that's the correct package.
try:
- pkgDict = dict( [ (i.name, i) for i in \
- self.yb.returnPackagesByDep(entry.get('name')) ] )
+ pkgDict = dict([(i.name, i) for i in \
+ self.yb.returnPackagesByDep(entry.get('name'))])
except yum.Errors.YumBaseError, e:
self.logger.error('Yum Error Depsolving for %s: %s' % \
(entry.get('name'), str(e)))
pkgDict = {}
if len(pkgDict) > 1:
- # What do we do with multiple packages?
+ # What do we do with multiple packages?
s = "YUMng: returnPackagesByDep(%s) returned many packages"
self.logger.info(s % entry.get('name'))
s = "YUMng: matching packages: %s"
- self.logger.info(s % str(pkgDict.keys()))
+ self.logger.info(s % str(list(pkgDict.keys())))
pkgs = set(pkgDict.keys()) & set(self.yum_installed.keys())
if len(pkgs) > 0:
# Virtual packages matches an installed real package
@@ -166,7 +175,7 @@ class YUM24(Bcfg2.Client.Tools.RPMng.RPMng):
# and Yum should Do The Right Thing on package install
pkg = None
elif len(pkgDict) == 1:
- pkg = pkgDict.values()[0]
+ pkg = list(pkgDict.values())[0]
else: # len(pkgDict) == 0
s = "YUMng: returnPackagesByDep(%s) returned no results"
self.logger.info(s % entry.get('name'))
@@ -252,16 +261,16 @@ class YUM24(Bcfg2.Client.Tools.RPMng.RPMng):
self.logger.error("GPG key has no simplefile attribute")
continue
key_arg = os.path.join(self.instance_status[inst].get('pkg').get('uri'), \
- inst.get('simplefile'))
+ inst.get('simplefile'))
cmdrc, output = self.cmd.run("rpm --import %s" % key_arg)
if cmdrc != 0:
self.logger.debug("Unable to install %s-%s" % \
- (self.instance_status[inst].get('pkg').get('name'), \
- self.str_evra(inst)))
+ (self.instance_status[inst].get('pkg').get('name'), \
+ self.str_evra(inst)))
else:
self.logger.debug("Installed %s-%s-%s" % \
- (self.instance_status[inst].get('pkg').get('name'), \
- inst.get('version'), inst.get('release')))
+ (self.instance_status[inst].get('pkg').get('name'), \
+ inst.get('version'), inst.get('release')))
self.RefreshPackages()
self.gpg_keyids = self.getinstalledgpg()
pkg = self.instance_status[gpg_keys[0]].get('pkg')
@@ -374,9 +383,9 @@ class YUM24(Bcfg2.Client.Tools.RPMng.RPMng):
pkg_arg = pkg_arg + '.' + inst.get('arch')
erase_args.append(pkg_arg)
else:
- pkgspec = { 'name':pkg.get('name'),
- 'version':inst.get('version'),
- 'release':inst.get('release')}
+ pkgspec = {'name': pkg.get('name'),
+ 'version': inst.get('version'),
+ 'release': inst.get('release')}
self.logger.info("WARNING: gpg-pubkey package not in configuration %s %s"\
% (pkgspec.get('name'), self.str_evra(pkgspec)))
self.logger.info(" This package will be deleted in a future version of the RPMng driver.")
@@ -395,7 +404,7 @@ class YUM24(Bcfg2.Client.Tools.RPMng.RPMng):
for inst in pkg:
if pkg.get('name') != 'gpg-pubkey':
pkg_arg = pkg.get('name') + '-'
- if inst.attrib.has_key('epoch'):
+ if 'epoch' in inst.attrib:
pkg_arg = pkg_arg + inst.get('epoch') + ':'
pkg_arg = pkg_arg + inst.get('version') + '-' + inst.get('release')
if 'arch' in inst.attrib:
@@ -416,6 +425,5 @@ class YUM24(Bcfg2.Client.Tools.RPMng.RPMng):
if pkg_modified == True:
self.modified.append(pkg)
-
self.RefreshPackages()
self.extra = self.FindExtraPackages()
diff --git a/src/lib/Client/Tools/YUMng.py b/src/lib/Client/Tools/YUMng.py
index 44d56ff9f..c9e7aa15e 100644
--- a/src/lib/Client/Tools/YUMng.py
+++ b/src/lib/Client/Tools/YUMng.py
@@ -300,7 +300,7 @@ class YUMng(Bcfg2.Client.Tools.PkgTool):
# Okay deal with a buggy yum multilib and verify
packages = self.yb.rpmdb.searchNevra(name=po.name, epoch=po.epoch,
- ver=po.version, rel=po.release) # find all arches of pkg
+ ver=po.version, rel=po.release) # find all arches of pkg
if len(packages) == 1:
return results # No mathcing multilib packages
@@ -319,13 +319,13 @@ class YUMng(Bcfg2.Client.Tools.PkgTool):
v = verify(p)
self.verifyCache[k] = v
- for fn, probs in v.items():
+ for fn, probs in list(v.items()):
# file problems must exist in ALL multilib packages to be real
if fn in files:
common[fn] = common.get(fn, 0) + 1
flag = len(packages) - 1
- for fn, i in common.items():
+ for fn, i in list(common.items()):
if i == flag:
# this fn had verify problems in all but one of the multilib
# packages. That means its correct in the package that's
@@ -463,6 +463,14 @@ class YUMng(Bcfg2.Client.Tools.PkgTool):
self.logger.debug(" Not checking version for virtual package")
_POs = [po for po in POs] # Make a copy
elif entry.get('name') == 'gpg-pubkey':
+ if 'version' not in nevra:
+ m = "Skipping verify: gpg-pubkey without an RPM version."
+ self.logger.warning(m)
+ continue
+ if 'release' not in nevra:
+ m = "Skipping verify: gpg-pubkey without an RPM release."
+ self.logger.warning(m)
+ continue
_POs = [p for p in POs if p.version == nevra['version'] \
and p.release == nevra['release']]
else:
@@ -504,7 +512,7 @@ class YUMng(Bcfg2.Client.Tools.PkgTool):
ignores = [ig.get('name') for ig in entry.findall('Ignore')] + \
[ig.get('name') for ig in inst.findall('Ignore')] + \
self.ignores
- for fn, probs in vResult.items():
+ for fn, probs in list(vResult.items()):
if fn in modlist:
self.logger.debug(" %s in modlist, skipping" % fn)
continue
@@ -529,7 +537,7 @@ class YUMng(Bcfg2.Client.Tools.PkgTool):
"these files, revert the changes, or ignore "
"false failures:")
self.logger.debug(" Verify Problems:")
- for fn, probs in stat['verify'].items():
+ for fn, probs in list(stat['verify'].items()):
self.logger.debug(" %s" % fn)
for p in probs:
self.logger.debug(" %s: %s" % p)
@@ -569,7 +577,7 @@ class YUMng(Bcfg2.Client.Tools.PkgTool):
extra_entry = Bcfg2.Client.XML.Element('Package', name=name,
type=self.pkgtype)
instances = self._buildInstances(entry)
- _POs = [p for p in POs] # Shallow copy
+ _POs = [p for p in POs] # Shallow copy
# Algorythm is sensitive to duplicates, check for them
checked = []
@@ -580,7 +588,7 @@ class YUMng(Bcfg2.Client.Tools.PkgTool):
flag = True
if len(pkgs) > 0:
if pkgs[0] in checked:
- continue # We've already taken care of this Instance
+ continue # We've already taken care of this Instance
else:
checked.append(pkgs[0])
_POs.remove(pkgs[0])
@@ -601,16 +609,17 @@ class YUMng(Bcfg2.Client.Tools.PkgTool):
packages = [e.get('name') for e in self.getSupportedEntries()]
extras = []
- for p in self.installed.keys():
+ for p in list(self.installed.keys()):
if p not in packages:
entry = Bcfg2.Client.XML.Element('Package', name=p,
type=self.pkgtype)
for i in self.installed[p]:
- inst = Bcfg2.Client.XML.SubElement(entry, 'Instance', \
- epoch = i['epoch'],
- version = i['version'],
- release = i['release'],
- arch = i['arch'])
+ inst = Bcfg2.Client.XML.SubElement(entry,
+ 'Instance',
+ epoch=i['epoch'],
+ version=i['version'],
+ release=i['release'],
+ arch=i['arch'])
extras.append(entry)
diff --git a/src/lib/Client/Tools/__init__.py b/src/lib/Client/Tools/__init__.py
index b5120db71..88609c2f6 100644
--- a/src/lib/Client/Tools/__init__.py
+++ b/src/lib/Client/Tools/__init__.py
@@ -4,9 +4,9 @@ import warnings
warnings.filterwarnings("ignore", "The popen2 module is deprecated.*",
DeprecationWarning)
import os
-import popen2
import stat
import sys
+from subprocess import Popen, PIPE
import time
import Bcfg2.Client.XML
@@ -25,26 +25,6 @@ class toolInstantiationError(Exception):
pass
-class readonlypipe(popen2.Popen4):
- """This pipe sets up stdin --> /dev/null."""
-
- def __init__(self, cmd, bufsize=-1):
- popen2._cleanup()
- c2pread, c2pwrite = os.pipe()
- null = open('/dev/null', 'w+')
- self.pid = os.fork()
- if self.pid == 0:
- # Child
- os.dup2(null.fileno(), sys.__stdin__.fileno())
- #os.dup2(p2cread, 0)
- os.dup2(c2pwrite, 1)
- os.dup2(c2pwrite, 2)
- self._run_child(cmd)
- os.close(c2pwrite)
- self.fromchild = os.fdopen(c2pread, 'r', bufsize)
- popen2._active.append(self)
-
-
class executor:
"""This class runs stuff for us"""
@@ -53,30 +33,12 @@ class executor:
def run(self, command):
"""Run a command in a pipe dealing with stdout buffer overloads."""
- self.logger.debug('> %s' % command)
-
- runpipe = readonlypipe(command, bufsize=16384)
- output = []
- try:#macosx doesn't like this
- runpipe.fromchild.flush()
- except IOError:
- pass
- line = runpipe.fromchild.readline()
- cmdstat = -1
- while cmdstat == -1:
- while line:
- if len(line) > 0:
- self.logger.debug('< %s' % line[:-1])
- output.append(line[:-1])
- line = runpipe.fromchild.readline()
- time.sleep(0.1)
- cmdstat = runpipe.poll()
- output += [line[:-1] for line in runpipe.fromchild.readlines() \
- if line]
- # The exit code from the program is in the upper byte of the
- # value returned by cmdstat. Shift it down for tools looking at
- # the value.
- return ((cmdstat >> 8), output)
+ p = Popen(command, shell=True, bufsize=16384,
+ stdin=PIPE, stdout=PIPE, close_fds=True)
+ output = p.communicate()[0]
+ for line in output.splitlines():
+ self.logger.debug('< %s' % line)
+ return (p.returncode, output.splitlines())
class Tool:
@@ -185,7 +147,9 @@ class Tool:
if 'failure' in entry.attrib:
self.logger.error("Entry %s:%s reports bind failure: %s" % \
- (entry.tag, entry.get('name'), entry.get('failure')))
+ (entry.tag,
+ entry.get('name'),
+ entry.get('failure')))
return False
missing = [attr for attr in self.__req__[entry.tag] \
@@ -198,7 +162,8 @@ class Tool:
try:
self.gatherCurrentData(entry)
except:
- self.logger.error("Unexpected error in gatherCurrentData", exc_info=1)
+ self.logger.error("Unexpected error in gatherCurrentData",
+ exc_info=1)
return False
return True
@@ -255,7 +220,8 @@ class PkgTool(Tool):
self.logger.info("Trying single pass package install for pkgtype %s" % \
self.pkgtype)
- data = [tuple([pkg.get(field) for field in self.pkgtool[1][1]]) for pkg in packages]
+ data = [tuple([pkg.get(field) for field in self.pkgtool[1][1]])
+ for pkg in packages]
pkgargs = " ".join([self.pkgtool[1][0] % datum for datum in data])
self.logger.debug("Installing packages: :%s:" % pkgargs)
@@ -348,7 +314,9 @@ class SvcTool(Tool):
return
for entry in [ent for ent in bundle if self.handlesEntry(ent)]:
- if entry.get('mode', 'default') == 'manual':
+ mode = entry.get('mode', 'default')
+ if mode == 'manual' or \
+ (mode == 'interactive_only' and not self.setup['interactive']):
continue
# need to handle servicemode = (build|default)
# need to handle mode = (default|supervised)
@@ -358,7 +326,12 @@ class SvcTool(Tool):
else:
if self.setup['interactive']:
prompt = 'Restart service %s?: (y/N): ' % entry.get('name')
- if raw_input(prompt) not in ['y', 'Y']:
+ # py3k compatibility
+ try:
+ ans = raw_input(prompt)
+ except NameError:
+ ans = input(prompt)
+ if ans not in ['y', 'Y']:
continue
rc = self.restart_service(entry)
else:
diff --git a/src/lib/Client/Tools/launchd.py b/src/lib/Client/Tools/launchd.py
index db6d94c1b..03dd97e71 100644
--- a/src/lib/Client/Tools/launchd.py
+++ b/src/lib/Client/Tools/launchd.py
@@ -82,6 +82,11 @@ class launchd(Bcfg2.Client.Tools.Tool):
def InstallService(self, entry):
"""Enable or disable launchd item."""
+ # don't take any actions for mode='manual'
+ if entry.get('mode', 'default') == 'manual':
+ self.logger.info("Service %s mode set to manual. Skipping "
+ "installation." % (entry.get('name')))
+ return False
name = entry.get('name')
if entry.get('status') == 'on':
self.logger.error("Installing service %s" % name)
diff --git a/src/lib/Component.py b/src/lib/Component.py
index 33b1c9289..88dce906e 100644
--- a/src/lib/Component.py
+++ b/src/lib/Component.py
@@ -11,12 +11,12 @@ import pydoc
import sys
import time
import threading
-import urlparse
-import xmlrpclib
import Bcfg2.Logger
from Bcfg2.Statistics import Statistics
from Bcfg2.SSLServer import XMLRPCServer
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import xmlrpclib, urlparse, fprint
logger = logging.getLogger()
@@ -56,11 +56,11 @@ def run_component(component_cls, location, daemon, pidfile_name, to_file,
os.chdir(os.sep)
pidfile = open(pidfile_name or "/dev/null", "w")
- print >> pidfile, os.getpid()
+ fprint(os.getpid(), pidfile)
pidfile.close()
component = component_cls(cfile=cfile, **cls_kwargs)
- up = urlparse.urlparse(location)
+ up = urlparse(location)
port = tuple(up[1].split(':'))
port = (port[0], int(port[1]))
try:
@@ -153,30 +153,31 @@ class Component (object):
automatic == True.
"""
- for name, func in inspect.getmembers(self, callable):
- if getattr(func, "automatic", False):
- need_to_lock = not getattr(func, 'locking', False)
- if (time.time() - func.automatic_ts) > \
- func.automatic_period:
- if need_to_lock:
- t1 = time.time()
- self.lock.acquire()
- t2 = time.time()
- self.instance_statistics.add_value('component_lock', t2-t1)
- try:
- mt1 = time.time()
+ for name, func in inspect.getmembers(self):
+ if name == '__call__':
+ if getattr(func, "automatic", False):
+ need_to_lock = not getattr(func, 'locking', False)
+ if (time.time() - func.automatic_ts) > \
+ func.automatic_period:
+ if need_to_lock:
+ t1 = time.time()
+ self.lock.acquire()
+ t2 = time.time()
+ self.instance_statistics.add_value('component_lock', t2-t1)
try:
- func()
- except:
- self.logger.error("Automatic method %s failed" \
- % (name), exc_info=1)
- finally:
- mt2 = time.time()
-
- if need_to_lock:
- self.lock.release()
- self.instance_statistics.add_value(name, mt2-mt1)
- func.__dict__['automatic_ts'] = time.time()
+ mt1 = time.time()
+ try:
+ func()
+ except:
+ self.logger.error("Automatic method %s failed" \
+ % (name), exc_info=1)
+ finally:
+ mt2 = time.time()
+
+ if need_to_lock:
+ self.lock.release()
+ self.instance_statistics.add_value(name, mt2-mt1)
+ func.__dict__['automatic_ts'] = time.time()
def _resolve_exposed_method(self, method_name):
"""Resolve an exposed method.
@@ -209,7 +210,8 @@ class Component (object):
except NoExposedMethod:
self.logger.error("Unknown method %s" % (method))
raise xmlrpclib.Fault(7, "Unknown method %s" % method)
- except Exception, e:
+ except Exception:
+ e = sys.exc_info()[1]
if getattr(e, "log", True):
self.logger.error(e, exc_info=True)
raise xmlrpclib.Fault(getattr(e, "fault_code", 1), str(e))
@@ -233,7 +235,8 @@ class Component (object):
self.instance_statistics.add_value(method, method_done - method_start)
except xmlrpclib.Fault:
raise
- except Exception, e:
+ except Exception:
+ e = sys.exc_info()[1]
if getattr(e, "log", True):
self.logger.error(e, exc_info=True)
raise xmlrpclib.Fault(getattr(e, "fault_code", 1), str(e))
diff --git a/src/lib/Logger.py b/src/lib/Logger.py
index e8cdd492d..9fe81f47e 100644
--- a/src/lib/Logger.py
+++ b/src/lib/Logger.py
@@ -10,18 +10,25 @@ import socket
import struct
import sys
import termios
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import fprint
logging.raiseExceptions = 0
+
def print_attributes(attrib):
"""Add the attributes for an element."""
return ' '.join(['%s="%s"' % data for data in list(attrib.items())])
+
def print_text(text):
"""Add text to the output (which will need normalising."""
- charmap = {'<':'&lt;', '>':'&gt;', '&':'&amp;'}
+ charmap = {'<': '&lt;',
+ '>': '&gt;',
+ '&': '&amp;'}
return ''.join([charmap.get(char, char) for char in text]) + '\n'
+
def xml_print(element, running_indent=0, indent=4):
"""Add an element and its children to the return string."""
if (len(element.getchildren()) == 0) and (not element.text):
@@ -32,7 +39,7 @@ def xml_print(element, running_indent=0, indent=4):
ret = (' ' * running_indent)
ret += '<%s%s>\n' % (element.tag, print_attributes(element))
if element.text:
- ret += (' '* child_indent) + print_text(element.text)
+ ret += (' ' * child_indent) + print_text(element.text)
for child in element.getchildren():
ret += xml_print(child, child_indent, indent)
ret += (' ' * running_indent) + '</%s>\n' % (element.tag)
@@ -40,16 +47,21 @@ def xml_print(element, running_indent=0, indent=4):
ret += (' ' * child_indent) + print_text(element.tail)
return ret
+
class TermiosFormatter(logging.Formatter):
- """The termios formatter displays output in a terminal-sensitive fashion."""
+ """The termios formatter displays output
+ in a terminal-sensitive fashion.
+ """
def __init__(self, fmt=None, datefmt=None):
logging.Formatter.__init__(self, fmt, datefmt)
if sys.stdout.isatty():
# now get termios info
try:
- self.width = struct.unpack('hhhh', fcntl.ioctl(0, termios.TIOCGWINSZ,
- "\000"*8))[1]
+ self.width = struct.unpack('hhhh',
+ fcntl.ioctl(0,
+ termios.TIOCGWINSZ,
+ "\000" * 8))[1]
if self.width == 0:
self.width = 80
except:
@@ -67,16 +79,16 @@ class TermiosFormatter(logging.Formatter):
if len(line) <= line_len:
returns.append(line)
else:
- inner_lines = int(math.floor(float(len(line)) / line_len))+1
+ inner_lines = int(math.floor(float(len(line)) / line_len)) + 1
for i in range(inner_lines):
- returns.append("%s" % (line[i*line_len:(i+1)*line_len]))
+ returns.append("%s" % (line[i * line_len:(i + 1) * line_len]))
elif isinstance(record.msg, list):
if not record.msg:
return ''
record.msg.sort()
msgwidth = self.width
columnWidth = max([len(item) for item in record.msg])
- columns = int(math.floor(float(msgwidth) / (columnWidth+2)))
+ columns = int(math.floor(float(msgwidth) / (columnWidth + 2)))
lines = int(math.ceil(float(len(record.msg)) / columns))
for lineNumber in range(lines):
indices = [idx for idx in [(colNum * lines) + lineNumber
@@ -91,6 +103,7 @@ class TermiosFormatter(logging.Formatter):
returns.append(self.formatException(record.exc_info))
return '\n'.join(returns)
+
class FragmentingSysLogHandler(logging.handlers.SysLogHandler):
"""
This handler fragments messages into
@@ -105,7 +118,7 @@ class FragmentingSysLogHandler(logging.handlers.SysLogHandler):
def emit(self, record):
"""Chunk and deliver records."""
record.name = self.procname
- if str(record.msg) > 250:
+ if isinstance(record.msg, str):
msgs = []
error = record.exc_info
record.exc_info = None
@@ -120,14 +133,16 @@ class FragmentingSysLogHandler(logging.handlers.SysLogHandler):
msgs = [record]
for newrec in msgs:
msg = self.log_format_string % (self.encodePriority(self.facility,
- newrec.levelname.lower()), self.format(newrec))
+ newrec.levelname.lower()),
+ self.format(newrec))
try:
- self.socket.send(msg)
+ self.socket.send(msg.encode('ascii'))
except socket.error:
- for i in xrange(10):
+ for i in range(10):
try:
if isinstance(self.address, tuple):
- self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ self.socket = socket.socket(socket.AF_INET,
+ socket.SOCK_DGRAM)
self.socket.connect(self.address)
else:
self._connect_unixsocket(self.address)
@@ -144,23 +159,34 @@ class FragmentingSysLogHandler(logging.handlers.SysLogHandler):
"""
pass
-def setup_logging(procname, to_console=True, to_syslog=True, syslog_facility='daemon', level=0, to_file=None):
+
+def setup_logging(procname, to_console=True, to_syslog=True,
+ syslog_facility='daemon', level=0, to_file=None):
"""Setup logging for Bcfg2 software."""
if hasattr(logging, 'already_setup'):
return
+
# add the handler to the root logger
if to_console:
console = logging.StreamHandler(sys.stdout)
- console.setLevel(logging.DEBUG)
+ if to_console is True:
+ console.setLevel(logging.DEBUG)
+ else:
+ console.setLevel(to_console)
# tell the handler to use this format
console.setFormatter(TermiosFormatter())
logging.root.addHandler(console)
+
if to_syslog:
try:
try:
- syslog = FragmentingSysLogHandler(procname, '/dev/log', syslog_facility)
+ syslog = FragmentingSysLogHandler(procname,
+ '/dev/log',
+ syslog_facility)
except socket.error:
- syslog = FragmentingSysLogHandler(procname, ('localhost', 514), syslog_facility)
+ syslog = FragmentingSysLogHandler(procname,
+ ('localhost', 514),
+ syslog_facility)
syslog.setLevel(logging.DEBUG)
syslog.setFormatter(logging.Formatter('%(name)s[%(process)d]: %(message)s'))
logging.root.addHandler(syslog)
@@ -168,14 +194,17 @@ def setup_logging(procname, to_console=True, to_syslog=True, syslog_facility='da
logging.root.error("failed to activate syslogging")
except:
print("Failed to activate syslogging")
+
if not to_file == None:
filelog = logging.FileHandler(to_file)
filelog.setLevel(logging.DEBUG)
filelog.setFormatter(logging.Formatter('%(asctime)s %(name)s[%(process)d]: %(message)s'))
logging.root.addHandler(filelog)
+
logging.root.setLevel(level)
logging.already_setup = True
+
def trace_process(**kwargs):
"""Literally log every line of python code as it runs.
@@ -202,11 +231,12 @@ def trace_process(**kwargs):
filename = filename[:-1]
name = frame.f_globals["__name__"]
line = linecache.getline(filename, lineno)
- print >> log_file, "%s:%s: %s" % (name, lineno, line.rstrip())
+ fprint("%s:%s: %s" % (name, lineno, line.rstrip()), log_file)
return traceit
sys.settrace(traceit)
+
def log_to_stderr(logger_name, level=logging.INFO):
"""Set up console logging."""
try:
@@ -214,11 +244,12 @@ def log_to_stderr(logger_name, level=logging.INFO):
except:
# assume logger_name is already a logger
logger = logger_name
- handler = logging.StreamHandler() # sys.stderr is the default stream
+ handler = logging.StreamHandler() # sys.stderr is the default stream
handler.setLevel(level)
- handler.setFormatter(TermiosFormatter()) # investigate this formatter
+ handler.setFormatter(TermiosFormatter()) # investigate this formatter
logger.addHandler(handler)
+
def log_to_syslog(logger_name, level=logging.INFO, format='%(name)s[%(process)d]: %(message)s'):
"""Set up syslog logging."""
try:
@@ -227,7 +258,7 @@ def log_to_syslog(logger_name, level=logging.INFO, format='%(name)s[%(process)d]
# assume logger_name is already a logger
logger = logger_name
# anticipate an exception somewhere below
- handler = logging.handlers.SysLogHandler() # investigate FragmentingSysLogHandler
+ handler = logging.handlers.SysLogHandler() # investigate FragmentingSysLogHandler
handler.setLevel(level)
handler.setFormatter(logging.Formatter(format))
logger.addHandler(handler)
diff --git a/src/lib/Options.py b/src/lib/Options.py
index 4041ccf78..d5304e696 100644
--- a/src/lib/Options.py
+++ b/src/lib/Options.py
@@ -1,11 +1,12 @@
"""Option parsing library for utilities."""
__revision__ = '$Revision$'
-import ConfigParser
import getopt
import os
import sys
import Bcfg2.Client.Tools
+# Compatibility imports
+from Bcfg2.Bcfg2Py3k import ConfigParser
def bool_cook(x):
if x:
@@ -146,7 +147,8 @@ class OptionSet(dict):
try:
opts, args = getopt.getopt(argv, self.buildGetopt(),
self.buildLongGetopt())
- except getopt.GetoptError, err:
+ except getopt.GetoptError:
+ err = sys.exc_info()[1]
self.helpExit(err)
if '-h' in argv:
self.helpExit('', 0)
@@ -201,6 +203,14 @@ PARANOID_MAX_COPIES = Option('Specify the number of paranoid copies you want',
OMIT_LOCK_CHECK = Option('Omit lock check', default=False, cmd='-O')
CORE_PROFILE = Option('profile',
default=False, cmd='-p', )
+FILES_ON_STDIN = Option('Operate on a list of files supplied on stdin',
+ cmd='--stdin', default=False, long_arg=True)
+SCHEMA_PATH = Option('Path to XML Schema files', cmd='--schema',
+ odesc='<schema path>',
+ default="%s/share/bcfg2/schemas" % DEFAULT_INSTALL_PREFIX,
+ long_arg=True)
+REQUIRE_SCHEMA = Option("Require property files to have matching schema files",
+ cmd="--require-schema", default=False, long_arg=True)
# Metadata options
MDATA_OWNER = Option('Default Path owner',
diff --git a/src/lib/Proxy.py b/src/lib/Proxy.py
index 275405faf..8a1ad683e 100644
--- a/src/lib/Proxy.py
+++ b/src/lib/Proxy.py
@@ -11,9 +11,6 @@ load_config -- read configuration files
__revision__ = '$Revision: $'
-from xmlrpclib import _Method
-
-import httplib
import logging
import re
import socket
@@ -25,50 +22,59 @@ import socket
try:
import ssl
SSL_LIB = 'py26_ssl'
-except ImportError, e:
+except ImportError:
from M2Crypto import SSL
import M2Crypto.SSL.Checker
SSL_LIB = 'm2crypto'
-import string
import sys
import time
-import urlparse
-import xmlrpclib
+
+# Compatibility imports
+from Bcfg2.Bcfg2Py3k import httplib, xmlrpclib, urlparse
version = sys.version_info[:2]
-has_py23 = map(int, version) >= [2, 3]
-has_py26 = map(int, version) >= [2, 6]
+has_py23 = version >= (2, 3)
+has_py26 = version >= (2, 6)
+
+__all__ = ["ComponentProxy",
+ "RetryMethod",
+ "SSLHTTPConnection",
+ "XMLRPCTransport"]
-__all__ = ["ComponentProxy", "RetryMethod", "SSLHTTPConnection", "XMLRPCTransport"]
class CertificateError(Exception):
def __init__(self, commonName):
self.commonName = commonName
-class RetryMethod(_Method):
+
+class RetryMethod(xmlrpclib._Method):
"""Method with error handling and retries built in."""
log = logging.getLogger('xmlrpc')
max_retries = 4
+
def __call__(self, *args):
for retry in range(self.max_retries):
try:
- return _Method.__call__(self, *args)
- except xmlrpclib.ProtocolError, err:
+ return xmlrpclib._Method.__call__(self, *args)
+ except xmlrpclib.ProtocolError:
+ err = sys.exc_info()[1]
self.log.error("Server failure: Protocol Error: %s %s" % \
(err.errcode, err.errmsg))
raise xmlrpclib.Fault(20, "Server Failure")
except xmlrpclib.Fault:
raise
- except socket.error, err:
+ except socket.error:
+ err = sys.exc_info()[1]
if hasattr(err, 'errno') and err.errno == 336265218:
self.log.error("SSL Key error")
break
if retry == 3:
self.log.error("Server failure: %s" % err)
raise xmlrpclib.Fault(20, err)
- except CertificateError, ce:
+ except CertificateError:
+ ce = sys.exc_info()[1]
self.log.error("Got unallowed commonName %s from server" \
% ce.commonName)
break
@@ -82,10 +88,13 @@ class RetryMethod(_Method):
raise xmlrpclib.Fault(20, "Server Failure")
# sorry jon
-xmlrpclib._Method = RetryMethod
+_Method = RetryMethod
+
class SSLHTTPConnection(httplib.HTTPConnection):
- """Extension of HTTPConnection that implements SSL and related behaviors."""
+ """Extension of HTTPConnection that
+ implements SSL and related behaviors.
+ """
logger = logging.getLogger('Bcfg2.Proxy.SSLHTTPConnection')
@@ -154,8 +163,7 @@ class SSLHTTPConnection(httplib.HTTPConnection):
elif SSL_LIB == 'm2crypto':
self._connect_m2crypto()
else:
- raise Exception, "No SSL module support"
-
+ raise Exception("No SSL module support")
def _connect_py26ssl(self):
"""Initiates a connection using the ssl module."""
@@ -166,7 +174,7 @@ class SSLHTTPConnection(httplib.HTTPConnection):
ssl_protocol_ver = ssl.PROTOCOL_TLSv1
else:
self.logger.error("Unknown protocol %s" % (self.protocol))
- raise Exception, "unknown protocol %s" % self.protocol
+ raise Exception("unknown protocol %s" % self.protocol)
if self.ca:
other_side_required = ssl.CERT_REQUIRED
else:
@@ -190,7 +198,7 @@ class SSLHTTPConnection(httplib.HTTPConnection):
if peer_cert and self.scns:
scn = [x[0][1] for x in peer_cert['subject'] if x[0][0] == 'commonName'][0]
if scn not in self.scns:
- raise CertificateError, scn
+ raise CertificateError(scn)
self.sock.closeSocket = True
def _connect_m2crypto(self):
@@ -202,7 +210,7 @@ class SSLHTTPConnection(httplib.HTTPConnection):
ctx = SSL.Context('tlsv1')
else:
self.logger.error("Unknown protocol %s" % (self.protocol))
- raise Exception, "unknown protocol %s" % self.protocol
+ raise Exception("unknown protocol %s" % self.protocol)
if self.ca:
# Use the certificate authority to validate the cert
@@ -235,12 +243,14 @@ class SSLHTTPConnection(httplib.HTTPConnection):
try:
self.sock.connect((hostname, self.port))
# automatically checks cert matches host
- except M2Crypto.SSL.Checker.WrongHost, wr:
- raise CertificateError, wr
+ except M2Crypto.SSL.Checker.WrongHost:
+ wr = sys.exc_info()[1]
+ raise CertificateError(wr)
class XMLRPCTransport(xmlrpclib.Transport):
- def __init__(self, key=None, cert=None, ca=None, scns=None, use_datetime=0, timeout=90):
+ def __init__(self, key=None, cert=None, ca=None,
+ scns=None, use_datetime=0, timeout=90):
if hasattr(xmlrpclib.Transport, '__init__'):
xmlrpclib.Transport.__init__(self, use_datetime)
self.key = key
@@ -250,9 +260,13 @@ class XMLRPCTransport(xmlrpclib.Transport):
self.timeout = timeout
def make_connection(self, host):
- host = self.get_host_info(host)[0]
- http = SSLHTTPConnection(host, key=self.key, cert=self.cert, ca=self.ca,
- scns=self.scns, timeout=self.timeout)
+ host, self._extra_headers = self.get_host_info(host)[0:2]
+ http = SSLHTTPConnection(host,
+ key=self.key,
+ cert=self.cert,
+ ca=self.ca,
+ scns=self.scns,
+ timeout=self.timeout)
https = httplib.HTTP()
https._setup(http)
return https
@@ -268,7 +282,10 @@ class XMLRPCTransport(xmlrpclib.Transport):
errcode, errmsg, headers = h.getreply()
if errcode != 200:
- raise xmlrpclib.ProtocolError(host + handler, errcode, errmsg, headers)
+ raise xmlrpclib.ProtocolError(host + handler,
+ errcode,
+ errmsg,
+ headers)
self.verbose = verbose
msglen = int(headers.dict['content-length'])
@@ -287,7 +304,7 @@ class XMLRPCTransport(xmlrpclib.Transport):
if not response:
break
if self.verbose:
- print "body:", repr(response), len(response)
+ print("body:", repr(response), len(response))
p.feed(response)
fd.close()
@@ -295,7 +312,9 @@ class XMLRPCTransport(xmlrpclib.Transport):
return u.close()
-def ComponentProxy(url, user=None, password=None, key=None, cert=None, ca=None,
+
+def ComponentProxy(url, user=None, password=None,
+ key=None, cert=None, ca=None,
allowedServerCNs=None, timeout=90):
"""Constructs proxies to components.
@@ -308,9 +327,10 @@ def ComponentProxy(url, user=None, password=None, key=None, cert=None, ca=None,
"""
if user and password:
- method, path = urlparse.urlparse(url)[:2]
+ method, path = urlparse(url)[:2]
newurl = "%s://%s:%s@%s" % (method, user, password, path)
else:
newurl = url
- ssl_trans = XMLRPCTransport(key, cert, ca, allowedServerCNs, timeout=timeout)
+ ssl_trans = XMLRPCTransport(key, cert, ca,
+ allowedServerCNs, timeout=timeout)
return xmlrpclib.ServerProxy(newurl, allow_none=True, transport=ssl_trans)
diff --git a/src/lib/SSLServer.py b/src/lib/SSLServer.py
index 1f4c1c8e4..a89beabbb 100644
--- a/src/lib/SSLServer.py
+++ b/src/lib/SSLServer.py
@@ -8,10 +8,7 @@ __all__ = [
import os
import sys
-import xmlrpclib
import socket
-import SocketServer
-import SimpleXMLRPCServer
import base64
import select
import signal
@@ -19,12 +16,17 @@ import logging
import ssl
import threading
import time
+# Compatibility imports
+from Bcfg2.Bcfg2Py3k import xmlrpclib, SimpleXMLRPCServer, SocketServer
+
class ForkedChild(Exception):
pass
+
class XMLRPCDispatcher (SimpleXMLRPCServer.SimpleXMLRPCDispatcher):
logger = logging.getLogger("Cobalt.Server.XMLRPCDispatcher")
+
def __init__(self, allow_none, encoding):
try:
SimpleXMLRPCServer.SimpleXMLRPCDispatcher.__init__(self,
@@ -48,7 +50,8 @@ class XMLRPCDispatcher (SimpleXMLRPCServer.SimpleXMLRPCDispatcher):
raw_response = xmlrpclib.dumps(response, methodresponse=1,
allow_none=self.allow_none,
encoding=self.encoding)
- except xmlrpclib.Fault, fault:
+ except xmlrpclib.Fault:
+ fault = sys.exc_info()[1]
raw_response = xmlrpclib.dumps(fault,
allow_none=self.allow_none,
encoding=self.encoding)
@@ -60,6 +63,7 @@ class XMLRPCDispatcher (SimpleXMLRPCServer.SimpleXMLRPCDispatcher):
allow_none=self.allow_none, encoding=self.encoding)
return raw_response
+
class SSLServer (SocketServer.TCPServer, object):
"""TCP server supporting SSL encryption.
@@ -76,7 +80,8 @@ class SSLServer (SocketServer.TCPServer, object):
logger = logging.getLogger("Cobalt.Server.TCPServer")
def __init__(self, server_address, RequestHandlerClass, keyfile=None,
- certfile=None, reqCert=False, ca=None, timeout=None, protocol='xmlrpc/ssl'):
+ certfile=None, reqCert=False, ca=None, timeout=None,
+ protocol='xmlrpc/ssl'):
"""Initialize the SSL-TCP server.
@@ -106,17 +111,17 @@ class SSLServer (SocketServer.TCPServer, object):
if keyfile != None:
if keyfile == False or not os.path.exists(keyfile):
self.logger.error("Keyfile %s does not exist" % keyfile)
- raise Exception, "keyfile doesn't exist"
+ raise Exception("keyfile doesn't exist")
self.certfile = certfile
if certfile != None:
if certfile == False or not os.path.exists(certfile):
self.logger.error("Certfile %s does not exist" % certfile)
- raise Exception, "certfile doesn't exist"
+ raise Exception("certfile doesn't exist")
self.ca = ca
if ca != None:
if ca == False or not os.path.exists(ca):
self.logger.error("CA %s does not exist" % ca)
- raise Exception, "ca doesn't exist"
+ raise Exception("ca doesn't exist")
self.reqCert = reqCert
if ca and certfile:
self.mode = ssl.CERT_OPTIONAL
@@ -128,14 +133,18 @@ class SSLServer (SocketServer.TCPServer, object):
self.ssl_protocol = ssl.PROTOCOL_TLSv1
else:
self.logger.error("Unknown protocol %s" % (protocol))
- raise Exception, "unknown protocol %s" % protocol
+ raise Exception("unknown protocol %s" % protocol)
def get_request(self):
(sock, sockinfo) = self.socket.accept()
sock.settimeout(self.timeout)
- sslsock = ssl.wrap_socket(sock, server_side=True, certfile=self.certfile,
- keyfile=self.keyfile, cert_reqs=self.mode,
- ca_certs=self.ca, ssl_version=self.ssl_protocol)
+ sslsock = ssl.wrap_socket(sock,
+ server_side=True,
+ certfile=self.certfile,
+ keyfile=self.keyfile,
+ cert_reqs=self.mode,
+ ca_certs=self.ca,
+ ssl_version=self.ssl_protocol)
return sslsock, sockinfo
def close_request(self, request):
@@ -212,20 +221,21 @@ class XMLRPCRequestHandler (SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
### need to override do_POST here
def do_POST(self):
try:
- max_chunk_size = 10*1024*1024
+ max_chunk_size = 10 * 1024 * 1024
size_remaining = int(self.headers["content-length"])
L = []
while size_remaining:
try:
select.select([self.rfile.fileno()], [], [], 3)
except select.error:
- print "got select timeout"
+ print("got select timeout")
raise
chunk_size = min(size_remaining, max_chunk_size)
L.append(self.rfile.read(chunk_size))
size_remaining -= len(L[-1])
data = ''.join(L)
- response = self.server._marshaled_dispatch(self.client_address, data)
+ response = self.server._marshaled_dispatch(self.client_address,
+ data)
except:
try:
self.send_response(500)
@@ -233,7 +243,7 @@ class XMLRPCRequestHandler (SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
except:
(type, msg) = sys.exc_info()[:2]
self.logger.error("Error sending 500 response (%s): %s" % \
- (type, msg))
+ (type, msg))
raise
else:
# got a valid XML RPC response
@@ -248,7 +258,8 @@ class XMLRPCRequestHandler (SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
# If we hit SSL3_WRITE_PENDING here try to resend.
self.wfile.write(response)
break
- except ssl.SSLError, e:
+ except ssl.SSLError:
+ e = sys.exc_info()[1]
if str(e).find("SSL3_WRITE_PENDING") < 0:
raise
self.logger.error("SSL3_WRITE_PENDING")
@@ -267,7 +278,7 @@ class XMLRPCRequestHandler (SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
(self.client_address[0], msg))
else:
self.logger.error("Error sending response (%s): %s" % \
- (type, msg))
+ (type, msg))
def finish(self):
# shut down the connection
@@ -276,6 +287,7 @@ class XMLRPCRequestHandler (SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
self.wfile.close()
self.rfile.close()
+
class XMLRPCServer (SocketServer.ThreadingMixIn, SSLServer,
XMLRPCDispatcher, object):
@@ -355,6 +367,7 @@ class XMLRPCServer (SocketServer.ThreadingMixIn, SSLServer,
def _get_require_auth(self):
return getattr(self.RequestHandlerClass, "require_auth", False)
+
def _set_require_auth(self, value):
self.RequestHandlerClass.require_auth = value
require_auth = property(_get_require_auth, _set_require_auth)
@@ -364,6 +377,7 @@ class XMLRPCServer (SocketServer.ThreadingMixIn, SSLServer,
return self.RequestHandlerClass.credentials
except AttributeError:
return dict()
+
def _set_credentials(self, value):
self.RequestHandlerClass.credentials = value
credentials = property(_get_credentials, _set_credentials)
@@ -375,7 +389,7 @@ class XMLRPCServer (SocketServer.ThreadingMixIn, SSLServer,
except AttributeError:
name = "unknown"
if hasattr(instance, 'plugins'):
- for pname, pinst in instance.plugins.iteritems():
+ for pname, pinst in list(instance.plugins.items()):
for mname in pinst.__rmi__:
xmname = "%s.%s" % (pname, mname)
fn = getattr(pinst, mname)
diff --git a/src/lib/Server/Admin/Backup.py b/src/lib/Server/Admin/Backup.py
index fefc9fc9e..9bd644ff9 100644
--- a/src/lib/Server/Admin/Backup.py
+++ b/src/lib/Server/Admin/Backup.py
@@ -5,6 +5,7 @@ import tarfile
import Bcfg2.Server.Admin
import Bcfg2.Options
+
class Backup(Bcfg2.Server.Admin.MetadataCore):
__shorthelp__ = "Make a backup of the Bcfg2 repository"
__longhelp__ = (__shorthelp__ + "\n\nbcfg2-admin backup\n")
@@ -29,4 +30,4 @@ class Backup(Bcfg2.Server.Admin.MetadataCore):
out = tarfile.open(self.datastore + '/' + filename, mode=mode)
out.add(self.datastore, os.path.basename(self.datastore))
out.close()
- print "Archive %s was stored under %s" % (filename, self.datastore)
+ print("Archive %s was stored under %s" % (filename, self.datastore))
diff --git a/src/lib/Server/Admin/Bundle.py b/src/lib/Server/Admin/Bundle.py
index 96a7ba59d..9b2a71783 100644
--- a/src/lib/Server/Admin/Bundle.py
+++ b/src/lib/Server/Admin/Bundle.py
@@ -6,11 +6,11 @@ import Bcfg2.Server.Admin
import Bcfg2.Options
from Bcfg2.Server.Plugins.Metadata import MetadataConsistencyError
+
class Bundle(Bcfg2.Server.Admin.MetadataCore):
__shorthelp__ = "Create or delete bundle entries"
- __longhelp__ = (__shorthelp__ + #"\n\nbcfg2-admin bundle add <bundle> "
- #"\n\nbcfg2-admin bundle del <bundle>"
- "\n\nbcfg2-admin bundle list-xml"
+ # TODO: add/del functions
+ __longhelp__ = (__shorthelp__ + "\n\nbcfg2-admin bundle list-xml"
"\nbcfg2-admin bundle list-genshi"
"\nbcfg2-admin bundle show\n")
__usage__ = ("bcfg2-admin bundle [options] [add|del] [group]")
@@ -21,7 +21,7 @@ class Bundle(Bcfg2.Server.Admin.MetadataCore):
def __call__(self, args):
Bcfg2.Server.Admin.MetadataCore.__call__(self, args)
- reg='((?:[a-z][a-z\\.\\d\\-]+)\\.(?:[a-z][a-z\\-]+))(?![\\w\\.])'
+ reg = '((?:[a-z][a-z\\.\\d\\-]+)\\.(?:[a-z][a-z\\-]+))(?![\\w\\.])'
# Get all bundles out of the Bundle/ directory
opts = {'repo': Bcfg2.Options.SERVER_REPOSITORY}
@@ -38,31 +38,31 @@ class Bundle(Bcfg2.Server.Admin.MetadataCore):
# try:
# self.metadata.add_bundle(args[1])
# except MetadataConsistencyError:
-# print "Error in adding bundle."
+# print("Error in adding bundle.")
# raise SystemExit(1)
# elif args[0] in ['delete', 'remove', 'del', 'rm']:
# try:
# self.metadata.remove_bundle(args[1])
# except MetadataConsistencyError:
-# print "Error in deleting bundle."
+# print("Error in deleting bundle.")
# raise SystemExit(1)
# Lists all available xml bundles
elif args[0] in ['list-xml', 'ls-xml']:
bundle_name = []
for bundle_path in xml_list:
- rg = re.compile(reg,re.IGNORECASE|re.DOTALL)
+ rg = re.compile(reg, re.IGNORECASE | re.DOTALL)
bundle_name.append(rg.search(bundle_path).group(1))
for bundle in bundle_name:
- print bundle.split('.')[0]
+ print(bundle.split('.')[0])
# Lists all available genshi bundles
elif args[0] in ['list-genshi', 'ls-gen']:
bundle_name = []
for bundle_path in genshi_list:
- rg = re.compile(reg,re.IGNORECASE|re.DOTALL)
+ rg = re.compile(reg, re.IGNORECASE | re.DOTALL)
bundle_name.append(rg.search(bundle_path).group(1))
for bundle in bundle_name:
- print bundle.split('.')[0]
- # Shows a list of all available bundles and prints bundle
+ print(bundle.split('.')[0])
+ # Shows a list of all available bundles and prints bundle
# details after the user choose one bundle.
# FIXME: Add support for detailed output of genshi bundles
# FIXME: This functionality is almost identical with
@@ -71,32 +71,34 @@ class Bundle(Bcfg2.Server.Admin.MetadataCore):
bundle_name = []
bundle_list = xml_list + genshi_list
for bundle_path in bundle_list:
- rg = re.compile(reg,re.IGNORECASE|re.DOTALL)
+ rg = re.compile(reg, re.IGNORECASE | re.DOTALL)
bundle_name.append(rg.search(bundle_path).group(1))
text = "Available bundles (Number of bundles: %s)" % \
(len(bundle_list))
- print text
- print "%s" % (len(text) * "-")
+ print(text)
+ print("%s" % (len(text) * "-"))
for i in range(len(bundle_list)):
- print "[%i]\t%s" % (i, bundle_name[i])
- print "Enter the line number of a bundle for details:",
- lineno = raw_input()
+ print("[%i]\t%s" % (i, bundle_name[i]))
+ try:
+ lineno = raw_input("Enter the line number of a bundle for details: ")
+ except NameError:
+ lineno = input("Enter the line number of a bundle for details: ")
if int(lineno) >= int(len(bundle_list)):
- print "No line with this number."
+ print("No line with this number.")
else:
if '%s/Bundler/%s' % \
(repo, bundle_name[int(lineno)]) in genshi_list:
- print "Detailed output for *.genshi bundles is not supported."
+ print("Detailed output for *.genshi bundles is not supported.")
else:
- print 'Details for the "%s" bundle:' % \
- (bundle_name[int(lineno)].split('.')[0])
+ print('Details for the "%s" bundle:' % \
+ (bundle_name[int(lineno)].split('.')[0]))
tree = lxml.etree.parse(bundle_list[int(lineno)])
#Prints bundle content
- #print lxml.etree.tostring(tree)
+ #print(lxml.etree.tostring(tree))
names = ['Action', 'Package', 'Path', 'Service']
for name in names:
for node in tree.findall("//" + name):
- print "%s:\t%s" % (name, node.attrib["name"])
+ print("%s:\t%s" % (name, node.attrib["name"]))
else:
- print "No command specified"
+ print("No command specified")
raise SystemExit(1)
diff --git a/src/lib/Server/Admin/Client.py b/src/lib/Server/Admin/Client.py
index 08bd34151..3af25b15a 100644
--- a/src/lib/Server/Admin/Client.py
+++ b/src/lib/Server/Admin/Client.py
@@ -2,6 +2,7 @@ import lxml.etree
import Bcfg2.Server.Admin
from Bcfg2.Server.Plugins.Metadata import MetadataConsistencyError
+
class Client(Bcfg2.Server.Admin.MetadataCore):
__shorthelp__ = "Create, delete, or modify client entries"
__longhelp__ = (__shorthelp__ + "\n\nbcfg2-admin client add <client> "
@@ -27,13 +28,13 @@ class Client(Bcfg2.Server.Admin.MetadataCore):
attr, val = i.split('=', 1)
if attr not in ['profile', 'uuid', 'password',
'location', 'secure', 'address']:
- print "Attribute %s unknown" % attr
+ print("Attribute %s unknown" % attr)
raise SystemExit(1)
attr_d[attr] = val
try:
self.metadata.add_client(args[1], attr_d)
except MetadataConsistencyError:
- print "Error in adding client"
+ print("Error in adding client")
raise SystemExit(1)
elif args[0] in ['update', 'up']:
attr_d = {}
@@ -41,24 +42,24 @@ class Client(Bcfg2.Server.Admin.MetadataCore):
attr, val = i.split('=', 1)
if attr not in ['profile', 'uuid', 'password',
'location', 'secure', 'address']:
- print "Attribute %s unknown" % attr
+ print("Attribute %s unknown" % attr)
raise SystemExit(1)
attr_d[attr] = val
try:
self.metadata.update_client(args[1], attr_d)
except MetadataConsistencyError:
- print "Error in updating client"
+ print("Error in updating client")
raise SystemExit(1)
elif args[0] in ['delete', 'remove', 'del', 'rm']:
try:
self.metadata.remove_client(args[1])
except MetadataConsistencyError:
- print "Error in deleting client"
+ print("Error in deleting client")
raise SystemExit(1)
elif args[0] in ['list', 'ls']:
tree = lxml.etree.parse(self.metadata.data + "/clients.xml")
for node in tree.findall("//Client"):
- print node.attrib["name"]
+ print(node.attrib["name"])
else:
- print "No command specified"
+ print("No command specified")
raise SystemExit(1)
diff --git a/src/lib/Server/Admin/Compare.py b/src/lib/Server/Admin/Compare.py
index f97233b0e..4c751b55a 100644
--- a/src/lib/Server/Admin/Compare.py
+++ b/src/lib/Server/Admin/Compare.py
@@ -1,6 +1,9 @@
-import lxml.etree, os
+import lxml.etree
+import os
+
import Bcfg2.Server.Admin
+
class Compare(Bcfg2.Server.Admin.Mode):
__shorthelp__ = ("Determine differences between files or "
"directories of client specification instances")
@@ -11,30 +14,30 @@ class Compare(Bcfg2.Server.Admin.Mode):
def __init__(self, configfile):
Bcfg2.Server.Admin.Mode.__init__(self, configfile)
- self.important = {'Package':['name', 'version'],
- 'Service':['name', 'status'],
- 'Directory':['name', 'owner', 'group', 'perms'],
- 'SymLink':['name', 'to'],
- 'ConfigFile':['name', 'owner', 'group', 'perms'],
- 'Permissions':['name', 'perms'],
- 'PostInstall':['name']}
+ self.important = {'Package': ['name', 'version'],
+ 'Service': ['name', 'status'],
+ 'Directory': ['name', 'owner', 'group', 'perms'],
+ 'SymLink': ['name', 'to'],
+ 'ConfigFile': ['name', 'owner', 'group', 'perms'],
+ 'Permissions': ['name', 'perms'],
+ 'PostInstall': ['name']}
def compareStructures(self, new, old):
for child in new.getchildren():
equiv = old.xpath('%s[@name="%s"]' %
(child.tag, child.get('name')))
if child.tag in self.important:
- print "tag type %s not handled" % (child.tag)
+ print("tag type %s not handled" % (child.tag))
continue
if len(equiv) == 0:
- print ("didn't find matching %s %s" %
- (child.tag, child.get('name')))
+ print("didn't find matching %s %s" %
+ (child.tag, child.get('name')))
continue
elif len(equiv) >= 1:
if child.tag == 'ConfigFile':
if child.text != equiv[0].text:
- print " %s %s contents differ" \
- % (child.tag, child.get('name'))
+ print(" %s %s contents differ" \
+ % (child.tag, child.get('name')))
continue
noattrmatch = [field for field in self.important[child.tag] if \
child.get(field) != equiv[0].get(field)]
@@ -42,8 +45,8 @@ class Compare(Bcfg2.Server.Admin.Mode):
new.remove(child)
old.remove(equiv[0])
else:
- print " %s %s attributes %s do not match" % \
- (child.tag, child.get('name'), noattrmatch)
+ print(" %s %s attributes %s do not match" % \
+ (child.tag, child.get('name'), noattrmatch))
if len(old.getchildren()) == 0 and len(new.getchildren()) == 0:
return True
if new.tag == 'Independent':
@@ -59,24 +62,26 @@ class Compare(Bcfg2.Server.Admin.Mode):
newl.remove(entry)
oldl.remove(entry)
for entry in both:
- print " %s differs (in bundle %s)" % (entry, name)
+ print(" %s differs (in bundle %s)" % (entry, name))
for entry in oldl:
- print " %s only in old configuration (in bundle %s)" % (entry, name)
+ print(" %s only in old configuration (in bundle %s)" % (entry,
+ name))
for entry in newl:
- print " %s only in new configuration (in bundle %s)" % (entry, name)
+ print(" %s only in new configuration (in bundle %s)" % (entry,
+ name))
return False
def compareSpecifications(self, path1, path2):
try:
new = lxml.etree.parse(path1).getroot()
except IOError:
- print "Failed to read %s" % (path1)
+ print("Failed to read %s" % (path1))
raise SystemExit(1)
try:
old = lxml.etree.parse(path2).getroot()
except IOError:
- print "Failed to read %s" % (path2)
+ print("Failed to read %s" % (path2))
raise SystemExit(1)
for src in [new, old]:
@@ -88,7 +93,7 @@ class Compare(Bcfg2.Server.Admin.Mode):
for bundle in new.findall('./Bundle'):
equiv = old.xpath('Bundle[@name="%s"]' % (bundle.get('name')))
if len(equiv) == 0:
- print "couldnt find matching bundle for %s" % bundle.get('name')
+ print("couldnt find matching bundle for %s" % bundle.get('name'))
continue
if len(equiv) == 1:
if self.compareStructures(bundle, equiv[0]):
@@ -98,7 +103,7 @@ class Compare(Bcfg2.Server.Admin.Mode):
else:
rcs.append(False)
else:
- print "Unmatched bundle %s" % (bundle.get('name'))
+ print("Unmatched bundle %s" % (bundle.get('name')))
rcs.append(False)
i1 = new.find('./Independent')
i2 = old.find('./Independent')
@@ -120,18 +125,18 @@ class Compare(Bcfg2.Server.Admin.Mode):
(oldd, newd) = args
(old, new) = [os.listdir(spot) for spot in args]
for item in old:
- print "Entry:", item
+ print("Entry:", item)
state = self.__call__([oldd + '/' + item, newd + '/' + item])
new.remove(item)
if state:
- print "Entry:", item, "good"
+ print("Entry:", item, "good")
else:
- print "Entry:", item, "bad"
+ print("Entry:", item, "bad")
if new:
- print "new has extra entries", new
+ print("new has extra entries", new)
return
try:
(old, new) = args
except IndexError:
- print self.__call__.__doc__
+ print(self.__call__.__doc__)
raise SystemExit(1)
diff --git a/src/lib/Server/Admin/Group.py b/src/lib/Server/Admin/Group.py
index 4b2db28ec..1c5d0c12f 100644
--- a/src/lib/Server/Admin/Group.py
+++ b/src/lib/Server/Admin/Group.py
@@ -2,6 +2,7 @@ import lxml.etree
import Bcfg2.Server.Admin
from Bcfg2.Server.Plugins.Metadata import MetadataConsistencyError
+
class Group(Bcfg2.Server.Admin.MetadataCore):
__shorthelp__ = "Create, delete, or modify group entries"
__longhelp__ = (__shorthelp__ + "\n\nbcfg2-admin group add <group> "
@@ -28,13 +29,13 @@ class Group(Bcfg2.Server.Admin.MetadataCore):
if attr not in ['profile', 'public', 'default',
'name', 'auth', 'toolset', 'category',
'comment']:
- print "Attribute %s unknown" % attr
+ print("Attribute %s unknown" % attr)
raise SystemExit(1)
attr_d[attr] = val
try:
self.metadata.add_group(args[1], attr_d)
except MetadataConsistencyError:
- print "Error in adding group"
+ print("Error in adding group")
raise SystemExit(1)
elif args[0] in ['update', 'up']:
attr_d = {}
@@ -43,24 +44,24 @@ class Group(Bcfg2.Server.Admin.MetadataCore):
if attr not in ['profile', 'public', 'default',
'name', 'auth', 'toolset', 'category',
'comment']:
- print "Attribute %s unknown" % attr
+ print("Attribute %s unknown" % attr)
raise SystemExit(1)
attr_d[attr] = val
try:
self.metadata.update_group(args[1], attr_d)
except MetadataConsistencyError:
- print "Error in updating group"
+ print("Error in updating group")
raise SystemExit(1)
elif args[0] in ['delete', 'remove', 'del', 'rm']:
try:
self.metadata.remove_group(args[1])
except MetadataConsistencyError:
- print "Error in deleting group"
+ print("Error in deleting group")
raise SystemExit(1)
elif args[0] in ['list', 'ls']:
tree = lxml.etree.parse(self.metadata.data + "/groups.xml")
for node in tree.findall("//Group"):
- print node.attrib["name"]
+ print(node.attrib["name"])
else:
- print "No command specified"
+ print("No command specified")
raise SystemExit(1)
diff --git a/src/lib/Server/Admin/Init.py b/src/lib/Server/Admin/Init.py
index 8f54b836e..fff8bcd1c 100644
--- a/src/lib/Server/Admin/Init.py
+++ b/src/lib/Server/Admin/Init.py
@@ -2,7 +2,9 @@ import getpass
import os
import random
import socket
+import stat
import string
+import sys
import subprocess
import Bcfg2.Server.Admin
import Bcfg2.Server.Plugin
@@ -137,21 +139,27 @@ def create_key(hostname, keypath, certpath, country, state, location):
keypath,
certpath))
subprocess.call((ccstr), shell=True)
- os.chmod(keypath, 0600)
+ os.chmod(keypath, stat.S_IRUSR | stat.S_IWUSR) # 0600
-def create_conf(confpath, confdata):
+def create_conf(confpath, confdata, keypath):
# Don't overwrite existing bcfg2.conf file
if os.path.exists(confpath):
- result = raw_input("\nWarning: %s already exists. "
- "Overwrite? [y/N]: " % confpath)
+ # py3k compatibility
+ try:
+ result = raw_input("\nWarning: %s already exists. "
+ "Overwrite? [y/N]: " % confpath)
+ except NameError:
+ result = input("\nWarning: %s already exists. "
+ "Overwrite? [y/N]: " % confpath)
if result not in ['Y', 'y']:
print("Leaving %s unchanged" % confpath)
return
try:
open(confpath, "w").write(confdata)
- os.chmod(confpath, 0600)
- except Exception, e:
+ os.chmod(keypath, stat.S_IRUSR | stat.S_IWUSR) # 0600
+ except Exception:
+ e = sys.exc_info()[1]
print("Error %s occured while trying to write configuration "
"file to '%s'.\n" %
(e, confpath))
@@ -204,7 +212,12 @@ class Init(Bcfg2.Server.Admin.Mode):
def _prompt_hostname(self):
"""Ask for the server hostname."""
- data = raw_input("What is the server's hostname [%s]: " %
+ # py3k compatibility
+ try:
+ data = raw_input("What is the server's hostname [%s]: " %
+ socket.getfqdn())
+ except NameError:
+ data = input("What is the server's hostname [%s]: " %
socket.getfqdn())
if data != '':
self.shostname = data
@@ -213,21 +226,36 @@ class Init(Bcfg2.Server.Admin.Mode):
def _prompt_config(self):
"""Ask for the configuration file path."""
- newconfig = raw_input("Store Bcfg2 configuration in [%s]: " %
- self.configfile)
+ # py3k compatibility
+ try:
+ newconfig = raw_input("Store Bcfg2 configuration in [%s]: " %
+ self.configfile)
+ except NameError:
+ newconfig = input("Store Bcfg2 configuration in [%s]: " %
+ self.configfile)
if newconfig != '':
self.configfile = newconfig
def _prompt_repopath(self):
"""Ask for the repository path."""
while True:
- newrepo = raw_input("Location of Bcfg2 repository [%s]: " %
- self.repopath)
+ # py3k compatibility
+ try:
+ newrepo = raw_input("Location of Bcfg2 repository [%s]: " %
+ self.repopath)
+ except NameError:
+ newrepo = input("Location of Bcfg2 repository [%s]: " %
+ self.repopath)
if newrepo != '':
self.repopath = newrepo
if os.path.isdir(self.repopath):
- response = raw_input("Directory %s exists. Overwrite? [y/N]:" \
- % self.repopath)
+ # py3k compatibility
+ try:
+ response = raw_input("Directory %s exists. Overwrite? [y/N]:" \
+ % self.repopath)
+ except NameError:
+ response = input("Directory %s exists. Overwrite? [y/N]:" \
+ % self.repopath)
if response.lower().strip() == 'y':
break
else:
@@ -243,8 +271,13 @@ class Init(Bcfg2.Server.Admin.Mode):
def _prompt_server(self):
"""Ask for the server name."""
- newserver = raw_input("Input the server location [%s]: " %
- self.server_uri)
+ # py3k compatibility
+ try:
+ newserver = raw_input("Input the server location [%s]: " %
+ self.server_uri)
+ except NameError:
+ newserver = input("Input the server location [%s]: " %
+ self.server_uri)
if newserver != '':
self.server_uri = newserver
@@ -256,51 +289,81 @@ class Init(Bcfg2.Server.Admin.Mode):
prompt += ': '
while True:
try:
- self.os_sel = os_list[int(raw_input(prompt))-1][1]
+ # py3k compatibility
+ try:
+ osidx = int(raw_input(prompt))
+ except NameError:
+ osidx = int(input(prompt))
+ self.os_sel = os_list[osidx - 1][1]
break
except ValueError:
continue
def _prompt_plugins(self):
- default = raw_input("Use default plugins? (%s) [Y/n]: " %
+ # py3k compatibility
+ try:
+ default = raw_input("Use default plugins? (%s) [Y/n]: " %
+ ''.join(default_plugins)).lower()
+ except NameError:
+ default = input("Use default plugins? (%s) [Y/n]: " %
''.join(default_plugins)).lower()
if default != 'y' or default != '':
while True:
plugins_are_valid = True
- plug_str = raw_input("Specify plugins: ")
+ # py3k compatibility
+ try:
+ plug_str = raw_input("Specify plugins: ")
+ except NameError:
+ plug_str = input("Specify plugins: ")
plugins = plug_str.split(',')
for plugin in plugins:
plugin = plugin.strip()
if not plugin in plugin_list:
plugins_are_valid = False
- print "ERROR: Plugin %s not recognized" % plugin
+ print("ERROR: Plugin %s not recognized" % plugin)
if plugins_are_valid:
break
def _prompt_certificate(self):
"""Ask for the key details (country, state, and location)."""
- print "The following questions affect SSL certificate generation."
- print "If no data is provided, the default values are used."
- newcountry = raw_input("Country name (2 letter code) for certificate: ")
+ print("The following questions affect SSL certificate generation.")
+ print("If no data is provided, the default values are used.")
+ # py3k compatibility
+ try:
+ newcountry = raw_input("Country name (2 letter code) for certificate: ")
+ except NameError:
+ newcountry = input("Country name (2 letter code) for certificate: ")
if newcountry != '':
if len(newcountry) == 2:
self.country = newcountry
else:
while len(newcountry) != 2:
- newcountry = raw_input("2 letter country code (eg. US): ")
+ # py3k compatibility
+ try:
+ newcountry = raw_input("2 letter country code (eg. US): ")
+ except NameError:
+ newcountry = input("2 letter country code (eg. US): ")
if len(newcountry) == 2:
self.country = newcountry
break
else:
self.country = 'US'
- newstate = raw_input("State or Province Name (full name) for certificate: ")
+ # py3k compatibility
+ try:
+ newstate = raw_input("State or Province Name (full name) for certificate: ")
+ except NameError:
+ newstate = input("State or Province Name (full name) for certificate: ")
if newstate != '':
self.state = newstate
else:
self.state = 'Illinois'
- newlocation = raw_input("Locality Name (eg, city) for certificate: ")
+ # py3k compatibility
+ try:
+ newlocation = raw_input("Locality Name (eg, city) for certificate: ")
+ except NameError:
+ newlocation = input("Locality Name (eg, city) for certificate: ")
if newlocation != '':
self.location = newlocation
else:
@@ -320,7 +383,8 @@ class Init(Bcfg2.Server.Admin.Mode):
'', ["Bcfg2.Server.Plugins"])
cls = getattr(module, plugin)
cls.init_repo(self.repopath)
- except Exception, e:
+ except Exception:
+ e = sys.exc_info()[1]
print("Plugin setup for %s failed: %s\n"
"Check that dependencies are installed?" % (plugin, e))
@@ -338,7 +402,7 @@ class Init(Bcfg2.Server.Admin.Mode):
self.server_uri)
# Create the configuration file and SSL key
- create_conf(self.configfile, confdata)
+ create_conf(self.configfile, confdata, keypath)
kpath = keypath + '/bcfg2.key'
cpath = keypath + '/bcfg2.crt'
create_key(self.shostname, kpath, cpath, self.country,
@@ -349,6 +413,6 @@ class Init(Bcfg2.Server.Admin.Mode):
try:
os.makedirs(path)
self._init_plugins()
- print "Repository created successfuly in %s" % (self.repopath)
+ print("Repository created successfuly in %s" % (self.repopath))
except OSError:
print("Failed to create %s." % path)
diff --git a/src/lib/Server/Admin/Perf.py b/src/lib/Server/Admin/Perf.py
index 095180592..af1c83072 100644
--- a/src/lib/Server/Admin/Perf.py
+++ b/src/lib/Server/Admin/Perf.py
@@ -1,8 +1,9 @@
+import sys
+
import Bcfg2.Options
import Bcfg2.Proxy
import Bcfg2.Server.Admin
-import sys
class Perf(Bcfg2.Server.Admin.Mode):
__shorthelp__ = ("Query server for performance data")
@@ -27,11 +28,11 @@ class Perf(Bcfg2.Server.Admin.Mode):
proxy = Bcfg2.Proxy.ComponentProxy(setup['server'],
setup['user'],
setup['password'],
- key = setup['key'],
- cert = setup['certificate'],
- ca = setup['ca'])
+ key=setup['key'],
+ cert=setup['certificate'],
+ ca=setup['ca'])
data = proxy.get_statistics()
- for key, value in data.iteritems():
+ for key, value in list(data.items()):
data = tuple(["%.06f" % (item) for item in value[:-1]] + [value[-1]])
output.append((key, ) + data)
self.print_table(output)
diff --git a/src/lib/Server/Admin/Pull.py b/src/lib/Server/Admin/Pull.py
index 926eda1b3..47a8be253 100644
--- a/src/lib/Server/Admin/Pull.py
+++ b/src/lib/Server/Admin/Pull.py
@@ -1,7 +1,9 @@
import getopt
import sys
+
import Bcfg2.Server.Admin
+
class Pull(Bcfg2.Server.Admin.MetadataCore):
"""Pull mode retrieves entries from clients and
integrates the information into the repository.
@@ -38,7 +40,7 @@ class Pull(Bcfg2.Server.Admin.MetadataCore):
try:
opts, gargs = getopt.getopt(args, 'vfIs')
except:
- print self.__shorthelp__
+ print(self.__shorthelp__)
raise SystemExit(1)
for opt in opts:
if opt[0] == '-v':
@@ -55,18 +57,20 @@ class Pull(Bcfg2.Server.Admin.MetadataCore):
try:
self.PullEntry(*line.split(None, 3))
except SystemExit:
- print " for %s" % line
+ print(" for %s" % line)
except:
- print "Bad entry: %s" % line.strip()
+ print("Bad entry: %s" % line.strip())
elif len(gargs) < 3:
- print self.__longhelp__
+ print(self.__longhelp__)
raise SystemExit(1)
else:
self.PullEntry(gargs[0], gargs[1], gargs[2])
def BuildNewEntry(self, client, etype, ename):
- """Construct a new full entry for given client/entry from statistics."""
- new_entry = {'type':etype, 'name':ename}
+ """Construct a new full entry for
+ given client/entry from statistics.
+ """
+ new_entry = {'type': etype, 'name': ename}
for plugin in self.bcore.pull_sources:
try:
(owner, group, perms, contents) = \
@@ -74,16 +78,19 @@ class Pull(Bcfg2.Server.Admin.MetadataCore):
break
except Bcfg2.Server.Plugin.PluginExecutionError:
if plugin == self.bcore.pull_sources[-1]:
- print "Pull Source failure; could not fetch current state"
+ print("Pull Source failure; could not fetch current state")
raise SystemExit(1)
try:
- data = {'owner':owner, 'group':group, 'perms':perms, 'text':contents}
+ data = {'owner': owner,
+ 'group': group,
+ 'perms': perms,
+ 'text': contents}
except UnboundLocalError:
print("Unable to build entry. "
"Do you have a statistics plugin enabled?")
raise SystemExit(1)
- for k, v in data.iteritems():
+ for k, v in list(data.items()):
if v:
new_entry[k] = v
#print new_entry
@@ -93,17 +100,22 @@ class Pull(Bcfg2.Server.Admin.MetadataCore):
"""Determine where to put pull data."""
if self.mode == 'interactive':
for choice in choices:
- print "Plugin returned choice:"
+ print("Plugin returned choice:")
if id(choice) == id(choices[0]):
- print "(current entry)",
+ print("(current entry) ")
if choice.all:
- print " => global entry"
+ print(" => global entry")
elif choice.group:
- print (" => group entry: %s (prio %d)" %
- (choice.group, choice.prio))
+ print(" => group entry: %s (prio %d)" %
+ (choice.group, choice.prio))
else:
- print " => host entry: %s" % (choice.hostname)
- if raw_input("Use this entry? [yN]: ") in ['y', 'Y']:
+ print(" => host entry: %s" % (choice.hostname))
+ # py3k compatibility
+ try:
+ ans = raw_input("Use this entry? [yN]: ") in ['y', 'Y']
+ except NameError:
+ ans = input("Use this entry? [yN]: ") in ['y', 'Y']
+ if ans:
return choice
return False
else:
@@ -136,7 +148,7 @@ class Pull(Bcfg2.Server.Admin.MetadataCore):
self.errExit("Configuration upload not supported by plugin %s" \
% (plugin.name))
# Commit if running under a VCS
- for vcsplugin in self.bcore.plugins.values():
+ for vcsplugin in list(self.bcore.plugins.values()):
if isinstance(vcsplugin, Bcfg2.Server.Plugin.Version):
files = "%s/%s" % (plugin.data, ename)
comment = 'file "%s" pulled from host %s' % (files, client)
diff --git a/src/lib/Server/Admin/Query.py b/src/lib/Server/Admin/Query.py
index b5af9bad2..9e1d7cc88 100644
--- a/src/lib/Server/Admin/Query.py
+++ b/src/lib/Server/Admin/Query.py
@@ -2,6 +2,7 @@ import logging
import Bcfg2.Logger
import Bcfg2.Server.Admin
+
class Query(Bcfg2.Server.Admin.Mode):
__shorthelp__ = "Query clients"
__longhelp__ = (__shorthelp__ + "\n\nbcfg2-admin query [-n] [-c] "
@@ -25,14 +26,15 @@ class Query(Bcfg2.Server.Admin.Mode):
self.bcore = Bcfg2.Server.Core.Core(self.get_repo_path(),
['Metadata', 'Probes'],
'foo', False, 'UTF-8')
- except Bcfg2.Server.Core.CoreInitError, msg:
+ except Bcfg2.Server.Core.CoreInitError:
+ msg = sys.exc_info()[1]
self.errExit("Core load failed because %s" % msg)
self.bcore.fam.handle_events_in_interval(1)
self.meta = self.bcore.metadata
def __call__(self, args):
Bcfg2.Server.Admin.Mode.__call__(self, args)
- clients = self.meta.clients.keys()
+ clients = list(self.meta.clients.keys())
filename_arg = False
filename = None
for arg in args:
@@ -48,7 +50,7 @@ class Query(Bcfg2.Server.Admin.Mode):
try:
k, v = arg.split('=')
except:
- print "Unknown argument %s" % arg
+ print("Unknown argument %s" % arg)
continue
if k == 'p':
nc = self.meta.get_client_names_by_profiles(v.split(','))
@@ -57,22 +59,22 @@ class Query(Bcfg2.Server.Admin.Mode):
# add probed groups (if present)
for conn in self.bcore.connectors:
if isinstance(conn, Bcfg2.Server.Plugins.Probes.Probes):
- for c, glist in conn.cgroups.items():
+ for c, glist in list(conn.cgroups.items()):
for g in glist:
if g in v.split(','):
nc.append(c)
else:
- print "One of g= or p= must be specified"
+ print("One of g= or p= must be specified")
raise SystemExit(1)
clients = [c for c in clients if c in nc]
if '-n' in args:
for client in clients:
- print client
+ print(client)
else:
- print ','.join(clients)
+ print(','.join(clients))
if '-f' in args:
f = open(filename, "w")
for client in clients:
f.write(client + "\n")
f.close()
- print "Wrote results to %s" % (filename)
+ print("Wrote results to %s" % (filename))
diff --git a/src/lib/Server/Admin/Reports.py b/src/lib/Server/Admin/Reports.py
index a4dd19064..942477a49 100644
--- a/src/lib/Server/Admin/Reports.py
+++ b/src/lib/Server/Admin/Reports.py
@@ -1,7 +1,6 @@
'''Admin interface for dynamic reports'''
import Bcfg2.Logger
import Bcfg2.Server.Admin
-import ConfigParser
import datetime
import os
import logging
@@ -14,6 +13,9 @@ from Bcfg2.Server.Reports.updatefix import update_database
from Bcfg2.Server.Reports.utils import *
from lxml.etree import XML, XMLSyntaxError
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import ConfigParser
+
# FIXME: Remove when server python dep is 2.5 or greater
if sys.version_info >= (2, 5):
from hashlib import md5
@@ -26,7 +28,8 @@ import django.core.management
# FIXME - settings file uses a hardcoded path for /etc/bcfg2.conf
try:
import Bcfg2.Server.Reports.settings
-except Exception, e:
+except Exception:
+ e = sys.exc_info()[1]
sys.stderr.write("Failed to load configuration settings. %s\n" % e)
sys.exit(1)
@@ -42,7 +45,8 @@ from django.db import connection, transaction
from Bcfg2.Server.Reports.reports.models import Client, Interaction, Entries, \
Entries_interactions, Performance, \
- Reason, Ping, TYPE_CHOICES, InternalDatabaseVersion
+ Reason, Ping
+
def printStats(fn):
"""
@@ -50,24 +54,29 @@ def printStats(fn):
Decorator for purging. Prints database statistics after a run.
"""
- def print_stats(*data):
+ def print_stats(self, *data):
start_client = Client.objects.count()
start_i = Interaction.objects.count()
start_ei = Entries_interactions.objects.count()
start_perf = Performance.objects.count()
start_ping = Ping.objects.count()
- fn(*data)
+ fn(self, *data)
- print "Clients removed: %s" % (start_client - Client.objects.count())
- print "Interactions removed: %s" % (start_i - Interaction.objects.count())
- print "Interactions->Entries removed: %s" % \
- (start_ei - Entries_interactions.objects.count())
- print "Metrics removed: %s" % (start_perf - Performance.objects.count())
- print "Ping metrics removed: %s" % (start_ping - Ping.objects.count())
+ self.log.info("Clients removed: %s" %
+ (start_client - Client.objects.count()))
+ self.log.info("Interactions removed: %s" %
+ (start_i - Interaction.objects.count()))
+ self.log.info("Interactions->Entries removed: %s" %
+ (start_ei - Entries_interactions.objects.count()))
+ self.log.info("Metrics removed: %s" %
+ (start_perf - Performance.objects.count()))
+ self.log.info("Ping metrics removed: %s" %
+ (start_ping - Ping.objects.count()))
return print_stats
+
class Reports(Bcfg2.Server.Admin.Mode):
'''Admin interface for dynamic reports'''
__shorthelp__ = "Manage dynamic reports"
@@ -93,7 +102,7 @@ class Reports(Bcfg2.Server.Admin.Mode):
def __init__(self, cfile):
Bcfg2.Server.Admin.Mode.__init__(self, cfile)
self.log.setLevel(logging.INFO)
- self.django_commands = [ 'syncdb', 'sqlall', 'validate' ]
+ self.django_commands = ['syncdb', 'sqlall', 'validate']
self.__usage__ = self.__usage__ + " Django commands:\n " + \
"\n ".join(self.django_commands)
@@ -123,54 +132,54 @@ class Reports(Bcfg2.Server.Admin.Mode):
update_database()
elif args[0] == 'load_stats':
quick = '-O3' in args
- stats_file=None
- clients_file=None
- i=1
+ stats_file = None
+ clients_file = None
+ i = 1
while i < len(args):
if args[i] == '-s' or args[i] == '--stats':
- stats_file = args[i+1]
+ stats_file = args[i + 1]
if stats_file[0] == '-':
self.errExit("Invalid statistics file: %s" % stats_file)
elif args[i] == '-c' or args[i] == '--clients-file':
- clients_file = args[i+1]
+ clients_file = args[i + 1]
if clients_file[0] == '-':
self.errExit("Invalid clients file: %s" % clients_file)
i = i + 1
self.load_stats(stats_file, clients_file, verb, quick)
elif args[0] == 'purge':
- expired=False
- client=None
- maxdate=None
- state=None
- i=1
+ expired = False
+ client = None
+ maxdate = None
+ state = None
+ i = 1
while i < len(args):
if args[i] == '-c' or args[i] == '--client':
if client:
self.errExit("Only one client per run")
- client = args[i+1]
- print client
+ client = args[i + 1]
+ print(client)
i = i + 1
elif args[i] == '--days':
if maxdate:
self.errExit("Max date specified multiple times")
try:
- maxdate = datetime.datetime.now() - datetime.timedelta(days=int(args[i+1]))
+ maxdate = datetime.datetime.now() - datetime.timedelta(days=int(args[i + 1]))
except:
- self.log.error("Invalid number of days: %s" % args[i+1])
- raise SystemExit, -1
+ self.log.error("Invalid number of days: %s" % args[i + 1])
+ raise SystemExit(-1)
i = i + 1
elif args[i] == '--expired':
- expired=True
+ expired = True
i = i + 1
if expired:
if state:
self.log.error("--state is not valid with --expired")
- raise SystemExit, -1
+ raise SystemExit(-1)
self.purge_expired(maxdate)
else:
self.purge(client, maxdate, state)
else:
- print "Unknown command: %s" % args[0]
+ print("Unknown command: %s" % args[0])
@transaction.commit_on_success
def scrub(self):
@@ -179,11 +188,12 @@ class Reports(Bcfg2.Server.Admin.Mode):
# Currently only reasons are a problem
try:
start_count = Reason.objects.count()
- except Exception, e:
+ except Exception:
+ e = sys.exc_info()[1]
self.log.error("Failed to load reason objects: %s" % e)
return
dup_reasons = []
-
+
cmp_reasons = dict()
batch_update = []
for reason in BatchFetch(Reason.objects):
@@ -192,7 +202,7 @@ class Reports(Bcfg2.Server.Admin.Mode):
comparisons '''
id = reason.id
reason.id = None
- key=md5(pickle.dumps(reason)).hexdigest()
+ key = md5(pickle.dumps(reason)).hexdigest()
reason.id = id
if key in cmp_reasons:
@@ -203,14 +213,15 @@ class Reports(Bcfg2.Server.Admin.Mode):
else:
cmp_reasons[key] = reason.id
self.log.debug("key %d" % reason.id)
-
+
self.log.debug("Done with updates, deleting dupes")
try:
cursor = connection.cursor()
cursor.executemany('update reports_entries_interactions set reason_id=%s where reason_id=%s', batch_update)
cursor.executemany('delete from reports_reason where id = %s', dup_reasons)
transaction.set_dirty()
- except Exception, ex:
+ except Exception:
+ ex = sys.exc_info()[1]
self.log.error("Failed to delete reasons: %s" % ex)
raise
@@ -244,7 +255,7 @@ class Reports(Bcfg2.Server.Admin.Mode):
try:
statsdata = XML(open(stats_file).read())
except (IOError, XMLSyntaxError):
- self.errExit("StatReports: Failed to parse %s"%(stats_file))
+ self.errExit("StatReports: Failed to parse %s" % (stats_file))
if not clientspath:
try:
@@ -255,10 +266,15 @@ class Reports(Bcfg2.Server.Admin.Mode):
try:
clientsdata = XML(open(clientspath).read())
except (IOError, XMLSyntaxError):
- self.errExit("StatReports: Failed to parse %s"%(clientspath))
+ self.errExit("StatReports: Failed to parse %s" % (clientspath))
try:
- load_stats(clientsdata, statsdata, verb, self.log, quick=quick, location=platform.node())
+ load_stats(clientsdata,
+ statsdata,
+ verb,
+ self.log,
+ quick=quick,
+ location=platform.node())
except:
pass
@@ -266,7 +282,7 @@ class Reports(Bcfg2.Server.Admin.Mode):
def purge(self, client=None, maxdate=None, state=None):
'''Purge historical data from the database'''
- filtered = False # indicates whether or not a client should be deleted
+ filtered = False # indicates whether or not a client should be deleted
if not client and not maxdate and not state:
self.errExit("Reports.prune: Refusing to prune all data")
@@ -278,13 +294,13 @@ class Reports(Bcfg2.Server.Admin.Mode):
ipurge = ipurge.filter(client=cobj)
except Client.DoesNotExist:
self.log.error("Client %s not in database" % client)
- raise SystemExit, -1
+ raise SystemExit(-1)
self.log.debug("Filtering by client: %s" % client)
if maxdate:
filtered = True
if not isinstance(maxdate, datetime.datetime):
- raise TypeError, "maxdate is not a DateTime object"
+ raise TypeError("maxdate is not a DateTime object")
self.log.debug("Filtering by maxdate: %s" % maxdate)
ipurge = ipurge.filter(timestamp__lt=maxdate)
@@ -296,9 +312,9 @@ class Reports(Bcfg2.Server.Admin.Mode):
if state:
filtered = True
- if state not in ('dirty','clean','modified'):
- raise TypeError, "state is not one of the following values " + \
- "('dirty','clean','modified')"
+ if state not in ('dirty', 'clean', 'modified'):
+ raise TypeError("state is not one of the following values " + \
+ "('dirty','clean','modified')")
self.log.debug("Filtering by state: %s" % state)
ipurge = ipurge.filter(state=state)
@@ -323,6 +339,8 @@ class Reports(Bcfg2.Server.Admin.Mode):
# bulk operations bypass the Interaction.delete method
self.log.debug("Pruning orphan Performance objects")
Performance.prune_orphans()
+ self.log.debug("Pruning orphan Reason objects")
+ Reason.prune_orphans()
if client and not filtered:
'''Delete the client, ping data is automatic'''
@@ -342,7 +360,7 @@ class Reports(Bcfg2.Server.Admin.Mode):
if maxdate:
if not isinstance(maxdate, datetime.datetime):
- raise TypeError, "maxdate is not a DateTime object"
+ raise TypeError("maxdate is not a DateTime object")
self.log.debug("Filtering by maxdate: %s" % maxdate)
clients = Client.objects.filter(expiration__lt=maxdate)
else:
@@ -354,4 +372,3 @@ class Reports(Bcfg2.Server.Admin.Mode):
client.delete()
self.log.debug("Pruning orphan Performance objects")
Performance.prune_orphans()
-
diff --git a/src/lib/Server/Admin/Snapshots.py b/src/lib/Server/Admin/Snapshots.py
index d58873174..052545b61 100644
--- a/src/lib/Server/Admin/Snapshots.py
+++ b/src/lib/Server/Admin/Snapshots.py
@@ -8,6 +8,8 @@ import Bcfg2.Server.Snapshots
import Bcfg2.Server.Snapshots.model
from Bcfg2.Server.Snapshots.model import Snapshot, Client, Metadata, Base, \
File, Group, Package, Service
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import u_str
class Snapshots(Bcfg2.Server.Admin.Mode):
__shorthelp__ = "Interact with the Snapshots system"
@@ -71,7 +73,7 @@ class Snapshots(Bcfg2.Server.Admin.Mode):
session.commit()
elif args[0] == 'dump':
client = args[1]
- snap = Snapshot.get_current(self.session, unicode(client))
+ snap = Snapshot.get_current(self.session, u_str(client))
if not snap:
print("Current snapshot for %s not found" % client)
sys.exit(1)
@@ -105,7 +107,7 @@ class Snapshots(Bcfg2.Server.Admin.Mode):
print("Usage: bcfg2-admin snapshots -b <client>")
return
client = args[2]
- snap = Snapshot.get_current(self.session, unicode(client))
+ snap = Snapshot.get_current(self.session, u_str(client))
if not snap:
print("Current snapshot for %s not found" % client)
sys.exit(1)
@@ -128,7 +130,7 @@ class Snapshots(Bcfg2.Server.Admin.Mode):
elif '-e' in args[1:]:
# Query a single host for extra entries
client = args[2]
- snap = Snapshot.get_current(self.session, unicode(client))
+ snap = Snapshot.get_current(self.session, u_str(client))
if not snap:
print("Current snapshot for %s not found" % client)
sys.exit(1)
diff --git a/src/lib/Server/Admin/Tidy.py b/src/lib/Server/Admin/Tidy.py
index cc8ab4f5e..f79991fd9 100644
--- a/src/lib/Server/Admin/Tidy.py
+++ b/src/lib/Server/Admin/Tidy.py
@@ -4,6 +4,7 @@ import socket
import Bcfg2.Server.Admin
+
class Tidy(Bcfg2.Server.Admin.Mode):
__shorthelp__ = "Clean up useless files in the repo"
__longhelp__ = __shorthelp__ + "\n\nbcfg2-admin tidy [-f] [-I]\n"
@@ -24,17 +25,21 @@ class Tidy(Bcfg2.Server.Admin.Mode):
if '-f' in args or '-I' in args:
if '-I' in args:
for name in badfiles[:]:
- answer = raw_input("Unlink file %s? [yN] " % name)
+ # py3k compatibility
+ try:
+ answer = raw_input("Unlink file %s? [yN] " % name)
+ except NameError:
+ answer = input("Unlink file %s? [yN] " % name)
if answer not in ['y', 'Y']:
badfiles.remove(name)
for name in badfiles:
try:
os.unlink(name)
except IOError:
- print "Failed to unlink %s" % name
+ print("Failed to unlink %s" % name)
else:
for name in badfiles:
- print name
+ print(name)
def buildTidyList(self):
"""Clean up unused or unusable files from the repository."""
@@ -56,7 +61,8 @@ class Tidy(Bcfg2.Server.Admin.Mode):
bad.append(hostname)
for name in os.listdir("%s/SSHbase" % (self.get_repo_path())):
if not hostmatcher.match(name):
- to_remove.append("%s/SSHbase/%s" % (self.get_repo_path(), name))
+ to_remove.append("%s/SSHbase/%s" % (self.get_repo_path(),
+ name))
else:
if hostmatcher.match(name).group(1) in bad:
to_remove.append("%s/SSHbase/%s" %
diff --git a/src/lib/Server/Admin/Viz.py b/src/lib/Server/Admin/Viz.py
index e3daea84b..f39e6d7a8 100644
--- a/src/lib/Server/Admin/Viz.py
+++ b/src/lib/Server/Admin/Viz.py
@@ -1,7 +1,9 @@
import getopt
from subprocess import Popen, PIPE
+
import Bcfg2.Server.Admin
+
class Viz(Bcfg2.Server.Admin.MetadataCore):
__shorthelp__ = "Produce graphviz diagrams of metadata structures"
__longhelp__ = (__shorthelp__ + "\n\nbcfg2-admin viz [--includehosts] "
@@ -27,7 +29,8 @@ class Viz(Bcfg2.Server.Admin.MetadataCore):
plugin_blacklist = ['DBStats', 'Snapshots', 'Cfg', 'Pkgmgr', 'Packages',
'Rules', 'Account', 'Decisions', 'Deps', 'Git', 'Svn',
- 'Fossil', 'Bzr', 'Bundler', 'TGenshi', 'SGenshi', 'Base']
+ 'Fossil', 'Bzr', 'Bundler', 'TGenshi', 'SGenshi',
+ 'Base']
def __init__(self, cfile):
@@ -42,8 +45,9 @@ class Viz(Bcfg2.Server.Admin.MetadataCore):
opts, args = getopt.getopt(args, 'Hbko:',
['includehosts', 'includebundles',
'includekey', 'outfile='])
- except getopt.GetoptError, msg:
- print msg
+ except getopt.GetoptError:
+ msg = sys.exc_info()[1]
+ print(msg)
#FIXME: is this for --raw?
#rset = False
@@ -63,8 +67,8 @@ class Viz(Bcfg2.Server.Admin.MetadataCore):
data = self.Visualize(self.get_repo_path(), hset, bset,
kset, outputfile)
- print data
- raise SystemExit, 0
+ print(data)
+ raise SystemExit(0)
def Visualize(self, repopath, hosts=False,
bundles=False, key=False, output=False):
@@ -82,7 +86,7 @@ class Viz(Bcfg2.Server.Admin.MetadataCore):
try:
dotpipe.stdin.write("digraph groups {\n")
except:
- print "write to dot process failed. Is graphviz installed?"
+ print("write to dot process failed. Is graphviz installed?")
raise SystemExit(1)
dotpipe.stdin.write('\trankdir="LR";\n')
dotpipe.stdin.write(self.metadata.viz(hosts, bundles,
diff --git a/src/lib/Server/Admin/Xcmd.py b/src/lib/Server/Admin/Xcmd.py
index 8ea98b79c..fd5794f88 100644
--- a/src/lib/Server/Admin/Xcmd.py
+++ b/src/lib/Server/Admin/Xcmd.py
@@ -1,9 +1,12 @@
+import sys
+
import Bcfg2.Options
import Bcfg2.Proxy
import Bcfg2.Server.Admin
-import sys
-import xmlrpclib
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import xmlrpclib
+
class Xcmd(Bcfg2.Server.Admin.Mode):
__shorthelp__ = ("XML-RPC Command Interface")
@@ -16,8 +19,8 @@ class Xcmd(Bcfg2.Server.Admin.Mode):
'user': Bcfg2.Options.CLIENT_USER,
'password': Bcfg2.Options.SERVER_PASSWORD,
'key': Bcfg2.Options.SERVER_KEY,
- 'certificate' : Bcfg2.Options.CLIENT_CERT,
- 'ca' : Bcfg2.Options.CLIENT_CA
+ 'certificate': Bcfg2.Options.CLIENT_CERT,
+ 'ca': Bcfg2.Options.CLIENT_CA
}
setup = Bcfg2.Options.OptionParser(optinfo)
setup.parse(sys.argv[2:])
@@ -25,9 +28,10 @@ class Xcmd(Bcfg2.Server.Admin.Mode):
proxy = Bcfg2.Proxy.ComponentProxy(setup['server'],
setup['user'],
setup['password'],
- key = setup['key'],
- cert = setup['certificate'],
- ca = setup['ca'], timeout=180)
+ key=setup['key'],
+ cert=setup['certificate'],
+ ca=setup['ca'],
+ timeout=180)
if len(setup['args']) == 0:
print("Usage: xcmd <xmlrpc method> <optional arguments>")
return
@@ -36,8 +40,9 @@ class Xcmd(Bcfg2.Server.Admin.Mode):
if len(setup['args']) > 1:
args = tuple(setup['args'][1:])
try:
- data = apply(getattr(proxy, cmd), args)
- except xmlrpclib.Fault, flt:
+ data = getattr(proxy, cmd)(*args)
+ except xmlrpclib.Fault:
+ flt = sys.exc_info()[1]
if flt.faultCode == 7:
print("Unknown method %s" % cmd)
return
@@ -46,4 +51,4 @@ class Xcmd(Bcfg2.Server.Admin.Mode):
else:
raise
if data != None:
- print data
+ print(data)
diff --git a/src/lib/Server/Admin/__init__.py b/src/lib/Server/Admin/__init__.py
index dc3dc8c01..8915492a3 100644
--- a/src/lib/Server/Admin/__init__.py
+++ b/src/lib/Server/Admin/__init__.py
@@ -19,22 +19,26 @@ __all__ = [
'Xcmd'
]
-import ConfigParser
import logging
import lxml.etree
import sys
import Bcfg2.Server.Core
import Bcfg2.Options
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import ConfigParser
+
class ModeOperationError(Exception):
pass
+
class Mode(object):
"""Help message has not yet been added for mode."""
__shorthelp__ = 'Shorthelp not defined yet'
__longhelp__ = 'Longhelp not defined yet'
__args__ = []
+
def __init__(self, configfile):
self.configfile = configfile
self.__cfp = False
@@ -50,11 +54,11 @@ class Mode(object):
def __call__(self, args):
if len(args) > 0 and args[0] == 'help':
- print self.__longhelp__
+ print(self.__longhelp__)
raise SystemExit(0)
def errExit(self, emsg):
- print emsg
+ print(emsg)
raise SystemExit(1)
def get_repo_path(self):
@@ -80,9 +84,9 @@ class Mode(object):
"""
hdelim = "="
- justify = {'left':str.ljust,
- 'center':str.center,
- 'right':str.rjust}[justify.lower()]
+ justify = {'left': str.ljust,
+ 'center': str.center,
+ 'right': str.rjust}[justify.lower()]
"""
Calculate column widths (longest item in each column
@@ -90,9 +94,9 @@ class Mode(object):
"""
cols = list(zip(*rows))
- colWidths = [max([len(str(item))+2*padding for \
+ colWidths = [max([len(str(item)) + 2 * padding for \
item in col]) for col in cols]
- borderline = vdelim.join([w*hdelim for w in colWidths])
+ borderline = vdelim.join([w * hdelim for w in colWidths])
# Print out the table
print(borderline)
@@ -103,6 +107,7 @@ class Mode(object):
print(borderline)
hdr = False
+
class MetadataCore(Mode):
"""Base class for admin-modes that handle metadata."""
def __init__(self, configfile, usage, pwhitelist=None, pblacklist=None):
@@ -113,17 +118,21 @@ class MetadataCore(Mode):
setup.hm = usage
setup.parse(sys.argv[1:])
if pwhitelist is not None:
- setup['plugins'] = [x for x in setup['plugins'] if x in pwhitelist]
+ setup['plugins'] = [x for x in setup['plugins']
+ if x in pwhitelist]
elif pblacklist is not None:
- setup['plugins'] = [x for x in setup['plugins'] if x not in pblacklist]
+ setup['plugins'] = [x for x in setup['plugins']
+ if x not in pblacklist]
try:
self.bcore = Bcfg2.Server.Core.Core(self.get_repo_path(),
setup['plugins'],
'foo', 'UTF-8')
- except Bcfg2.Server.Core.CoreInitError, msg:
+ except Bcfg2.Server.Core.CoreInitError:
+ msg = sys.exc_info()[1]
self.errExit("Core load failed because %s" % msg)
self.bcore.fam.handle_events_in_interval(5)
self.metadata = self.bcore.metadata
+
class StructureMode(MetadataCore):
pass
diff --git a/src/lib/Server/Core.py b/src/lib/Server/Core.py
index ac67b8a69..8f9d3e746 100644
--- a/src/lib/Server/Core.py
+++ b/src/lib/Server/Core.py
@@ -3,19 +3,28 @@ __revision__ = '$Revision$'
import atexit
import logging
-import lxml.etree
import select
+import sys
import threading
import time
-import xmlrpclib
+try:
+ import lxml.etree
+except ImportError:
+ print("Failed to import lxml dependency. Shutting down server.")
+ raise SystemExit(1)
from Bcfg2.Component import Component, exposed
from Bcfg2.Server.Plugin import PluginInitError, PluginExecutionError
import Bcfg2.Server.FileMonitor
import Bcfg2.Server.Plugins.Metadata
+# Compatibility imports
+from Bcfg2.Bcfg2Py3k import xmlrpclib
+if sys.hexversion >= 0x03000000:
+ from functools import reduce
logger = logging.getLogger('Bcfg2.Server.Core')
+
def critical_error(operation):
"""Log and err, traceback and return an xmlrpc fault to client."""
logger.error(operation, exc_info=1)
@@ -27,12 +36,16 @@ try:
except:
pass
+
class CoreInitError(Exception):
"""This error is raised when the core cannot be initialized."""
pass
+
class Core(Component):
- """The Core object is the container for all Bcfg2 Server logic and modules."""
+ """The Core object is the container for all
+ Bcfg2 Server logic and modules.
+ """
name = 'bcfg2-server'
implementation = 'bcfg2-server'
@@ -42,15 +55,16 @@ class Core(Component):
Component.__init__(self)
self.datastore = repo
if filemonitor not in Bcfg2.Server.FileMonitor.available:
- logger.error("File monitor driver %s not available; forcing to default" % filemonitor)
+ logger.error("File monitor driver %s not available; "
+ "forcing to default" % filemonitor)
filemonitor = 'default'
try:
self.fam = Bcfg2.Server.FileMonitor.available[filemonitor]()
except IOError:
logger.error("Failed to instantiate fam driver %s" % filemonitor,
exc_info=1)
- raise CoreInitError, "failed to instantiate fam driver (used %s)" % \
- filemonitor
+ raise CoreInitError("failed to instantiate fam driver (used %s)" % \
+ filemonitor)
self.pubspace = {}
self.cfile = cfile
self.cron = {}
@@ -70,44 +84,43 @@ class Core(Component):
if not plugin in self.plugins:
self.init_plugins(plugin)
# Remove blacklisted plugins
- for p, bl in self.plugin_blacklist.items():
+ for p, bl in list(self.plugin_blacklist.items()):
if len(bl) > 0:
logger.error("The following plugins conflict with %s;"
"Unloading %s" % (p, bl))
for plug in bl:
del self.plugins[plug]
# This section loads the experimental plugins
- expl = [plug for (name, plug) in self.plugins.iteritems()
+ expl = [plug for (name, plug) in list(self.plugins.items())
if plug.experimental]
if expl:
logger.info("Loading experimental plugin(s): %s" % \
(" ".join([x.name for x in expl])))
logger.info("NOTE: Interfaces subject to change")
- depr = [plug for (name, plug) in self.plugins.iteritems()
+ depr = [plug for (name, plug) in list(self.plugins.items())
if plug.deprecated]
# This section loads the deprecated plugins
if depr:
logger.info("Loading deprecated plugin(s): %s" % \
(" ".join([x.name for x in depr])))
-
- mlist = [p for p in self.plugins.values() if \
+ mlist = [p for p in list(self.plugins.values()) if \
isinstance(p, Bcfg2.Server.Plugin.Metadata)]
if len(mlist) == 1:
self.metadata = mlist[0]
else:
logger.error("No Metadata Plugin loaded; failed to instantiate Core")
- raise CoreInitError, "No Metadata Plugin"
- self.statistics = [plugin for plugin in self.plugins.values() if \
- isinstance(plugin, Bcfg2.Server.Plugin.Statistics)]
- self.pull_sources = [plugin for plugin in self.statistics if \
- isinstance(plugin, Bcfg2.Server.Plugin.PullSource)]
- self.generators = [plugin for plugin in self.plugins.values() if \
- isinstance(plugin, Bcfg2.Server.Plugin.Generator)]
- self.structures = [plugin for plugin in self.plugins.values() if \
- isinstance(plugin, Bcfg2.Server.Plugin.Structure)]
- self.connectors = [plugin for plugin in self.plugins.values() if \
- isinstance(plugin, Bcfg2.Server.Plugin.Connector)]
+ raise CoreInitError("No Metadata Plugin")
+ self.statistics = [plugin for plugin in list(self.plugins.values())
+ if isinstance(plugin, Bcfg2.Server.Plugin.Statistics)]
+ self.pull_sources = [plugin for plugin in self.statistics
+ if isinstance(plugin, Bcfg2.Server.Plugin.PullSource)]
+ self.generators = [plugin for plugin in list(self.plugins.values())
+ if isinstance(plugin, Bcfg2.Server.Plugin.Generator)]
+ self.structures = [plugin for plugin in list(self.plugins.values())
+ if isinstance(plugin, Bcfg2.Server.Plugin.Structure)]
+ self.connectors = [plugin for plugin in list(self.plugins.values())
+ if isinstance(plugin, Bcfg2.Server.Plugin.Connector)]
self.ca = ca
self.fam_thread = threading.Thread(target=self._file_monitor_thread)
if start_fam_thread:
@@ -128,7 +141,7 @@ class Core(Component):
except:
continue
# VCS plugin periodic updates
- for plugin in self.plugins.values():
+ for plugin in list(self.plugins.values()):
if isinstance(plugin, Bcfg2.Server.Plugin.Version):
self.revision = plugin.get_revision()
@@ -137,7 +150,7 @@ class Core(Component):
try:
mod = getattr(__import__("Bcfg2.Server.Plugins.%s" %
(plugin)).Server.Plugins, plugin)
- except ImportError, e:
+ except ImportError:
try:
mod = __import__(plugin)
except:
@@ -157,22 +170,23 @@ class Core(Component):
(plugin), exc_info=1)
def shutdown(self):
- """Shuting down the plugins."""
+ """Shutting down the plugins."""
if not self.terminate.isSet():
self.terminate.set()
- for plugin in self.plugins.values():
+ for plugin in list(self.plugins.values()):
plugin.shutdown()
def validate_data(self, metadata, data, base_cls):
"""Checks the data structure."""
- for plugin in self.plugins.values():
+ for plugin in list(self.plugins.values()):
if isinstance(plugin, base_cls):
try:
if base_cls == Bcfg2.Server.Plugin.StructureValidator:
plugin.validate_structures(metadata, data)
elif base_cls == Bcfg2.Server.Plugin.GoalValidator:
plugin.validate_goals(metadata, data)
- except Bcfg2.Server.Plugin.ValidationError, err:
+ except Bcfg2.Server.Plugin.ValidationError:
+ err = sys.exc_info()[1]
logger.error("Plugin %s structure validation failed: %s" \
% (plugin.name, err.message))
raise
@@ -182,7 +196,7 @@ class Core(Component):
def GetStructures(self, metadata):
"""Get all structures for client specified by metadata."""
- structures = reduce(lambda x, y:x+y,
+ structures = reduce(lambda x, y: x + y,
[struct.BuildStructures(metadata) for struct \
in self.structures], [])
sbundles = [b.get('name') for b in structures if b.tag == 'Bundle']
@@ -232,7 +246,8 @@ class Core(Component):
glist = [gen for gen in self.generators if
entry.get('name') in gen.Entries.get(entry.tag, {})]
if len(glist) == 1:
- return glist[0].Entries[entry.tag][entry.get('name')](entry, metadata)
+ return glist[0].Entries[entry.tag][entry.get('name')](entry,
+ metadata)
elif len(glist) > 1:
generators = ", ".join([gen.name for gen in glist])
logger.error("%s %s served by multiple generators: %s" % \
@@ -242,7 +257,7 @@ class Core(Component):
if len(g2list) == 1:
return g2list[0].HandleEntry(entry, metadata)
entry.set('failure', 'no matching generator')
- raise PluginExecutionError, (entry.tag, entry.get('name'))
+ raise PluginExecutionError(entry.tag, entry.get('name'))
def BuildConfiguration(self, client):
"""Build configuration for clients."""
@@ -290,7 +305,7 @@ class Core(Component):
def GetDecisions(self, metadata, mode):
"""Get data for the decision list."""
result = []
- for plugin in self.plugins.values():
+ for plugin in list(self.plugins.values()):
try:
if isinstance(plugin, Bcfg2.Server.Plugin.Decision):
result += plugin.GetDecisions(metadata, mode)
@@ -300,7 +315,7 @@ class Core(Component):
return result
def build_metadata(self, client_name):
- """Build the metadata structure."""
+ """Build the metadata structure."""
if not hasattr(self, 'metadata'):
# some threads start before metadata is even loaded
raise Bcfg2.Server.Plugins.Metadata.MetadataRuntimeError
diff --git a/src/lib/Server/FileMonitor.py b/src/lib/Server/FileMonitor.py
index 0f09f7751..d6b313e6b 100644
--- a/src/lib/Server/FileMonitor.py
+++ b/src/lib/Server/FileMonitor.py
@@ -7,6 +7,7 @@ from time import sleep, time
logger = logging.getLogger('Bcfg2.Server.FileMonitor')
+
def ShouldIgnore(event):
"""Test if the event should be suppresed."""
# FIXME should move event suppression out of the core
@@ -18,6 +19,7 @@ def ShouldIgnore(event):
return True
return False
+
class Event(object):
def __init__(self, request_id, filename, code):
self.requestID = request_id
@@ -29,6 +31,8 @@ class Event(object):
return self.action
available = {}
+
+
class FileMonitor(object):
"""File Monitor baseclass."""
def __init__(self, debug=False):
@@ -78,7 +82,7 @@ class FileMonitor(object):
if lock:
lock.release()
end = time()
- logger.info("Handled %d events in %.03fs" % (count, (end-start)))
+ logger.info("Handled %d events in %.03fs" % (count, (end - start)))
def handle_events_in_interval(self, interval):
end = time() + interval
@@ -91,7 +95,9 @@ class FileMonitor(object):
class FamFam(object):
- """The fam object is a set of callbacks for file alteration events (FAM support)."""
+ """The fam object is a set of callbacks for
+ file alteration events (FAM support).
+ """
def __init__(self):
object.__init__(self)
@@ -164,7 +170,6 @@ class FamFam(object):
return count
-
class Fam(FileMonitor):
"""
The fam object is a set of callbacks for
@@ -195,6 +200,7 @@ class Fam(FileMonitor):
def get_event(self):
return self.fm.nextEvent()
+
class Pseudo(FileMonitor):
"""
The fam object is a set of callbacks for
@@ -213,14 +219,16 @@ class Pseudo(FileMonitor):
def AddMonitor(self, path, obj):
"""add a monitor to path, installing a callback to obj.HandleEvent"""
- handleID = len(self.handles.keys())
+ handleID = len(list(self.handles.keys()))
mode = os.stat(path)[stat.ST_MODE]
handle = Event(handleID, path, 'exists')
if stat.S_ISDIR(mode):
dirList = os.listdir(path)
self.pending_events.append(handle)
for includedFile in dirList:
- self.pending_events.append(Event(handleID, includedFile, 'exists'))
+ self.pending_events.append(Event(handleID,
+ includedFile,
+ 'exists'))
self.pending_events.append(Event(handleID, path, 'endExist'))
else:
self.pending_events.append(Event(handleID, path, 'exists'))
diff --git a/src/lib/Server/Hostbase/ldapauth.py b/src/lib/Server/Hostbase/ldapauth.py
index f2148181f..21b462c86 100644
--- a/src/lib/Server/Hostbase/ldapauth.py
+++ b/src/lib/Server/Hostbase/ldapauth.py
@@ -1,16 +1,18 @@
-"""Checks with LDAP (ActiveDirectory) to see if the current user is an LDAP(AD) user,
-and returns a subset of the user's profile that is needed by Argonne/CIS to
-to set user level privleges in Django"""
-
-__revision__ = '$Revision: 2456 $'
+"""
+Checks with LDAP (ActiveDirectory) to see if the current user is an LDAP(AD)
+user, and returns a subset of the user's profile that is needed by Argonne/CIS
+to set user level privleges in Django
+"""
import os
import ldap
+
class LDAPAUTHError(Exception):
"""LDAPAUTHError is raised when somehting goes boom."""
pass
+
class ldapauth(object):
group_test = False
check_member_of = os.environ['LDAP_CHECK_MBR_OF_GRP']
@@ -20,35 +22,35 @@ class ldapauth(object):
telephoneNumber = None
title = None
memberOf = None
- department = None #this will be a list
+ department = None # this will be a list
mail = None
- extensionAttribute1 = None #badgenumber
+ extensionAttribute1 = None # badgenumber
badge_no = None
- def __init__(self,login,passwd):
+ def __init__(self, login, passwd):
"""get username (if using ldap as auth the
apache env var REMOTE_USER should be used)
from username get user profile from AD/LDAP
"""
#p = self.user_profile(login,passwd)
- d = self.user_dn(login) #success, distname
- print d[1]
+ d = self.user_dn(login) # success, distname
+ print(d[1])
if d[0] == 'success':
pass
- p = self.user_bind(d[1],passwd)
+ p = self.user_bind(d[1], passwd)
if p[0] == 'success':
#parse results
parsed = self.parse_results(p[2])
- print self.department
+ print(self.department)
self.group_test = self.member_of()
securitylevel = self.security_level()
- print "ACCESS LEVEL: " + str(securitylevel)
+ print("ACCESS LEVEL: " + str(securitylevel))
else:
raise LDAPAUTHError(p[2])
else:
raise LDAPAUTHError(p[2])
- def user_profile(self,login,passwd=None):
+ def user_profile(self, login, passwd=None):
"""NOT USED RIGHT NOW"""
ldap_login = "CN=%s" % login
svc_acct = os.environ['LDAP_SVC_ACCT_NAME']
@@ -60,33 +62,35 @@ class ldapauth(object):
try:
conn = ldap.initialize(os.environ['LDAP_URI'])
- conn.bind(svc_acct,svc_pass,ldap.AUTH_SIMPLE)
+ conn.bind(svc_acct, svc_pass, ldap.AUTH_SIMPLE)
result_id = conn.search(search_pth,
- ldap.SCOPE_SUBTREE,
- ldap_login,None)
- result_type,result_data = conn.result(result_id,0)
- return ('success','User profile found',result_data,)
- except ldap.LDAPError,e:
+ ldap.SCOPE_SUBTREE,
+ ldap_login,
+ None)
+ result_type, result_data = conn.result(result_id, 0)
+ return ('success', 'User profile found', result_data,)
+ except ldap.LDAPError, e:
#connection failed
- return ('error','LDAP connect failed',e,)
+ return ('error', 'LDAP connect failed', e,)
- def user_bind(self,distinguishedName,passwd):
+ def user_bind(self, distinguishedName, passwd):
"""Binds to LDAP Server"""
search_pth = os.environ['LDAP_SEARCH_PTH']
try:
conn = ldap.initialize(os.environ['LDAP_URI'])
- conn.bind(distinguishedName,passwd,ldap.AUTH_SIMPLE)
+ conn.bind(distinguishedName, passwd, ldap.AUTH_SIMPLE)
cn = distinguishedName.split(",")
result_id = conn.search(search_pth,
- ldap.SCOPE_SUBTREE,
- cn[0],None)
- result_type,result_data = conn.result(result_id,0)
- return ('success','User profile found',result_data,)
- except ldap.LDAPError,e:
+ ldap.SCOPE_SUBTREE,
+ cn[0],
+ None)
+ result_type, result_data = conn.result(result_id, 0)
+ return ('success', 'User profile found', result_data,)
+ except ldap.LDAPError, e:
#connection failed
- return ('error','LDAP connect failed',e,)
+ return ('error', 'LDAP connect failed', e,)
- def user_dn(self,cn):
+ def user_dn(self, cn):
"""Uses Service Account to get distinguishedName"""
ldap_login = "CN=%s" % cn
svc_acct = os.environ['LDAP_SVC_ACCT_NAME']
@@ -95,19 +99,20 @@ class ldapauth(object):
try:
conn = ldap.initialize(os.environ['LDAP_URI'])
- conn.bind(svc_acct,svc_pass,ldap.AUTH_SIMPLE)
+ conn.bind(svc_acct, svc_pass, ldap.AUTH_SIMPLE)
result_id = conn.search(search_pth,
- ldap.SCOPE_SUBTREE,
- ldap_login,None)
- result_type,result_data = conn.result(result_id,0)
+ ldap.SCOPE_SUBTREE,
+ ldap_login,
+ None)
+ result_type, result_data = conn.result(result_id, 0)
raw_obj = result_data[0][1]
distinguishedName = raw_obj['distinguishedName']
- return ('success',distinguishedName[0],)
- except ldap.LDAPError,e:
+ return ('success', distinguishedName[0],)
+ except ldap.LDAPError, e:
#connection failed
- return ('error','LDAP connect failed',e,)
+ return ('error', 'LDAP connect failed', e,)
- def parse_results(self,user_obj):
+ def parse_results(self, user_obj):
"""Clean up the huge ugly object handed to us in the LDAP query"""
#user_obj is a list formatted like this:
#[('LDAP_DN',{user_dict},),]
@@ -169,4 +174,3 @@ class ldapauth(object):
level = 4
return level
-
diff --git a/src/lib/Server/Hostbase/media/base.css b/src/lib/Server/Hostbase/media/base.css
index 9196c7d51..ddbf02165 100644
--- a/src/lib/Server/Hostbase/media/base.css
+++ b/src/lib/Server/Hostbase/media/base.css
@@ -1,5 +1,5 @@
-
-/* Import other styles */
-@import url('global.css');
-@import url('layout.css');
-@import url('boxypastel.css');
+
+/* Import other styles */
+@import url('global.css');
+@import url('layout.css');
+@import url('boxypastel.css');
diff --git a/src/lib/Server/Hostbase/media/global.css b/src/lib/Server/Hostbase/media/global.css
index 92d7ce0a3..73451e1bc 100644
--- a/src/lib/Server/Hostbase/media/global.css
+++ b/src/lib/Server/Hostbase/media/global.css
@@ -1,8 +1,8 @@
-body {
- margin:0;
- padding:0;
- font-size:12px;
- font-family:"Lucida Grande","Bitstream Vera Sans",Verdana,Arial,sans-serif;
- color:#000;
- background:#fff;
- }
+body {
+ margin:0;
+ padding:0;
+ font-size:12px;
+ font-family:"Lucida Grande","Bitstream Vera Sans",Verdana,Arial,sans-serif;
+ color:#000;
+ background:#fff;
+ }
diff --git a/src/lib/Server/Hostbase/media/layout.css b/src/lib/Server/Hostbase/media/layout.css
index 99f61da8f..9085cc220 100644
--- a/src/lib/Server/Hostbase/media/layout.css
+++ b/src/lib/Server/Hostbase/media/layout.css
@@ -1,62 +1,62 @@
-/* Page Structure */
-#container { position:absolute; top: 3em; margin-left:1em; margin-right:2em; padding:0; margin-top:1.5em; min-width:
- 650px; }
-#header { width:100%; }
-#content-main { float:left; }
-
-/* HEADER */
-#header {
-background:#000;
-color:#ffc;
-position:absolute;
-}
-#header a:link, #header a:visited { color:white; }
-#header a:hover { text-decoration:underline; }
-#branding h1 { padding:0 10px; font-size:18px; margin:8px 0; font-weight:normal; color:#f4f379; }
-#branding h2 { padding:0 10px; font-size:14px; margin:-8px 0 8px 0; font-weight:normal; color:#ffc; }
-#user-tools { position:absolute; top:0; right:0; padding:1.2em 10px; font-size:11px; text-align:right; }
-
-/*SIDEBAR*/
-#sidebar {
- float:left;
- position: relative;
- width: auto;
- height: 100%;
- margin-top: 3em;
- padding-right: 1.5em;
- padding-left: 1.5em;
- padding-top: 1em;
- padding-bottom:3em;
- background: #000;
- color:ffc;
-}
-
-a.sidebar:link {color: #fff;}
-a.sidebar:active {color: #fff;}
-a.sidebar:visited {color: #fff;}
-a.sidebar:hover {color: #fff;}
-
-ul.sidebar {
- color: #ffc;
- text-decoration: none;
- list-style-type: none;
- text-indent: -1em;
-}
-ul.sidebar-level2 {
- text-indent: -2em;
- list-style-type: none;
- font-size: 11px;
-}
-
-/* ALIGNED FIELDSETS */
-.aligned label { display:block; padding:0 1em 3px 0; float:left; width:8em; }
-.aligned label.inline { display:inline; float:none; }
-.colMS .aligned .vLargeTextField, .colMS .aligned .vXMLLargeTextField { width:350px; }
-form .aligned p, form .aligned ul { margin-left:7em; padding-left:30px; }
-form .aligned table p { margin-left:0; padding-left:0; }
-form .aligned p.help { padding-left:38px; }
-.aligned .vCheckboxLabel { float:none !important; display:inline; padding-left:4px; }
-.colM .aligned .vLargeTextField, colM .aligned .vXMLLargeTextField { width:610px; }
-.checkbox-row p.help { margin-left:0; padding-left:0 !important; }
-
-
+/* Page Structure */
+#container { position:absolute; top: 3em; margin-left:1em; margin-right:2em; padding:0; margin-top:1.5em; min-width:
+ 650px; }
+#header { width:100%; }
+#content-main { float:left; }
+
+/* HEADER */
+#header {
+background:#000;
+color:#ffc;
+position:absolute;
+}
+#header a:link, #header a:visited { color:white; }
+#header a:hover { text-decoration:underline; }
+#branding h1 { padding:0 10px; font-size:18px; margin:8px 0; font-weight:normal; color:#f4f379; }
+#branding h2 { padding:0 10px; font-size:14px; margin:-8px 0 8px 0; font-weight:normal; color:#ffc; }
+#user-tools { position:absolute; top:0; right:0; padding:1.2em 10px; font-size:11px; text-align:right; }
+
+/*SIDEBAR*/
+#sidebar {
+ float:left;
+ position: relative;
+ width: auto;
+ height: 100%;
+ margin-top: 3em;
+ padding-right: 1.5em;
+ padding-left: 1.5em;
+ padding-top: 1em;
+ padding-bottom:3em;
+ background: #000;
+ color:ffc;
+}
+
+a.sidebar:link {color: #fff;}
+a.sidebar:active {color: #fff;}
+a.sidebar:visited {color: #fff;}
+a.sidebar:hover {color: #fff;}
+
+ul.sidebar {
+ color: #ffc;
+ text-decoration: none;
+ list-style-type: none;
+ text-indent: -1em;
+}
+ul.sidebar-level2 {
+ text-indent: -2em;
+ list-style-type: none;
+ font-size: 11px;
+}
+
+/* ALIGNED FIELDSETS */
+.aligned label { display:block; padding:0 1em 3px 0; float:left; width:8em; }
+.aligned label.inline { display:inline; float:none; }
+.colMS .aligned .vLargeTextField, .colMS .aligned .vXMLLargeTextField { width:350px; }
+form .aligned p, form .aligned ul { margin-left:7em; padding-left:30px; }
+form .aligned table p { margin-left:0; padding-left:0; }
+form .aligned p.help { padding-left:38px; }
+.aligned .vCheckboxLabel { float:none !important; display:inline; padding-left:4px; }
+.colM .aligned .vLargeTextField, colM .aligned .vXMLLargeTextField { width:610px; }
+.checkbox-row p.help { margin-left:0; padding-left:0 !important; }
+
+
diff --git a/src/lib/Server/Hostbase/settings.py b/src/lib/Server/Hostbase/settings.py
index a42fd5b2e..c44c7bf16 100644
--- a/src/lib/Server/Hostbase/settings.py
+++ b/src/lib/Server/Hostbase/settings.py
@@ -27,7 +27,7 @@ else:
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
- # ('Your Name', 'your_email@domain.com'),
+ ('Root', 'root'),
)
MANAGERS = ADMINS
diff --git a/src/lib/Server/Lint/Bundles.py b/src/lib/Server/Lint/Bundles.py
new file mode 100644
index 000000000..e90159f7c
--- /dev/null
+++ b/src/lib/Server/Lint/Bundles.py
@@ -0,0 +1,64 @@
+import lxml.etree
+import Bcfg2.Server.Lint
+
+class Bundles(Bcfg2.Server.Lint.ServerPlugin):
+ """ Perform various bundle checks """
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ """ run plugin """
+ if 'Bundler' in self.core.plugins:
+ self.missing_bundles()
+ for bundle in self.core.plugins['Bundler'].entries.values():
+ if self.HandlesFile(bundle.name):
+ if (Bcfg2.Server.Plugins.Bundler.have_genshi and
+ type(bundle) is
+ Bcfg2.Server.Plugins.SGenshi.SGenshiTemplateFile):
+ self.sgenshi_groups(bundle)
+ else:
+ self.bundle_names(bundle)
+
+ def missing_bundles(self):
+ """ find bundles listed in Metadata but not implemented in Bundler """
+ if self.files is None:
+ # when given a list of files on stdin, this check is
+ # useless, so skip it
+ groupdata = self.metadata.groups_xml.xdata
+ ref_bundles = set([b.get("name")
+ for b in groupdata.findall("//Bundle")])
+
+ allbundles = self.core.plugins['Bundler'].entries.keys()
+ for bundle in ref_bundles:
+ xmlbundle = "%s.xml" % bundle
+ genshibundle = "%s.genshi" % bundle
+ if (xmlbundle not in allbundles and
+ genshibundle not in allbundles):
+ self.LintError("bundle-not-found",
+ "Bundle %s referenced, but does not exist" %
+ bundle)
+
+ def bundle_names(self, bundle):
+ """ verify bundle name attribute matches filename """
+ try:
+ xdata = lxml.etree.XML(bundle.data)
+ except AttributeError:
+ # genshi template
+ xdata = lxml.etree.parse(bundle.template.filepath).getroot()
+
+ fname = bundle.name.split('Bundler/')[1].split('.')[0]
+ bname = xdata.get('name')
+ if fname != bname:
+ self.LintError("inconsistent-bundle-name",
+ "Inconsistent bundle name: filename is %s, bundle name is %s" %
+ (fname, bname))
+
+ def sgenshi_groups(self, bundle):
+ """ ensure that Genshi Bundles do not include <Group> tags,
+ which are not supported """
+ xdata = lxml.etree.parse(bundle.name)
+ groups = [self.RenderXML(g)
+ for g in xdata.getroottree().findall("//Group")]
+ if groups:
+ self.LintError("group-tag-not-allowed",
+ "<Group> tag is not allowed in SGenshi Bundle:\n%s" %
+ "\n".join(groups))
diff --git a/src/lib/Server/Lint/Comments.py b/src/lib/Server/Lint/Comments.py
new file mode 100644
index 000000000..8e86cc564
--- /dev/null
+++ b/src/lib/Server/Lint/Comments.py
@@ -0,0 +1,188 @@
+import os.path
+import lxml.etree
+import Bcfg2.Server.Lint
+
+class Comments(Bcfg2.Server.Lint.ServerPlugin):
+ """ check files for various required headers """
+ def __init__(self, *args, **kwargs):
+ Bcfg2.Server.Lint.ServerPlugin.__init__(self, *args, **kwargs)
+ self.config_cache = {}
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ self.check_bundles()
+ self.check_properties()
+ self.check_metadata()
+ self.check_cfg()
+ self.check_infoxml()
+ self.check_probes()
+
+ def required_keywords(self, rtype):
+ """ given a file type, fetch the list of required VCS keywords
+ from the bcfg2-lint config """
+ return self.required_items(rtype, "keyword")
+
+ def required_comments(self, rtype):
+ """ given a file type, fetch the list of required comments
+ from the bcfg2-lint config """
+ return self.required_items(rtype, "comment")
+
+ def required_items(self, rtype, itype):
+ """ given a file type and item type (comment or keyword),
+ fetch the list of required items from the bcfg2-lint config """
+ if itype not in self.config_cache:
+ self.config_cache[itype] = {}
+
+ if rtype not in self.config_cache[itype]:
+ rv = []
+ global_item = "global_%ss" % itype
+ if global_item in self.config:
+ rv.extend(self.config[global_item].split(","))
+
+ item = "%s_%ss" % (rtype.lower(), itype)
+ if item in self.config:
+ if self.config[item]:
+ rv.extend(self.config[item].split(","))
+ else:
+ # config explicitly specifies nothing
+ rv = []
+ self.config_cache[itype][rtype] = rv
+ return self.config_cache[itype][rtype]
+
+ def check_bundles(self):
+ """ check bundle files for required headers """
+ if 'Bundler' in self.core.plugins:
+ for bundle in self.core.plugins['Bundler'].entries.values():
+ xdata = None
+ rtype = ""
+ try:
+ xdata = lxml.etree.XML(bundle.data)
+ rtype = "bundler"
+ except AttributeError:
+ xdata = lxml.etree.parse(bundle.template.filepath).getroot()
+ rtype = "sgenshi"
+
+ self.check_xml(bundle.name, xdata, rtype)
+
+ def check_properties(self):
+ """ check properties files for required headers """
+ if 'Properties' in self.core.plugins:
+ props = self.core.plugins['Properties']
+ for propfile, pdata in props.store.entries.items():
+ if os.path.splitext(propfile)[1] == ".xml":
+ self.check_xml(pdata.name, pdata.data, 'properties')
+
+ def check_metadata(self):
+ """ check metadata files for required headers """
+ if self.has_all_xincludes("groups.xml"):
+ self.check_xml(os.path.join(self.metadata.data, "groups.xml"),
+ self.metadata.groups_xml.data,
+ "metadata")
+ if self.has_all_xincludes("clients.xml"):
+ self.check_xml(os.path.join(self.metadata.data, "clients.xml"),
+ self.metadata.clients_xml.data,
+ "metadata")
+
+ def check_cfg(self):
+ """ check Cfg files for required headers """
+ if 'Cfg' in self.core.plugins:
+ for entryset in self.core.plugins['Cfg'].entries.values():
+ for entry in entryset.entries.values():
+ if entry.name.endswith(".genshi"):
+ rtype = "tgenshi"
+ else:
+ rtype = "cfg"
+ self.check_plaintext(entry.name, entry.data, rtype)
+
+ def check_infoxml(self):
+ """ check info.xml files for required headers """
+ if 'Cfg' in self.core.plugins:
+ for entryset in self.core.plugins['Cfg'].entries.items():
+ if (hasattr(entryset, "infoxml") and
+ entryset.infoxml is not None):
+ self.check_xml(entryset.infoxml.name,
+ entryset.infoxml.pnode.data,
+ "infoxml")
+
+ def check_probes(self):
+ """ check probes for required headers """
+ if 'Probes' in self.core.plugins:
+ for probe in self.core.plugins['Probes'].probes.entries.values():
+ self.check_plaintext(probe.name, probe.data, "probes")
+
+ def check_xml(self, filename, xdata, rtype):
+ """ check generic XML files for required headers """
+ self.check_lines(filename,
+ [str(el)
+ for el in xdata.getiterator(lxml.etree.Comment)],
+ rtype)
+
+ def check_plaintext(self, filename, data, rtype):
+ """ check generic plaintex files for required headers """
+ self.check_lines(filename, data.splitlines(), rtype)
+
+ def check_lines(self, filename, lines, rtype):
+ """ generic header check for a set of lines """
+ if self.HandlesFile(filename):
+ # found is trivalent:
+ # False == not found
+ # None == found but not expanded
+ # True == found and expanded
+ found = dict((k, False) for k in self.required_keywords(rtype))
+
+ for line in lines:
+ # we check for both '$<keyword>:' and '$<keyword>$' to see
+ # if the keyword just hasn't been expanded
+ for (keyword, status) in found.items():
+ if not status:
+ if '$%s:' % keyword in line:
+ found[keyword] = True
+ elif '$%s$' % keyword in line:
+ found[keyword] = None
+
+ unexpanded = [keyword for (keyword, status) in found.items()
+ if status is None]
+ if unexpanded:
+ self.LintError("unexpanded-keywords",
+ "%s: Required keywords(s) found but not expanded: %s" %
+ (filename, ", ".join(unexpanded)))
+ missing = [keyword for (keyword, status) in found.items()
+ if status is False]
+ if missing:
+ self.LintError("keywords-not-found",
+ "%s: Required keywords(s) not found: $%s$" %
+ (filename, "$, $".join(missing)))
+
+ # next, check for required comments. found is just
+ # boolean
+ found = dict((k, False) for k in self.required_comments(rtype))
+
+ for line in lines:
+ for (comment, status) in found.items():
+ if not status:
+ found[comment] = comment in line
+
+ missing = [comment for (comment, status) in found.items()
+ if status is False]
+ if missing:
+ self.LintError("comments-not-found",
+ "%s: Required comments(s) not found: %s" %
+ (filename, ", ".join(missing)))
+
+ def has_all_xincludes(self, mfile):
+ """ return true if self.files includes all XIncludes listed in
+ the specified metadata type, false otherwise"""
+ if self.files is None:
+ return True
+ else:
+ path = os.path.join(self.metadata.data, mfile)
+ if path in self.files:
+ xdata = lxml.etree.parse(path)
+ for el in xdata.findall('./{http://www.w3.org/2001/XInclude}include'):
+ if not self.has_all_xincludes(el.get('href')):
+ self.LintError("broken-xinclude-chain",
+ "Broken XInclude chain: could not include %s" % path)
+ return False
+
+ return True
+
diff --git a/src/lib/Server/Lint/Duplicates.py b/src/lib/Server/Lint/Duplicates.py
new file mode 100644
index 000000000..517f0dd7b
--- /dev/null
+++ b/src/lib/Server/Lint/Duplicates.py
@@ -0,0 +1,82 @@
+import os.path
+import lxml.etree
+import Bcfg2.Server.Lint
+
+class Duplicates(Bcfg2.Server.Lint.ServerPlugin):
+ """ Find duplicate clients, groups, etc. """
+ def __init__(self, *args, **kwargs):
+ Bcfg2.Server.Lint.ServerPlugin.__init__(self, *args, **kwargs)
+ self.groups_xdata = None
+ self.clients_xdata = None
+ self.load_xdata()
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ """ run plugin """
+ # only run this plugin if we were not given a list of files.
+ # not only is it marginally silly to run this plugin with a
+ # partial list of files, it turns out to be really freaking
+ # hard to get only a fragment of group or client metadata
+ if self.groups_xdata is not None:
+ self.duplicate_groups()
+ self.duplicate_defaults()
+ if self.clients_xdata is not None:
+ self.duplicate_clients()
+
+ def load_xdata(self):
+ """ attempt to load XML data for groups and clients. only
+ actually load data if all documents reference in XIncludes can
+ be found in self.files"""
+ if self.has_all_xincludes("groups.xml"):
+ self.groups_xdata = self.metadata.clients_xml.xdata
+ if self.has_all_xincludes("clients.xml"):
+ self.clients_xdata = self.metadata.clients_xml.xdata
+
+ def duplicate_groups(self):
+ """ find duplicate groups """
+ self.duplicate_entries(self.clients_xdata.xpath('//Groups/Group'),
+ 'group')
+
+ def duplicate_clients(self):
+ """ find duplicate clients """
+ self.duplicate_entries(self.clients_xdata.xpath('//Clients/Client'),
+ 'client')
+
+ def duplicate_entries(self, data, etype):
+ """ generic duplicate entry finder """
+ seen = {}
+ for el in data:
+ if el.get('name') not in seen:
+ seen[el.get('name')] = el
+ else:
+ self.LintError("duplicate-%s" % etype,
+ "Duplicate %s '%s':\n%s\n%s" %
+ (etype, el.get('name'),
+ self.RenderXML(seen[el.get('name')]),
+ self.RenderXML(el)))
+
+ def duplicate_defaults(self):
+ """ check for multiple default group definitions """
+ default_groups = [g for g in self.groups_xdata.findall('.//Group')
+ if g.get('default') == 'true']
+ if len(default_groups) > 1:
+ self.LintError("multiple-default-groups",
+ "Multiple default groups defined: %s" %
+ ",".join(default_groups))
+
+ def has_all_xincludes(self, mfile):
+ """ return true if self.files includes all XIncludes listed in
+ the specified metadata type, false otherwise"""
+ if self.files is None:
+ return True
+ else:
+ path = os.path.join(self.metadata.data, mfile)
+ if path in self.files:
+ xdata = lxml.etree.parse(path)
+ for el in xdata.findall('./{http://www.w3.org/2001/XInclude}include'):
+ if not self.has_all_xincludes(el.get('href')):
+ self.LintError("broken-xinclude-chain",
+ "Broken XInclude chain: could not include %s" % path)
+ return False
+
+ return True
diff --git a/src/lib/Server/Lint/InfoXML.py b/src/lib/Server/Lint/InfoXML.py
new file mode 100644
index 000000000..7725ad748
--- /dev/null
+++ b/src/lib/Server/Lint/InfoXML.py
@@ -0,0 +1,43 @@
+import os.path
+import Bcfg2.Options
+import Bcfg2.Server.Lint
+
+class InfoXML(Bcfg2.Server.Lint.ServerPlugin):
+ """ ensure that all config files have an info.xml file"""
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ if 'Cfg' in self.core.plugins:
+ for filename, entryset in self.core.plugins['Cfg'].entries.items():
+ infoxml_fname = os.path.join(entryset.path, "info.xml")
+ if self.HandlesFile(infoxml_fname):
+ if (hasattr(entryset, "infoxml") and
+ entryset.infoxml is not None):
+ self.check_infoxml(entryset.infoxml.pnode.data)
+ else:
+ self.LintError("no-infoxml",
+ "No info.xml found for %s" % filename)
+
+ def check_infoxml(self, xdata):
+ for info in xdata.getroottree().findall("//Info"):
+ required = []
+ if "required_attrs" in self.config:
+ required = self.config["required_attrs"].split(",")
+
+ missing = [attr for attr in required if info.get(attr) is None]
+ if missing:
+ self.LintError("required-infoxml-attrs-missing",
+ "Required attribute(s) %s not found in %s:%s" %
+ (",".join(missing), infoxml_fname,
+ self.RenderXML(info)))
+
+ if ((Bcfg2.Options.MDATA_PARANOID.value and
+ info.get("paranoid") is not None and
+ info.get("paranoid").lower() == "false") or
+ (not Bcfg2.Options.MDATA_PARANOID.value and
+ (info.get("paranoid") is None or
+ info.get("paranoid").lower() != "true"))):
+ self.LintError("paranoid-false",
+ "Paranoid must be true in %s:%s" %
+ (infoxml_fname, self.RenderXML(info)))
+
diff --git a/src/lib/Server/Lint/Pkgmgr.py b/src/lib/Server/Lint/Pkgmgr.py
new file mode 100644
index 000000000..39c601617
--- /dev/null
+++ b/src/lib/Server/Lint/Pkgmgr.py
@@ -0,0 +1,38 @@
+import Bcfg2.Server.Lint
+
+class Pkgmgr(Bcfg2.Server.Lint.ServerPlugin):
+ """ find duplicate Pkgmgr entries with the same priority """
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ if 'Pkgmgr' not in self.core.plugins:
+ self.logger.info("Pkgmgr server plugin is not enabled, skipping Pkgmgr lint checks")
+ return
+
+ pset = set()
+ for plist in self.core.plugins['Pkgmgr'].entries.values():
+ if self.HandlesFile(plist.name):
+ xdata = plist.data
+ # get priority, type, group
+ priority = xdata.getroot().get('priority')
+ ptype = xdata.getroot().get('type')
+ for pkg in xdata.findall("//Package"):
+ if pkg.getparent().tag == 'Group':
+ grp = pkg.getparent().get('name')
+ if (type(grp) is not str and
+ grp.getparent().tag == 'Group'):
+ pgrp = grp.getparent().get('name')
+ else:
+ pgrp = 'none'
+ else:
+ grp = 'none'
+ pgrp = 'none'
+ ptuple = (pkg.get('name'), priority, ptype, grp, pgrp)
+ # check if package is already listed with same
+ # priority, type, grp
+ if ptuple in pset:
+ self.LintError("duplicate-package",
+ "Duplicate Package %s, priority:%s, type:%s" %
+ (pkg.get('name'), priority, ptype))
+ else:
+ pset.add(ptuple)
diff --git a/src/lib/Server/Lint/RequiredAttrs.py b/src/lib/Server/Lint/RequiredAttrs.py
new file mode 100644
index 000000000..cbb4395c4
--- /dev/null
+++ b/src/lib/Server/Lint/RequiredAttrs.py
@@ -0,0 +1,72 @@
+import os.path
+import lxml.etree
+import Bcfg2.Server.Lint
+
+class RequiredAttrs(Bcfg2.Server.Lint.ServerPlugin):
+ """ verify attributes for configuration entries (as defined in
+ doc/server/configurationentries) """
+
+ def __init__(self, *args, **kwargs):
+ Bcfg2.Server.Lint.ServerPlugin.__init__(self, *args, **kwargs)
+ self.required_attrs = {
+ 'device': ['name', 'owner', 'group', 'dev_type'],
+ 'directory': ['name', 'owner', 'group', 'perms'],
+ 'file': ['name', 'owner', 'group', 'perms'],
+ 'hardlink': ['name', 'to'],
+ 'symlink': ['name', 'to'],
+ 'ignore': ['name'],
+ 'nonexistent': ['name'],
+ 'permissions': ['name', 'owner', 'group', 'perms']}
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ self.check_rules()
+ self.check_bundles()
+
+ def check_rules(self):
+ """ check Rules for Path entries with missing attrs """
+ if 'Rules' in self.core.plugins:
+ for rules in self.core.plugins['Rules'].entries.values():
+ xdata = rules.pnode.data
+ for path in xdata.xpath("//Path"):
+ self.check_entry(path, os.path.join(self.config['repo'],
+ rules.name))
+
+ def check_bundles(self):
+ """ check bundles for BoundPath entries with missing attrs """
+ if 'Bundler' in self.core.plugins:
+ for bundle in self.core.plugins['Bundler'].entries.values():
+ try:
+ xdata = lxml.etree.XML(bundle.data)
+ except AttributeError:
+ xdata = lxml.etree.parse(bundle.template.filepath).getroot()
+
+ for path in xdata.xpath("//BoundPath"):
+ self.check_entry(path, bundle.name)
+
+ def check_entry(self, entry, filename):
+ """ generic entry check """
+ if self.HandlesFile(filename):
+ pathname = entry.get('name')
+ pathtype = entry.get('type')
+ pathset = set(entry.attrib.keys())
+ try:
+ required_attrs = set(self.required_attrs[pathtype] + ['type'])
+ except KeyError:
+ self.LintError("unknown-path-type",
+ "Unknown path type %s: %s" %
+ (pathtype, self.RenderXML(entry)))
+
+ if 'dev_type' in required_attrs:
+ dev_type = entry.get('dev_type')
+ if dev_type in ['block', 'char']:
+ # check if major/minor are specified
+ required_attrs |= set(['major', 'minor'])
+ if not pathset.issuperset(required_attrs):
+ self.LintError("required-attrs-missing",
+ "The required attributes %s are missing for %s %sin %s:\n%s" %
+ (",".join([attr
+ for attr in
+ required_attrs.difference(pathset)]),
+ entry.tag, pathname, filename,
+ self.RenderXML(entry)))
diff --git a/src/lib/Server/Lint/Validate.py b/src/lib/Server/Lint/Validate.py
new file mode 100644
index 000000000..c87c55ee9
--- /dev/null
+++ b/src/lib/Server/Lint/Validate.py
@@ -0,0 +1,186 @@
+import glob
+import lxml.etree
+import os
+import fnmatch
+import Bcfg2.Options
+import Bcfg2.Server.Lint
+from subprocess import Popen, PIPE, STDOUT
+
+class Validate(Bcfg2.Server.Lint.ServerlessPlugin):
+ """ Ensure that the repo validates """
+
+ def __init__(self, *args, **kwargs):
+ Bcfg2.Server.Lint.ServerlessPlugin.__init__(self, *args, **kwargs)
+ self.filesets = {"metadata:groups":"%s/metadata.xsd",
+ "metadata:clients":"%s/clients.xsd",
+ "info":"%s/info.xsd",
+ "%s/Bundler/*.{xml,genshi}":"%s/bundle.xsd",
+ "%s/Pkgmgr/*.xml":"%s/pkglist.xsd",
+ "%s/Base/*.xml":"%s/base.xsd",
+ "%s/Rules/*.xml":"%s/rules.xsd",
+ "%s/etc/report-configuration.xml":"%s/report-configuration.xsd",
+ "%s/Svcmgr/*.xml":"%s/services.xsd",
+ "%s/Deps/*.xml":"%s/deps.xsd",
+ "%s/Decisions/*.xml":"%s/decisions.xsd",
+ "%s/Packages/config.xml":"%s/packages.xsd",
+ "%s/GroupPatterns/config.xml":"%s/grouppatterns.xsd",
+ "%s/NagiosGen/config.xml":"%s/nagiosgen.xsd"}
+
+ self.filelists = {}
+ self.get_filelists()
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ self.schemadir = self.config['schema']
+
+ for schemaname, path in self.filesets.items():
+ try:
+ filelist = self.filelists[path]
+ except KeyError:
+ filelist = []
+
+ if filelist:
+ # avoid loading schemas for empty file lists
+ try:
+ schema = lxml.etree.XMLSchema(lxml.etree.parse(schemaname %
+ schemadir))
+ except:
+ self.LintError("schema-failed-to-parse",
+ "Failed to process schema %s",
+ schemaname % schemadir)
+ continue
+ for filename in filelist:
+ self.validate(filename, schemaname % schemadir,
+ schema=schema)
+
+ self.check_properties()
+
+ def check_properties(self):
+ """ check Properties files against their schemas """
+ for filename in self.filelists['props']:
+ schemafile = "%s.xsd" % os.path.splitext(filename)[0]
+ if os.path.exists(schemafile):
+ self.validate(filename, schemafile)
+ else:
+ self.LintError("properties-schema-not-found",
+ "No schema found for %s" % filename)
+
+ def validate(self, filename, schemafile, schema=None):
+ """validate a file against the given lxml.etree.Schema.
+ return True on success, False on failure """
+ if schema is None:
+ # if no schema object was provided, instantiate one
+ try:
+ schema = lxml.etree.XMLSchema(lxml.etree.parse(schemafile))
+ except:
+ self.LintError("schema-failed-to-parse",
+ "Failed to process schema %s" % schemafile)
+ return False
+
+ try:
+ datafile = lxml.etree.parse(filename)
+ except SyntaxError:
+ lint = Popen(["xmllint", filename], stdout=PIPE, stderr=STDOUT)
+ self.LintError("xml-failed-to-parse",
+ "%s fails to parse:\n%s" % (filename,
+ lint.communicate()[0]))
+ lint.wait()
+ return False
+ except IOError:
+ self.LintError("xml-failed-to-read",
+ "Failed to open file %s" % filename)
+ return False
+
+ if not schema.validate(datafile):
+ cmd = ["xmllint"]
+ if self.files is None:
+ cmd.append("--xinclude")
+ cmd.extend(["--noout", "--schema", schemafile, filename])
+ lint = Popen(cmd, stdout=PIPE, stderr=STDOUT)
+ output = lint.communicate()[0]
+ if lint.wait():
+ self.LintError("xml-failed-to-verify",
+ "%s fails to verify:\n%s" % (filename, output))
+ return False
+ return True
+
+ def get_filelists(self):
+ """ get lists of different kinds of files to validate """
+ if self.files is not None:
+ listfiles = lambda p: fnmatch.filter(self.files, p % "*")
+ else:
+ listfiles = lambda p: glob.glob(p % self.config['repo'])
+
+ for path in self.filesets.keys():
+ if path.startswith("metadata:"):
+ mtype = path.split(":")[1]
+ self.filelists[path] = self.get_metadata_list(mtype)
+ elif path == "info":
+ if self.files is not None:
+ self.filelists[path] = \
+ [f for f in self.files
+ if os.path.basename(f) == 'info.xml']
+ else: # self.files is None
+ self.filelists[path] = []
+ for infodir in ['Cfg', 'TGenshi', 'TCheetah']:
+ for root, dirs, files in os.walk('%s/%s' %
+ (self.config['repo'],
+ infodir)):
+ self.filelists[path].extend([os.path.join(root, f)
+ for f in files
+ if f == 'info.xml'])
+ else:
+ self.filelists[path] = listfiles(path)
+
+ self.filelists['props'] = listfiles("%s/Properties/*.xml")
+ all_metadata = listfiles("%s/Metadata/*.xml")
+
+ # if there are other files in Metadata that aren't xincluded
+ # from clients.xml or groups.xml, we can't verify them. warn
+ # about those.
+ for fname in all_metadata:
+ if (fname not in self.filelists['metadata:groups'] and
+ fname not in self.filelists['metadata:clients']):
+ self.LintError("broken-xinclude-chain",
+ "Broken XInclude chain: Could not determine file type of %s" % fname)
+
+ def get_metadata_list(self, mtype):
+ """ get all metadata files for the specified type (clients or
+ group) """
+ if self.files is not None:
+ rv = fnmatch.filter(self.files, "*/Metadata/%s.xml" % mtype)
+ else:
+ rv = glob.glob("%s/Metadata/%s.xml" % (self.config['repo'], mtype))
+
+ # attempt to follow XIncludes. if the top-level files aren't
+ # listed in self.files, though, there's really nothing we can
+ # do to guess what a file in Metadata is
+ if rv:
+ rv.extend(self.follow_xinclude(rv[0]))
+
+ return rv
+
+ def follow_xinclude(self, xfile):
+ """ follow xincludes in the given file """
+ xdata = lxml.etree.parse(xfile)
+ included = set([ent.get('href') for ent in
+ xdata.findall('./{http://www.w3.org/2001/XInclude}include')])
+ rv = []
+
+ while included:
+ try:
+ filename = included.pop()
+ except KeyError:
+ continue
+
+ path = os.path.join(os.path.dirname(xfile), filename)
+ if self.HandlesFile(path):
+ rv.append(path)
+ groupdata = lxml.etree.parse(path)
+ [included.add(el.get('href'))
+ for el in
+ groupdata.findall('./{http://www.w3.org/2001/XInclude}include')]
+ included.discard(filename)
+
+ return rv
+
diff --git a/src/lib/Server/Lint/__init__.py b/src/lib/Server/Lint/__init__.py
new file mode 100644
index 000000000..3b89d1f9e
--- /dev/null
+++ b/src/lib/Server/Lint/__init__.py
@@ -0,0 +1,155 @@
+__revision__ = '$Revision$'
+
+__all__ = ['Bundles',
+ 'Comments',
+ 'Duplicates',
+ 'InfoXML',
+ 'Pkgmgr',
+ 'RequiredAttrs',
+ 'Validate']
+
+import logging
+import os.path
+from copy import copy
+import lxml.etree
+import Bcfg2.Logger
+
+def returnErrors(fn):
+ """ Decorator for Run method that returns error counts """
+ return fn
+
+class Plugin (object):
+ """ base class for ServerlessPlugin and ServerPlugin """
+
+ def __init__(self, config, errorhandler=None, files=None):
+ self.files = files
+ self.config = config
+ self.logger = logging.getLogger('bcfg2-lint')
+ if errorhandler is None:
+ self.errorHandler = ErrorHandler()
+ else:
+ self.errorHandler = errorhandler
+
+ def Run(self):
+ """ run the plugin. must be overloaded by child classes """
+ pass
+
+ def HandlesFile(self, fname):
+ """ returns true if the given file should be handled by the
+ plugin according to the files list, false otherwise """
+ return (self.files is None or
+ fname in self.files or
+ os.path.join(self.config['repo'], fname) in self.files or
+ os.path.abspath(fname) in self.files or
+ os.path.abspath(os.path.join(self.config['repo'],
+ fname)) in self.files)
+
+ def LintError(self, err, msg):
+ self.errorHandler.dispatch(err, msg)
+
+ def RenderXML(self, element):
+ """render an XML element for error output -- line number
+ prefixed, no children"""
+ xml = None
+ if len(element) or element.text:
+ el = copy(element)
+ if el.text:
+ el.text = '...'
+ [el.remove(c) for c in el.iterchildren()]
+ xml = lxml.etree.tostring(el).strip()
+ else:
+ xml = lxml.etree.tostring(element).strip()
+ return " line %s: %s" % (element.sourceline, xml)
+
+
+class ErrorHandler (object):
+ # how to handle different errors by default
+ _errors = {"no-infoxml":"warning",
+ "paranoid-false":"warning",
+ "bundle-not-found":"error",
+ "inconsistent-bundle-name":"warning",
+ "group-tag-not-allowed":"error",
+ "unexpanded-keywords":"warning",
+ "keywords-not-found":"warning",
+ "comments-not-found":"warning",
+ "broken-xinclude-chain":"warning",
+ "duplicate-client":"error",
+ "duplicate-group":"error",
+ "duplicate-package":"error",
+ "multiple-default-groups":"error",
+ "required-infoxml-attrs-missing":"error",
+ "unknown-path-type":"error",
+ "required-attrs-missing":"error",
+ "schema-failed-to-parse":"warning",
+ "properties-schema-not-found":"warning",
+ "xml-failed-to-parse":"error",
+ "xml-failed-to-read":"error",
+ "xml-failed-to-verify":"error",}
+
+ def __init__(self, config=None):
+ self.errors = 0
+ self.warnings = 0
+
+ self.logger = logging.getLogger('bcfg2-lint')
+
+ self._handlers = {}
+ if config is not None:
+ for err, action in config.items():
+ if "warn" in action:
+ self._handlers[err] = self.warn
+ elif "err" in action:
+ self._handlers[err] = self.error
+ else:
+ self._handlers[err] = self.debug
+
+ for err, action in self._errors.items():
+ if err not in self._handlers:
+ if "warn" in action:
+ self._handlers[err] = self.warn
+ elif "err" in action:
+ self._handlers[err] = self.error
+ else:
+ self._handlers[err] = self.debug
+
+ def dispatch(self, err, msg):
+ if err in self._handlers:
+ self._handlers[err](msg)
+ self.logger.debug(" (%s)" % err)
+ else:
+ self.logger.info("Unknown error %s" % err)
+
+ def error(self, msg):
+ """ log an error condition """
+ self.errors += 1
+ lines = msg.splitlines()
+ self.logger.error("ERROR: %s" % lines.pop())
+ [self.logger.error(" %s" % l) for l in lines]
+
+ def warn(self, msg):
+ """ log a warning condition """
+ self.warnings += 1
+ lines = msg.splitlines()
+ self.logger.warning("WARNING: %s" % lines.pop())
+ [self.logger.warning(" %s" % l) for l in lines]
+
+ def debug(self, msg):
+ """ log a silent/debug condition """
+ lines = msg.splitlines()
+ [self.logger.debug("%s" % l) for l in lines]
+
+
+class ServerlessPlugin (Plugin):
+ """ base class for plugins that are run before the server starts
+ up (i.e., plugins that check things that may prevent the server
+ from starting up) """
+ pass
+
+
+class ServerPlugin (Plugin):
+ """ base class for plugins that check things that require the
+ running Bcfg2 server """
+ def __init__(self, lintCore, config, **kwargs):
+ Plugin.__init__(self, config, **kwargs)
+ self.core = lintCore
+ self.logger = self.core.logger
+ self.metadata = self.core.metadata
diff --git a/src/lib/Server/Plugin.py b/src/lib/Server/Plugin.py
index 73d054409..cd2b63656 100644
--- a/src/lib/Server/Plugin.py
+++ b/src/lib/Server/Plugin.py
@@ -8,13 +8,23 @@ import os
import pickle
import posixpath
import re
-import Queue
+import sys
import threading
from lxml.etree import XML, XMLSyntaxError
import Bcfg2.Options
+# py3k compatibility
+if sys.hexversion >= 0x03000000:
+ from functools import reduce
+ from io import FileIO as BUILTIN_FILE_TYPE
+else:
+ BUILTIN_FILE_TYPE = file
+from Bcfg2.Bcfg2Py3k import Queue
+from Bcfg2.Bcfg2Py3k import Empty
+from Bcfg2.Bcfg2Py3k import Full
+
# grab default metadata info from bcfg2.conf
opts = {'owner': Bcfg2.Options.MDATA_OWNER,
'group': Bcfg2.Options.MDATA_GROUP,
@@ -38,14 +48,17 @@ info_regex = re.compile( \
'paranoid:(\s)*(?P<paranoid>\S+)|' +
'perms:(\s)*(?P<perms>\w+)|')
+
class PluginInitError(Exception):
"""Error raised in cases of Plugin initialization errors."""
pass
+
class PluginExecutionError(Exception):
"""Error raised in case of Plugin execution errors."""
pass
+
class Plugin(object):
"""This is the base class for all Bcfg2 Server plugins.
Several attributes must be defined in the subclass:
@@ -90,6 +103,7 @@ class Plugin(object):
def shutdown(self):
self.running = False
+
class Generator(object):
"""Generator plugins contribute to literal client configurations."""
def HandlesEntry(self, entry, metadata):
@@ -100,20 +114,24 @@ class Generator(object):
"""This is the slow-path handler for configuration entry binding."""
raise PluginExecutionError
+
class Structure(object):
"""Structure Plugins contribute to abstract client configurations."""
def BuildStructures(self, metadata):
"""Return a list of abstract goal structures for client."""
raise PluginExecutionError
+
class Metadata(object):
"""Signal metadata capabilities for this plugin"""
def add_client(self, client_name, attribs):
"""Add client."""
pass
+
def remove_client(self, client_name):
"""Remove client."""
pass
+
def viz(self, hosts, bundles, key, colors):
"""Create viz str for viz admin mode."""
pass
@@ -124,6 +142,7 @@ class Metadata(object):
def merge_additional_data(self, imd, source, groups, data):
raise PluginExecutionError
+
class Connector(object):
"""Connector Plugins augment client metadata instances."""
def get_additional_groups(self, metadata):
@@ -134,6 +153,7 @@ class Connector(object):
"""Determine additional data for metadata instances."""
return dict()
+
class Probing(object):
"""Signal probe capability for this plugin."""
def GetProbes(self, _):
@@ -144,11 +164,13 @@ class Probing(object):
"""Receive probe results pertaining to client."""
pass
+
class Statistics(object):
"""Signal statistics handling capability."""
def process_statistics(self, client, xdata):
pass
+
class ThreadedStatistics(Statistics,
threading.Thread):
"""Threaded statistics handling capability."""
@@ -157,7 +179,7 @@ class ThreadedStatistics(Statistics,
threading.Thread.__init__(self)
# Event from the core signaling an exit
self.terminate = core.terminate
- self.work_queue = Queue.Queue(100000)
+ self.work_queue = Queue(100000)
self.pending_file = "%s/etc/%s.pending" % (datastore, self.__class__.__name__)
self.daemon = True
self.start()
@@ -169,10 +191,10 @@ class ThreadedStatistics(Statistics,
while not self.work_queue.empty():
(metadata, data) = self.work_queue.get_nowait()
try:
- pending_data.append( ( metadata.hostname, lxml.etree.tostring(data) ) )
+ pending_data.append((metadata.hostname, lxml.etree.tostring(data)))
except:
self.logger.warning("Dropping interaction for %s" % metadata.hostname)
- except Queue.Empty:
+ except Empty:
pass
try:
@@ -192,7 +214,8 @@ class ThreadedStatistics(Statistics,
savefile = open(self.pending_file, 'r')
pending_data = pickle.load(savefile)
savefile.close()
- except Exception, e:
+ except Exception:
+ e = sys.exc_info()[1]
self.logger.warning("Failed to load pending data: %s" % e)
for (pmetadata, pdata) in pending_data:
# check that shutdown wasnt called early
@@ -202,7 +225,7 @@ class ThreadedStatistics(Statistics,
try:
while True:
try:
- metadata = self.core.build_metadata(pmetadata)
+ metadata = self.core.build_metadata(pmetadata)
break
except Bcfg2.Server.Plugins.Metadata.MetadataRuntimeError:
pass
@@ -211,11 +234,12 @@ class ThreadedStatistics(Statistics,
if self.terminate.isSet():
return False
- self.work_queue.put_nowait( (metadata, lxml.etree.fromstring(pdata)) )
- except Queue.Full:
+ self.work_queue.put_nowait((metadata, lxml.etree.fromstring(pdata)))
+ except Full:
self.logger.warning("Queue.Full: Failed to load queue data")
break
- except lxml.etree.LxmlError, lxml_error:
+ except lxml.etree.LxmlError:
+ lxml_error = sys.exc_info()[1]
self.logger.error("Unable to load save interaction: %s" % lxml_error)
except Bcfg2.Server.Plugins.Metadata.MetadataConsistencyError:
self.logger.error("Unable to load metadata for save interaction: %s" % pmetadata)
@@ -232,9 +256,10 @@ class ThreadedStatistics(Statistics,
while not self.terminate.isSet():
try:
(xdata, client) = self.work_queue.get(block=True, timeout=2)
- except Queue.Empty:
+ except Empty:
continue
- except Exception, e:
+ except Exception:
+ e = sys.exc_info()[1]
self.logger.error("ThreadedStatistics: %s" % e)
continue
self.handle_statistic(xdata, client)
@@ -246,7 +271,7 @@ class ThreadedStatistics(Statistics,
try:
self.work_queue.put_nowait((metadata, copy.deepcopy(data)))
warned = False
- except Queue.Full:
+ except Full:
if not warned:
self.logger.warning("%s: Queue is full. Dropping interactions." % self.__class__.__name__)
warned = True
@@ -255,6 +280,7 @@ class ThreadedStatistics(Statistics,
"""Handle stats here."""
pass
+
class PullSource(object):
def GetExtra(self, client):
return []
@@ -262,6 +288,7 @@ class PullSource(object):
def GetCurrentEntry(self, client, e_type, e_name):
raise PluginExecutionError
+
class PullTarget(object):
def AcceptChoices(self, entry, metadata):
raise PluginExecutionError
@@ -271,31 +298,38 @@ class PullTarget(object):
of bcfg2-admin pull."""
raise PluginExecutionError
+
class Decision(object):
"""Signal decision handling capability."""
def GetDecisions(self, metadata, mode):
return []
+
class ValidationError(Exception):
pass
+
class StructureValidator(object):
"""Validate/modify goal structures."""
def validate_structures(self, metadata, structures):
- raise ValidationError, "not implemented"
+ raise ValidationError("not implemented")
+
class GoalValidator(object):
"""Validate/modify configuration goals."""
def validate_goals(self, metadata, goals):
- raise ValidationError, "not implemented"
+ raise ValidationError("not implemented")
+
class Version(object):
"""Interact with various version control systems."""
def get_revision(self):
return []
+
def commit_data(self, file_list, comment=None):
pass
+
# the rest of the file contains classes for coherent file caching
class FileBacked(object):
@@ -315,7 +349,7 @@ class FileBacked(object):
if event and event.code2str() not in ['exists', 'changed', 'created']:
return
try:
- self.data = file(self.name).read()
+ self.data = BUILTIN_FILE_TYPE(self.name).read()
self.Index()
except IOError:
logger.error("Failed to read file %s" % (self.name))
@@ -324,6 +358,7 @@ class FileBacked(object):
"""Update local data structures based on current file state"""
pass
+
class DirectoryBacked(object):
"""This object is a coherent cache for a filesystem hierarchy of files."""
__child__ = FileBacked
@@ -341,7 +376,7 @@ class DirectoryBacked(object):
return self.entries[key]
def __iter__(self):
- return self.entries.iteritems()
+ return iter(list(self.entries.items()))
def AddEntry(self, name):
"""Add new entry to data structures upon file creation."""
@@ -380,9 +415,10 @@ class DirectoryBacked(object):
elif action in ['endExist']:
pass
else:
- print "Got unknown event %s %s %s" % (event.requestID,
+ print("Got unknown event %s %s %s" % (event.requestID,
event.code2str(),
- event.filename)
+ event.filename))
+
class XMLFileBacked(FileBacked):
"""
@@ -401,7 +437,7 @@ class XMLFileBacked(FileBacked):
try:
xdata = XML(self.data)
except XMLSyntaxError:
- logger.error("Failed to parse %s"%(self.name))
+ logger.error("Failed to parse %s" % (self.name))
return
self.label = xdata.attrib[self.__identifier__]
self.entries = xdata.getchildren()
@@ -409,12 +445,14 @@ class XMLFileBacked(FileBacked):
def __iter__(self):
return iter(self.entries)
+
class SingleXMLFileBacked(XMLFileBacked):
"""This object is a coherent cache for an independent XML file."""
def __init__(self, filename, fam):
XMLFileBacked.__init__(self, filename)
fam.AddMonitor(filename, self)
+
class StructFile(XMLFileBacked):
"""This file contains a set of structure file formatting logic."""
def __init__(self, name):
@@ -429,38 +467,52 @@ class StructFile(XMLFileBacked):
logger.error("Failed to parse file %s" % self.name)
return
self.fragments = {}
- work = {lambda x:True: xdata.getchildren()}
+ work = {lambda x: True: xdata.getchildren()}
while work:
(predicate, worklist) = work.popitem()
- self.fragments[predicate] = [item for item in worklist if item.tag != 'Group'
- and not isinstance(item, lxml.etree._Comment)]
- for group in [item for item in worklist if item.tag == 'Group']:
- # if only python had forceable early-binding
- if group.get('negate', 'false') in ['true', 'True']:
- cmd = "lambda x:'%s' not in x.groups and predicate(x)"
- else:
- cmd = "lambda x:'%s' in x.groups and predicate(x)"
-
- newpred = eval(cmd % (group.get('name')), {'predicate':predicate})
- work[newpred] = group.getchildren()
+ self.fragments[predicate] = \
+ [item for item in worklist
+ if (item.tag != 'Group' and
+ item.tag != 'Client' and
+ not isinstance(item,
+ lxml.etree._Comment))]
+ for item in worklist:
+ cmd = None
+ if item.tag == 'Group':
+ if item.get('negate', 'false').lower() == 'true':
+ cmd = "lambda x:'%s' not in x.groups and predicate(x)"
+ else:
+ cmd = "lambda x:'%s' in x.groups and predicate(x)"
+ elif item.tag == 'Client':
+ if item.get('negate', 'false').lower() == 'true':
+ cmd = "lambda x:x.hostname != '%s' and predicate(x)"
+ else:
+ cmd = "lambda x:x.hostname == '%s' and predicate(x)"
+ # else, ignore item
+ if cmd is not None:
+ newpred = eval(cmd % item.get('name'),
+ {'predicate':predicate})
+ work[newpred] = item.getchildren()
def Match(self, metadata):
"""Return matching fragments of independent."""
- matching = [frag for (pred, frag) in self.fragments.iteritems() if pred(metadata)]
+ matching = [frag for (pred, frag) in list(self.fragments.items())
+ if pred(metadata)]
if matching:
- return reduce(lambda x, y:x+y, matching)
+ return reduce(lambda x, y: x + y, matching)
logger.error("File %s got null match" % (self.name))
return []
+
class INode:
"""
LNodes provide lists of things available at a particular
group intersection.
"""
- raw = {'Client':"lambda x:'%s' == x.hostname and predicate(x)",
- 'Group':"lambda x:'%s' in x.groups and predicate(x)"}
- nraw = {'Client':"lambda x:'%s' != x.hostname and predicate(x)",
- 'Group':"lambda x:'%s' not in x.groups and predicate(x)"}
+ raw = {'Client': "lambda x:'%s' == x.hostname and predicate(x)",
+ 'Group': "lambda x:'%s' in x.groups and predicate(x)"}
+ nraw = {'Client': "lambda x:'%s' != x.hostname and predicate(x)",
+ 'Group': "lambda x:'%s' not in x.groups and predicate(x)"}
containers = ['Group', 'Client']
ignore = []
@@ -468,16 +520,16 @@ class INode:
self.data = data
self.contents = {}
if parent == None:
- self.predicate = lambda x:True
+ self.predicate = lambda x: True
else:
predicate = parent.predicate
if data.get('negate', 'false') in ['true', 'True']:
psrc = self.nraw
else:
psrc = self.raw
- if data.tag in psrc.keys():
+ if data.tag in list(psrc.keys()):
self.predicate = eval(psrc[data.tag] % (data.get('name')),
- {'predicate':predicate})
+ {'predicate': predicate})
else:
raise Exception
mytype = self.__class__
@@ -491,7 +543,7 @@ class INode:
try:
self.contents[item.tag][item.get('name')] = item.attrib
except KeyError:
- self.contents[item.tag] = {item.get('name'):item.attrib}
+ self.contents[item.tag] = {item.get('name'): item.attrib}
if item.text:
self.contents[item.tag]['__text__'] = item.text
try:
@@ -511,6 +563,7 @@ class INode:
for child in self.children:
child.Match(metadata, data)
+
class XMLSrc(XMLFileBacked):
"""XMLSrc files contain a LNode hierarchy that returns matching entries."""
__node__ = INode
@@ -527,7 +580,7 @@ class XMLSrc(XMLFileBacked):
def HandleEvent(self, _=None):
"""Read file upon update."""
try:
- data = file(self.name).read()
+ data = BUILTIN_FILE_TYPE(self.name).read()
except IOError:
logger.error("Failed to read file %s" % (self.name))
return
@@ -557,10 +610,12 @@ class XMLSrc(XMLFileBacked):
self.pnode.Match(metadata, cache[1])
self.cache = cache
+
class XMLDirectoryBacked(DirectoryBacked):
"""Directorybacked for *.xml."""
patterns = re.compile('.*\.xml')
+
class PrioDir(Plugin, Generator, XMLDirectoryBacked):
"""This is a generator that handles package assignments."""
name = 'PrioDir'
@@ -579,8 +634,8 @@ class PrioDir(Plugin, Generator, XMLDirectoryBacked):
"""Handle events and update dispatch table."""
XMLDirectoryBacked.HandleEvent(self, event)
self.Entries = {}
- for src in self.entries.values():
- for itype, children in src.items.iteritems():
+ for src in list(self.entries.values()):
+ for itype, children in list(src.items.items()):
for child in children:
try:
self.Entries[itype][child] = self.BindEntry
@@ -589,14 +644,14 @@ class PrioDir(Plugin, Generator, XMLDirectoryBacked):
def BindEntry(self, entry, metadata):
"""Check package lists of package entries."""
- [src.Cache(metadata) for src in self.entries.values()]
+ [src.Cache(metadata) for src in list(self.entries.values())]
name = entry.get('name')
if not src.cache:
self.logger.error("Called before data loaded")
raise PluginExecutionError
- matching = [src for src in self.entries.values()
+ matching = [src for src in list(self.entries.values())
if src.cache and entry.tag in src.cache[1]
- and src.cache[1][entry.tag].has_key(name)]
+ and name in src.cache[1][entry.tag]]
if len(matching) == 0:
raise PluginExecutionError
elif len(matching) == 1:
@@ -618,15 +673,17 @@ class PrioDir(Plugin, Generator, XMLDirectoryBacked):
entry.text = data['__text__']
if '__children__' in data:
[entry.append(copy.deepcopy(item)) for item in data['__children__']]
- [entry.attrib.__setitem__(key, data[key]) for key in data.keys() \
+ [entry.attrib.__setitem__(key, data[key]) for key in list(data.keys()) \
if not key.startswith('__')]
+
# new unified EntrySet backend
class SpecificityError(Exception):
"""Thrown in case of filename parse failure."""
pass
+
class Specificity:
def __init__(self, all=False, group=False, hostname=False, prio=0, delta=False):
@@ -665,6 +722,7 @@ class Specificity:
return True
return False
+
class SpecificData(object):
def __init__(self, name, specific, encoding):
self.name = name
@@ -678,9 +736,11 @@ class SpecificData(object):
except:
logger.error("Failed to read file %s" % self.name)
+
class EntrySet:
"""Entry sets deal with the host- and group-specific entries."""
ignore = re.compile("^(\.#.*|.*~|\\..*\\.(sw[px])|.*\\.genshi_include)$")
+
def __init__(self, basename, path, entry_type, encoding):
self.path = path
self.entry_type = entry_type
@@ -693,7 +753,7 @@ class EntrySet:
self.specific = re.compile(pattern)
def get_matching(self, metadata):
- return [item for item in self.entries.values() \
+ return [item for item in list(self.entries.values()) \
if item.specific.matches(metadata)]
def handle_event(self, event):
@@ -761,11 +821,11 @@ class EntrySet:
for line in open(fpath).readlines():
match = info_regex.match(line)
if not match:
- logger.warning("Failed to match line: %s"%line)
+ logger.warning("Failed to match line: %s" % line)
continue
else:
mgd = match.groupdict()
- for key, value in mgd.iteritems():
+ for key, value in list(mgd.items()):
if value:
self.metadata[key] = value
if len(self.metadata['perms']) == 3:
@@ -795,7 +855,7 @@ class EntrySet:
(entry.get('name')))
raise PluginExecutionError
[entry.attrib.__setitem__(key, value) \
- for (key, value) in mdata['Info'][None].iteritems()]
+ for (key, value) in list(mdata['Info'][None].items())]
def bind_entry(self, entry, metadata):
"""Return the appropriate interpreted template from the set of available templates."""
@@ -817,6 +877,7 @@ class EntrySet:
raise PluginExecutionError
+
class GroupSpool(Plugin, Generator):
"""Unified interface for handling group-specific data (e.g. .G## files)."""
name = 'GroupSpool'
@@ -878,9 +939,9 @@ class GroupSpool(Plugin, Generator):
if not relative.endswith('/'):
relative += '/'
name = self.data + relative
- if relative not in self.handles.values():
+ if relative not in list(self.handles.values()):
if not posixpath.isdir(name):
- print "Failed to open directory %s" % (name)
+ print("Failed to open directory %s" % (name))
return
reqid = self.core.fam.AddMonitor(name, self)
self.handles[reqid] = relative
diff --git a/src/lib/Server/Plugins/Account.py b/src/lib/Server/Plugins/Account.py
index e3ea58761..f67819b9d 100644
--- a/src/lib/Server/Plugins/Account.py
+++ b/src/lib/Server/Plugins/Account.py
@@ -3,6 +3,7 @@ __revision__ = '$Revision$'
import Bcfg2.Server.Plugin
+
class Account(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Generator):
"""This module generates account config files,
@@ -21,13 +22,14 @@ class Account(Bcfg2.Server.Plugin.Plugin,
def __init__(self, core, datastore):
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Generator.__init__(self)
- self.Entries = {'ConfigFile':{'/etc/passwd':self.from_yp_cb,
- '/etc/group':self.from_yp_cb,
- '/etc/security/limits.conf':self.gen_limits_cb,
- '/root/.ssh/authorized_keys':self.gen_root_keys_cb,
- '/etc/sudoers':self.gen_sudoers}}
+ self.Entries = {'ConfigFile': {'/etc/passwd': self.from_yp_cb,
+ '/etc/group': self.from_yp_cb,
+ '/etc/security/limits.conf': self.gen_limits_cb,
+ '/root/.ssh/authorized_keys': self.gen_root_keys_cb,
+ '/etc/sudoers': self.gen_sudoers}}
try:
- self.repository = Bcfg2.Server.Plugin.DirectoryBacked(self.data, self.core.fam)
+ self.repository = Bcfg2.Server.Plugin.DirectoryBacked(self.data,
+ self.core.fam)
except:
self.logger.error("Failed to load repos: %s, %s" % \
(self.data, "%s/ssh" % (self.data)))
@@ -38,9 +40,11 @@ class Account(Bcfg2.Server.Plugin.Plugin,
fname = entry.attrib['name'].split('/')[-1]
entry.text = self.repository.entries["static.%s" % (fname)].data
entry.text += self.repository.entries["dyn.%s" % (fname)].data
- perms = {'owner':'root', 'group':'root', 'perms':'0644'}
+ perms = {'owner': 'root',
+ 'group': 'root',
+ 'perms': '0644'}
[entry.attrib.__setitem__(key, value) for (key, value) in \
- perms.iteritems()]
+ list(perms.items())]
def gen_limits_cb(self, entry, metadata):
"""Build limits entries based on current ACLs."""
@@ -50,9 +54,11 @@ class Account(Bcfg2.Server.Plugin.Plugin,
self.repository.entries["useraccess"].data.split()]
users = [user for (user, host) in \
useraccess if host == metadata.hostname.split('.')[0]]
- perms = {'owner':'root', 'group':'root', 'perms':'0600'}
+ perms = {'owner': 'root',
+ 'group': 'root',
+ 'perms': '0600'}
[entry.attrib.__setitem__(key, value) for (key, value) in \
- perms.iteritems()]
+ list(perms.items())]
entry.text += "".join(["%s hard maxlogins 1024\n" % uname for uname in superusers + users])
if "*" not in users:
entry.text += "* hard maxlogins 0\n"
@@ -71,9 +77,11 @@ class Account(Bcfg2.Server.Plugin.Plugin,
entry.text = "".join([rdata["%s.key" % user].data for user \
in superusers if \
("%s.key" % user) in rdata])
- perms = {'owner':'root', 'group':'root', 'perms':'0600'}
+ perms = {'owner': 'root',
+ 'group': 'root',
+ 'perms': '0600'}
[entry.attrib.__setitem__(key, value) for (key, value) \
- in perms.iteritems()]
+ in list(perms.items())]
def gen_sudoers(self, entry, metadata):
"""Build root authorized keys file based on current ACLs."""
@@ -88,6 +96,8 @@ class Account(Bcfg2.Server.Plugin.Plugin,
entry.text = self.repository.entries['static.sudoers'].data
entry.text += "".join(["%s ALL=(ALL) ALL\n" % uname \
for uname in superusers])
- perms = {'owner':'root', 'group':'root', 'perms':'0440'}
+ perms = {'owner': 'root',
+ 'group': 'root',
+ 'perms': '0440'}
[entry.attrib.__setitem__(key, value) for (key, value) \
- in perms.iteritems()]
+ in list(perms.items())]
diff --git a/src/lib/Server/Plugins/Base.py b/src/lib/Server/Plugins/Base.py
index 8e5ca1cd9..5e7d89727 100644
--- a/src/lib/Server/Plugins/Base.py
+++ b/src/lib/Server/Plugins/Base.py
@@ -1,9 +1,15 @@
"""This module sets up a base list of configuration entries."""
__revision__ = '$Revision$'
-import Bcfg2.Server.Plugin
import copy
import lxml.etree
+import sys
+# py3k compatibility
+if sys.hexversion >= 0x03000000:
+ from functools import reduce
+
+import Bcfg2.Server.Plugin
+
class Base(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Structure,
@@ -31,8 +37,8 @@ class Base(Bcfg2.Server.Plugin.Plugin,
def BuildStructures(self, metadata):
"""Build structures for client described by metadata."""
ret = lxml.etree.Element("Independent", version='2.0')
- fragments = reduce(lambda x, y: x+y,
+ fragments = reduce(lambda x, y: x + y,
[base.Match(metadata) for base
- in self.entries.values()], [])
+ in list(self.entries.values())], [])
[ret.append(copy.deepcopy(frag)) for frag in fragments]
return [ret]
diff --git a/src/lib/Server/Plugins/Bundler.py b/src/lib/Server/Plugins/Bundler.py
index 3f88fe26b..01ad3c78b 100644
--- a/src/lib/Server/Plugins/Bundler.py
+++ b/src/lib/Server/Plugins/Bundler.py
@@ -4,6 +4,7 @@ __revision__ = '$Revision$'
import copy
import lxml.etree
import re
+import sys
import Bcfg2.Server.Plugin
@@ -73,14 +74,15 @@ class Bundler(Bcfg2.Server.Plugin.Plugin,
"""Build all structures for client (metadata)."""
bundleset = []
for bundlename in metadata.bundles:
- entries = [item for (key, item) in self.entries.iteritems() if \
+ entries = [item for (key, item) in list(self.entries.items()) if \
self.patterns.match(key).group('name') == bundlename]
if len(entries) == 0:
continue
elif len(entries) == 1:
try:
bundleset.append(entries[0].get_xml_value(metadata))
- except genshi.template.base.TemplateError, t:
+ except genshi.template.base.TemplateError:
+ t = sys.exc_info()[1]
self.logger.error("Bundler: Failed to template genshi bundle %s" \
% (bundlename))
self.logger.error(t)
diff --git a/src/lib/Server/Plugins/Cfg.py b/src/lib/Server/Plugins/Cfg.py
index f851b7914..41cf6c9c1 100644
--- a/src/lib/Server/Plugins/Cfg.py
+++ b/src/lib/Server/Plugins/Cfg.py
@@ -6,6 +6,7 @@ import logging
import lxml
import os
import re
+import sys
import tempfile
import Bcfg2.Server.Plugin
@@ -13,15 +14,21 @@ import Bcfg2.Server.Plugin
try:
import genshi.core
import genshi.input
- from genshi.template import TemplateLoader, \
- TextTemplate, MarkupTemplate, TemplateError
- from genshi.template import NewTextTemplate
+ from genshi.template import TemplateLoader, NewTextTemplate
have_genshi = True
except:
have_genshi = False
logger = logging.getLogger('Bcfg2.Plugins.Cfg')
+
+def u_str(string, encoding):
+ if sys.hexversion >= 0x03000000:
+ return str(string, encoding)
+ else:
+ return unicode(string, encoding)
+
+
# snipped from TGenshi
def removecomment(stream):
"""A genshi filter that removes comments from the stream."""
@@ -30,6 +37,7 @@ def removecomment(stream):
continue
yield kind, data, pos
+
def process_delta(data, delta):
if not delta.specific.delta:
return data
@@ -60,13 +68,15 @@ def process_delta(data, delta):
output = open(basefile.name, 'r').read()
[os.unlink(fname) for fname in [basefile.name, dfile.name]]
if ret >> 8 != 0:
- raise Bcfg2.Server.Plugin.PluginExecutionError, ('delta', delta)
+ raise Bcfg2.Server.Plugin.PluginExecutionError('delta', delta)
return output
+
class CfgMatcher:
+
def __init__(self, fname):
name = re.escape(fname)
- self.basefile_reg = re.compile('^(?P<basename>%s)(|\\.H_(?P<hostname>\S+)|.G(?P<prio>\d+)_(?P<group>\S+))(?P<genshi>\\.genshi)?$' % name)
+ self.basefile_reg = re.compile('^(?P<basename>%s)(|\\.H_(?P<hostname>\S+?)|.G(?P<prio>\d+)_(?P<group>\S+?))(?P<genshi>\\.genshi)?$' % name)
self.delta_reg = re.compile('^(?P<basename>%s)(|\\.H_(?P<hostname>\S+)|\\.G(?P<prio>\d+)_(?P<group>\S+))\\.(?P<delta>(cat|diff))$' % name)
self.cat_count = fname.count(".cat")
self.diff_count = fname.count(".diff")
@@ -77,7 +87,9 @@ class CfgMatcher:
return self.delta_reg.match(fname)
return self.basefile_reg.match(fname)
+
class CfgEntrySet(Bcfg2.Server.Plugin.EntrySet):
+
def __init__(self, basename, path, entry_type, encoding):
Bcfg2.Server.Plugin.EntrySet.__init__(self, basename, path,
entry_type, encoding)
@@ -87,15 +99,18 @@ class CfgEntrySet(Bcfg2.Server.Plugin.EntrySet):
return cmp(one.specific, other.specific)
def get_pertinent_entries(self, metadata):
- '''return a list of all entries pertinent to a client => [base, delta1, delta2]'''
- matching = [ent for ent in self.entries.values() if \
+ """return a list of all entries pertinent
+ to a client => [base, delta1, delta2]
+ """
+ matching = [ent for ent in list(self.entries.values()) if \
ent.specific.matches(metadata)]
matching.sort(self.sort_by_specific)
- non_delta = [matching.index(m) for m in matching if not m.specific.delta]
+ non_delta = [matching.index(m) for m in matching
+ if not m.specific.delta]
if not non_delta:
raise Bcfg2.Server.Plugin.PluginExecutionError
base = min(non_delta)
- used = matching[:base+1]
+ used = matching[:base + 1]
used.reverse()
return used
@@ -113,17 +128,19 @@ class CfgEntrySet(Bcfg2.Server.Plugin.EntrySet):
template_cls = NewTextTemplate
loader = TemplateLoader()
template = loader.load(basefile.name, cls=template_cls,
- encoding=self.encoding)
- stream = template.generate( \
- name=entry.get('name'), metadata=metadata,
- path=basefile.name).filter(removecomment)
+ encoding=self.encoding)
+ fname = entry.get('realname', entry.get('name'))
+ stream = template.generate(name=fname,
+ metadata=metadata,
+ path=basefile.name).filter(removecomment)
try:
data = stream.render('text', strip_whitespace=False)
except TypeError:
data = stream.render('text')
if data == '':
entry.set('empty', 'true')
- except Exception, e:
+ except Exception:
+ e = sys.exc_info()[1]
logger.error("Cfg: genshi exception: %s" % e)
raise Bcfg2.Server.Plugin.PluginExecutionError
else:
@@ -136,7 +153,13 @@ class CfgEntrySet(Bcfg2.Server.Plugin.EntrySet):
if entry.get('encoding') == 'base64':
entry.text = binascii.b2a_base64(data)
else:
- entry.text = unicode(data, self.encoding)
+ try:
+ entry.text = u_str(data, self.encoding)
+ except UnicodeDecodeError:
+ e = sys.exc_info()[1]
+ logger.error("Failed to decode %s: %s" % (entry.get('name'), e))
+ logger.error("Please verify you are using the proper encoding.")
+ raise Bcfg2.Server.Plugin.PluginExecutionError
if entry.text in ['', None]:
entry.set('empty', 'true')
@@ -168,7 +191,8 @@ class CfgEntrySet(Bcfg2.Server.Plugin.EntrySet):
open(name, 'w').write(new_entry['text'])
if log:
logger.info("Wrote file %s" % name)
- badattr = [attr for attr in ['owner', 'group', 'perms'] if attr in new_entry]
+ badattr = [attr for attr in ['owner', 'group', 'perms']
+ if attr in new_entry]
if badattr:
metadata_updates = {}
metadata_updates.update(self.metadata)
@@ -178,12 +202,13 @@ class CfgEntrySet(Bcfg2.Server.Plugin.EntrySet):
infotag = lxml.etree.SubElement(infoxml, 'Info')
[infotag.attrib.__setitem__(attr, metadata_updates[attr]) \
for attr in metadata_updates]
- ofile = open(self.path + "/info.xml","w")
+ ofile = open(self.path + "/info.xml", "w")
ofile.write(lxml.etree.tostring(infoxml, pretty_print=True))
ofile.close()
if log:
logger.info("Wrote file %s" % (self.path + "/info.xml"))
+
class Cfg(Bcfg2.Server.Plugin.GroupSpool,
Bcfg2.Server.Plugin.PullTarget):
"""This generator in the configuration file repository for Bcfg2."""
@@ -197,4 +222,6 @@ class Cfg(Bcfg2.Server.Plugin.GroupSpool,
return self.entries[entry.get('name')].list_accept_choices(metadata)
def AcceptPullData(self, specific, new_entry, log):
- return self.entries[new_entry.get('name')].write_update(specific, new_entry, log)
+ return self.entries[new_entry.get('name')].write_update(specific,
+ new_entry,
+ log)
diff --git a/src/lib/Server/Plugins/DBStats.py b/src/lib/Server/Plugins/DBStats.py
index 27696a978..5ef1920e1 100644
--- a/src/lib/Server/Plugins/DBStats.py
+++ b/src/lib/Server/Plugins/DBStats.py
@@ -33,7 +33,8 @@ class DBStats(Bcfg2.Server.Plugin.Plugin,
logger.debug("Searching for new models to add to the statistics database")
try:
update_database()
- except Exception, inst:
+ except Exception:
+ inst = sys.exc_info()[1]
logger.debug(str(inst))
logger.debug(str(type(inst)))
@@ -61,7 +62,8 @@ class DBStats(Bcfg2.Server.Plugin.Plugin,
logger.info("Imported data for %s in %s seconds" \
% (metadata.hostname, time.time() - start))
return
- except MultipleObjectsReturned, e:
+ except MultipleObjectsReturned:
+ e = sys.exc_info()[1]
logger.error("DBStats: MultipleObjectsReturned while handling %s: %s" % \
(metadata.hostname, e))
logger.error("DBStats: Data is inconsistent")
diff --git a/src/lib/Server/Plugins/Decisions.py b/src/lib/Server/Plugins/Decisions.py
index 1f9525a0e..e239be5ee 100644
--- a/src/lib/Server/Plugins/Decisions.py
+++ b/src/lib/Server/Plugins/Decisions.py
@@ -26,7 +26,8 @@ class DecisionSet(Bcfg2.Server.Plugin.EntrySet):
DecisionFile, encoding)
try:
fam.AddMonitor(path, self)
- except OSError, e:
+ except OSError:
+ e = sys.exc_info()[1]
logger.error('Adding filemonitor for %s failed. '
'Make sure directory exists' % path)
raise Bcfg2.Server.Plugin.PluginInitError(e)
diff --git a/src/lib/Server/Plugins/Deps.py b/src/lib/Server/Plugins/Deps.py
index 088f8cdad..b186258cb 100644
--- a/src/lib/Server/Plugins/Deps.py
+++ b/src/lib/Server/Plugins/Deps.py
@@ -5,20 +5,22 @@ import lxml.etree
import Bcfg2.Server.Plugin
+
class DNode(Bcfg2.Server.Plugin.INode):
"""DNode provides supports for single predicate types for dependencies."""
- raw = {'Group':"lambda x:'%s' in x.groups and predicate(x)"}
+ raw = {'Group': "lambda x:'%s' in x.groups and predicate(x)"}
containers = ['Group']
def __init__(self, data, idict, parent=None):
self.data = data
self.contents = {}
if parent == None:
- self.predicate = lambda x:True
+ self.predicate = lambda x: True
else:
predicate = parent.predicate
- if data.tag in self.raw.keys():
- self.predicate = eval(self.raw[data.tag] % (data.get('name')), {'predicate':predicate})
+ if data.tag in list(self.raw.keys()):
+ self.predicate = eval(self.raw[data.tag] % (data.get('name')),
+ {'predicate': predicate})
else:
raise Exception
mytype = self.__class__
@@ -27,15 +29,18 @@ class DNode(Bcfg2.Server.Plugin.INode):
if item.tag in self.containers:
self.children.append(mytype(item, idict, self))
else:
- data = [(child.tag, child.get('name')) for child in item.getchildren()]
+ data = [(child.tag, child.get('name'))
+ for child in item.getchildren()]
try:
self.contents[item.tag][item.get('name')] = data
except KeyError:
- self.contents[item.tag] = {item.get('name'):data}
+ self.contents[item.tag] = {item.get('name'): data}
+
class DepXMLSrc(Bcfg2.Server.Plugin.XMLSrc):
__node__ = DNode
+
class Deps(Bcfg2.Server.Plugin.PrioDir,
Bcfg2.Server.Plugin.StructureValidator):
name = 'Deps'
@@ -68,12 +73,12 @@ class Deps(Bcfg2.Server.Plugin.PrioDir,
if (entries, gdata) in self.cache:
prereqs = self.cache[(entries, gdata)]
else:
- [src.Cache(metadata) for src in self.entries.values()]
+ [src.Cache(metadata) for src in list(self.entries.values())]
toexamine = list(entries[:])
while toexamine:
entry = toexamine.pop()
- matching = [src for src in self.entries.values()
+ matching = [src for src in list(self.entries.values())
if src.cache and entry[0] in src.cache[1]
and entry[1] in src.cache[1][entry[0]]]
if len(matching) > 1:
diff --git a/src/lib/Server/Plugins/Editor.py b/src/lib/Server/Plugins/Editor.py
index bfd4d6e93..76a03a325 100644
--- a/src/lib/Server/Plugins/Editor.py
+++ b/src/lib/Server/Plugins/Editor.py
@@ -2,6 +2,7 @@ import Bcfg2.Server.Plugin
import re
import lxml.etree
+
def linesub(pattern, repl, filestring):
"""Substitutes instances of pattern with repl in filestring."""
if filestring == None:
@@ -12,6 +13,7 @@ def linesub(pattern, repl, filestring):
output.append(re.sub(pattern, repl, filestring))
return '\n'.join(output)
+
class EditDirectives(Bcfg2.Server.Plugin.SpecificData):
"""This object handles the editing directives."""
def ProcessDirectives(self, input):
@@ -22,23 +24,29 @@ class EditDirectives(Bcfg2.Server.Plugin.SpecificData):
temp = linesub(directive[0], directive[1], temp)
return temp
+
class EditEntrySet(Bcfg2.Server.Plugin.EntrySet):
def __init__(self, basename, path, entry_type, encoding):
- self.ignore = re.compile("^(\.#.*|.*~|\\..*\\.(tmp|sw[px])|%s\.H_.*)$" %path.split('/')[-1])
- Bcfg2.Server.Plugin.EntrySet.__init__(self, basename, path, entry_type, encoding)
+ self.ignore = re.compile("^(\.#.*|.*~|\\..*\\.(tmp|sw[px])|%s\.H_.*)$" % path.split('/')[-1])
+ Bcfg2.Server.Plugin.EntrySet.__init__(self,
+ basename,
+ path,
+ entry_type,
+ encoding)
self.inputs = dict()
def bind_entry(self, entry, metadata):
client = metadata.hostname
filename = entry.get('name')
- permdata = {'owner':'root', 'group':'root'}
- permdata['perms'] = '0644'
+ permdata = {'owner': 'root',
+ 'group': 'root',
+ 'perms': '0644'}
[entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
entry.text = self.entries['edits'].ProcessDirectives(self.get_client_data(client))
if not entry.text:
entry.set('empty', 'true')
try:
- f = open('%s/%s.H_%s' %(self.path, filename.split('/')[-1], client), 'w')
+ f = open('%s/%s.H_%s' % (self.path, filename.split('/')[-1], client), 'w')
f.write(entry.text)
f.close()
except:
@@ -60,7 +68,7 @@ class Editor(Bcfg2.Server.Plugin.GroupSpool,
def GetProbes(self, _):
'''Return a set of probes for execution on client'''
probelist = list()
- for name in self.entries.keys():
+ for name in list(self.entries.keys()):
probe = lxml.etree.Element('probe')
probe.set('name', name)
probe.set('source', "Editor")
diff --git a/src/lib/Server/Plugins/GroupPatterns.py b/src/lib/Server/Plugins/GroupPatterns.py
index 553f9d286..7faead39a 100644
--- a/src/lib/Server/Plugins/GroupPatterns.py
+++ b/src/lib/Server/Plugins/GroupPatterns.py
@@ -3,6 +3,7 @@ import re
import Bcfg2.Server.Plugin
+
class PackedDigitRange(object):
def __init__(self, digit_range):
self.sparse = list()
@@ -18,12 +19,14 @@ class PackedDigitRange(object):
if iother in self.sparse:
return True
for (start, end) in self.ranges:
- if iother in xrange(start, end+1):
+ if iother in range(start, end + 1):
return True
return False
+
class PatternMap(object):
range_finder = '\\[\\[[\d\-,]+\\]\\]'
+
def __init__(self, pattern, rangestr, groups):
self.pattern = pattern
self.rangestr = rangestr
@@ -33,8 +36,11 @@ class PatternMap(object):
self.process = self.process_re
elif rangestr != None:
self.process = self.process_range
- self.re = re.compile('^' + re.subn(self.range_finder, '(\d+)', rangestr)[0])
- dmatcher = re.compile(re.subn(self.range_finder, '\\[\\[([\d\-,]+)\\]\\]', rangestr)[0])
+ self.re = re.compile('^' + re.subn(self.range_finder, '(\d+)',
+ rangestr)[0])
+ dmatcher = re.compile(re.subn(self.range_finder,
+ '\\[\\[([\d\-,]+)\\]\\]',
+ rangestr)[0])
self.dranges = [PackedDigitRange(x) for x in dmatcher.match(rangestr).groups()]
else:
raise Exception
@@ -58,10 +64,11 @@ class PatternMap(object):
for group in self.groups:
newg = group
for idx in range(len(sub)):
- newg = newg.replace('$%s' % (idx+1), sub[idx])
+ newg = newg.replace('$%s' % (idx + 1), sub[idx])
ret.append(newg)
return ret
+
class PatternFile(Bcfg2.Server.Plugin.SingleXMLFileBacked):
def __init__(self, filename, fam):
Bcfg2.Server.Plugin.SingleXMLFileBacked.__init__(self, filename, fam)
@@ -101,6 +108,7 @@ class PatternFile(Bcfg2.Server.Plugin.SingleXMLFileBacked):
(pattern.pattern, hostname), exc_info=1)
return ret
+
class GroupPatterns(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Connector):
name = "GroupPatterns"
diff --git a/src/lib/Server/Plugins/Hostbase.py b/src/lib/Server/Plugins/Hostbase.py
index 65992596d..4180fd716 100644
--- a/src/lib/Server/Plugins/Hostbase.py
+++ b/src/lib/Server/Plugins/Hostbase.py
@@ -1,4 +1,7 @@
-'''This file provides the Hostbase plugin. It manages dns/dhcp/nis host information'''
+"""
+This file provides the Hostbase plugin.
+It manages dns/dhcp/nis host information
+"""
__revision__ = '$Revision$'
import os
@@ -11,7 +14,9 @@ from sets import Set
from django.template import Context, loader
from django.db import connection
import re
-import cStringIO
+# Compatibility imports
+from Bcfg2.Bcfg2Py3k import StringIO
+
class Hostbase(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Structure,
@@ -23,24 +28,29 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
filepath = '/my/adm/hostbase/files/bind'
def __init__(self, core, datastore):
-
+
self.ready = False
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Structure.__init__(self)
Bcfg2.Server.Plugin.Generator.__init__(self)
- files = ['zone.tmpl', 'reversesoa.tmpl', 'named.tmpl', 'reverseappend.tmpl',
- 'dhcpd.tmpl', 'hosts.tmpl', 'hostsappend.tmpl']
+ files = ['zone.tmpl',
+ 'reversesoa.tmpl',
+ 'named.tmpl',
+ 'reverseappend.tmpl',
+ 'dhcpd.tmpl',
+ 'hosts.tmpl',
+ 'hostsappend.tmpl']
self.filedata = {}
self.dnsservers = []
self.dhcpservers = []
- self.templates = {'zone':loader.get_template('zone.tmpl'),
- 'reversesoa':loader.get_template('reversesoa.tmpl'),
- 'named':loader.get_template('named.tmpl'),
- 'namedviews':loader.get_template('namedviews.tmpl'),
- 'reverseapp':loader.get_template('reverseappend.tmpl'),
- 'dhcp':loader.get_template('dhcpd.tmpl'),
- 'hosts':loader.get_template('hosts.tmpl'),
- 'hostsapp':loader.get_template('hostsappend.tmpl'),
+ self.templates = {'zone': loader.get_template('zone.tmpl'),
+ 'reversesoa': loader.get_template('reversesoa.tmpl'),
+ 'named': loader.get_template('named.tmpl'),
+ 'namedviews': loader.get_template('namedviews.tmpl'),
+ 'reverseapp': loader.get_template('reverseappend.tmpl'),
+ 'dhcp': loader.get_template('dhcpd.tmpl'),
+ 'hosts': loader.get_template('hosts.tmpl'),
+ 'hostsapp': loader.get_template('hostsappend.tmpl'),
}
self.Entries['ConfigFile'] = {}
self.__rmi__ = ['rebuildState']
@@ -48,14 +58,17 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
self.rebuildState(None)
except:
raise PluginInitError
-
+
def FetchFile(self, entry, metadata):
"""Return prebuilt file data."""
fname = entry.get('name').split('/')[-1]
if not fname in self.filedata:
raise PluginExecutionError
- perms = {'owner':'root', 'group':'root', 'perms':'644'}
- [entry.attrib.__setitem__(key, value) for (key, value) in perms.iteritems()]
+ perms = {'owner': 'root',
+ 'group': 'root',
+ 'perms': '644'}
+ [entry.attrib.__setitem__(key, value)
+ for (key, value) in list(perms.items())]
entry.text = self.filedata[fname]
def BuildStructures(self, metadata):
@@ -110,8 +123,8 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
hosts = {}
for zone in zones:
- zonefile = cStringIO.StringIO()
- externalzonefile = cStringIO.StringIO()
+ zonefile = StringIO()
+ externalzonefile = StringIO()
cursor.execute("""SELECT n.name FROM hostbase_zone_nameservers z
INNER JOIN hostbase_nameserver n ON z.nameserver_id = n.id
WHERE z.zone_id = \'%s\'""" % zone[0])
@@ -148,20 +161,20 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
cursor.execute(querystring)
zonehosts = cursor.fetchall()
prevhost = (None, None, None, None)
- cnames = cStringIO.StringIO()
- cnamesexternal = cStringIO.StringIO()
+ cnames = StringIO()
+ cnamesexternal = StringIO()
for host in zonehosts:
if not host[2].split(".", 1)[1] == zone[1]:
zonefile.write(cnames.getvalue())
externalzonefile.write(cnamesexternal.getvalue())
- cnames = cStringIO.StringIO()
- cnamesexternal = cStringIO.StringIO()
+ cnames = StringIO()
+ cnamesexternal = StringIO()
continue
if not prevhost[1] == host[1] or not prevhost[2] == host[2]:
zonefile.write(cnames.getvalue())
externalzonefile.write(cnamesexternal.getvalue())
- cnames = cStringIO.StringIO()
- cnamesexternal = cStringIO.StringIO()
+ cnames = StringIO()
+ cnamesexternal = StringIO()
zonefile.write("%-32s%-10s%-32s\n" %
(host[2].split(".", 1)[0], 'A', host[1]))
zonefile.write("%-32s%-10s%-3s%s.\n" %
@@ -173,29 +186,29 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
('', 'MX', host[4], host[5]))
elif not prevhost[5] == host[5]:
zonefile.write("%-32s%-10s%-3s%s.\n" %
- ('', 'MX', host[4], host[5]))
+ ('', 'MX', host[4], host[5]))
if host[6] == 'global':
externalzonefile.write("%-32s%-10s%-3s%s.\n" %
('', 'MX', host[4], host[5]))
-
+
if host[3]:
try:
if host[3].split(".", 1)[1] == zone[1]:
cnames.write("%-32s%-10s%-32s\n" %
(host[3].split(".", 1)[0],
- 'CNAME',host[2].split(".", 1)[0]))
+ 'CNAME', host[2].split(".", 1)[0]))
if host[6] == 'global':
cnamesexternal.write("%-32s%-10s%-32s\n" %
(host[3].split(".", 1)[0],
- 'CNAME',host[2].split(".", 1)[0]))
+ 'CNAME', host[2].split(".", 1)[0]))
else:
cnames.write("%-32s%-10s%-32s\n" %
- (host[3]+".",
+ (host[3] + ".",
'CNAME',
host[2].split(".", 1)[0]))
if host[6] == 'global':
cnamesexternal.write("%-32s%-10s%-32s\n" %
- (host[3]+".",
+ (host[3] + ".",
'CNAME',
host[2].split(".", 1)[0]))
@@ -215,9 +228,9 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
cursor.execute("SELECT * FROM hostbase_zone WHERE zone LIKE \'%%.rev\' AND zone <> \'.rev\'")
reversezones = cursor.fetchall()
-
+
reversenames = []
- for reversezone in reversezones:
+ for reversezone in reversezones:
cursor.execute("""SELECT n.name FROM hostbase_zone_nameservers z
INNER JOIN hostbase_nameserver n ON z.nameserver_id = n.id
WHERE z.zone_id = \'%s\'""" % reversezone[0])
@@ -236,7 +249,7 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
subnet = reversezone[1].split(".")
subnet.reverse()
- reversenames.append((reversezone[1].rstrip('.rev'),".".join(subnet[1:])))
+ reversenames.append((reversezone[1].rstrip('.rev'), ".".join(subnet[1:])))
for filename in reversenames:
cursor.execute("""
@@ -247,8 +260,8 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
WHERE p.ip_addr LIKE '%s%%%%' AND h.status = 'active' ORDER BY p.ip_addr
""" % filename[1])
reversehosts = cursor.fetchall()
- zonefile = cStringIO.StringIO()
- externalzonefile = cStringIO.StringIO()
+ zonefile = StringIO()
+ externalzonefile = StringIO()
if len(filename[0].split(".")) == 2:
originlist = []
[originlist.append((".".join([ip[1].split(".")[2], filename[0]]),
@@ -268,13 +281,13 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
'hosts': hosts,
'inaddr': origin[0],
'fileorigin': filename[0],
- })
+ })
zonefile.write(self.templates['reverseapp'].render(context))
context = Context({
'hosts': hosts_external,
'inaddr': origin[0],
'fileorigin': filename[0],
- })
+ })
externalzonefile.write(self.templates['reverseapp'].render(context))
else:
originlist = [filename[0]]
@@ -289,7 +302,7 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
'hosts': hosts,
'inaddr': filename[0],
'fileorigin': None,
- })
+ })
zonefile.write(self.templates['reverseapp'].render(context))
context = Context({
'hosts': hosts_external,
@@ -308,13 +321,12 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
context = Context({
'zones': zones,
'reverses': reversenames,
- })
+ })
self.filedata['named.conf'] = self.templates['named'].render(context)
self.Entries['ConfigFile']['/my/adm/hostbase/files/named.conf'] = self.FetchFile
self.filedata['named.conf.views'] = self.templates['namedviews'].render(context)
self.Entries['ConfigFile']['/my/adm/hostbase/files/named.conf.views'] = self.FetchFile
-
def buildDHCP(self):
"""Pre-build dhcpd.conf and stash in the filedata table."""
@@ -362,7 +374,6 @@ class Hostbase(Bcfg2.Server.Plugin.Plugin,
self.filedata['dhcpd.conf'] = self.templates['dhcp'].render(context)
self.Entries['ConfigFile']['/my/adm/hostbase/files/dhcpd.conf'] = self.FetchFile
-
def buildHosts(self):
"""Pre-build and stash /etc/hosts file."""
@@ -490,7 +501,7 @@ Name Room User Type
def buildHostsLPD(self):
"""Creates the /mcs/etc/hosts.lpd file"""
-
+
# this header needs to be changed to be more generic
header = """+@machines
+@all-machines
@@ -503,7 +514,7 @@ delphi.esh.anl.gov
anlcv1.ctd.anl.gov
anlvms.ctd.anl.gov
olivia.ctd.anl.gov\n\n"""
-
+
cursor = connection.cursor()
cursor.execute("""
SELECT hostname FROM hostbase_host WHERE netgroup=\"red\" AND status = 'active'
@@ -534,7 +545,6 @@ olivia.ctd.anl.gov\n\n"""
self.filedata['hosts.lpd'] = hostslpdfile
self.Entries['ConfigFile']['/mcs/etc/hosts.lpd'] = self.FetchFile
-
def buildNetgroups(self):
"""Makes the *-machine files"""
header = """###################################################################
@@ -557,11 +567,11 @@ olivia.ctd.anl.gov\n\n"""
nameslist = cursor.fetchall()
# gets the first host and initializes the hash
hostdata = nameslist[0]
- netgroups = {hostdata[2]:[hostdata[0]]}
+ netgroups = {hostdata[2]: [hostdata[0]]}
for row in nameslist:
# if new netgroup, create it
if row[2] not in netgroups:
- netgroups.update({row[2]:[]})
+ netgroups.update({row[2]: []})
# if it belongs in the netgroup and has multiple interfaces, put them in
if hostdata[0] == row[0] and row[3]:
netgroups[row[2]].append(row[1])
@@ -572,7 +582,7 @@ olivia.ctd.anl.gov\n\n"""
hostdata = row
for netgroup in netgroups:
- fileoutput = cStringIO.StringIO()
+ fileoutput = StringIO()
fileoutput.write(header % (netgroup, netgroup, len(netgroups[netgroup])))
for each in netgroups[netgroup]:
fileoutput.write(each + "\n")
diff --git a/src/lib/Server/Plugins/Ldap.py b/src/lib/Server/Plugins/Ldap.py
index 4f10d8ca6..06ecaed7b 100644
--- a/src/lib/Server/Plugins/Ldap.py
+++ b/src/lib/Server/Plugins/Ldap.py
@@ -1,9 +1,18 @@
import imp
+import logging
+import sys
import time
-import ldap
import Bcfg2.Options
import Bcfg2.Server.Plugin
+logger = logging.getLogger('Bcfg2.Plugins.Ldap')
+
+try:
+ import ldap
+except:
+ logger.error("Unable to load ldap module. Is python-ldap installed?")
+ raise ImportError
+
# time in seconds between retries after failed LDAP connection
RETRY_DELAY = 5
# how many times to try reaching the LDAP server if a connection is broken
@@ -81,7 +90,8 @@ class Ldap(Bcfg2.Server.Plugin.Plugin, Bcfg2.Server.Plugin.Connector):
self.debug_log("LdapPlugin debug: query '" + query.name +
"' not applicable to host '" + metadata.hostname + "'")
return data
- except Exception, error_msg:
+ except Exception:
+ error_msg = sys.exc_info()[1]
if self.debug_flag:
raise
else:
diff --git a/src/lib/Server/Plugins/Metadata.py b/src/lib/Server/Plugins/Metadata.py
index aa482e7ed..ca6e43851 100644
--- a/src/lib/Server/Plugins/Metadata.py
+++ b/src/lib/Server/Plugins/Metadata.py
@@ -1,4 +1,6 @@
-"""This file stores persistent metadata for the Bcfg2 Configuration Repository."""
+"""
+This file stores persistent metadata for the Bcfg2 Configuration Repository.
+"""
__revision__ = '$Revision$'
@@ -12,6 +14,7 @@ import time
import Bcfg2.Server.FileMonitor
import Bcfg2.Server.Plugin
+
def locked(fd):
"""Aquire a lock on a file"""
try:
@@ -20,14 +23,19 @@ def locked(fd):
return True
return False
+
class MetadataConsistencyError(Exception):
"""This error gets raised when metadata is internally inconsistent."""
pass
+
class MetadataRuntimeError(Exception):
- """This error is raised when the metadata engine is called prior to reading enough data."""
+ """This error is raised when the metadata engine
+ is called prior to reading enough data.
+ """
pass
+
class XMLMetadataConfig(object):
"""Handles xml config files and all XInclude statements"""
def __init__(self, metadata, watch_clients, basefile):
@@ -39,7 +47,8 @@ class XMLMetadataConfig(object):
self.basedata = None
self.basedir = metadata.data
self.logger = metadata.logger
- self.pseudo_monitor = isinstance(metadata.core.fam, Bcfg2.Server.FileMonitor.Pseudo)
+ self.pseudo_monitor = isinstance(metadata.core.fam,
+ Bcfg2.Server.FileMonitor.Pseudo)
@property
def xdata(self):
@@ -56,7 +65,8 @@ class XMLMetadataConfig(object):
def add_monitor(self, fname):
"""Add a fam monitor for an included file"""
if self.should_monitor:
- self.metadata.core.fam.AddMonitor("%s/%s" % (self.basedir, fname), self.metadata)
+ self.metadata.core.fam.AddMonitor("%s/%s" % (self.basedir, fname),
+ self.metadata)
self.extras.append(fname)
def load_xml(self):
@@ -81,14 +91,16 @@ class XMLMetadataConfig(object):
def write(self):
"""Write changes to xml back to disk."""
- self.write_xml("%s/%s" % (self.basedir, self.basefile), self.basedata)
+ self.write_xml("%s/%s" % (self.basedir, self.basefile),
+ self.basedata)
def write_xml(self, fname, xmltree):
"""Write changes to xml back to disk."""
tmpfile = "%s.new" % fname
try:
datafile = open("%s" % tmpfile, 'w')
- except IOError, e:
+ except IOError:
+ e = sys.exc_info()[1]
self.logger.error("Failed to write %s: %s" % (tmpfile, e))
raise MetadataRuntimeError
# prep data
@@ -182,6 +194,7 @@ class ClientMetadata(object):
return grp
return ''
+
class MetadataQuery(object):
def __init__(self, by_name, get_clients, by_groups, by_profiles, all_groups, all_groups_in_category):
# resolver is set later
@@ -201,6 +214,7 @@ class MetadataQuery(object):
def all(self):
return [self.by_name(name) for name in self.all_clients()]
+
class Metadata(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Metadata,
Bcfg2.Server.Plugin.Statistics):
@@ -220,12 +234,13 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
except:
print("Unable to add file monitor for groups.xml or clients.xml")
raise Bcfg2.Server.Plugin.PluginInitError
-
+
self.clients_xml = XMLMetadataConfig(self, watch_clients, 'clients.xml')
self.groups_xml = XMLMetadataConfig(self, watch_clients, 'groups.xml')
self.states = {}
if watch_clients:
- self.states = {"groups.xml":False, "clients.xml":False}
+ self.states = {"groups.xml": False,
+ "clients.xml": False}
self.addresses = {}
self.auth = dict()
self.clients = {}
@@ -244,10 +259,11 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
self.session_cache = {}
self.default = None
self.pdirty = False
- self.extra = {'groups.xml':[], 'clients.xml':[]}
+ self.extra = {'groups.xml': [],
+ 'clients.xml': []}
self.password = core.password
self.query = MetadataQuery(core.build_metadata,
- lambda:self.clients.keys(),
+ lambda: list(self.clients.keys()),
self.get_client_names_by_groups,
self.get_client_names_by_profiles,
self.get_all_group_names,
@@ -288,7 +304,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
element = lxml.etree.SubElement(self.groups_xml.base_xdata.getroot(),
"Group", name=group_name)
- for key, val in attribs.iteritems():
+ for key, val in list(attribs.items()):
element.set(key, val)
self.groups_xml.write()
@@ -303,7 +319,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
self.logger.error("Unexpected error finding group")
raise MetadataConsistencyError
- for key, val in attribs.iteritems():
+ for key, val in list(attribs.items()):
xdict['xquery'][0].set(key, val)
self.groups_xml.write_xml(xdict['filename'], xdict['xmltree'])
@@ -330,7 +346,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
self.logger.error("Bundle \"%s\" already exists" % (bundle_name))
raise MetadataConsistencyError
root.append(element)
- group_tree = open(self.data + "/groups.xml","w")
+ group_tree = open(self.data + "/groups.xml", "w")
fd = group_tree.fileno()
while True:
try:
@@ -352,7 +368,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
self.logger.error("Bundle \"%s\" not found" % (bundle_name))
raise MetadataConsistencyError
root.remove(node)
- group_tree = open(self.data + "/groups.xml","w")
+ group_tree = open(self.data + "/groups.xml", "w")
fd = group_tree.fileno()
while True:
try:
@@ -384,7 +400,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
element = lxml.etree.SubElement(self.clients_xml.base_xdata.getroot(),
"Client", name=client_name)
- for key, val in attribs.iteritems():
+ for key, val in list(attribs.items()):
element.set(key, val)
self.clients_xml.write()
@@ -401,7 +417,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
raise MetadataConsistencyError
node = xdict['xquery'][0]
- [node.set(key, value) for key, value in attribs.items()]
+ [node.set(key, value) for key, value in list(attribs.items())]
self.clients_xml.write_xml(xdict['filename'], xdict['xmltree'])
def HandleEvent(self, event):
@@ -463,8 +479,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
grouptmp = {}
self.categories = {}
groupseen = list()
- for group in xdata.xpath('//Groups/Group') \
- + xdata.xpath('Group'):
+ for group in xdata.xpath('//Groups/Group'):
if group.get('name') not in groupseen:
groupseen.append(group.get('name'))
else:
@@ -506,17 +521,17 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
ggg))
[self.groups[group][0].add(bund) for bund in bundles]
self.states['groups.xml'] = True
- if False not in self.states.values():
+ if False not in list(self.states.values()):
# check that all client groups are real and complete
- real = self.groups.keys()
- for client in self.clients.keys():
+ real = list(self.groups.keys())
+ for client in list(self.clients.keys()):
if self.clients[client] not in self.profiles:
self.logger.error("Client %s set as nonexistent or incomplete group %s" \
% (client, self.clients[client]))
self.logger.error("Removing client mapping for %s" % (client))
self.bad_clients[client] = self.clients[client]
del self.clients[client]
- for bclient in self.bad_clients.keys():
+ for bclient in list(self.bad_clients.keys()):
if self.bad_clients[bclient] in self.profiles:
self.logger.info("Restored profile mapping for client %s" % bclient)
self.clients[bclient] = self.bad_clients[bclient]
@@ -525,7 +540,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
def set_profile(self, client, profile, addresspair):
"""Set group parameter for provided client."""
self.logger.info("Asserting client %s profile to %s" % (client, profile))
- if False in self.states.values():
+ if False in list(self.states.values()):
raise MetadataRuntimeError
if profile not in self.public:
self.logger.error("Failed to set client %s to private group %s" % (client, profile))
@@ -579,7 +594,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
def get_initial_metadata(self, client):
"""Return the metadata for a given client."""
- if False in self.states.values():
+ if False in list(self.states.values()):
raise MetadataRuntimeError
client = client.lower()
if client in self.aliases:
@@ -604,7 +619,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
password = self.passwords[client]
else:
password = None
- uuids = [item for item, value in self.uuid.iteritems() if value == client]
+ uuids = [item for item, value in list(self.uuid.items()) if value == client]
if uuids:
uuid = uuids[0]
else:
@@ -622,7 +637,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
def get_all_group_names(self):
all_groups = set()
- [all_groups.update(g[1]) for g in self.groups.values()]
+ [all_groups.update(g[1]) for g in list(self.groups.values())]
return all_groups
def get_all_groups_in_category(self, category):
@@ -632,11 +647,12 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
return all_groups
def get_client_names_by_profiles(self, profiles):
- return [client for client, profile in self.clients.iteritems() \
+ return [client for client, profile in list(self.clients.items()) \
if profile in profiles]
def get_client_names_by_groups(self, groups):
- mdata = [self.core.build_metadata(client) for client in self.clients.keys()]
+ mdata = [self.core.build_metadata(client)
+ for client in list(self.clients.keys())]
return [md.hostname for md in mdata if md.groups.issuperset(groups)]
def merge_additional_groups(self, imd, groups):
@@ -766,7 +782,6 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
xdict['xquery'][0].set('auth', 'cert')
self.clients_xml.write_xml(xdict['filename'], xdict['xmltree'])
-
def viz(self, hosts, bundles, key, colors):
"""Admin mode viz support."""
groups_tree = lxml.etree.parse(self.data + "/groups.xml")
@@ -775,7 +790,7 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
except lxml.etree.XIncludeError:
self.logger.error("Failed to process XInclude for file %s" % dest)
groups = groups_tree.getroot()
- categories = {'default':'grey83'}
+ categories = {'default': 'grey83'}
instances = {}
viz_str = ""
egroups = groups.findall("Group") + groups.findall('.//Groups/Group')
@@ -787,12 +802,12 @@ class Metadata(Bcfg2.Server.Plugin.Plugin,
del categories[None]
if hosts:
clients = self.clients
- for client, profile in clients.iteritems():
+ for client, profile in list(clients.items()):
if profile in instances:
instances[profile].append(client)
else:
instances[profile] = [client]
- for profile, clist in instances.iteritems():
+ for profile, clist in list(instances.items()):
clist.sort()
viz_str += '''\t"%s-instances" [ label="%s", shape="record" ];\n''' \
% (profile, '|'.join(clist))
diff --git a/src/lib/Server/Plugins/NagiosGen.py b/src/lib/Server/Plugins/NagiosGen.py
index 14277b63d..8a76c130d 100644
--- a/src/lib/Server/Plugins/NagiosGen.py
+++ b/src/lib/Server/Plugins/NagiosGen.py
@@ -1,149 +1,152 @@
'''This module implements a Nagios configuration generator'''
-import glob
-import logging
-import lxml.etree
import os
import re
+import sys
+import glob
import socket
+import logging
+import lxml.etree
import Bcfg2.Server.Plugin
LOGGER = logging.getLogger('Bcfg2.Plugins.NagiosGen')
-host_config_fmt = \
-'''
-define host{
- host_name %s
- alias %s
- address %s
-'''
+line_fmt = '\t%-32s %s'
+
+class NagiosGenConfig(Bcfg2.Server.Plugin.SingleXMLFileBacked,
+ Bcfg2.Server.Plugin.StructFile):
+ def __init__(self, filename, fam):
+ Bcfg2.Server.Plugin.SingleXMLFileBacked.__init__(self, filename, fam)
+ Bcfg2.Server.Plugin.StructFile.__init__(self, filename)
+
class NagiosGen(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Generator):
"""NagiosGen is a Bcfg2 plugin that dynamically generates
Nagios configuration file based on Bcfg2 data.
"""
name = 'NagiosGen'
- __version__ = '0.6'
+ __version__ = '0.7'
__author__ = 'bcfg-dev@mcs.anl.gov'
def __init__(self, core, datastore):
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Generator.__init__(self)
+ self.config = NagiosGenConfig(os.path.join(self.data, 'config.xml'),
+ core.fam)
self.Entries = {'Path':
- {'/etc/nagiosgen.status' : self.createhostconfig,
- '/etc/nagios/nagiosgen.cfg': self.createserverconfig}}
-
- self.client_attrib = {'encoding':'ascii',
- 'owner':'root',
- 'group':'root',
- 'type':'file',
- 'perms':'0400'}
- self.server_attrib = {'encoding':'ascii',
- 'owner':'nagios',
- 'group':'nagios',
- 'type':'file',
- 'perms':'0440'}
-
- def getparents(self, hostname):
- """Return parents for given hostname."""
- depends=[]
- if not os.path.isfile('%s/parents.xml' % (self.data)):
- return depends
-
- tree = lxml.etree.parse('%s/parents.xml' % (self.data))
- for entry in tree.findall('.//Depend'):
- if entry.attrib['name'] == hostname:
- depends.append(entry.attrib['on'])
- return depends
+ {'/etc/nagiosgen.status': self.createhostconfig,
+ '/etc/nagios/nagiosgen.cfg': self.createserverconfig}}
+
+ self.client_attrib = {'encoding': 'ascii',
+ 'owner': 'root',
+ 'group': 'root',
+ 'type': 'file',
+ 'perms': '0400'}
+ self.server_attrib = {'encoding': 'ascii',
+ 'owner': 'nagios',
+ 'group': 'nagios',
+ 'type': 'file',
+ 'perms': '0440'}
def createhostconfig(self, entry, metadata):
"""Build host specific configuration file."""
host_address = socket.gethostbyname(metadata.hostname)
- host_groups = [grp for grp in metadata.groups if \
- os.path.isfile('%s/%s-group.cfg' % (self.data, grp))]
- host_config = host_config_fmt % \
- (metadata.hostname, metadata.hostname, host_address)
+ host_groups = [grp for grp in metadata.groups
+ if os.path.isfile('%s/%s-group.cfg' % (self.data, grp))]
+ host_config = ['define host {',
+ line_fmt % ('host_name', metadata.hostname),
+ line_fmt % ('alias', metadata.hostname),
+ line_fmt % ('address', host_address)]
if host_groups:
- host_config += ' hostgroups %s\n' % (",".join(host_groups))
-
- xtra = None
- if hasattr(metadata, 'Properties') and \
- 'NagiosGen.xml' in metadata.Properties:
- for q in (metadata.hostname, 'default'):
- xtra = metadata.Properties['NagiosGen.xml'].data.find(q)
- if xtra is not None:
- break
-
- if xtra is not None:
- directives = list(xtra)
- for item in directives:
- host_config += ' %-32s %s\n' % (item.tag, item.text)
-
+ host_config.append(line_fmt % ("hostgroups",
+ ",".join(host_groups)))
+
+ # read the old-style Properties config, but emit a warning.
+ xtra = dict()
+ props = None
+ if (hasattr(metadata, 'Properties') and
+ 'NagiosGen.xml' in metadata.Properties):
+ props = metadata.Properties['NagiosGen.xml'].data
+ if props is not None:
+ LOGGER.warn("Parsing deprecated Properties/NagiosGen.xml. "
+ "Update to the new-style config with "
+ "nagiosgen-convert.py.")
+ xtra = dict((el.tag, el.text)
+ for el in props.find(metadata.hostname))
+ # hold off on parsing the defaults until we've checked for
+ # a new-style config
+
+ # read the old-style parents.xml, but emit a warning
+ pfile = os.path.join(self.data, "parents.xml")
+ if os.path.exists(pfile):
+ LOGGER.warn("Parsing deprecated NagiosGen/parents.xml. "
+ "Update to the new-style config with "
+ "nagiosgen-convert.py.")
+ parents = lxml.etree.parse(pfile)
+ for el in parents.xpath("//Depend[@name='%s']" % metadata.hostname):
+ if 'parent' in xtra:
+ xtra['parent'] += "," + el.get("on")
+ else:
+ xtra['parent'] = el.get("on")
+
+ # read the new-style config and overwrite the old-style config
+ for el in self.config.Match(metadata):
+ if el.tag == 'Option':
+ xtra[el.get("name")] = el.text
+
+ # if we haven't found anything in the new- or old-style
+ # configs, finally read defaults from old-style config
+ if not xtra and props is not None:
+ xtra = dict((el.tag, el.text) for el in props.find('default'))
+
+ if xtra:
+ host_config.extend([line_fmt % (opt, val)
+ for opt, val in list(xtra.items())])
else:
- host_config += ' use default\n'
+ host_config.append(line_fmt % ('use', 'default'))
- host_config += '}\n'
- entry.text = host_config
- [entry.attrib.__setitem__(key, value) for \
- (key, value) in self.client_attrib.iteritems()]
+ host_config.append('}')
+ entry.text = "%s\n" % "\n".join(host_config)
+ [entry.attrib.__setitem__(key, value)
+ for (key, value) in list(self.client_attrib.items())]
try:
- fileh = open("%s/%s-host.cfg" % \
- (self.data, metadata.hostname), 'w')
- fileh.write(host_config)
+ fileh = open("%s/%s-host.cfg" %
+ (self.data, metadata.hostname), 'w')
+ fileh.write(entry.text)
fileh.close()
- except OSError, ioerr:
- LOGGER.error("Failed to write %s/%s-host.cfg" % \
- (self.data, metadata.hostname))
+ except OSError:
+ ioerr = sys.exc_info()[1]
+ LOGGER.error("Failed to write %s/%s-host.cfg" %
+ (self.data, metadata.hostname))
LOGGER.error(ioerr)
def createserverconfig(self, entry, _):
"""Build monolithic server configuration file."""
- host_configs = glob.glob('%s/*-host.cfg' % self.data)
+ host_configs = glob.glob('%s/*-host.cfg' % self.data)
group_configs = glob.glob('%s/*-group.cfg' % self.data)
- host_data = ""
- group_data = ""
+ host_data = []
+ group_data = []
for host in host_configs:
- hostfile = open(host, 'r')
- hostname=host.split('/')[-1].replace('-host.cfg','')
- parents=self.getparents(hostname)
- if parents:
- hostlines = hostfile.readlines()
- else:
- hostdata = hostfile.read()
- hostfile.close()
-
- if parents:
- hostdata=''
- addparents=True
- for line in hostlines:
- line=line.replace('\n','')
- if 'parents' in line:
- line+=','+','.join(parents)
- addparents=False
- if '}' in line:
- line=''
- hostdata+="%s\n" % line
- if addparents:
- hostdata+=" parents %s\n" % ','.join(parents)
- hostdata+="}\n"
-
- host_data += hostdata
+ host_data.append(open(host, 'r').read())
+
for group in group_configs:
group_name = re.sub("(-group.cfg|.*/(?=[^/]+))", "", group)
- if host_data.find(group_name) != -1:
+ if "\n".join(host_data).find(group_name) != -1:
groupfile = open(group, 'r')
- group_data += groupfile.read()
+ group_data.append(groupfile.read())
groupfile.close()
- entry.text = group_data + host_data
- [entry.attrib.__setitem__(key, value) for \
- (key, value) in self.server_attrib.iteritems()]
+
+ entry.text = "%s\n\n%s" % ("\n".join(group_data), "\n".join(host_data))
+ [entry.attrib.__setitem__(key, value)
+ for (key, value) in list(self.server_attrib.items())]
try:
- fileh = open("%s/nagiosgen.cfg" % (self.data), 'w')
- fileh.write(group_data + host_data)
+ fileh = open("%s/nagiosgen.cfg" % self.data, 'w')
+ fileh.write(entry.text)
fileh.close()
- except OSError, ioerr:
- LOGGER.error("Failed to write %s/nagiosgen.cfg" % (self.data))
+ except OSError:
+ ioerr = sys.exc_info()[1]
+ LOGGER.error("Failed to write %s/nagiosgen.cfg" % self.data)
LOGGER.error(ioerr)
diff --git a/src/lib/Server/Plugins/Ohai.py b/src/lib/Server/Plugins/Ohai.py
index 0f7c7187f..6bd3edc34 100644
--- a/src/lib/Server/Plugins/Ohai.py
+++ b/src/lib/Server/Plugins/Ohai.py
@@ -37,12 +37,12 @@ class OhaiCache(object):
try:
data = open("%s/%s.json" % (self.dirname, item)).read()
except:
- raise KeyError, item
+ raise KeyError(item)
self.cache[item] = json.loads(data)
return self.cache[item]
def __iter__(self):
- data = self.cache.keys()
+ data = list(self.cache.keys())
data.extend([x[:-5] for x in os.listdir(self.dirname)])
return data.__iter__()
@@ -50,7 +50,9 @@ class OhaiCache(object):
class Ohai(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Probing,
Bcfg2.Server.Plugin.Connector):
- """The Ohai plugin is used to detect information about the client operating system."""
+ """The Ohai plugin is used to detect information
+ about the client operating system.
+ """
name = 'Ohai'
experimental = True
diff --git a/src/lib/Server/Plugins/Packages.py b/src/lib/Server/Plugins/Packages.py
index 438c1d5c0..4e47f8549 100644
--- a/src/lib/Server/Plugins/Packages.py
+++ b/src/lib/Server/Plugins/Packages.py
@@ -1,4 +1,3 @@
-import cPickle
import copy
import gzip
import tarfile
@@ -8,7 +7,21 @@ import lxml.etree
import os
import re
import sys
-import urllib2
+
+# Compatibility imports
+from Bcfg2.Bcfg2Py3k import cPickle
+from Bcfg2.Bcfg2Py3k import HTTPBasicAuthHandler
+from Bcfg2.Bcfg2Py3k import HTTPPasswordMgrWithDefaultRealm
+from Bcfg2.Bcfg2Py3k import HTTPError
+from Bcfg2.Bcfg2Py3k import install_opener
+from Bcfg2.Bcfg2Py3k import build_opener
+from Bcfg2.Bcfg2Py3k import urlopen
+
+# py3k compatibility
+if sys.hexversion >= 0x03000000:
+ from io import FileIO as BUILTIN_FILE_TYPE
+else:
+ BUILTIN_FILE_TYPE = file
# FIXME: Remove when server python dep is 2.5 or greater
if sys.version_info >= (2, 5):
@@ -22,14 +35,17 @@ import Bcfg2.Server.Plugin
# build sources.list?
# caching for yum
+
class NoData(Exception):
pass
+
class SomeData(Exception):
pass
logger = logging.getLogger('Packages')
+
def source_from_xml(xsource):
ret = dict([('rawurl', False), ('url', False)])
for key, tag in [('groups', 'Group'), ('components', 'Component'),
@@ -60,6 +76,7 @@ def source_from_xml(xsource):
ret['url'] += '/'
return ret
+
def _fetch_url(url):
if '@' in url:
mobj = re.match('(\w+://)([^:]+):([^@]+)@(.*)$', url)
@@ -68,10 +85,11 @@ def _fetch_url(url):
user = mobj.group(2)
passwd = mobj.group(3)
url = mobj.group(1) + mobj.group(4)
- auth = urllib2.HTTPBasicAuthHandler(urllib2.HTTPPasswordMgrWithDefaultRealm())
+ auth = HTTPBasicAuthHandler(HTTPPasswordMgrWithDefaultRealm())
auth.add_password(None, url, user, passwd)
- urllib2.install_opener(urllib2.build_opener(auth))
- return urllib2.urlopen(url).read()
+ install_opener(build_opener(auth))
+ return urlopen(url).read()
+
class Source(object):
basegroups = []
@@ -135,7 +153,7 @@ class Source(object):
agroups = ['global'] + [a for a in self.arches if a in meta.groups]
vdict = dict()
for agrp in agroups:
- for key, value in self.provides[agrp].iteritems():
+ for key, value in list(self.provides[agrp].items()):
if key not in vdict:
vdict[key] = set(value)
else:
@@ -160,11 +178,12 @@ class Source(object):
except ValueError:
logger.error("Packages: Bad url string %s" % url)
continue
- except urllib2.HTTPError, h:
+ except HTTPError:
+ h = sys.exc_info()[1]
logger.error("Packages: Failed to fetch url %s. code=%s" \
% (url, h.code))
continue
- file(fname, 'w').write(data)
+ BUILTIN_FILE_TYPE(fname, 'w').write(data)
def applies(self, metadata):
return len([g for g in self.basegroups if g in metadata.groups]) != 0 and \
@@ -193,6 +212,7 @@ class Source(object):
return {'groups': copy.copy(self.groups), \
'urls': [copy.deepcopy(url) for url in self.url_map]}
+
class YUMSource(Source):
xp = '{http://linux.duke.edu/metadata/common}'
rp = '{http://linux.duke.edu/metadata/rpm}'
@@ -217,13 +237,13 @@ class YUMSource(Source):
self.file_to_arch = dict()
def save_state(self):
- cache = file(self.cachefile, 'wb')
+ cache = BUILTIN_FILE_TYPE(self.cachefile, 'wb')
cPickle.dump((self.packages, self.deps, self.provides,
self.filemap, self.url_map), cache, 2)
cache.close()
def load_state(self):
- data = file(self.cachefile)
+ data = BUILTIN_FILE_TYPE(self.cachefile)
(self.packages, self.deps, self.provides, \
self.filemap, self.url_map) = cPickle.load(data)
@@ -250,7 +270,8 @@ class YUMSource(Source):
except ValueError:
logger.error("Packages: Bad url string %s" % rmdurl)
continue
- except urllib2.HTTPError, h:
+ except HTTPError:
+ h = sys.exc_info()[1]
logger.error("Packages: Failed to fetch url %s. code=%s" \
% (rmdurl, h.code))
continue
@@ -277,7 +298,7 @@ class YUMSource(Source):
fdata = lxml.etree.parse(fname).getroot()
self.parse_filelist(fdata, farch)
# merge data
- sdata = self.packages.values()
+ sdata = list(self.packages.values())
self.packages['global'] = copy.deepcopy(sdata.pop())
while sdata:
self.packages['global'] = self.packages['global'].intersection(sdata.pop())
@@ -337,10 +358,10 @@ class YUMSource(Source):
def get_vpkgs(self, metadata):
rv = Source.get_vpkgs(self, metadata)
- for arch, fmdata in self.filemap.iteritems():
+ for arch, fmdata in list(self.filemap.items()):
if arch not in metadata.groups and arch != 'global':
continue
- for filename, pkgs in fmdata.iteritems():
+ for filename, pkgs in list(fmdata.items()):
rv[filename] = pkgs
return rv
@@ -348,6 +369,7 @@ class YUMSource(Source):
filtered = set([u for u in unknown if u.startswith('rpmlib')])
unknown.difference_update(filtered)
+
class APTSource(Source):
basegroups = ['apt', 'debian', 'ubuntu', 'nexenta']
ptype = 'deb'
@@ -362,13 +384,13 @@ class APTSource(Source):
'components': self.components, 'arches': self.arches, 'groups': self.groups}]
def save_state(self):
- cache = file(self.cachefile, 'wb')
+ cache = BUILTIN_FILE_TYPE(self.cachefile, 'wb')
cPickle.dump((self.pkgnames, self.deps, self.provides),
cache, 2)
cache.close()
def load_state(self):
- data = file(self.cachefile)
+ data = BUILTIN_FILE_TYPE(self.cachefile)
self.pkgnames, self.deps, self.provides = cPickle.load(data)
def filter_unknown(self, unknown):
@@ -407,7 +429,7 @@ class APTSource(Source):
print("Failed to read file %s" % fname)
raise
for line in reader.readlines():
- words = line.strip().split(':', 1)
+ words = str(line.strip()).split(':', 1)
if words[0] == 'Package':
pkgname = words[1].strip().rstrip()
self.pkgnames.add(pkgname)
@@ -449,7 +471,7 @@ class APTSource(Source):
for barch in bdeps:
self.deps[barch][pkgname] = bdeps[barch][pkgname]
provided = set()
- for bprovided in bprov.values():
+ for bprovided in list(bprov.values()):
provided.update(set(bprovided))
for prov in provided:
prset = set()
@@ -469,6 +491,7 @@ class APTSource(Source):
pkg not in self.blacklist and \
(len(self.whitelist) == 0 or pkg in self.whitelist)
+
class PACSource(Source):
basegroups = ['arch', 'parabola']
ptype = 'pacman'
@@ -483,13 +506,13 @@ class PACSource(Source):
'components': self.components, 'arches': self.arches, 'groups': self.groups}]
def save_state(self):
- cache = file(self.cachefile, 'wb')
+ cache = BUILTIN_FILE_TYPE(self.cachefile, 'wb')
cPickle.dump((self.pkgnames, self.deps, self.provides),
cache, 2)
cache.close()
def load_state(self):
- data = file(self.cachefile)
+ data = BUILTIN_FILE_TYPE(self.cachefile)
self.pkgnames, self.deps, self.provides = cPickle.load(data)
def filter_unknown(self, unknown):
@@ -526,7 +549,7 @@ class PACSource(Source):
bdeps[barch] = dict()
bprov[barch] = dict()
try:
- print "try to read : " + fname
+ print("try to read : " + fname)
tar = tarfile.open(fname, "r")
reader = gzip.GzipFile(fname)
except:
@@ -536,7 +559,7 @@ class PACSource(Source):
for tarinfo in tar:
if tarinfo.isdir():
self.pkgnames.add(tarinfo.name.rsplit("-", 2)[0])
- print "added : " + tarinfo.name.rsplit("-", 2)[0]
+ print("added : " + tarinfo.name.rsplit("-", 2)[0])
tar.close()
self.deps['global'] = dict()
@@ -556,7 +579,7 @@ class PACSource(Source):
for barch in bdeps:
self.deps[barch][pkgname] = bdeps[barch][pkgname]
provided = set()
- for bprovided in bprov.values():
+ for bprovided in list(bprov.values()):
provided.update(set(bprovided))
for prov in provided:
prset = set()
@@ -576,6 +599,7 @@ class PACSource(Source):
pkg not in self.blacklist and \
(len(self.whitelist) == 0 or pkg in self.whitelist)
+
class Packages(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.StructureValidator,
Bcfg2.Server.Plugin.Generator,
@@ -614,7 +638,7 @@ class Packages(Bcfg2.Server.Plugin.Plugin,
vpkgs = dict()
for source in self.get_matching_sources(meta):
s_vpkgs = source.get_vpkgs(meta)
- for name, prov_set in s_vpkgs.iteritems():
+ for name, prov_set in list(s_vpkgs.items()):
if name not in vpkgs:
vpkgs[name] = set(prov_set)
else:
@@ -726,7 +750,9 @@ class Packages(Bcfg2.Server.Plugin.Plugin,
satisfied_vpkgs.add(current)
elif [item for item in vpkg_cache[current] if item in packages]:
if debug:
- self.logger.debug("Packages: requirement %s satisfied by %s" % (current, [item for item in vpkg_cache[current] if item in packages]))
+ self.logger.debug("Packages: requirement %s satisfied by %s" % (current,
+ [item for item in vpkg_cache[current]
+ if item in packages]))
satisfied_vpkgs.add(current)
vpkgs.difference_update(satisfied_vpkgs)
@@ -736,7 +762,9 @@ class Packages(Bcfg2.Server.Plugin.Plugin,
# allow use of virt through explicit specification, then fall back to forcing current on last pass
if [item for item in vpkg_cache[current] if item in packages]:
if debug:
- self.logger.debug("Packages: requirement %s satisfied by %s" % (current, [item for item in vpkg_cache[current] if item in packages]))
+ self.logger.debug("Packages: requirement %s satisfied by %s" % (current,
+ [item for item in vpkg_cache[current]
+ if item in packages]))
satisfied_both.add(current)
elif current in input_requirements or final_pass:
pkgs.add(current)
@@ -828,7 +856,8 @@ class Packages(Bcfg2.Server.Plugin.Plugin,
xdata.xinclude()
xdata = xdata.getroot()
except (lxml.etree.XIncludeError, \
- lxml.etree.XMLSyntaxError), xmlerr:
+ lxml.etree.XMLSyntaxError):
+ xmlerr = sys.exc_info()[1]
self.logger.error("Package: Error processing xml: %s" % xmlerr)
raise Bcfg2.Server.Plugin.PluginInitError
except IOError:
diff --git a/src/lib/Server/Plugins/Pkgmgr.py b/src/lib/Server/Plugins/Pkgmgr.py
index b58a7c91d..b96e7ea7d 100644
--- a/src/lib/Server/Plugins/Pkgmgr.py
+++ b/src/lib/Server/Plugins/Pkgmgr.py
@@ -7,15 +7,17 @@ import Bcfg2.Server.Plugin
logger = logging.getLogger('Bcfg2.Plugins.Pkgmgr')
+
class FuzzyDict(dict):
fuzzy = re.compile('(?P<name>.*):(?P<alist>\S+(,\S+)*)')
+
def __getitem__(self, key):
if isinstance(key, str):
mdata = self.fuzzy.match(key)
if mdata:
return dict.__getitem__(self, mdata.groupdict()['name'])
else:
- print "got non-string key %s" % str(key)
+ print("got non-string key %s" % str(key))
return dict.__getitem__(self, key)
def has_key(self, key):
@@ -33,11 +35,14 @@ class FuzzyDict(dict):
return default
raise
+
class PNode(Bcfg2.Server.Plugin.INode):
- """PNode has a list of packages available at a particular group intersection."""
- splitters = {'rpm':re.compile('^(.*/)?(?P<name>[\w\+\d\.]+(-[\w\+\d\.]+)*)-' + \
+ """PNode has a list of packages available at a
+ particular group intersection.
+ """
+ splitters = {'rpm': re.compile('^(.*/)?(?P<name>[\w\+\d\.]+(-[\w\+\d\.]+)*)-' + \
'(?P<version>[\w\d\.]+-([\w\d\.]+))\.(?P<arch>\S+)\.rpm$'),
- 'encap':re.compile('^(?P<name>[\w-]+)-(?P<version>[\w\d\.+-]+).encap.*$')}
+ 'encap': re.compile('^(?P<name>[\w-]+)-(?P<version>[\w\d\.+-]+).encap.*$')}
ignore = ['Package']
def Match(self, metadata, data):
@@ -54,41 +59,44 @@ class PNode(Bcfg2.Server.Plugin.INode):
def __init__(self, data, pdict, parent=None):
# copy local attributes to all child nodes if no local attribute exists
- if not pdict.has_key('Package'):
+ if 'Package' not in pdict:
pdict['Package'] = set()
for child in data.getchildren():
- for attr in [key for key in data.attrib.keys() \
- if key != 'name' and not child.attrib.has_key(key)]:
+ for attr in [key for key in list(data.attrib.keys())
+ if key != 'name' and key not in child.attrib]:
try:
child.set(attr, data.get(attr))
except:
# don't fail on things like comments and other immutable elements
pass
Bcfg2.Server.Plugin.INode.__init__(self, data, pdict, parent)
- if not self.contents.has_key('Package'):
+ if 'Package' not in self.contents:
self.contents['Package'] = FuzzyDict()
for pkg in data.findall('./Package'):
- if pkg.attrib.has_key('name') and pkg.get('name') not in pdict['Package']:
+ if 'name' in pkg.attrib and pkg.get('name') not in pdict['Package']:
pdict['Package'].add(pkg.get('name'))
if pkg.get('name') != None:
self.contents['Package'][pkg.get('name')] = {}
if pkg.getchildren():
self.contents['Package'][pkg.get('name')]['__children__'] \
= pkg.getchildren()
- if pkg.attrib.has_key('simplefile'):
+ if 'simplefile' in pkg.attrib:
pkg.set('url', "%s/%s" % (pkg.get('uri'), pkg.get('simplefile')))
self.contents['Package'][pkg.get('name')].update(pkg.attrib)
else:
- if pkg.attrib.has_key('file'):
- if pkg.attrib.has_key('multiarch'):
+ if 'file' in pkg.attrib:
+ if 'multiarch' in pkg.attrib:
archs = pkg.get('multiarch').split()
srcs = pkg.get('srcs', pkg.get('multiarch')).split()
- url = ' '.join(["%s/%s" % (pkg.get('uri'), pkg.get('file') % {'src':srcs[idx], 'arch':archs[idx]})
+ url = ' '.join(["%s/%s" % (pkg.get('uri'),
+ pkg.get('file') % {'src':srcs[idx],
+ 'arch':archs[idx]})
for idx in range(len(archs))])
pkg.set('url', url)
else:
- pkg.set('url', '%s/%s' % (pkg.get('uri'), pkg.get('file')))
- if self.splitters.has_key(pkg.get('type')) and pkg.get('file') != None:
+ pkg.set('url', '%s/%s' % (pkg.get('uri'),
+ pkg.get('file')))
+ if pkg.get('type') in self.splitters and pkg.get('file') != None:
mdata = self.splitters[pkg.get('type')].match(pkg.get('file'))
if not mdata:
logger.error("Failed to match pkg %s" % pkg.get('file'))
@@ -112,10 +120,13 @@ class PNode(Bcfg2.Server.Plugin.INode):
class PkgSrc(Bcfg2.Server.Plugin.XMLSrc):
- """PkgSrc files contain a PNode hierarchy that returns matching package entries."""
+ """PkgSrc files contain a PNode hierarchy that
+ returns matching package entries.
+ """
__node__ = PNode
__cacheobj__ = FuzzyDict
+
class Pkgmgr(Bcfg2.Server.Plugin.PrioDir):
"""This is a generator that handles package assignments."""
name = 'Pkgmgr'
@@ -127,8 +138,8 @@ class Pkgmgr(Bcfg2.Server.Plugin.PrioDir):
def HandleEvent(self, event):
'''Handle events and update dispatch table'''
Bcfg2.Server.Plugin.XMLDirectoryBacked.HandleEvent(self, event)
- for src in self.entries.values():
- for itype, children in src.items.iteritems():
+ for src in list(self.entries.values()):
+ for itype, children in list(src.items.items()):
for child in children:
try:
self.Entries[itype][child] = self.BindEntry
@@ -149,7 +160,7 @@ class Pkgmgr(Bcfg2.Server.Plugin.PrioDir):
if inst.get('arch') not in arches]
def HandlesEntry(self, entry, metadata):
- return entry.tag == 'Package' and entry.get('name').split(':')[0] in self.Entries['Package'].keys()
+ return entry.tag == 'Package' and entry.get('name').split(':')[0] in list(self.Entries['Package'].keys())
def HandleEntry(self, entry, metadata):
self.BindEntry(entry, metadata)
diff --git a/src/lib/Server/Plugins/Probes.py b/src/lib/Server/Plugins/Probes.py
index 57dd4f698..ea2e79ccc 100644
--- a/src/lib/Server/Plugins/Probes.py
+++ b/src/lib/Server/Plugins/Probes.py
@@ -6,8 +6,10 @@ import Bcfg2.Server.Plugin
specific_probe_matcher = re.compile("(.*/)?(?P<basename>\S+)(.(?P<mode>[GH](\d\d)?)_\S+)")
probe_matcher = re.compile("(.*/)?(?P<basename>\S+)")
+
class ProbeSet(Bcfg2.Server.Plugin.EntrySet):
ignore = re.compile("^(\.#.*|.*~|\\..*\\.(tmp|sw[px])|probed\\.xml)$")
+
def __init__(self, path, fam, encoding, plugin_name):
fpattern = '[0-9A-Za-z_\-]+'
self.plugin_name = plugin_name
@@ -34,7 +36,7 @@ class ProbeSet(Bcfg2.Server.Plugin.EntrySet):
if pname not in build:
build[pname] = entry
- for (name, entry) in build.iteritems():
+ for (name, entry) in list(build.items()):
probe = lxml.etree.Element('probe')
probe.set('name', name.split('/')[-1])
probe.set('source', self.plugin_name)
@@ -47,6 +49,7 @@ class ProbeSet(Bcfg2.Server.Plugin.EntrySet):
ret.append(probe)
return ret
+
class Probes(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Probing,
Bcfg2.Server.Plugin.Connector):
@@ -80,7 +83,8 @@ class Probes(Bcfg2.Server.Plugin.Plugin,
value=self.probedata[client][probe])
for group in sorted(self.cgroups[client]):
lxml.etree.SubElement(cx, "Group", name=group)
- data = lxml.etree.tostring(top, encoding='UTF-8', xml_declaration=True,
+ data = lxml.etree.tostring(top, encoding='UTF-8',
+ xml_declaration=True,
pretty_print='true')
try:
datafile = open("%s/%s" % (self.data, 'probed.xml'), 'w')
@@ -98,7 +102,7 @@ class Probes(Bcfg2.Server.Plugin.Plugin,
self.cgroups = {}
for client in data.getchildren():
self.probedata[client.get('name')] = {}
- self.cgroups[client.get('name')]=[]
+ self.cgroups[client.get('name')] = []
for pdata in client:
if (pdata.tag == 'Probe'):
self.probedata[client.get('name')][pdata.get('name')] = pdata.get('value')
@@ -118,7 +122,7 @@ class Probes(Bcfg2.Server.Plugin.Plugin,
def ReceiveDataItem(self, client, data):
"""Receive probe results pertaining to client."""
- if not self.cgroups.has_key(client.hostname):
+ if client.hostname not in self.cgroups:
self.cgroups[client.hostname] = []
if data.text == None:
self.logger.error("Got null response to probe %s from %s" % \
@@ -139,9 +143,9 @@ class Probes(Bcfg2.Server.Plugin.Plugin,
dlines.remove(line)
dtext = "\n".join(dlines)
try:
- self.probedata[client.hostname].update({data.get('name'):dtext})
+ self.probedata[client.hostname].update({data.get('name'): dtext})
except KeyError:
- self.probedata[client.hostname] = {data.get('name'):dtext}
+ self.probedata[client.hostname] = {data.get('name'): dtext}
def get_additional_groups(self, meta):
return self.cgroups.get(meta.hostname, list())
diff --git a/src/lib/Server/Plugins/Properties.py b/src/lib/Server/Plugins/Properties.py
index 2888ef1d1..dea797a10 100644
--- a/src/lib/Server/Plugins/Properties.py
+++ b/src/lib/Server/Plugins/Properties.py
@@ -4,15 +4,45 @@ import lxml.etree
import Bcfg2.Server.Plugin
-class PropertyFile(Bcfg2.Server.Plugin.XMLFileBacked):
+class PropertyFile(Bcfg2.Server.Plugin.StructFile):
"""Class for properties files."""
-
def Index(self):
- """Build data into an xml object."""
- try:
- self.data = lxml.etree.XML(self.data)
- except lxml.etree.XMLSyntaxError:
- Bcfg2.Server.Plugin.logger.error("Failed to parse %s" % self.name)
+ """Build internal data structures."""
+ if type(self.data) is not lxml.etree._Element:
+ try:
+ self.data = lxml.etree.XML(self.data)
+ except lxml.etree.XMLSyntaxError:
+ Bcfg2.Server.Plugin.logger.error("Failed to parse %s" %
+ self.name)
+
+ self.fragments = {}
+ work = {lambda x: True: self.data.getchildren()}
+ while work:
+ (predicate, worklist) = work.popitem()
+ self.fragments[predicate] = \
+ [item for item in worklist
+ if (item.tag != 'Group' and
+ item.tag != 'Client' and
+ not isinstance(item,
+ lxml.etree._Comment))]
+ for item in worklist:
+ cmd = None
+ if item.tag == 'Group':
+ if item.get('negate', 'false').lower() == 'true':
+ cmd = "lambda x:'%s' not in x.groups and predicate(x)"
+ else:
+ cmd = "lambda x:'%s' in x.groups and predicate(x)"
+ elif item.tag == 'Client':
+ if item.get('negate', 'false').lower() == 'true':
+ cmd = "lambda x:x.hostname != '%s' and predicate(x)"
+ else:
+ cmd = "lambda x:x.hostname == '%s' and predicate(x)"
+ # else, ignore item
+ if cmd is not None:
+ newpred = eval(cmd % item.get('name'),
+ {'predicate':predicate})
+ work[newpred] = item.getchildren()
+
class PropDirectoryBacked(Bcfg2.Server.Plugin.DirectoryBacked):
@@ -33,9 +63,10 @@ class Properties(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Connector.__init__(self)
try:
self.store = PropDirectoryBacked(self.data, core.fam)
- except OSError, e:
+ except OSError:
+ e = sys.exc_info()[1]
Bcfg2.Server.Plugin.logger.error("Error while creating Properties "
- "store: %s %s" % (e.strerror,e.filename))
+ "store: %s %s" % (e.strerror, e.filename))
raise Bcfg2.Server.Plugin.PluginInitError
def get_additional_data(self, _):
diff --git a/src/lib/Server/Plugins/SGenshi.py b/src/lib/Server/Plugins/SGenshi.py
index cead06e34..efd981956 100644
--- a/src/lib/Server/Plugins/SGenshi.py
+++ b/src/lib/Server/Plugins/SGenshi.py
@@ -5,6 +5,7 @@ import genshi.input
import genshi.template
import lxml.etree
import logging
+import sys
import Bcfg2.Server.Plugin
import Bcfg2.Server.Plugins.TGenshi
@@ -23,11 +24,14 @@ class SGenshiTemplateFile(Bcfg2.Server.Plugins.TGenshi.TemplateFile):
Bcfg2.Server.Plugins.TGenshi.removecomment)
data = stream.render('xml', strip_whitespace=False)
return lxml.etree.XML(data)
- except LookupError, lerror:
+ except LookupError:
+ lerror = sys.exc_info()[1]
logger.error('Genshi lookup error: %s' % lerror)
- except genshi.template.TemplateError, terror:
+ except genshi.template.TemplateError:
+ terror = sys.exc_info()[1]
logger.error('Genshi template error: %s' % terror)
- except genshi.input.ParseError, perror:
+ except genshi.input.ParseError:
+ perror = sys.exc_info()[1]
logger.error('Genshi parse error: %s' % perror)
raise
diff --git a/src/lib/Server/Plugins/SSHbase.py b/src/lib/Server/Plugins/SSHbase.py
index 96a444875..cf0998aaa 100644
--- a/src/lib/Server/Plugins/SSHbase.py
+++ b/src/lib/Server/Plugins/SSHbase.py
@@ -5,6 +5,7 @@ import binascii
import os
import socket
import shutil
+import sys
import tempfile
from subprocess import Popen, PIPE
import Bcfg2.Server.Plugin
@@ -52,7 +53,8 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
try:
Bcfg2.Server.Plugin.DirectoryBacked.__init__(self, self.data,
self.core.fam)
- except OSError, ioerr:
+ except OSError:
+ ioerr = sys.exc_info()[1]
self.logger.error("Failed to load SSHbase repository from %s" \
% (self.data))
self.logger.error(ioerr)
@@ -72,8 +74,8 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
def get_skn(self):
"""Build memory cache of the ssh known hosts file."""
if not self.__skn:
- self.__skn = "\n".join([value.data for key, value in \
- self.entries.iteritems() if \
+ self.__skn = "\n".join([str(value.data) for key, value in \
+ list(self.entries.items()) if \
key.endswith('.static')])
names = dict()
# if no metadata is registered yet, defer
@@ -103,7 +105,7 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
continue
names[cmeta.hostname] = sorted(names[cmeta.hostname])
# now we have our name cache
- pubkeys = [pubk for pubk in self.entries.keys() \
+ pubkeys = [pubk for pubk in list(self.entries.keys()) \
if pubk.find('.pub.H_') != -1]
pubkeys.sort()
badnames = set()
@@ -131,7 +133,7 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
if event and event.filename.endswith('.static'):
self.skn = False
if not self.__skn:
- if (len(self.entries.keys())) >= (len(os.listdir(self.data))-1):
+ if (len(list(self.entries.keys()))) >= (len(os.listdir(self.data)) - 1):
_ = self.skn
def HandlesEntry(self, entry, _):
@@ -205,26 +207,26 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
for hostkey in hostkeys:
entry.text += "localhost,localhost.localdomain,127.0.0.1 %s" % (
self.entries[hostkey].data)
- permdata = {'owner':'root',
- 'group':'root',
- 'type':'file',
- 'perms':'0644'}
+ permdata = {'owner': 'root',
+ 'group': 'root',
+ 'type': 'file',
+ 'perms': '0644'}
[entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
def build_hk(self, entry, metadata):
"""This binds host key data into entries."""
client = metadata.hostname
filename = "%s.H_%s" % (entry.get('name').split('/')[-1], client)
- if filename not in self.entries.keys():
+ if filename not in list(self.entries.keys()):
self.GenerateHostKeys(client)
if not filename in self.entries:
self.logger.error("%s still not registered" % filename)
raise Bcfg2.Server.Plugin.PluginExecutionError
keydata = self.entries[filename].data
- permdata = {'owner':'root',
- 'group':'root',
- 'type':'file',
- 'perms':'0600'}
+ permdata = {'owner': 'root',
+ 'group': 'root',
+ 'type': 'file',
+ 'perms': '0600'}
if entry.get('name')[-4:] == '.pub':
permdata['perms'] = '0644'
[entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
@@ -245,7 +247,7 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
else:
keytype = 'rsa1'
- if hostkey not in self.entries.keys():
+ if hostkey not in list(self.entries.keys()):
fileloc = "%s/%s" % (self.data, hostkey)
publoc = self.data + '/' + ".".join([hostkey.split('.')[0],
'pub',
@@ -257,8 +259,8 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
shutil.copy(temploc, fileloc)
shutil.copy("%s.pub" % temploc, publoc)
self.AddEntry(hostkey)
- self.AddEntry(".".join([hostkey.split('.')[0]]+['pub', "H_%s" \
- % client]))
+ self.AddEntry(".".join([hostkey.split('.')[0]] + ['pub', "H_%s" \
+ % client]))
try:
os.unlink(temploc)
os.unlink("%s.pub" % temploc)
@@ -277,7 +279,7 @@ class SSHbase(Bcfg2.Server.Plugin.Plugin,
try:
open(filename, 'w').write(entry['text'])
if log:
- print "Wrote file %s" % filename
+ print("Wrote file %s" % filename)
except KeyError:
self.logger.error("Failed to pull %s. This file does not currently "
"exist on the client" % entry.get('name'))
diff --git a/src/lib/Server/Plugins/SSLCA.py b/src/lib/Server/Plugins/SSLCA.py
index 1c9e1b59d..baaa14ba9 100644
--- a/src/lib/Server/Plugins/SSLCA.py
+++ b/src/lib/Server/Plugins/SSLCA.py
@@ -5,7 +5,8 @@ import posixpath
import tempfile
import os
from subprocess import Popen, PIPE, STDOUT
-from ConfigParser import ConfigParser
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import ConfigParser
class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
@@ -41,14 +42,14 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
if event.filename.endswith('.xml'):
if action in ['exists', 'created', 'changed']:
if event.filename.endswith('key.xml'):
- key_spec = dict(lxml.etree.parse(epath).find('Key').items())
+ key_spec = dict(list(lxml.etree.parse(epath).find('Key').items()))
self.key_specs[ident] = {
'bits': key_spec.get('bits', 2048),
'type': key_spec.get('type', 'rsa')
}
self.Entries['Path'][ident] = self.get_key
elif event.filename.endswith('cert.xml'):
- cert_spec = dict(lxml.etree.parse(epath).find('Cert').items())
+ cert_spec = dict(list(lxml.etree.parse(epath).find('Cert').items()))
ca = cert_spec.get('ca', 'default')
self.cert_specs[ident] = {
'ca': ca,
@@ -64,7 +65,7 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
}
cp = ConfigParser()
cp.read(self.core.cfile)
- self.CAs[ca] = dict(cp.items('sslca_'+ca))
+ self.CAs[ca] = dict(cp.items('sslca_' + ca))
self.Entries['Path'][ident] = self.get_cert
if action == 'deleted':
if ident in self.Entries['Path']:
@@ -99,12 +100,14 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
# check if we already have a hostfile, or need to generate a new key
# TODO: verify key fits the specs
path = entry.get('name')
- filename = "".join([path, '/', path.rsplit('/', 1)[1], '.H_', metadata.hostname])
- if filename not in self.entries.keys():
+ filename = "".join([path, '/', path.rsplit('/', 1)[1],
+ '.H_', metadata.hostname])
+ if filename not in list(self.entries.keys()):
key = self.build_key(filename, entry, metadata)
open(self.data + filename, 'w').write(key)
entry.text = key
- self.entries[filename] = self.__child__("%s%s" % (self.data, filename))
+ self.entries[filename] = self.__child__("%s%s" % (self.data,
+ filename))
self.entries[filename].HandleEvent()
else:
entry.text = self.entries[filename].data
@@ -135,23 +138,28 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
[entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
path = entry.get('name')
- filename = "".join([path, '/', path.rsplit('/', 1)[1], '.H_', metadata.hostname])
+ filename = "".join([path, '/', path.rsplit('/', 1)[1],
+ '.H_', metadata.hostname])
# first - ensure we have a key to work with
key = self.cert_specs[entry.get('name')].get('key')
- key_filename = "".join([key, '/', key.rsplit('/', 1)[1], '.H_', metadata.hostname])
+ key_filename = "".join([key, '/', key.rsplit('/', 1)[1],
+ '.H_', metadata.hostname])
if key_filename not in self.entries:
e = lxml.etree.Element('Path')
e.attrib['name'] = key
self.core.Bind(e, metadata)
# check if we have a valid hostfile
- if filename in self.entries.keys() and self.verify_cert(filename, key_filename, entry):
+ if filename in list(self.entries.keys()) and self.verify_cert(filename,
+ key_filename,
+ entry):
entry.text = self.entries[filename].data
else:
cert = self.build_cert(key_filename, entry, metadata)
open(self.data + filename, 'w').write(cert)
- self.entries[filename] = self.__child__("%s%s" % (self.data, filename))
+ self.entries[filename] = self.__child__("%s%s" % (self.data,
+ filename))
self.entries[filename].HandleEvent()
entry.text = cert
@@ -188,7 +196,6 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
return True
return False
-
def build_cert(self, key_filename, entry, metadata):
"""
creates a new certificate according to the specification
@@ -200,9 +207,14 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
days = self.cert_specs[entry.get('name')]['days']
passphrase = self.CAs[ca].get('passphrase')
if passphrase:
- cmd = "openssl ca -config %s -in %s -days %s -batch -passin pass:%s" % (ca_config, req, days, passphrase)
+ cmd = "openssl ca -config %s -in %s -days %s -batch -passin pass:%s" % (ca_config,
+ req,
+ days,
+ passphrase)
else:
- cmd = "openssl ca -config %s -in %s -days %s -batch" % (ca_config, req, days)
+ cmd = "openssl ca -config %s -in %s -days %s -batch" % (ca_config,
+ req,
+ days)
cert = Popen(cmd, shell=True, stdout=PIPE).stdout.read()
try:
os.unlink(req_config)
@@ -234,7 +246,7 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
},
'alt_names': {}
}
- for section in defaults.keys():
+ for section in list(defaults.keys()):
cp.add_section(section)
for key in defaults[section]:
cp.set(section, key, defaults[section][key])
@@ -242,7 +254,7 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
altnames = list(metadata.aliases)
altnames.append(metadata.hostname)
for altname in altnames:
- cp.set('alt_names', 'DNS.'+str(x), altname)
+ cp.set('alt_names', 'DNS.' + str(x), altname)
x += 1
for item in ['C', 'L', 'ST', 'O', 'OU', 'emailAddress']:
if self.cert_specs[entry.get('name')][item]:
@@ -259,6 +271,9 @@ class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
req = tempfile.mkstemp()[1]
days = self.cert_specs[entry.get('name')]['days']
key = self.data + key_filename
- cmd = "openssl req -new -config %s -days %s -key %s -text -out %s" % (req_config, days, key, req)
+ cmd = "openssl req -new -config %s -days %s -key %s -text -out %s" % (req_config,
+ days,
+ key,
+ req)
res = Popen(cmd, shell=True, stdout=PIPE).stdout.read()
return req
diff --git a/src/lib/Server/Plugins/Snapshots.py b/src/lib/Server/Plugins/Snapshots.py
index a4489ae95..8b6bad574 100644
--- a/src/lib/Server/Plugins/Snapshots.py
+++ b/src/lib/Server/Plugins/Snapshots.py
@@ -8,10 +8,13 @@ import Bcfg2.Server.Plugin
import Bcfg2.Server.Snapshots
import Bcfg2.Logger
from Bcfg2.Server.Snapshots.model import Snapshot
-import Queue
+import sys
import time
import threading
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import Queue
+
logger = logging.getLogger('Snapshots')
ftypes = ['ConfigFile', 'SymLink', 'Directory']
@@ -24,13 +27,21 @@ datafields = {
'SymLink': ['to'],
}
+
+def u_str(string):
+ if sys.hexversion >= 0x03000000:
+ return string
+ else:
+ return unicode(string)
+
+
def build_snap_ent(entry):
basefields = []
if entry.tag in ['Package', 'Service']:
basefields += ['type']
- desired = dict([(key, unicode(entry.get(key))) for key in basefields])
- state = dict([(key, unicode(entry.get(key))) for key in basefields])
- desired.update([(key, unicode(entry.get(key))) for key in \
+ desired = dict([(key, u_str(entry.get(key))) for key in basefields])
+ state = dict([(key, u_str(entry.get(key))) for key in basefields])
+ desired.update([(key, u_str(entry.get(key))) for key in \
datafields[entry.tag]])
if entry.tag == 'ConfigFile' or \
((entry.tag == 'Path') and (entry.get('type') == 'file')):
@@ -38,19 +49,19 @@ def build_snap_ent(entry):
desired['contents'] = None
else:
if entry.get('encoding', 'ascii') == 'ascii':
- desired['contents'] = unicode(entry.text)
+ desired['contents'] = u_str(entry.text)
else:
- desired['contents'] = unicode(binascii.a2b_base64(entry.text))
+ desired['contents'] = u_str(binascii.a2b_base64(entry.text))
if 'current_bfile' in entry.attrib:
- state['contents'] = unicode(binascii.a2b_base64( \
+ state['contents'] = u_str(binascii.a2b_base64( \
entry.get('current_bfile')))
elif 'current_bdiff' in entry.attrib:
diff = binascii.a2b_base64(entry.get('current_bdiff'))
- state['contents'] = unicode( \
+ state['contents'] = u_str( \
'\n'.join(difflib.restore(diff.split('\n'), 1)))
- state.update([(key, unicode(entry.get('current_' + key, entry.get(key)))) \
+ state.update([(key, u_str(entry.get('current_' + key, entry.get(key)))) \
for key in datafields[entry.tag]])
if entry.tag in ['ConfigFile', 'Path'] and entry.get('exists', 'true') == 'false':
state = None
@@ -66,7 +77,7 @@ class Snapshots(Bcfg2.Server.Plugin.Statistics,
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Statistics.__init__(self)
self.session = Bcfg2.Server.Snapshots.setup_session(core.cfile)
- self.work_queue = Queue.Queue()
+ self.work_queue = Queue()
self.loader = threading.Thread(target=self.load_snapshot)
self.loader.start()
@@ -92,9 +103,9 @@ class Snapshots(Bcfg2.Server.Plugin.Statistics,
bad = []
state = xdata.find('.//Statistics')
correct = state.get('state') == 'clean'
- revision = unicode(state.get('revision', '-1'))
+ revision = u_str(state.get('revision', '-1'))
for entry in state.find('.//Bad'):
- data = [False, False, unicode(entry.get('name'))] \
+ data = [False, False, u_str(entry.get('name'))] \
+ build_snap_ent(entry)
if entry.tag in ftypes:
etag = 'Path'
@@ -107,24 +118,24 @@ class Snapshots(Bcfg2.Server.Plugin.Statistics,
else:
etag = entry.tag
if entry.get('name') in entries[etag]:
- data = [True, False, unicode(entry.get('name'))] + \
+ data = [True, False, u_str(entry.get('name'))] + \
build_snap_ent(entry)
else:
- data = [True, False, unicode(entry.get('name'))] + \
+ data = [True, False, u_str(entry.get('name'))] + \
build_snap_ent(entry)
for entry in state.find('.//Extra'):
if entry.tag in datafields:
data = build_snap_ent(entry)[1]
- ename = unicode(entry.get('name'))
+ ename = u_str(entry.get('name'))
data['name'] = ename
extra[entry.tag][ename] = data
else:
- print "extra", entry.tag, entry.get('name')
+ print("extra", entry.tag, entry.get('name'))
t2 = time.time()
snap = Snapshot.from_data(self.session, correct, revision,
metadata, entries, extra)
self.session.add(snap)
self.session.commit()
t3 = time.time()
- logger.info("Snapshot storage took %fs" % (t3-t2))
+ logger.info("Snapshot storage took %fs" % (t3 - t2))
return True
diff --git a/src/lib/Server/Plugins/Statistics.py b/src/lib/Server/Plugins/Statistics.py
index c7fa0e534..f4f4c7175 100644
--- a/src/lib/Server/Plugins/Statistics.py
+++ b/src/lib/Server/Plugins/Statistics.py
@@ -8,7 +8,6 @@ import logging
from lxml.etree import XML, SubElement, Element, XMLSyntaxError
import lxml.etree
import os
-import Queue
from time import asctime, localtime, time, strptime, mktime
import threading
@@ -33,7 +32,8 @@ class StatisticsStore(object):
or force:
try:
fout = open(self.filename + '.new', 'w')
- except IOError, ioerr:
+ except IOError:
+ ioerr = sys.exc_info()[1]
self.logger.error("Failed to open %s for writing: %s" % (self.filename + '.new', ioerr))
else:
fout.write(lxml.etree.tostring(self.element, encoding='UTF-8', xml_declaration=True))
diff --git a/src/lib/Server/Plugins/Svn2.py b/src/lib/Server/Plugins/Svn2.py
index 875e9e6a6..35f555294 100644
--- a/src/lib/Server/Plugins/Svn2.py
+++ b/src/lib/Server/Plugins/Svn2.py
@@ -1,4 +1,3 @@
-import os
try:
import pysvn
missing = False
@@ -7,7 +6,7 @@ except:
import Bcfg2.Server.Plugin
class Svn2(Bcfg2.Server.Plugin.Plugin,
- Bcfg2.Server.Plugin.Version):
+ Bcfg2.Server.Plugin.Version):
"""Svn is a version plugin for dealing with Bcfg2 repos."""
name = 'Svn2'
__version__ = '$Id$'
@@ -36,7 +35,7 @@ class Svn2(Bcfg2.Server.Plugin.Plugin,
if not self.revision:
raise Bcfg2.Server.Plugin.PluginInitError
- self.logger.debug("Initialized svn plugin with svn root %s at revision %s" \
+ self.logger.debug("Initialized svn plugin with svn root %s at revision %s"
% (self.svn_root, revision))
def get_revision(self):
@@ -63,25 +62,50 @@ class Svn2(Bcfg2.Server.Plugin.Plugin,
#FIXME - look for conflicts?
- for file in file_list:
- stat = self.client.status(file)
+ for fname in file_list:
+ stat = self.client.status(fname)
self.client.add([f.path for f in stat \
if f.text_status == pysvn.wc_status_kind.unversioned])
try:
self.revision = self.client.checkin([self.datastore], comment,
recurse=True)
self.revision = self.client.update(self.datastore, recurse=True)[0]
- self.logger.info("Svn2: Commited changes. At %s" % self.revision.number)
- except:
- self.logger.error("Svn2: Failed to commit changes", exc_info=1)
+ self.logger.info("Svn2: Commited changes. At %s" %
+ self.revision.number)
+ except Exception, err:
+ # try to be smart about the error we got back
+ details = None
+ if "callback_ssl_server_trust_prompt" in err.message:
+ details = "SVN server certificate is not trusted"
+ elif "callback_get_login" in err.message:
+ details = "SVN credentials not cached"
+
+ if details is None:
+ self.logger.error("Svn2: Failed to commit changes",
+ exc_info=1)
+ else:
+ self.logger.error("Svn2: Failed to commit changes: %s" %
+ details)
def Update(self):
'''Svn2.Update() => True|False\nUpdate svn working copy\n'''
try:
old_revision = self.revision.number
self.revision = self.client.update(self.datastore, recurse=True)[0]
- except:
- self.logger.error("Svn2: Failed to update server repository", exc_info=1)
+ except Exception, err:
+ # try to be smart about the error we got back
+ details = None
+ if "callback_ssl_server_trust_prompt" in err.message:
+ details = "SVN server certificate is not trusted"
+ elif "callback_get_login" in err.message:
+ details = "SVN credentials not cached"
+
+ if details is None:
+ self.logger.error("Svn2: Failed to update server repository",
+ exc_info=1)
+ else:
+ self.logger.error("Svn2: Failed to update server repository: %s" %
+ details)
return False
if old_revision == self.revision.number:
diff --git a/src/lib/Server/Plugins/TCheetah.py b/src/lib/Server/Plugins/TCheetah.py
index d40f4baf3..49be88881 100644
--- a/src/lib/Server/Plugins/TCheetah.py
+++ b/src/lib/Server/Plugins/TCheetah.py
@@ -6,6 +6,9 @@ import logging
import sys
import traceback
import Bcfg2.Server.Plugin
+# py3k compatibility
+if sys.hexversion >= 0x03000000:
+ unicode = str
logger = logging.getLogger('Bcfg2.Plugins.TCheetah')
@@ -36,7 +39,8 @@ class TemplateFile:
self.template = Cheetah.Template.Template(open(self.name).read(),
compilerSettings=s,
searchList=self.searchlist)
- except Cheetah.Parser.ParseError, perror:
+ except Cheetah.Parser.ParseError:
+ perror = sys.exc_info()[1]
logger.error("Cheetah parse error for file %s" % (self.name))
logger.error(perror.report())
@@ -56,7 +60,7 @@ class TemplateFile:
entry.text = self.template
else:
if entry.get('encoding') == 'base64':
- # take care of case where file needs base64 encoding
+ # take care of case where file needs base64 encoding
entry.text = binascii.b2a_base64(self.template)
else:
entry.text = unicode(str(self.template), self.encoding)
diff --git a/src/lib/Server/Plugins/TGenshi.py b/src/lib/Server/Plugins/TGenshi.py
index 2a12672cc..bc5e00400 100644
--- a/src/lib/Server/Plugins/TGenshi.py
+++ b/src/lib/Server/Plugins/TGenshi.py
@@ -3,7 +3,11 @@ __revision__ = '$Revision$'
import binascii
import logging
+import sys
import Bcfg2.Server.Plugin
+# py3k compatibility
+if sys.hexversion >= 0x03000000:
+ unicode = str
logger = logging.getLogger('Bcfg2.Plugins.TGenshi')
@@ -63,11 +67,14 @@ class TemplateFile:
try:
self.template = loader.load(self.name, cls=self.template_cls,
encoding=self.encoding)
- except LookupError, lerror:
+ except LookupError:
+ lerror = sys.exc_info()[1]
logger.error('Genshi lookup error: %s' % lerror)
- except TemplateError, terror:
+ except TemplateError:
+ terror = sys.exc_info()[1]
logger.error('Genshi template error: %s' % terror)
- except genshi.input.ParseError, perror:
+ except genshi.input.ParseError:
+ perror = sys.exc_info()[1]
logger.error('Genshi parse error: %s' % perror)
def bind_entry(self, entry, metadata):
@@ -92,7 +99,7 @@ class TemplateFile:
entry.text = textdata
else:
if entry.get('encoding') == 'base64':
- # take care of case where file needs base64 encoding
+ # take care of case where file needs base64 encoding
entry.text = binascii.b2a_base64(textdata)
else:
entry.text = unicode(textdata, self.encoding)
@@ -107,10 +114,12 @@ class TemplateFile:
entry.text = unicode(xmldata, self.encoding)
if entry.text == '':
entry.set('empty', 'true')
- except TemplateError, terror:
+ except TemplateError:
+ terror = sys.exc_info()[1]
logger.error('Genshi template error: %s' % terror)
raise Bcfg2.Server.Plugin.PluginExecutionError
- except AttributeError, err:
+ except AttributeError:
+ err = sys.exc_info()[1]
logger.error('Genshi template loading error: %s' % err)
raise Bcfg2.Server.Plugin.PluginExecutionError
diff --git a/src/lib/Server/Reports/backends.py b/src/lib/Server/Reports/backends.py
index 9207038ed..85241932f 100644
--- a/src/lib/Server/Reports/backends.py
+++ b/src/lib/Server/Reports/backends.py
@@ -1,35 +1,34 @@
from django.contrib.auth.models import User
from nisauth import *
+
class NISBackend(object):
def authenticate(self, username=None, password=None):
try:
- print "start nis authenticate"
+ print("start nis authenticate")
n = nisauth(username, password)
temp_pass = User.objects.make_random_password(100)
nis_user = dict(username=username,
)
- user_session_obj = dict(
- email = username,
- first_name = None,
- last_name = None,
- uid = n.uid
- )
+ user_session_obj = dict(email=username,
+ first_name=None,
+ last_name=None,
+ uid=n.uid)
user, created = User.objects.get_or_create(username=username)
-
+
return user
- except NISAUTHError, e:
- print str(e)
+ except NISAUTHError:
+ e = sys.exc_info()[1]
+ print(e)
return None
-
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
- except User.DoesNotExist, e:
- print str(e)
+ except User.DoesNotExist:
+ e = sys.exc_info()[1]
+ print(e)
return None
-
diff --git a/src/lib/Server/Reports/importscript.py b/src/lib/Server/Reports/importscript.py
index cdfd8079c..b6a3c2599 100755
--- a/src/lib/Server/Reports/importscript.py
+++ b/src/lib/Server/Reports/importscript.py
@@ -1,11 +1,17 @@
#! /usr/bin/env python
-'''Imports statistics.xml and clients.xml files in to database backend for new statistics engine'''
+"""
+Imports statistics.xml and clients.xml files in to database backend for
+new statistics engine
+"""
__revision__ = '$Revision$'
-import os, sys, binascii
+import binascii
+import os
+import sys
try:
import Bcfg2.Server.Reports.settings
-except Exception, e:
+except Exception:
+ e = sys.exc_info()[1]
sys.stderr.write("Failed to load configuration settings. %s\n" % e)
sys.exit(1)
@@ -24,21 +30,24 @@ from datetime import datetime
from time import strptime
from django.db import connection
from Bcfg2.Server.Reports.updatefix import update_database
-import ConfigParser
import logging
import Bcfg2.Logger
import platform
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import ConfigParser
+
+
def build_reason_kwargs(r_ent):
- binary_file=False
+ binary_file = False
if r_ent.get('current_bfile', False):
- binary_file=True
+ binary_file = True
rc_diff = r_ent.get('current_bfile')
- if len(rc_diff) > 1024*1024:
+ if len(rc_diff) > 1024 * 1024:
rc_diff = ''
elif len(rc_diff) == 0:
# No point in flagging binary if we have no data
- binary_file=False
+ binary_file = False
elif r_ent.get('current_bdiff', False):
rc_diff = binascii.a2b_base64(r_ent.get('current_bdiff'))
elif r_ent.get('current_diff', False):
@@ -57,7 +66,7 @@ def build_reason_kwargs(r_ent):
current_to=r_ent.get('current_to', default=""),
version=r_ent.get('version', default=""),
current_version=r_ent.get('current_version', default=""),
- current_exists=r_ent.get('current_exists', default="True").capitalize()=="True",
+ current_exists=r_ent.get('current_exists', default="True").capitalize() == "True",
current_diff=rc_diff,
is_binary=binary_file)
@@ -75,7 +84,7 @@ def load_stats(cdata, sdata, vlevel, logger, quick=False, location=''):
name = node.get('name')
c_inst, created = Client.objects.get_or_create(name=name)
if vlevel > 0:
- logger.info("Client %s added to db" % name)
+ logger.info("Client %s added to db" % name)
clients[name] = c_inst
try:
pingability[name]
@@ -93,24 +102,30 @@ def load_stats(cdata, sdata, vlevel, logger, quick=False, location=''):
continue
else:
newint = Interaction(client=c_inst,
- timestamp = timestamp,
- state = statistics.get('state', default="unknown"),
- repo_rev_code = statistics.get('revision',default="unknown"),
- client_version = statistics.get('client_version',default="unknown"),
- goodcount = statistics.get('good',default="0"),
- totalcount = statistics.get('total',default="0"),
- server = location)
+ timestamp=timestamp,
+ state=statistics.get('state',
+ default="unknown"),
+ repo_rev_code=statistics.get('revision',
+ default="unknown"),
+ client_version=statistics.get('client_version',
+ default="unknown"),
+ goodcount=statistics.get('good',
+ default="0"),
+ totalcount=statistics.get('total',
+ default="0"),
+ server=location)
newint.save()
current_interaction = newint
if vlevel > 0:
- logger.info("Interaction for %s at %s with id %s INSERTED in to db"%(c_inst.id,
+ logger.info("Interaction for %s at %s with id %s INSERTED in to db" % (c_inst.id,
timestamp, current_interaction.id))
-
- counter_fields = { TYPE_CHOICES[0]: 0, TYPE_CHOICES[1]: 0, TYPE_CHOICES[2]: 0 }
+ counter_fields = {TYPE_CHOICES[0]: 0,
+ TYPE_CHOICES[1]: 0,
+ TYPE_CHOICES[2]: 0}
pattern = [('Bad/*', TYPE_CHOICES[0]),
('Extra/*', TYPE_CHOICES[2]),
- ('Modified/*', TYPE_CHOICES[1]),]
+ ('Modified/*', TYPE_CHOICES[1])]
for (xpath, type) in pattern:
for x in statistics.findall(xpath):
counter_fields[type] = counter_fields[type] + 1
@@ -118,25 +133,23 @@ def load_stats(cdata, sdata, vlevel, logger, quick=False, location=''):
try:
rr = None
- if not quick:
- try:
- rr = Reason.objects.filter(**kargs)[0]
- except IndexError:
- pass
- if not rr:
+ try:
+ rr = Reason.objects.filter(**kargs)[0]
+ except IndexError:
rr = Reason(**kargs)
rr.save()
if vlevel > 0:
logger.info("Created reason: %s" % rr.id)
- except Exception, ex:
+ except Exception:
+ ex = sys.exc_info()[1]
logger.error("Failed to create reason for %s: %s" % (x.get('name'), ex))
rr = Reason(current_exists=x.get('current_exists',
- default="True").capitalize()=="True")
+ default="True").capitalize() == "True")
rr.save()
entry, created = Entries.objects.get_or_create(\
name=x.get('name'), kind=x.tag)
-
+
Entries_interactions(entry=entry, reason=rr,
interaction=current_interaction,
type=type[0]).save()
@@ -151,7 +164,7 @@ def load_stats(cdata, sdata, vlevel, logger, quick=False, location=''):
mperfs = []
for times in statistics.findall('OpStamps'):
- for metric, value in times.items():
+ for metric, value in list(times.items()):
mmatch = []
if not quick:
mmatch = Performance.objects.filter(metric=metric, value=value)
@@ -164,7 +177,7 @@ def load_stats(cdata, sdata, vlevel, logger, quick=False, location=''):
mperfs.append(mperf)
current_interaction.performance_items.add(*mperfs)
- for key in pingability.keys():
+ for key in list(pingability.keys()):
if key not in clients:
continue
try:
@@ -191,27 +204,33 @@ if __name__ == '__main__':
clientpath = False
statpath = False
syslog = False
-
+
try:
- opts, args = getopt(argv[1:], "hvudc:s:CS", ["help", "verbose", "updates" ,
- "debug", "clients=", "stats=",
- "config=", "syslog"])
- except GetoptError, mesg:
+ opts, args = getopt(argv[1:], "hvudc:s:CS", ["help",
+ "verbose",
+ "updates",
+ "debug",
+ "clients=",
+ "stats=",
+ "config=",
+ "syslog"])
+ except GetoptError:
+ mesg = sys.exc_info()[1]
# print help information and exit:
- print "%s\nUsage:\nimportscript.py [-h] [-v] [-u] [-d] [-S] [-C bcfg2 config file] [-c clients-file] [-s statistics-file]" % (mesg)
- raise SystemExit, 2
+ print("%s\nUsage:\nimportscript.py [-h] [-v] [-u] [-d] [-S] [-C bcfg2 config file] [-c clients-file] [-s statistics-file]" % (mesg))
+ raise SystemExit(2)
for o, a in opts:
if o in ("-h", "--help"):
- print "Usage:\nimportscript.py [-h] [-v] -c <clients-file> -s <statistics-file> \n"
- print "h : help; this message"
- print "v : verbose; print messages on record insertion/skip"
- print "u : updates; print status messages as items inserted semi-verbose"
- print "d : debug; print most SQL used to manipulate database"
- print "C : path to bcfg2.conf config file."
- print "c : clients.xml file"
- print "s : statistics.xml file"
- print "S : syslog; output to syslog"
+ print("Usage:\nimportscript.py [-h] [-v] -c <clients-file> -s <statistics-file> \n")
+ print("h : help; this message")
+ print("v : verbose; print messages on record insertion/skip")
+ print("u : updates; print status messages as items inserted semi-verbose")
+ print("d : debug; print most SQL used to manipulate database")
+ print("C : path to bcfg2.conf config file.")
+ print("c : clients.xml file")
+ print("s : statistics.xml file")
+ print("S : syslog; output to syslog")
raise SystemExit
if o in ["-C", "--config"]:
cpath = a
@@ -243,28 +262,33 @@ if __name__ == '__main__':
try:
statpath = "%s/etc/statistics.xml" % cf.get('server', 'repository')
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
- print "Could not read bcfg2.conf; exiting"
- raise SystemExit, 1
+ print("Could not read bcfg2.conf; exiting")
+ raise SystemExit(1)
try:
statsdata = XML(open(statpath).read())
except (IOError, XMLSyntaxError):
- print("StatReports: Failed to parse %s"%(statpath))
- raise SystemExit, 1
+ print("StatReports: Failed to parse %s" % (statpath))
+ raise SystemExit(1)
if not clientpath:
try:
clientspath = "%s/Metadata/clients.xml" % \
cf.get('server', 'repository')
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
- print "Could not read bcfg2.conf; exiting"
- raise SystemExit, 1
+ print("Could not read bcfg2.conf; exiting")
+ raise SystemExit(1)
try:
clientsdata = XML(open(clientspath).read())
except (IOError, XMLSyntaxError):
- print("StatReports: Failed to parse %s"%(clientspath))
- raise SystemExit, 1
+ print("StatReports: Failed to parse %s" % (clientspath))
+ raise SystemExit(1)
q = '-O3' in sys.argv
# Be sure the database is ready for new schema
update_database()
- load_stats(clientsdata, statsdata, verb, logger, quick=q, location=platform.node())
+ load_stats(clientsdata,
+ statsdata,
+ verb,
+ logger,
+ quick=q,
+ location=platform.node())
diff --git a/src/lib/Server/Reports/manage.py b/src/lib/Server/Reports/manage.py
index 5e78ea979..858bddeca 100755
--- a/src/lib/Server/Reports/manage.py
+++ b/src/lib/Server/Reports/manage.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python
from django.core.management import execute_manager
try:
- import settings # Assumed to be in the same directory.
+ import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
diff --git a/src/lib/Server/Reports/nisauth.py b/src/lib/Server/Reports/nisauth.py
index b4be0e391..6fc346f1e 100644
--- a/src/lib/Server/Reports/nisauth.py
+++ b/src/lib/Server/Reports/nisauth.py
@@ -1,15 +1,17 @@
-import os
-import crypt, nis
+import crypt
+import nis
from Bcfg2.Server.Reports.settings import AUTHORIZED_GROUP
"""Checks with NIS to see if the current user is in the support group"""
__revision__ = "$Revision: $"
+
class NISAUTHError(Exception):
"""NISAUTHError is raised when somehting goes boom."""
pass
+
class nisauth(object):
group_test = False
samAcctName = None
@@ -18,26 +20,27 @@ class nisauth(object):
telephoneNumber = None
title = None
memberOf = None
- department = None #this will be a list
+ department = None # this will be a list
mail = None
- extensionAttribute1 = None #badgenumber
+ extensionAttribute1 = None # badgenumber
badge_no = None
uid = None
- def __init__(self,login,passwd=None):
+ def __init__(self, login, passwd=None):
"""get user profile from NIS"""
try:
p = nis.match(login, 'passwd.byname').split(":")
- print p
+ print(p)
except:
raise NISAUTHError('username')
# check user password using crypt and 2 character salt from passwd file
if p[1] == crypt.crypt(passwd, p[1][:2]):
# check to see if user is in valid support groups
# will have to include these groups in a settings file eventually
- if not login in nis.match(AUTHORIZED_GROUP, 'group.byname').split(':')[-1].split(','):
+ if not login in nis.match(AUTHORIZED_GROUP,
+ 'group.byname').split(':')[-1].split(','):
raise NISAUTHError('group')
self.uid = p[2]
- print self.uid
+ print(self.uid)
else:
raise NISAUTHError('password')
diff --git a/src/lib/Server/Reports/reports/models.py b/src/lib/Server/Reports/reports/models.py
index 1963a9090..d94b2e1ba 100644
--- a/src/lib/Server/Reports/reports/models.py
+++ b/src/lib/Server/Reports/reports/models.py
@@ -29,6 +29,7 @@ TYPE_CHOICES = (
(TYPE_EXTRA, 'Extra'),
)
+
def convert_entry_type_to_id(type_name):
"""Convert a entry type to its entry id"""
for e_id, e_name in TYPE_CHOICES:
@@ -36,23 +37,25 @@ def convert_entry_type_to_id(type_name):
return e_id
return -1
+
class ClientManager(models.Manager):
"""Extended client manager functions."""
def active(self, timestamp=None):
- """returns a set of clients that have been created and have not yet been
- expired as of optional timestmamp argument. Timestamp should be a
- datetime object."""
-
+ """returns a set of clients that have been created and have not
+ yet been expired as of optional timestmamp argument. Timestamp
+ should be a datetime object."""
+
if timestamp == None:
timestamp = datetime.now()
elif not isinstance(timestamp, datetime):
- raise ValueError, 'Expected a datetime object'
+ raise ValueError('Expected a datetime object')
else:
try:
- timestamp = datetime(*strptime(timestamp, "%Y-%m-%d %H:%M:%S")[0:6])
+ timestamp = datetime(*strptime(timestamp,
+ "%Y-%m-%d %H:%M:%S")[0:6])
except ValueError:
return self.none()
-
+
return self.filter(Q(expiration__gt=timestamp) | Q(expiration__isnull=True),
creation__lt=timestamp)
@@ -65,25 +68,27 @@ class Client(models.Model):
null=True, blank=True,
related_name="parent_client")
expiration = models.DateTimeField(blank=True, null=True)
-
+
def __str__(self):
return self.name
objects = ClientManager()
-
+
class Admin:
pass
+
class Ping(models.Model):
"""Represents a ping of a client (sparsely)."""
client = models.ForeignKey(Client, related_name="pings")
starttime = models.DateTimeField()
endtime = models.DateTimeField()
- status = models.CharField(max_length=4, choices=PING_CHOICES)#up/down
+ status = models.CharField(max_length=4, choices=PING_CHOICES) # up/down
class Meta:
get_latest_by = 'endtime'
-
+
+
class InteractiveManager(models.Manager):
"""Manages interactions objects."""
@@ -94,31 +99,31 @@ class InteractiveManager(models.Manager):
This method uses aggregated queries to return a ValuesQueryDict object.
Faster then raw sql since this is executed as a single query.
"""
-
- return self.values('client').annotate(max_timestamp=Max('timestamp')).values()
- def interaction_per_client(self, maxdate = None, active_only=True):
+ return list(self.values('client').annotate(max_timestamp=Max('timestamp')).values())
+
+ def interaction_per_client(self, maxdate=None, active_only=True):
"""
Returns the most recent interactions for clients as of a date
Arguments:
maxdate -- datetime object. Most recent date to pull. (dafault None)
active_only -- Include only active clients (default True)
-
+
"""
- if maxdate and not isinstance(maxdate,datetime):
- raise ValueError, 'Expected a datetime object'
- return self.filter(id__in = self.get_interaction_per_client_ids(maxdate, active_only))
+ if maxdate and not isinstance(maxdate, datetime):
+ raise ValueError('Expected a datetime object')
+ return self.filter(id__in=self.get_interaction_per_client_ids(maxdate, active_only))
- def get_interaction_per_client_ids(self, maxdate = None, active_only=True):
+ def get_interaction_per_client_ids(self, maxdate=None, active_only=True):
"""
Returns the ids of most recent interactions for clients as of a date.
Arguments:
maxdate -- datetime object. Most recent date to pull. (dafault None)
active_only -- Include only active clients (default True)
-
+
"""
from django.db import connection
cursor = connection.cursor()
@@ -127,10 +132,10 @@ class InteractiveManager(models.Manager):
sql = 'select reports_interaction.id, x.client_id from (select client_id, MAX(timestamp) ' + \
'as timer from reports_interaction'
if maxdate:
- if not isinstance(maxdate,datetime):
- raise ValueError, 'Expected a datetime object'
+ if not isinstance(maxdate, datetime):
+ raise ValueError('Expected a datetime object')
sql = sql + " where timestamp <= '%s' " % maxdate
- cfilter = "(expiration is null or expiration > '%s') and creation <= '%s'" % (maxdate,maxdate)
+ cfilter = "(expiration is null or expiration > '%s') and creation <= '%s'" % (maxdate, maxdate)
sql = sql + ' GROUP BY client_id) x, reports_interaction where ' + \
'reports_interaction.client_id = x.client_id AND reports_interaction.timestamp = x.timer'
if active_only:
@@ -144,16 +149,17 @@ class InteractiveManager(models.Manager):
pass
return []
+
class Interaction(models.Model):
"""Models each reconfiguration operation interaction between client and server."""
client = models.ForeignKey(Client, related_name="interactions",)
- timestamp = models.DateTimeField()#Timestamp for this record
- state = models.CharField(max_length=32)#good/bad/modified/etc
- repo_rev_code = models.CharField(max_length=64)#repo revision at time of interaction
- client_version = models.CharField(max_length=32)#Client Version
- goodcount = models.IntegerField()#of good config-items
- totalcount = models.IntegerField()#of total config-items
- server = models.CharField(max_length=256) # Name of the server used for the interaction
+ timestamp = models.DateTimeField() # Timestamp for this record
+ state = models.CharField(max_length=32) # good/bad/modified/etc
+ repo_rev_code = models.CharField(max_length=64) # repo revision at time of interaction
+ client_version = models.CharField(max_length=32) # Client Version
+ goodcount = models.IntegerField() # of good config-items
+ totalcount = models.IntegerField() # of total config-items
+ server = models.CharField(max_length=256) # Name of the server used for the interaction
bad_entries = models.IntegerField(default=-1)
modified_entries = models.IntegerField(default=-1)
extra_entries = models.IntegerField(default=-1)
@@ -163,25 +169,25 @@ class Interaction(models.Model):
def percentgood(self):
if not self.totalcount == 0:
- return (self.goodcount/float(self.totalcount))*100
+ return (self.goodcount / float(self.totalcount)) * 100
else:
return 0
def percentbad(self):
if not self.totalcount == 0:
- return ((self.totalcount-self.goodcount)/(float(self.totalcount)))*100
+ return ((self.totalcount - self.goodcount) / (float(self.totalcount))) * 100
else:
return 0
-
+
def isclean(self):
if (self.bad_entry_count() == 0 and self.goodcount == self.totalcount):
return True
else:
return False
-
+
def isstale(self):
- if (self == self.client.current_interaction):#Is Mostrecent
- if(datetime.now()-self.timestamp > timedelta(hours=25) ):
+ if (self == self.client.current_interaction): # Is Mostrecent
+ if(datetime.now() - self.timestamp > timedelta(hours=25)):
return True
else:
return False
@@ -194,10 +200,11 @@ class Interaction(models.Model):
return True
else:
return False
+
def save(self):
- super(Interaction, self).save() #call the real save...
+ super(Interaction, self).save() # call the real save...
self.client.current_interaction = self.client.interactions.latest()
- self.client.save()#save again post update
+ self.client.save() # save again post update
def delete(self):
'''Override the default delete. Allows us to remove Performance items'''
@@ -239,35 +246,38 @@ class Interaction(models.Model):
self.extra_entries = Entries_interactions.objects.filter(interaction=self, type=TYPE_EXTRA).count()
self.save()
return self.extra_entries
-
+
objects = InteractiveManager()
class Admin:
list_display = ('client', 'timestamp', 'state')
list_filter = ['client', 'timestamp']
pass
+
class Meta:
get_latest_by = 'timestamp'
ordering = ['-timestamp']
unique_together = ("client", "timestamp")
+
class Reason(models.Model):
"""reason why modified or bad entry did not verify, or changed."""
owner = models.TextField(max_length=128, blank=True)
current_owner = models.TextField(max_length=128, blank=True)
group = models.TextField(max_length=128, blank=True)
current_group = models.TextField(max_length=128, blank=True)
- perms = models.TextField(max_length=4, blank=True)#txt fixes typing issue
+ perms = models.TextField(max_length=4, blank=True) # txt fixes typing issue
current_perms = models.TextField(max_length=4, blank=True)
- status = models.TextField(max_length=3, blank=True)#on/off/(None)
- current_status = models.TextField(max_length=1, blank=True)#on/off/(None)
+ status = models.TextField(max_length=3, blank=True) # on/off/(None)
+ current_status = models.TextField(max_length=1, blank=True) # on/off/(None)
to = models.TextField(max_length=256, blank=True)
current_to = models.TextField(max_length=256, blank=True)
version = models.TextField(max_length=128, blank=True)
current_version = models.TextField(max_length=128, blank=True)
- current_exists = models.BooleanField()#False means its missing. Default True
+ current_exists = models.BooleanField() # False means its missing. Default True
current_diff = models.TextField(max_length=1280, blank=True)
is_binary = models.BooleanField(default=False)
+
def _str_(self):
return "Reason"
@@ -278,7 +288,7 @@ class Reason(models.Model):
cursor = connection.cursor()
cursor.execute('delete from reports_reason where not exists (select rei.id from reports_entries_interactions rei where rei.reason_id = reports_reason.id)')
transaction.set_dirty()
-
+
class Entries(models.Model):
"""Contains all the entries feed by the client."""
@@ -295,19 +305,22 @@ class Entries(models.Model):
cursor = connection.cursor()
cursor.execute('delete from reports_entries where not exists (select rei.id from reports_entries_interactions rei where rei.entry_id = reports_entries.id)')
transaction.set_dirty()
-
+
+
class Entries_interactions(models.Model):
"""Define the relation between the reason, the interaction and the entry."""
entry = models.ForeignKey(Entries)
reason = models.ForeignKey(Reason)
interaction = models.ForeignKey(Interaction)
type = models.IntegerField(choices=TYPE_CHOICES)
-
+
+
class Performance(models.Model):
"""Object representing performance data for any interaction."""
interaction = models.ManyToManyField(Interaction, related_name="performance_items")
metric = models.CharField(max_length=128)
value = models.DecimalField(max_digits=32, decimal_places=16)
+
def __str__(self):
return self.metric
@@ -318,7 +331,8 @@ class Performance(models.Model):
cursor = connection.cursor()
cursor.execute('delete from reports_performance where not exists (select ri.id from reports_performance_interaction ri where ri.performance_id = reports_performance.id)')
transaction.set_dirty()
-
+
+
class InternalDatabaseVersion(models.Model):
"""Object that tell us to witch version is the database."""
version = models.IntegerField()
diff --git a/src/lib/Server/Reports/reports/templates/base.html b/src/lib/Server/Reports/reports/templates/base.html
index a64f1c76a..6ef4c9aff 100644
--- a/src/lib/Server/Reports/reports/templates/base.html
+++ b/src/lib/Server/Reports/reports/templates/base.html
@@ -87,7 +87,7 @@
<div style='clear:both'></div>
</div><!-- document -->
<div id="footer">
- <span>Bcfg2 Version 1.2.0pre1</span>
+ <span>Bcfg2 Version 1.2.0pre2</span>
</div>
<div id="calendar_div" style='position:absolute; visibility:hidden; background-color:white; layer-background-color:white;'></div>
diff --git a/src/lib/Server/Reports/reports/templatetags/bcfg2_tags.py b/src/lib/Server/Reports/reports/templatetags/bcfg2_tags.py
index 7fffe289d..629984f26 100644
--- a/src/lib/Server/Reports/reports/templatetags/bcfg2_tags.py
+++ b/src/lib/Server/Reports/reports/templatetags/bcfg2_tags.py
@@ -21,9 +21,9 @@ def page_navigator(context):
path = context['request'].META['PATH_INFO']
total_pages = int(context['total_pages'])
records_per_page = int(context['records_per_page'])
- except KeyError, e:
+ except KeyError:
return fragment
- except ValueError, e:
+ except ValueError:
return fragment
if total_pages < 2:
@@ -84,7 +84,8 @@ def page_navigator(context):
except Resolver404:
path = "404"
- except NoReverseMatch, nr:
+ except NoReverseMatch:
+ nr = sys.exc_info()[1]
path = "NoReverseMatch: %s" % nr
except ValueError:
path = "ValueError"
@@ -193,12 +194,13 @@ class AddUrlFilter(template.Node):
del kwargs['server']
try:
link = reverse(view, args=args, kwargs=kwargs)
- except NoReverseMatch, rm:
+ except NoReverseMatch:
link = reverse(self.fallback_view, args=None,
kwargs={ filter_name: filter_value })
- except NoReverseMatch, rm:
+ except NoReverseMatch:
+ rm = sys.exc_info()[1]
raise rm
- except (Resolver404, ValueError), e:
+ except (Resolver404, ValueError):
pass
return link
@@ -219,9 +221,9 @@ def add_url_filter(parser, token):
filter_name = filter_name.strip()
filter_value = parser.compile_filter(filter_value)
except ValueError:
- raise template.TemplateSyntaxError, "%r tag requires exactly one argument" % token.contents.split()[0]
+ raise template.TemplateSyntaxError("%r tag requires exactly one argument" % token.contents.split()[0])
if not filter_name or not filter_value:
- raise template.TemplateSyntaxError, "argument should be a filter=value pair"
+ raise template.TemplateSyntaxError("argument should be a filter=value pair")
return AddUrlFilter(filter_name, filter_value)
@@ -268,7 +270,7 @@ def to_media_url(parser, token):
tag_name, filter_value = token.split_contents()
filter_value = parser.compile_filter(filter_value)
except ValueError:
- raise template.TemplateSyntaxError, "%r tag requires exactly one argument" % token.contents.split()[0]
+ raise template.TemplateSyntaxError("%r tag requires exactly one argument" % token.contents.split()[0])
return MediaTag(filter_value)
diff --git a/src/lib/Server/Reports/reports/templatetags/syntax_coloring.py b/src/lib/Server/Reports/reports/templatetags/syntax_coloring.py
index 43dafb262..291528e2e 100644
--- a/src/lib/Server/Reports/reports/templatetags/syntax_coloring.py
+++ b/src/lib/Server/Reports/reports/templatetags/syntax_coloring.py
@@ -1,3 +1,4 @@
+import sys
from django import template
from django.utils.encoding import smart_unicode, smart_str
from django.utils.html import conditional_escape
@@ -14,6 +15,12 @@ try:
except:
colorize = False
+def u_str(string):
+ if sys.hexversion >= 0x03000000:
+ return string
+ else:
+ return unicode(string)
+
@register.filter
def syntaxhilight(value, arg="diff", autoescape=None):
"""
@@ -26,9 +33,9 @@ def syntaxhilight(value, arg="diff", autoescape=None):
if colorize:
try:
- output = u'<style type="text/css">' \
+ output = u_str('<style type="text/css">') \
+ smart_unicode(HtmlFormatter().get_style_defs('.highlight')) \
- + u'</style>'
+ + u_str('</style>')
lexer = get_lexer_by_name(arg)
output += highlight(value, lexer, HtmlFormatter())
@@ -36,6 +43,6 @@ def syntaxhilight(value, arg="diff", autoescape=None):
except:
return value
else:
- return mark_safe(u'<div class="note-box">Tip: Install pygments for highlighting</div><pre>%s</pre>' % value)
+ return mark_safe(u_str('<div class="note-box">Tip: Install pygments for highlighting</div><pre>%s</pre>') % value)
syntaxhilight.needs_autoescape = True
diff --git a/src/lib/Server/Reports/reports/views.py b/src/lib/Server/Reports/reports/views.py
index 00d35c092..ccd71a60e 100644
--- a/src/lib/Server/Reports/reports/views.py
+++ b/src/lib/Server/Reports/reports/views.py
@@ -3,22 +3,26 @@ Report views
Functions to handle all of the reporting views.
"""
-from django.template import Context, RequestContext, loader
-from django.http import HttpResponse, HttpResponseRedirect, HttpResponseServerError, Http404
+from datetime import datetime, timedelta
+import sys
+from time import strptime
+
+from django.template import Context, RequestContext
+from django.http import \
+ HttpResponse, HttpResponseRedirect, HttpResponseServerError, Http404
from django.shortcuts import render_to_response, get_object_or_404
-from django.core.urlresolvers import resolve, reverse, Resolver404, NoReverseMatch
+from django.core.urlresolvers import \
+ resolve, reverse, Resolver404, NoReverseMatch
from django.db import connection
-from django.db.backends import util
from Bcfg2.Server.Reports.reports.models import *
-from datetime import datetime, timedelta
-from time import strptime
-import sys
+
class PaginationError(Exception):
"""This error is raised when pagination cannot be completed."""
pass
+
def server_error(request):
"""
500 error handler.
@@ -29,6 +33,7 @@ def server_error(request):
from django.views import debug
return debug.technical_500_response(request, *sys.exc_info())
+
def timeview(fn):
"""
Setup a timeview view
@@ -53,28 +58,32 @@ def timeview(fn):
if cal_date.find(' ') > -1:
kw['hour'] = timestamp.hour
kw['minute'] = timestamp.minute
- return HttpResponseRedirect(reverse(view, args=args, kwargs=kw))
+ return HttpResponseRedirect(reverse(view,
+ args=args,
+ kwargs=kw))
except KeyError:
pass
except:
pass
# FIXME - Handle this
-
+
"""Extract timestamp from args."""
timestamp = None
try:
- timestamp = datetime(int(kwargs.pop('year')), int(kwargs.pop('month')),
+ timestamp = datetime(int(kwargs.pop('year')),
+ int(kwargs.pop('month')),
int(kwargs.pop('day')), int(kwargs.pop('hour', 0)),
int(kwargs.pop('minute', 0)), 0)
kwargs['timestamp'] = timestamp
except KeyError:
pass
- except:
+ except:
raise
return fn(request, **kwargs)
return _handle_timeview
-
+
+
def config_item(request, pk, type="bad"):
"""
Display a single entry.
@@ -83,30 +92,33 @@ def config_item(request, pk, type="bad"):
"""
item = get_object_or_404(Entries_interactions, id=pk)
- timestamp=item.interaction.timestamp
- time_start=item.interaction.timestamp.replace(\
- hour=0, minute=0, second=0, microsecond=0)
- time_end=time_start + timedelta(days=1)
-
- todays_data = Interaction.objects.filter(\
- timestamp__gte=time_start,\
- timestamp__lt=time_end)
- shared_entries = Entries_interactions.objects.filter(entry=item.entry,\
- reason=item.reason, type=item.type,
- interaction__in=[x['id']\
- for x in todays_data.values('id')])
+ timestamp = item.interaction.timestamp
+ time_start = item.interaction.timestamp.replace(hour=0,
+ minute=0,
+ second=0,
+ microsecond=0)
+ time_end = time_start + timedelta(days=1)
+
+ todays_data = Interaction.objects.filter(timestamp__gte=time_start,
+ timestamp__lt=time_end)
+ shared_entries = Entries_interactions.objects.filter(entry=item.entry,
+ reason=item.reason,
+ type=item.type,
+ interaction__in=[x['id']\
+ for x in todays_data.values('id')])
associated_list = Interaction.objects.filter(id__in=[x['interaction']\
for x in shared_entries.values('interaction')])\
- .order_by('client__name','timestamp').select_related().all()
+ .order_by('client__name', 'timestamp').select_related().all()
return render_to_response('config_items/item.html',
- {'item':item,
- 'isextra': item.type == TYPE_EXTRA,
- 'mod_or_bad': type,
- 'associated_list':associated_list,
- 'timestamp' : timestamp},
- context_instance=RequestContext(request))
+ {'item': item,
+ 'isextra': item.type == TYPE_EXTRA,
+ 'mod_or_bad': type,
+ 'associated_list': associated_list,
+ 'timestamp': timestamp},
+ context_instance=RequestContext(request))
+
@timeview
def config_item_list(request, type, timestamp=None):
@@ -115,11 +127,12 @@ def config_item_list(request, type, timestamp=None):
type = convert_entry_type_to_id(type)
if type < 0:
raise Http404
-
+
current_clients = Interaction.objects.get_interaction_per_client_ids(timestamp)
item_list_dict = {}
seen = dict()
- for x in Entries_interactions.objects.filter(interaction__in=current_clients, type=type).select_related():
+ for x in Entries_interactions.objects.filter(interaction__in=current_clients,
+ type=type).select_related():
if (x.entry, x.reason) in seen:
continue
seen[(x.entry, x.reason)] = 1
@@ -129,13 +142,15 @@ def config_item_list(request, type, timestamp=None):
item_list_dict[x.entry.kind] = [x]
for kind in item_list_dict:
- item_list_dict[kind].sort(lambda a,b: cmp(a.entry.name, b.entry.name))
+ item_list_dict[kind].sort(lambda a, b: cmp(a.entry.name, b.entry.name))
- return render_to_response('config_items/listing.html', {'item_list_dict':item_list_dict,
- 'mod_or_bad':mod_or_bad,
- 'timestamp' : timestamp},
+ return render_to_response('config_items/listing.html',
+ {'item_list_dict': item_list_dict,
+ 'mod_or_bad': mod_or_bad,
+ 'timestamp': timestamp},
context_instance=RequestContext(request))
+
@timeview
def client_index(request, timestamp=None):
"""
@@ -149,8 +164,10 @@ def client_index(request, timestamp=None):
.order_by("client__name").all()
return render_to_response('clients/index.html',
- { 'inter_list': list, 'timestamp' : timestamp},
- context_instance=RequestContext(request))
+ {'inter_list': list,
+ 'timestamp': timestamp},
+ context_instance=RequestContext(request))
+
@timeview
def client_detailed_list(request, timestamp=None, **kwargs):
@@ -165,7 +182,8 @@ def client_detailed_list(request, timestamp=None, **kwargs):
kwargs['page_limit'] = 0
return render_history_view(request, 'clients/detailed-list.html', **kwargs)
-def client_detail(request, hostname = None, pk = None):
+
+def client_detail(request, hostname=None, pk=None):
context = dict()
client = get_object_or_404(Client, name=hostname)
if(pk == None):
@@ -177,6 +195,7 @@ def client_detail(request, hostname = None, pk = None):
return render_history_view(request, 'clients/detail.html', page_limit=5,
client=client, maxdate=context['interaction'].timestamp, context=context)
+
def client_manage(request):
"""Manage client expiration"""
message = ''
@@ -186,12 +205,12 @@ def client_manage(request):
client_action = request.POST.get('client_action', None)
client = Client.objects.get(name=client_name)
if client_action == 'expire':
- client.expiration = datetime.now();
+ client.expiration = datetime.now()
client.save()
message = "Expiration for %s set to %s." % \
(client_name, client.expiration.strftime("%Y-%m-%d %H:%M:%S"))
elif client_action == 'unexpire':
- client.expiration = None;
+ client.expiration = None
client.save()
message = "%s is now active." % client_name
else:
@@ -205,6 +224,7 @@ def client_manage(request):
{'clients': Client.objects.order_by('name').all(), 'message': message},
context_instance=RequestContext(request))
+
@timeview
def display_summary(request, timestamp=None):
"""
@@ -216,7 +236,12 @@ def display_summary(request, timestamp=None):
if not timestamp:
timestamp = datetime.now()
- collected_data = dict(clean=[],bad=[],modified=[],extra=[],stale=[],pings=[])
+ collected_data = dict(clean=[],
+ bad=[],
+ modified=[],
+ extra=[],
+ stale=[],
+ pings=[])
for node in recent_data:
if timestamp - node.timestamp > timedelta(hours=24):
collected_data['stale'].append(node)
@@ -238,42 +263,47 @@ def display_summary(request, timestamp=None):
# label, header_text, node_list
summary_data = []
- get_dict = lambda name, label: { 'name': name,
- 'nodes': collected_data[name],
- 'label': label }
+ get_dict = lambda name, label: {'name': name,
+ 'nodes': collected_data[name],
+ 'label': label}
if len(collected_data['clean']) > 0:
- summary_data.append( get_dict('clean', 'nodes are clean.') )
+ summary_data.append(get_dict('clean',
+ 'nodes are clean.'))
if len(collected_data['bad']) > 0:
- summary_data.append( get_dict('bad', 'nodes are bad.') )
+ summary_data.append(get_dict('bad',
+ 'nodes are bad.'))
if len(collected_data['modified']) > 0:
- summary_data.append( get_dict('modified', 'nodes were modified.') )
+ summary_data.append(get_dict('modified',
+ 'nodes were modified.'))
if len(collected_data['extra']) > 0:
- summary_data.append( get_dict('extra',
- 'nodes have extra configurations.') )
+ summary_data.append(get_dict('extra',
+ 'nodes have extra configurations.'))
if len(collected_data['stale']) > 0:
- summary_data.append( get_dict('stale',
- 'nodes did not run within the last 24 hours.') )
+ summary_data.append(get_dict('stale',
+ 'nodes did not run within the last 24 hours.'))
if len(collected_data['pings']) > 0:
- summary_data.append( get_dict('pings',
- 'are down.') )
+ summary_data.append(get_dict('pings',
+ 'are down.'))
return render_to_response('displays/summary.html',
{'summary_data': summary_data, 'node_count': node_count,
'timestamp': timestamp},
context_instance=RequestContext(request))
+
@timeview
def display_timing(request, timestamp=None):
mdict = dict()
inters = Interaction.objects.interaction_per_client(timestamp).select_related().all()
[mdict.__setitem__(inter, {'name': inter.client.name}) \
for inter in inters]
- for metric in Performance.objects.filter(interaction__in=mdict.keys()).all():
+ for metric in Performance.objects.filter(interaction__in=list(mdict.keys())).all():
for i in metric.interaction.all():
mdict[i][metric.metric] = metric.value
return render_to_response('displays/timing.html',
- {'metrics': mdict.values(), 'timestamp': timestamp},
- context_instance=RequestContext(request))
+ {'metrics': list(mdict.values()),
+ 'timestamp': timestamp},
+ context_instance=RequestContext(request))
def render_history_view(request, template='clients/history.html', **kwargs):
@@ -303,7 +333,7 @@ def render_history_view(request, template='clients/history.html', **kwargs):
max_results = int(kwargs.get('page_limit', 25))
page = int(kwargs.get('page_number', 1))
- client=kwargs.get('client', None)
+ client = kwargs.get('client', None)
if not client and 'hostname' in kwargs:
client = get_object_or_404(Client, name=kwargs['hostname'])
if client:
@@ -333,8 +363,13 @@ def render_history_view(request, template='clients/history.html', **kwargs):
entry_list = []
if max_results > 0:
try:
- rec_start, rec_end = prepare_paginated_list(request, context, iquery, page, max_results)
- except PaginationError, page_error:
+ rec_start, rec_end = prepare_paginated_list(request,
+ context,
+ iquery,
+ page,
+ max_results)
+ except PaginationError:
+ page_error = sys.exc_info()[1]
if isinstance(page_error[0], HttpResponse):
return page_error[0]
return HttpResponseServerError(page_error)
@@ -345,20 +380,21 @@ def render_history_view(request, template='clients/history.html', **kwargs):
return render_to_response(template, context,
context_instance=RequestContext(request))
+
def prepare_paginated_list(request, context, paged_list, page=1, max_results=25):
"""
Prepare context and slice an object for pagination.
"""
if max_results < 1:
- raise PaginationError, "Max results less then 1"
+ raise PaginationError("Max results less then 1")
if paged_list == None:
- raise PaginationError, "Invalid object"
+ raise PaginationError("Invalid object")
try:
nitems = paged_list.count()
except TypeError:
nitems = len(paged_list)
-
+
rec_start = (page - 1) * int(max_results)
try:
total_pages = (nitems / int(max_results)) + 1
@@ -369,11 +405,11 @@ def prepare_paginated_list(request, context, paged_list, page=1, max_results=25)
try:
view, args, kwargs = resolve(request.META['PATH_INFO'])
kwargs['page_number'] = total_pages
- raise PaginationError, HttpResponseRedirect( reverse(view, kwargs=kwargs) )
+ raise PaginationError(HttpResponseRedirect(reverse(view,
+ kwards=kwargs)))
except (Resolver404, NoReverseMatch, ValueError):
raise "Accessing beyond last page. Unable to resolve redirect."
context['total_pages'] = total_pages
context['records_per_page'] = max_results
return (rec_start, rec_start + int(max_results))
-
diff --git a/src/lib/Server/Reports/settings.py b/src/lib/Server/Reports/settings.py
index 66da7a8b1..fff30d30a 100644
--- a/src/lib/Server/Reports/settings.py
+++ b/src/lib/Server/Reports/settings.py
@@ -1,8 +1,9 @@
import django
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import ConfigParser
# Django settings for bcfg2 reports project.
-from ConfigParser import ConfigParser, NoSectionError, NoOptionError
-c = ConfigParser()
+c = ConfigParser.ConfigParser()
c.read(['/etc/bcfg2.conf', '/etc/bcfg2-web.conf'])
try:
@@ -18,31 +19,40 @@ else:
TEMPLATE_DEBUG = DEBUG
ADMINS = (
- ('Bcfg2', 'admin@email.address'),
+ ('Root', 'root'),
)
MANAGERS = ADMINS
-DATABASE_ENGINE = c.get('statistics', 'database_engine')
-# 'postgresql', 'mysql', 'sqlite3' or 'ado_mssql'.
+db_engine = c.get('statistics', 'database_engine')
+db_name = ''
if c.has_option('statistics', 'database_name'):
- DATABASE_NAME = c.get('statistics', 'database_name')
-else:
- DATABASE_NAME = ''
-# Or path to database file if using sqlite3.
-#<repository>/etc/brpt.sqlite is default path
-
-if DATABASE_ENGINE != 'sqlite3':
- DATABASE_USER = c.get('statistics', 'database_user')
- # Not used with sqlite3.
- DATABASE_PASSWORD = c.get('statistics', 'database_password')
- # Not used with sqlite3.
- DATABASE_HOST = c.get('statistics', 'database_host')
- # Set to empty string for localhost. Not used with sqlite3.
- DATABASE_PORT = c.get('statistics', 'database_port')
- # Set to empty string for default. Not used with sqlite3.
-if DATABASE_ENGINE == 'sqlite3' and DATABASE_NAME == '':
- DATABASE_NAME = "%s/etc/brpt.sqlite" % c.get('server', 'repository')
+ db_name = c.get('statistics', 'database_name')
+if db_engine == 'sqlite3' and db_name == '':
+ db_name = "%s/etc/brpt.sqlite" % c.get('server', 'repository')
+
+DATABASES = {
+ 'default': {
+ 'ENGINE': "django.db.backends.%s" % db_engine,
+ 'NAME': db_name
+ }
+}
+
+if db_engine != 'sqlite3':
+ DATABASES['default']['USER'] = c.get('statistics', 'database_user')
+ DATABASES['default']['PASSWORD'] = c.get('statistics', 'database_password')
+ DATABASES['default']['HOST'] = c.get('statistics', 'database_host')
+ DATABASES['default']['PORT'] = c.get('statistics', 'database_port')
+
+if django.VERSION[0] == 1 and django.VERSION[1] < 2:
+ DATABASE_ENGINE = db_engine
+ DATABASE_NAME = DATABASES['default']['NAME']
+ if DATABASE_ENGINE != 'sqlite3':
+ DATABASE_USER = DATABASES['default']['USER']
+ DATABASE_PASSWORD = DATABASES['default']['PASSWORD']
+ DATABASE_HOST = DATABASES['default']['HOST']
+ DATABASE_PORT = DATABASES['default']['PORT']
+
# Local time zone for this installation. All choices can be found here:
# http://docs.djangoproject.com/en/dev/ref/settings/#time-zone
diff --git a/src/lib/Server/Reports/updatefix.py b/src/lib/Server/Reports/updatefix.py
index f8fca1f90..4d3c964f5 100644
--- a/src/lib/Server/Reports/updatefix.py
+++ b/src/lib/Server/Reports/updatefix.py
@@ -2,12 +2,13 @@ import Bcfg2.Server.Reports.settings
from django.db import connection
import django.core.management
+import logging
+import traceback
from Bcfg2.Server.Reports.reports.models import InternalDatabaseVersion, \
TYPE_BAD, TYPE_MODIFIED, TYPE_EXTRA
-
-import logging, traceback
logger = logging.getLogger('Bcfg2.Server.Reports.UpdateFix')
+
# all update function should go here
def _merge_database_table_entries():
cursor = connection.cursor()
@@ -21,7 +22,7 @@ def _merge_database_table_entries():
select name, kind from reports_extra
""")
# this fetch could be better done
- entries_map={}
+ entries_map = {}
for row in cursor.fetchall():
insert_cursor.execute("insert into reports_entries (name, kind) \
values (%s, %s)", (row[0], row[1]))
@@ -48,6 +49,7 @@ def _merge_database_table_entries():
insert_cursor.execute("insert into reports_entries_interactions \
(entry_id, interaction_id, reason_id, type) values (%s, %s, %s, %s)", (entry_id, row[3], row[2], row[4]))
+
def _interactions_constraint_or_idx():
'''sqlite doesn't support alter tables.. or constraints'''
cursor = connection.cursor()
@@ -55,27 +57,28 @@ def _interactions_constraint_or_idx():
cursor.execute('alter table reports_interaction add constraint reports_interaction_20100601 unique (client_id,timestamp)')
except:
cursor.execute('create unique index reports_interaction_20100601 on reports_interaction (client_id,timestamp)')
-
+
def _populate_interaction_entry_counts():
'''Populate up the type totals for the interaction table'''
cursor = connection.cursor()
- count_field = { TYPE_BAD: 'bad_entries',
- TYPE_MODIFIED: 'modified_entries',
- TYPE_EXTRA: 'extra_entries' }
+ count_field = {TYPE_BAD: 'bad_entries',
+ TYPE_MODIFIED: 'modified_entries',
+ TYPE_EXTRA: 'extra_entries'}
- for type in count_field.keys():
- cursor.execute("select count(type), interaction_id "+
+ for type in list(count_field.keys()):
+ cursor.execute("select count(type), interaction_id " +
"from reports_entries_interactions where type = %s group by interaction_id" % type)
updates = []
for row in cursor.fetchall():
updates.append(row)
try:
cursor.executemany("update reports_interaction set " + count_field[type] + "=%s where id = %s", updates)
- except Exception, e:
- print e
+ except Exception:
+ e = sys.exc_info()[1]
+ print(e)
cursor.close()
-
+
# be sure to test your upgrade query before reflecting the change in the models
# the list of function and sql command to do should go here
@@ -104,6 +107,7 @@ _fixes = [_merge_database_table_entries,
# this will calculate the last possible version of the database
lastversion = len(_fixes)
+
def rollupdate(current_version):
""" function responsible to coordinates all the updates
need current_version as integer
@@ -119,11 +123,12 @@ def rollupdate(current_version):
except:
logger.error("Failed to perform db update %s" % (_fixes[i]), exc_info=1)
# since array start at 0 but version start at 1 we add 1 to the normal count
- ret = InternalDatabaseVersion.objects.create(version=i+1)
+ ret = InternalDatabaseVersion.objects.create(version=i + 1)
return ret
else:
return None
+
def dosync():
"""Function to do the syncronisation for the models"""
# try to detect if it's a fresh new database
@@ -164,7 +169,7 @@ def dosync():
def update_database():
''' methode to search where we are in the revision of the database models and update them '''
- try :
+ try:
logger.debug("Running upgrade of models to the new one")
dosync()
know_version = InternalDatabaseVersion.objects.order_by('-version')
diff --git a/src/lib/Server/Reports/utils.py b/src/lib/Server/Reports/utils.py
index b74f09e74..e0b6ead59 100755
--- a/src/lib/Server/Reports/utils.py
+++ b/src/lib/Server/Reports/utils.py
@@ -1,11 +1,11 @@
"""Helper functions for reports"""
-from Bcfg2.Server.Reports.reports.models import TYPE_CHOICES
from django.conf.urls.defaults import *
import re
"""List of filters provided by filteredUrls"""
filter_list = ('server', 'state')
+
class BatchFetch(object):
"""Fetch Django objects in smaller batches to save memory"""
@@ -21,6 +21,10 @@ class BatchFetch(object):
return self
def next(self):
+ """Provide compatibility with python < 3.0"""
+ return self.__next__()
+
+ def __next__(self):
"""Return the next object from our array and fetch from the
database when needed"""
if self.block_count + self.count - self.step == self.max:
@@ -34,11 +38,12 @@ class BatchFetch(object):
self.count += 1
return self.data[self.count - 1]
+
def generateUrls(fn):
"""
Parse url tuples and send to functions.
- Decorator for url generators. Handles url tuple parsing
+ Decorator for url generators. Handles url tuple parsing
before the actual function is called.
"""
def url_gen(*urls):
@@ -51,13 +56,14 @@ def generateUrls(fn):
return results
return url_gen
+
@generateUrls
def paginatedUrls(pattern, view, kwargs=None, name=None):
"""
Takes a group of url tuples and adds paginated urls.
- Extends a url tuple to include paginated urls. Currently doesn't handle url() compiled
- patterns.
+ Extends a url tuple to include paginated urls.
+ Currently doesn't handle url() compiled patterns.
"""
results = [(pattern, view, kwargs, name)]
@@ -67,13 +73,15 @@ def paginatedUrls(pattern, view, kwargs=None, name=None):
tail = mtail.group(1)
pattern = pattern[:len(pattern) - len(tail)]
results += [(pattern + "/(?P<page_number>\d+)" + tail, view, kwargs)]
- results += [(pattern + "/(?P<page_number>\d+)\|(?P<page_limit>\d+)" + tail, view, kwargs)]
+ results += [(pattern + "/(?P<page_number>\d+)\|(?P<page_limit>\d+)" +
+ tail, view, kwargs)]
if not kwargs:
kwargs = dict()
kwargs['page_limit'] = 0
results += [(pattern + "/?\|(?P<page_limit>all)" + tail, view, kwargs)]
return results
+
@generateUrls
def filteredUrls(pattern, view, kwargs=None, name=None):
"""
@@ -93,7 +101,8 @@ def filteredUrls(pattern, view, kwargs=None, name=None):
'/server/(?P<server>[\w\-\.]+)/(?P<state>[A-Za-z]+)'):
results += [(pattern + filter + tail, view, kwargs)]
return results
-
+
+
@generateUrls
def timeviewUrls(pattern, view, kwargs=None, name=None):
"""
@@ -113,4 +122,3 @@ def timeviewUrls(pattern, view, kwargs=None, name=None):
'/(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})'):
results += [(pattern + filter + tail, view, kwargs)]
return results
-
diff --git a/src/lib/Server/Snapshots/__init__.py b/src/lib/Server/Snapshots/__init__.py
index 6018377cb..7c901adb2 100644
--- a/src/lib/Server/Snapshots/__init__.py
+++ b/src/lib/Server/Snapshots/__init__.py
@@ -2,7 +2,8 @@ __all__ = ['models', 'db_from_config', 'setup_session']
import sqlalchemy
import sqlalchemy.orm
-import ConfigParser
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import ConfigParser
def db_from_config(cfile):
@@ -19,7 +20,7 @@ def db_from_config(cfile):
db = cp.get('snapshots', 'database')
return '%s://%s:%s@%s/%s' % (driver, user, password, host, db)
else:
- raise Exception, "unsupported db driver %s" % driver
+ raise Exception("unsupported db driver %s" % driver)
def setup_session(cfile, debug=False):
diff --git a/src/lib/Server/Snapshots/model.py b/src/lib/Server/Snapshots/model.py
index cbb14be79..2aa35f1ec 100644
--- a/src/lib/Server/Snapshots/model.py
+++ b/src/lib/Server/Snapshots/model.py
@@ -1,3 +1,4 @@
+import sys
from sqlalchemy import Table, Column, Integer, Unicode, ForeignKey, Boolean, \
DateTime, UnicodeText, desc
import datetime
@@ -6,6 +7,13 @@ from sqlalchemy.orm import relation, backref
from sqlalchemy.ext.declarative import declarative_base
+def u_str(string):
+ if sys.hexversion >= 0x03000000:
+ return string
+ else:
+ return unicode(string)
+
+
class Uniquer(object):
force_rt = True
@@ -33,12 +41,20 @@ class Administrator(Uniquer, Base):
email = Column(Unicode(64))
admin_client = Table('admin_client', Base.metadata,
- Column('admin_id', Integer, ForeignKey('administrator.id')),
- Column('client_id', Integer, ForeignKey('client.id')))
+ Column('admin_id',
+ Integer,
+ ForeignKey('administrator.id')),
+ Column('client_id',
+ Integer,
+ ForeignKey('client.id')))
admin_group = Table('admin_group', Base.metadata,
- Column('admin_id', Integer, ForeignKey('administrator.id')),
- Column('group_id', Integer, ForeignKey('group.id')))
+ Column('admin_id',
+ Integer,
+ ForeignKey('administrator.id')),
+ Column('group_id',
+ Integer,
+ ForeignKey('group.id')))
class Client(Uniquer, Base):
@@ -68,12 +84,20 @@ class ConnectorKeyVal(Uniquer, Base):
value = Column(UnicodeText)
meta_group = Table('meta_group', Base.metadata,
- Column('metadata_id', Integer, ForeignKey('metadata.id')),
- Column('group_id', Integer, ForeignKey('group.id')))
+ Column('metadata_id',
+ Integer,
+ ForeignKey('metadata.id')),
+ Column('group_id',
+ Integer,
+ ForeignKey('group.id')))
meta_conn = Table('meta_conn', Base.metadata,
- Column('metadata_id', Integer, ForeignKey('metadata.id')),
- Column('connkeyval_id', Integer, ForeignKey('connkeyval.id')))
+ Column('metadata_id',
+ Integer,
+ ForeignKey('metadata.id')),
+ Column('connkeyval_id',
+ Integer,
+ ForeignKey('connkeyval.id')))
class Metadata(Base):
@@ -87,21 +111,21 @@ class Metadata(Base):
@classmethod
def from_metadata(cls, mysession, mymetadata):
- client = Client.by_value(mysession, name=unicode(mymetadata.hostname))
+ client = Client.by_value(mysession, name=u_str(mymetadata.hostname))
m = cls(client=client)
for group in mymetadata.groups:
- m.groups.append(Group.by_value(mysession, name=unicode(group)))
+ m.groups.append(Group.by_value(mysession, name=u_str(group)))
for connector in mymetadata.connectors:
data = getattr(mymetadata, connector)
if not isinstance(data, dict):
continue
- for key, value in data.iteritems():
+ for key, value in list(data.items()):
if not isinstance(value, str):
continue
m.keyvals.append(ConnectorKeyVal.by_value(mysession,
- connector=unicode(connector),
- key=unicode(key),
- value=unicode(value)))
+ connector=u_str(connector),
+ key=u_str(key),
+ value=u_str(value)))
return m
@@ -143,8 +167,12 @@ class PackageCorrespondence(Base, CorrespondenceType):
correct = Column(Boolean)
package_snap = Table('package_snap', Base.metadata,
- Column('ppair_id', Integer, ForeignKey('package_pair.id')),
- Column('snapshot_id', Integer, ForeignKey('snapshot.id')))
+ Column('ppair_id',
+ Integer,
+ ForeignKey('package_pair.id')),
+ Column('snapshot_id',
+ Integer,
+ ForeignKey('snapshot.id')))
class Service(Base, Uniquer):
@@ -167,8 +195,12 @@ class ServiceCorrespondence(Base, CorrespondenceType):
correct = Column(Boolean)
service_snap = Table('service_snap', Base.metadata,
- Column('spair_id', Integer, ForeignKey('service_pair.id')),
- Column('snapshot_id', Integer, ForeignKey('snapshot.id')))
+ Column('spair_id',
+ Integer,
+ ForeignKey('service_pair.id')),
+ Column('snapshot_id',
+ Integer,
+ ForeignKey('snapshot.id')))
class File(Base, Uniquer):
@@ -194,20 +226,36 @@ class FileCorrespondence(Base, CorrespondenceType):
correct = Column(Boolean)
file_snap = Table('file_snap', Base.metadata,
- Column('fpair_id', Integer, ForeignKey('file_pair.id')),
- Column('snapshot_id', Integer, ForeignKey('snapshot.id')))
+ Column('fpair_id',
+ Integer,
+ ForeignKey('file_pair.id')),
+ Column('snapshot_id',
+ Integer,
+ ForeignKey('snapshot.id')))
extra_pkg_snap = Table('extra_pkg_snap', Base.metadata,
- Column('package_id', Integer, ForeignKey('package.id')),
- Column('snapshot_id', Integer, ForeignKey('snapshot.id')))
+ Column('package_id',
+ Integer,
+ ForeignKey('package.id')),
+ Column('snapshot_id',
+ Integer,
+ ForeignKey('snapshot.id')))
extra_file_snap = Table('extra_file_snap', Base.metadata,
- Column('file_id', Integer, ForeignKey('file.id')),
- Column('snapshot_id', Integer, ForeignKey('snapshot.id')))
+ Column('file_id',
+ Integer,
+ ForeignKey('file.id')),
+ Column('snapshot_id',
+ Integer,
+ ForeignKey('snapshot.id')))
extra_service_snap = Table('extra_service_snap', Base.metadata,
- Column('service_id', Integer, ForeignKey('service.id')),
- Column('snapshot_id', Integer, ForeignKey('snapshot.id')))
+ Column('service_id',
+ Integer,
+ ForeignKey('service.id')),
+ Column('snapshot_id',
+ Integer,
+ ForeignKey('snapshot.id')))
class Action(Base):
@@ -228,7 +276,7 @@ class Snapshot(Base):
correct = Column(Boolean)
revision = Column(Unicode(36))
metadata_id = Column(Integer, ForeignKey('metadata.id'))
- client_metadata = relation(Metadata, primaryjoin=metadata_id==Metadata.id)
+ client_metadata = relation(Metadata, primaryjoin=metadata_id == Metadata.id)
timestamp = Column(DateTime, default=datetime.datetime.now)
client_id = Column(Integer, ForeignKey('client.id'))
client = relation(Client, backref=backref('snapshots'))
@@ -256,23 +304,25 @@ class Snapshot(Base):
(cls.e_dispatch, extra)]:
for key in dispatch:
dest, ecls = dispatch[key]
- for edata in data[key].values():
+ for edata in list(data[key].values()):
getattr(snap, dest).append(ecls.from_record(session, edata))
return snap
@classmethod
def by_client(cls, session, clientname):
- return session.query(cls).join(cls.client_metadata, Metadata.client).filter(Client.name==clientname)
+ return session.query(cls).join(cls.client_metadata,
+ Metadata.client).filter(Client.name == clientname)
@classmethod
def get_current(cls, session, clientname):
- return session.query(Snapshot).join(Snapshot.client_metadata, Metadata.client).filter(Client.name==clientname).order_by(desc(Snapshot.timestamp)).first()
+ return session.query(Snapshot).join(Snapshot.client_metadata,
+ Metadata.client).filter(Client.name == clientname).order_by(desc(Snapshot.timestamp)).first()
@classmethod
def get_by_date(cls, session, clientname, timestamp):
return session.query(Snapshot)\
.join(Snapshot.client_metadata, Metadata.client)\
.filter(Snapshot.timestamp < timestamp)\
- .filter(Client.name==clientname)\
+ .filter(Client.name == clientname)\
.order_by(desc(Snapshot.timestamp))\
.first()
diff --git a/src/lib/Statistics.py b/src/lib/Statistics.py
index b2240db98..a0cb8f39b 100644
--- a/src/lib/Statistics.py
+++ b/src/lib/Statistics.py
@@ -29,4 +29,4 @@ class Statistics(object):
self.data[name].add_value(value)
def display(self):
- return dict([value.get_value() for value in self.data.values()])
+ return dict([value.get_value() for value in list(self.data.values())])
diff --git a/src/sbin/bcfg2 b/src/sbin/bcfg2
index 9bc50fe65..7f7d8f5c6 100755
--- a/src/sbin/bcfg2
+++ b/src/sbin/bcfg2
@@ -3,18 +3,20 @@
"""Bcfg2 Client"""
__revision__ = '$Revision$'
+import fcntl
import logging
import os
import signal
+import stat
import sys
import tempfile
import time
-import xmlrpclib
-import fcntl
import Bcfg2.Options
import Bcfg2.Client.XML
import Bcfg2.Client.Frame
import Bcfg2.Client.Tools
+# Compatibility imports
+from Bcfg2.Bcfg2Py3k import xmlrpclib
import Bcfg2.Proxy
import Bcfg2.Logger
@@ -106,7 +108,12 @@ class Client:
raise SystemExit(0)
if self.setup['remove'] and 'services' in self.setup['remove']:
self.logger.error("Service removal is nonsensical, disable services to get former behavior")
- if self.setup['remove'] not in [False, 'all', 'services', 'packages']:
+ if self.setup['remove'] not in [False,
+ 'all',
+ 'Services',
+ 'Packages',
+ 'services',
+ 'packages']:
self.logger.error("Got unknown argument %s for -r" % (self.setup['remove']))
if (self.setup["file"] != False) and (self.setup["cache"] != False):
print("cannot use -f and -c together")
@@ -130,7 +137,9 @@ class Client:
script.write(probe.text)
script.close()
os.close(scripthandle)
- os.chmod(script.name, 0755)
+ os.chmod(script.name, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH |
+ stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH |
+ stat.S_IWUSR) # 0755
ret.text = os.popen(script.name).read().strip()
self.logger.info("Probe %s has result:\n%s" % (name, ret.text))
finally:
@@ -183,7 +192,8 @@ class Client:
try:
probe_data = proxy.GetProbes()
- except xmlrpclib.Fault, flt:
+ except xmlrpclib.Fault:
+ flt = sys.exc_info()[1]
self.logger.error("Failed to download probes from bcfg2")
self.logger.error(flt.faultString)
raise SystemExit(1)
@@ -192,7 +202,8 @@ class Client:
try:
probes = Bcfg2.Client.XML.XML(probe_data)
- except Bcfg2.Client.XML.ParseError, syntax_error:
+ except Bcfg2.Client.XML.ParseError:
+ syntax_error = sys.exc_info()[1]
self.fatal_error(
"Server returned invalid probe requests: %s" %
(syntax_error))
@@ -223,7 +234,8 @@ class Client:
self.setup['decision'])
self.logger.info("Got decision list from server:")
self.logger.info(self.setup['decision_list'])
- except xmlrpclib.Fault, f:
+ except xmlrpclib.Fault:
+ f = sys.exc_info()[1]
if f.faultCode == 1:
print("GetDecisionList method not supported by server")
else:
@@ -249,7 +261,8 @@ class Client:
try:
self.config = Bcfg2.Client.XML.XML(rawconfig)
- except Bcfg2.Client.XML.ParseError, syntax_error:
+ except Bcfg2.Client.XML.ParseError:
+ syntax_error = sys.exc_info()[1]
self.fatal_error("The configuration could not be parsed: %s" %
(syntax_error))
return(1)
diff --git a/src/sbin/bcfg2-admin b/src/sbin/bcfg2-admin
index 2c9a43859..36be6ab14 100755
--- a/src/sbin/bcfg2-admin
+++ b/src/sbin/bcfg2-admin
@@ -2,11 +2,12 @@
"""bcfg2-admin is a script that helps to administrate a Bcfg2 deployment."""
from optparse import OptionParser
-from StringIO import StringIO
import logging
import Bcfg2.Server.Core
import Bcfg2.Logger
import Bcfg2.Options
+# Compatibility import
+from Bcfg2.Bcfg2Py3k import StringIO
log = logging.getLogger('bcfg2-admin')
@@ -56,14 +57,15 @@ def main():
else:
# Print short help for all modes
parser.print_help()
- print create_description()
+ print(create_description())
raise SystemExit(0)
if args[0] in get_modes():
modname = args[0].capitalize()
try:
mode_cls = mode_import(modname)
- except ImportError, e:
+ except ImportError:
+ e = sys.exc_info()[1]
log.error("Failed to load admin mode %s: %s" % (modname, e))
raise SystemExit(1)
mode = mode_cls(options.configfile)
@@ -73,7 +75,7 @@ def main():
else:
log.error("Unknown mode %s" % args[0])
parser.print_help()
- print create_description()
+ print(create_description())
raise SystemExit(1)
if __name__ == '__main__':
diff --git a/src/sbin/bcfg2-build-reports b/src/sbin/bcfg2-build-reports
index 231f52105..7122fb300 100755
--- a/src/sbin/bcfg2-build-reports
+++ b/src/sbin/bcfg2-build-reports
@@ -13,8 +13,9 @@ import os
import socket
import sys
from time import asctime, strptime
-from ConfigParser import ConfigParser, NoSectionError, NoOptionError
from lxml.etree import XML, XSLT, parse, Element, ElementTree, SubElement, tostring, XMLSyntaxError
+# Compatibility imports
+from Bcfg2.Bcfg2Py3k import ConfigParser
def generatereport(rspec, nrpt):
"""
@@ -42,9 +43,9 @@ def generatereport(rspec, nrpt):
# This line actually sorts from most recent to oldest.
statisticslist.sort(lambda y, x: cmp(strptime(x.get("time")), strptime(y.get("time"))))
stats = statisticslist[0]
-
+
[node.remove(item) for item in node.findall('Statistics')]
-
+
# Add a good tag if node is good and we wnat to report such.
if reportgood == 'Y' and stats.get('state') == 'clean':
SubElement(stats,"Good")
@@ -52,7 +53,7 @@ def generatereport(rspec, nrpt):
[stats.remove(item) for item in stats.findall("Bad") + stats.findall("Modified") if \
item.getchildren() == []]
[stats.remove(item) for item in stats.findall("Modified") if reportmodified == 'N']
-
+
# Test for staleness -if stale add Stale tag.
if stats.get("time").find(current_date) == -1:
SubElement(stats,"Stale")
@@ -64,7 +65,7 @@ def mail(mailbody, confi):
try:
mailer = confi.get('statistics', 'sendmailpath')
- except (NoSectionError, NoOptionError):
+ except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
mailer = "/usr/sbin/sendmail"
# Open a pipe to the mail program and
# write the data to the pipe.
@@ -72,7 +73,7 @@ def mail(mailbody, confi):
pipe.write(mailbody)
exitcode = pipe.close()
if exitcode:
- print "Exit code: %s" % exitcode
+ print("Exit code: %s" % exitcode)
def rss(reportxml, delivery, report):
"""rss appends a new report to the specified rss file
@@ -98,7 +99,7 @@ def rss(reportxml, delivery, report):
chantitle = SubElement(channel, "title")
chantitle.text = report.attrib['name']
chanlink = SubElement(channel, "link")
-
+
# This can later link to WWW report if one gets published
# simultaneously?
chanlink.text = "http://www.mcs.anl.gov/cobalt/bcfg2"
@@ -119,7 +120,7 @@ def www(reportxml, delivery):
"""www outputs report to."""
# This can later link to WWW report if one gets published
- # simultaneously?
+ # simultaneously?
for destination in delivery.findall('Destination'):
fil = open(destination.attrib['address'], 'w')
@@ -138,15 +139,15 @@ def pretty_print(element, level=0):
"""Produce a pretty-printed text representation of element."""
if element.text:
fmt = "%s<%%s %%s>%%s</%%s>" % (level*" ")
- data = (element.tag, (" ".join(["%s='%s'" % keyval for keyval in element.attrib.iteritems()])),
+ data = (element.tag, (" ".join(["%s='%s'" % keyval for keyval in list(element.attrib.items())])),
element.text, element.tag)
if element._children:
fmt = "%s<%%s %%s>\n" % (level*" ",) + (len(element._children) * "%s") + "%s</%%s>\n" % (level*" ")
- data = (element.tag, ) + (" ".join(["%s='%s'" % keyval for keyval in element.attrib.iteritems()]),)
+ data = (element.tag, ) + (" ".join(["%s='%s'" % keyval for keyval in list(element.attrib.items())]),)
data += tuple([pretty_print(entry, level+2) for entry in element._children]) + (element.tag, )
else:
fmt = "%s<%%s %%s/>\n" % (level * " ")
- data = (element.tag, " ".join(["%s='%s'" % keyval for keyval in element.attrib.iteritems()]))
+ data = (element.tag, " ".join(["%s='%s'" % keyval for keyval in list(element.attrib.items())]))
return fmt % data
@@ -157,14 +158,14 @@ if __name__ == '__main__':
cfpath = sys.argv[sys.argv.index('-C') + 1]
else:
cfpath = '/etc/bcfg2.conf'
- c = ConfigParser()
+ c = ConfigParser.ConfigParser()
c.read([cfpath])
configpath = "%s/etc/report-configuration.xml" % c.get('server', 'repository')
statpath = "%s/etc/statistics.xml" % c.get('server', 'repository')
clientsdatapath = "%s/Metadata/clients.xml" % c.get('server', 'repository')
try:
prefix = c.get('server', 'prefix')
- except (NoSectionError, NoOptionError):
+ except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
prefix = '/usr'
transformpath = "/%s/share/bcfg2/xsl-transforms/" % (prefix)
@@ -172,13 +173,14 @@ if __name__ == '__main__':
try:
opts, args = getopt.getopt(sys.argv[1:], "C:hAc:Ns:", ["help", "all", "config=","no-ping", "stats="])
- except getopt.GetoptError, mesg:
+ except getopt.GetoptError:
+ mesg = sys.exc_info()[1]
# Print help information and exit:
- print "%s\nUsage:\nbcfg2-build-reports [-h][-A (include ALL clients)] [-c <configuration-file>] [-s <statistics-file>][-N (do not ping clients)]" % (mesg)
- raise SystemExit, 2
+ print("%s\nUsage:\nbcfg2-build-reports [-h][-A (include ALL clients)] [-c <configuration-file>] [-s <statistics-file>][-N (do not ping clients)]" % (mesg))
+ raise SystemExit(2)
for o, a in opts:
if o in ("-h", "--help"):
- print "Usage:\nbcfg2-build-reports [-h] [-c <configuration-file>] [-s <statistics-file>]"
+ print("Usage:\nbcfg2-build-reports [-h] [-c <configuration-file>] [-s <statistics-file>]")
raise SystemExit
if o in ("-A", "--all"):
all=True
@@ -205,17 +207,17 @@ if __name__ == '__main__':
statsdata = XML(open(statpath).read())
except (IOError, XMLSyntaxError):
print("bcfg2-build-reports: Failed to parse %s"%(statpath))
- raise SystemExit, 1
+ raise SystemExit(1)
try:
configdata = XML(open(configpath).read())
except (IOError, XMLSyntaxError):
print("bcfg2-build-reports: Failed to parse %s"%(configpath))
- raise SystemExit, 1
+ raise SystemExit(1)
try:
clientsdata = XML(open(clientsdatapath).read())
except (IOError, XMLSyntaxError):
print("bcfg2-build-reports: Failed to parse %s"%(clientsdatapath))
- raise SystemExit, 1
+ raise SystemExit(1)
# Merge data from three sources.
nodereport = Element("Report", attrib={"time" : asctime()})
@@ -229,7 +231,7 @@ if __name__ == '__main__':
for statel in nod.findall("Statistics"):
nodel.append(statel)
nodereport.append(nodel)
-
+
if all:
for nod in statsdata.findall("Node"):
for client in clientsdata.findall("Client"):
@@ -242,8 +244,8 @@ if __name__ == '__main__':
for statel in nod.findall("Statistics"):
nodel.append(statel)
nodereport.append(nodel)
-
-
+
+
for reprt in configdata.findall('Report'):
nodereport.set("name", reprt.get("name", default="BCFG Report"))
@@ -254,7 +256,7 @@ if __name__ == '__main__':
for deliv in reprt.findall('Delivery'):
# Is a deepcopy of procnodereport necessary?
-
+
delivtype = deliv.get('type', default='nodes-digest')
deliverymechanism = deliv.get('mechanism', default='www')
@@ -269,14 +271,14 @@ if __name__ == '__main__':
except:
print("bcfg2-build-reports: Invalid report type or delivery mechanism.\n Can't find: "\
+ transformpath + transform)
- raise SystemExit, 1
+ raise SystemExit(1)
try: # Try to parse stylesheet.
stylesheet = XSLT(parse(transformpath + transform))
except:
print("bcfg2-build-reports: invalid XSLT transform file.")
- raise SystemExit, 1
-
+ raise SystemExit(1)
+
if deliverymechanism == 'mail':
if delivtype == 'nodes-individual':
reportdata = copy.deepcopy(procnodereport)
@@ -285,7 +287,7 @@ if __name__ == '__main__':
reportdata.append(noden)
result = stylesheet.apply(ElementTree(reportdata))
outputstring = stylesheet.tostring(result)
-
+
if not outputstring == None:
toastring = ''
for desti in deliv.findall("Destination"):
@@ -295,13 +297,13 @@ if __name__ == '__main__':
outputstring = "To: %s\nFrom: root@%s\n%s"% \
(toastring, socket.getfqdn(), outputstring)
mail(outputstring, c) #call function to send
-
+
else:
reportdata = copy.deepcopy(procnodereport)
result = stylesheet.apply(ElementTree(reportdata))
outputstring = stylesheet.tostring(result)
-
+
if not outputstring == None:
toastring = ''
for desti in deliv.findall("Destination"):
diff --git a/src/sbin/bcfg2-info b/src/sbin/bcfg2-info
index a6d236bc8..161fee441 100755
--- a/src/sbin/bcfg2-info
+++ b/src/sbin/bcfg2-info
@@ -27,6 +27,40 @@ import Bcfg2.Server.Plugins.Metadata
import Bcfg2.Server.Plugin
logger = logging.getLogger('bcfg2-info')
+USAGE = """Commands:
+build <hostname> <filename> - Build config for hostname, writing to filename
+builddir <hostname> <dirname> - Build config for hostname, writing separate files to dirname
+buildall <directory> - Build configs for all clients in directory
+buildfile <filename> <hostname> - Build config file for hostname (not written to disk)
+bundles - Print out group/bundle information
+clients - Print out client/profile information
+config - Print out the configuration of the Bcfg2 server
+debug - Shell out to native python interpreter
+event_debug - Display filesystem events as they are processed
+generators - List current versions of generators
+groups - List groups
+help - Print this list of available commands
+mappings <type*> <name*> - Print generator mappings for optional type and name
+profile <command> <args> - Profile a single bcfg2-info command
+quit - Exit the bcfg2-info command line
+showentries <hostname> <type> - Show abstract configuration entries for a given host
+showclient <client1> <client2> - Show metadata for given hosts
+update - Process pending file events
+version - Print version of this tool"""
+
+BUILDDIR_USAGE = """Usage: builddir [-f] <hostname> <output dir>
+
+Generates a config for client <hostname> and writes the
+individual configuration files out separately in a tree
+under <output dir>. The <output dir> directory must be
+rooted under /tmp unless the -f argument is provided, in
+which case it can be located anywhere.
+
+NOTE: Currently only handles file entries and writes
+all content with the default owner and permissions. These
+could be much more permissive than would be created by the
+Bcfg2 client itself."""
+
class mockLog(object):
def error(self, *args, **kwargs):
@@ -75,7 +109,8 @@ class infoCore(cmd.Cmd, Bcfg2.Server.Core.Core):
encoding)
if event_debug:
self.fam.debug = True
- except Bcfg2.Server.Core.CoreInitError, msg:
+ except Bcfg2.Server.Core.CoreInitError:
+ msg = sys.exc_info()[1]
print("Core load failed because %s" % msg)
raise SystemExit(1)
self.prompt = '> '
@@ -89,7 +124,7 @@ class infoCore(cmd.Cmd, Bcfg2.Server.Core.Core):
try:
self.cmdloop('Welcome to bcfg2-info\n'
'Type "help" for more information')
- except SystemExit, val:
+ except SystemExit:
raise
except Bcfg2.Server.Plugin.PluginExecutionError:
continue
@@ -106,7 +141,7 @@ class infoCore(cmd.Cmd, Bcfg2.Server.Core.Core):
try:
opts, _ = getopt.getopt(args.split(), 'nf:')
except:
- print "Usage: debug [-n] [-f <command list>]"
+ print("Usage: debug [-n] [-f <command list>]")
return
self.cont = False
scriptmode = False
@@ -136,7 +171,7 @@ class infoCore(cmd.Cmd, Bcfg2.Server.Core.Core):
Exit program.
Usage: [quit|exit]
"""
- for plugin in self.plugins.values():
+ for plugin in list(self.plugins.values()):
plugin.shutdown()
os._exit(0)
@@ -145,27 +180,7 @@ class infoCore(cmd.Cmd, Bcfg2.Server.Core.Core):
def do_help(self, _):
"""Print out usage info."""
- print 'Commands:'
- print 'build <hostname> <filename> - Build config for hostname, writing to filename'
- print 'builddir <hostname> <dirname> - Build config for hostname, writing separate files to dirname'
- print 'buildall <directory> - Build configs for all clients in directory'
- print 'buildfile <filename> <hostname> - Build config file for hostname (not written to disk)'
- print 'bundles - Print out group/bundle information'
- print 'clients - Print out client/profile information'
- print 'config - Print out the configuration of the Bcfg2 server'
- print 'debug - Shell out to native python interpreter'
- print 'event_debug - Display filesystem events as they are processed'
- print 'generators - List current versions of generators'
- print 'groups - List groups'
- print 'help - Print this list of available commands'
- print 'mappings <type*> <name*> - Print generator mappings for optional type and name'
- print 'profile <command> <args> - Profile a single bcfg2-info command'
- print 'quit - Exit the bcfg2-info command line'
- print 'showentries <hostname> <type> - Show abstract configuration entries for a given host'
- print 'showclient <client1> <client2> - Show metadata for given hosts'
- print 'update - Process pending file events'
- print 'version - Print version of this tool'
-
+ print(USAGE)
def do_update(self, _):
"""Process pending filesystem events."""
@@ -198,18 +213,7 @@ class infoCore(cmd.Cmd, Bcfg2.Server.Core.Core):
def help_builddir(self):
"""Display help for builddir command."""
- print('Usage: builddir [-f] <hostname> <output dir>')
- print('')
- print('Generates a config for client <hostname> and writes the')
- print('individual configuration files out separately in a tree')
- print('under <output dir>. The <output dir> directory must be')
- print('rooted under /tmp unless the -f argument is provided, in')
- print('which case it can be located anywhere.')
- print('')
- print('NOTE: Currently only handles file entries and writes')
- print('all content with the default owner and permissions. These')
- print('could be much more permissive than would be created by the')
- print('Bcfg2 client itself.')
+ print(BUILDDIR_USAGE)
def do_builddir(self, args):
"""Build client configuration as separate files within a dir."""
@@ -238,7 +242,7 @@ class infoCore(cmd.Cmd, Bcfg2.Server.Core.Core):
p = Bcfg2.Client.Tools.POSIX.POSIX(log, setup, client_config)
states = dict()
p.Inventory(states)
- p.Install(states.keys(), states)
+ p.Install(list(states.keys()), states)
else:
print('Error: Incorrect number of parameters.')
self.help_builddir()
@@ -262,7 +266,7 @@ class infoCore(cmd.Cmd, Bcfg2.Server.Core.Core):
try:
metadata = self.build_metadata(client)
self.Bind(entry, metadata)
- print(lxml.etree.tostring(entry, encoding="UTF-8",
+ print(lxml.etree.tostring(entry, encoding="UTF-8",
xml_declaration=True))
except:
print("Failed to build entry %s for host %s" % (fname, client))
@@ -371,22 +375,22 @@ class infoCore(cmd.Cmd, Bcfg2.Server.Core.Core):
except:
print("Client %s not defined" % client)
continue
- print "Hostname:\t", client_meta.hostname
- print "Profile:\t", client_meta.profile
- print "Groups:\t\t", list(client_meta.groups)[0]
+ print("Hostname:\t", client_meta.hostname)
+ print("Profile:\t", client_meta.profile)
+ print("Groups:\t\t", list(client_meta.groups)[0])
for grp in list(client_meta.groups)[1:]:
- print '\t\t%s' % grp
+ print('\t\t%s' % grp)
if client_meta.bundles:
- print "Bundles:\t", list(client_meta.bundles)[0]
+ print("Bundles:\t", list(client_meta.bundles)[0])
for bnd in list(client_meta.bundles)[1:]:
- print '\t\t%s' % bnd
+ print('\t\t%s' % bnd)
if client_meta.connectors:
- print "Connector data"
- print "=" * 80
+ print("Connector data")
+ print("=" * 80)
for conn in client_meta.connectors:
if getattr(client_meta, conn):
- print "%s:\t" % (conn), getattr(client_meta, conn)
- print "=" * 80
+ print("%s:\t" % (conn), getattr(client_meta, conn))
+ print("=" * 80)
def do_mappings(self, args):
"""Print out mapping info."""
@@ -402,11 +406,11 @@ class infoCore(cmd.Cmd, Bcfg2.Server.Core.Core):
interested = [(etype, [args.split()[1]])
for etype in etypes]
else:
- interested = [(etype, generator.Entries[etype])
- for etype in etypes
+ interested = [(etype, generator.Entries[etype])
+ for etype in etypes
if etype in generator.Entries]
for etype, names in interested:
- for name in [name for name in names if name in
+ for name in [name for name in names if name in
generator.Entries.get(etype, {})]:
data.append((generator.name, etype, name))
printTabular(data)
diff --git a/src/sbin/bcfg2-lint b/src/sbin/bcfg2-lint
new file mode 100755
index 000000000..6bc34433e
--- /dev/null
+++ b/src/sbin/bcfg2-lint
@@ -0,0 +1,194 @@
+#!/usr/bin/env python
+
+"""This tool examines your Bcfg2 specifications for errors."""
+__revision__ = '$Revision$'
+
+import sys
+import inspect
+import logging
+import Bcfg2.Logger
+import Bcfg2.Options
+import Bcfg2.Server.Core
+import Bcfg2.Server.Lint
+# Compatibility imports
+from Bcfg2.Bcfg2Py3k import ConfigParser
+
+logger = logging.getLogger('bcfg2-lint')
+
+class Parser(ConfigParser.ConfigParser):
+ def get(self, section, option, default):
+ """ Override ConfigParser.get: If the request option is not in
+ the config file then return the value of default rather than
+ raise an exception. We still raise exceptions on missing
+ sections.
+ """
+ try:
+ return ConfigParser.ConfigParser.get(self, section, option)
+ except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
+ return default
+
+def run_serverless_plugins(plugins, config=None, setup=None, errorhandler=None):
+ logger.debug("Running serverless plugins")
+ for plugin_name, plugin in list(plugins.items()):
+ run_plugin(plugin, plugin_name, errorhandler=errorhandler,
+ setup=setup, config=config, files=files)
+
+def run_server_plugins(plugins, config=None, setup=None, errorhandler=None):
+ core = load_server(setup)
+ logger.debug("Running server plugins")
+ for plugin_name, plugin in list(plugins.items()):
+ run_plugin(plugin, plugin_name, args=[core], errorhandler=errorhandler,
+ setup=setup, config=config, files=files)
+
+def run_plugin(plugin, plugin_name, setup=None, errorhandler=None,
+ args=None, config=None, files=None):
+ logger.debug(" Running %s" % plugin_name)
+ if args is None:
+ args = []
+
+ if errorhandler is None:
+ errorhandler = get_errorhandler(config)
+
+ if config is not None and config.has_section(plugin_name):
+ args.append(dict(config.items(plugin_name), **setup))
+ else:
+ args.append(setup)
+
+ # older versions of python do not support mixing *-magic and
+ # non-*-magic (e.g., "plugin(*args, files=files)", so we do this
+ # all with *-magic
+ kwargs = dict(files=files, errorhandler=errorhandler)
+
+ return plugin(*args, **kwargs).Run()
+
+def get_errorhandler(config):
+ """ get a Bcfg2.Server.Lint.ErrorHandler object """
+ if config.has_section("errors"):
+ conf = dict(config.items("errors"))
+ else:
+ conf = None
+ return Bcfg2.Server.Lint.ErrorHandler(config=conf)
+
+def load_server(setup):
+ """ load server """
+ core = Bcfg2.Server.Core.Core(setup['repo'], setup['plugins'],
+ setup['password'], setup['encoding'])
+ if setup['event debug']:
+ core.fam.debug = True
+ core.fam.handle_events_in_interval(4)
+ return core
+
+if __name__ == '__main__':
+ optinfo = {
+ 'configfile': Bcfg2.Options.CFILE,
+ 'help': Bcfg2.Options.HELP,
+ 'verbose': Bcfg2.Options.VERBOSE,
+ }
+ optinfo.update({
+ 'event debug': Bcfg2.Options.DEBUG,
+ 'encoding': Bcfg2.Options.ENCODING,
+ # Server options
+ 'repo': Bcfg2.Options.SERVER_REPOSITORY,
+ 'plugins': Bcfg2.Options.SERVER_PLUGINS,
+ 'mconnect': Bcfg2.Options.SERVER_MCONNECT,
+ 'filemonitor': Bcfg2.Options.SERVER_FILEMONITOR,
+ 'location': Bcfg2.Options.SERVER_LOCATION,
+ 'static': Bcfg2.Options.SERVER_STATIC,
+ 'key': Bcfg2.Options.SERVER_KEY,
+ 'cert': Bcfg2.Options.SERVER_CERT,
+ 'ca': Bcfg2.Options.SERVER_CA,
+ 'password': Bcfg2.Options.SERVER_PASSWORD,
+ 'protocol': Bcfg2.Options.SERVER_PROTOCOL,
+ # More options
+ 'logging': Bcfg2.Options.LOGGING_FILE_PATH,
+ 'stdin': Bcfg2.Options.FILES_ON_STDIN,
+ 'schema': Bcfg2.Options.SCHEMA_PATH,
+ 'config': Bcfg2.Options.Option('Specify bcfg2-lint configuration file',
+ '/etc/bcfg2-lint.conf',
+ cmd='--lint-config',
+ odesc='<conffile>',
+ long_arg=True),
+ 'showerrors': Bcfg2.Options.Option('Show error handling', False,
+ cmd='--list-errors',
+ long_arg=True),
+ })
+ setup = Bcfg2.Options.OptionParser(optinfo)
+ setup.parse(sys.argv[1:])
+
+ log_args = dict(to_syslog=False, to_console=logging.WARNING)
+ if setup['verbose']:
+ log_args['to_console'] = logging.DEBUG
+ Bcfg2.Logger.setup_logging('bcfg2-info', **log_args)
+
+ config = Parser()
+ config.read(setup['config'])
+
+ if setup['showerrors']:
+ if config.has_section("errors"):
+ econf = dict(config.items("errors"))
+ else:
+ econf = dict()
+
+ print("%-35s %-35s" % ("Error name", "Handler (Default)"))
+ for err, default in Bcfg2.Server.Lint.ErrorHandler._errors.items():
+ if err in econf and econf[err] != default:
+ handler = "%s (%s)" % (econf[err], default)
+ else:
+ handler = default
+ print("%-35s %-35s" % (err, handler))
+ raise SystemExit(0)
+
+ # get list of plugins to run
+ if setup['args']:
+ allplugins = setup['args']
+ elif "bcfg2-repo-validate" in sys.argv[0]:
+ allplugins = 'Duplicates,RequiredAttrs,Validate'.split(',')
+ else:
+ allplugins = config.get('lint', 'plugins',
+ ",".join(Bcfg2.Server.Lint.__all__)).split(',')
+
+ if setup['stdin']:
+ files = [s.strip() for s in sys.stdin.readlines()]
+ else:
+ files = None
+
+ # load plugins
+ serverplugins = {}
+ serverlessplugins = {}
+ for plugin_name in allplugins:
+ try:
+ mod = getattr(__import__("Bcfg2.Server.Lint.%s" %
+ (plugin_name)).Server.Lint, plugin_name)
+ except ImportError:
+ try:
+ mod = __import__(plugin_name)
+ except Exception:
+ err = sys.exc_info()[1]
+ logger.error("Failed to load plugin %s: %s" % (plugin_name,
+ err))
+ raise SystemExit(1)
+ plugin = getattr(mod, plugin_name)
+ if [c for c in inspect.getmro(plugin)
+ if c == Bcfg2.Server.Lint.ServerPlugin]:
+ serverplugins[plugin_name] = plugin
+ else:
+ serverlessplugins[plugin_name] = plugin
+
+ errorhandler = get_errorhandler(config)
+
+ run_serverless_plugins(serverlessplugins,
+ errorhandler=errorhandler,
+ config=config, setup=setup)
+
+ if serverplugins:
+ run_server_plugins(serverplugins, errorhandler=errorhandler,
+ config=config, setup=setup)
+
+ if errorhandler.errors or errorhandler.warnings or setup['verbose']:
+ print("%d errors" % errorhandler.errors)
+ print("%d warnings" % errorhandler.warnings)
+
+ if errorhandler.errors:
+ raise SystemExit(2)
+ elif errorhandler.warnings:
+ raise SystemExit(3)
diff --git a/src/sbin/bcfg2-ping-sweep b/src/sbin/bcfg2-ping-sweep
index 4082cad8b..70f718690 100755
--- a/src/sbin/bcfg2-ping-sweep
+++ b/src/sbin/bcfg2-ping-sweep
@@ -8,7 +8,7 @@ __revision__ = '$Revision$'
from os import dup2, execl, fork, uname, wait
import sys
import time
-import lxml.etree
+import lxml.etree
import Bcfg2.Options
@@ -20,9 +20,10 @@ if __name__ == '__main__':
cfpath = setup['configfile']
clientdatapath = "%s/Metadata/clients.xml" % setup['repo']
-
+
clientElement = lxml.etree.parse(clientdatapath)
- hostlist = [client.get('name') for client in clientElement.findall("Client")]
+ hostlist = [client.get('name')
+ for client in clientElement.findall("Client")]
pids = {}
null = open('/dev/null', 'w+')
@@ -31,9 +32,8 @@ if __name__ == '__main__':
#/bin/ping on linux /sbin/ping on os x
osname = uname()[0]
-
while hostlist or pids:
- if hostlist and len(pids.keys()) < 15:
+ if hostlist and len(list(pids.keys())) < 15:
host = hostlist.pop()
pid = fork()
if pid == 0:
@@ -47,7 +47,7 @@ if __name__ == '__main__':
execl('/sbin/ping', 'ping', '-t', '5', '-c', '1', host)
elif osname == 'SunOS':
execl('/usr/sbin/ping', 'ping', host, '56', '1')
- else: #default
+ else: # default
execl('/bin/ping', 'ping', '-w', '5', '-c', '1', host)
else:
pids[pid] = host
@@ -58,14 +58,15 @@ if __name__ == '__main__':
continue
chost = pids[cpid]
del pids[cpid]
- elm = clientElement.xpath("//Client[@name='%s']"%chost)[0]
+ elm = clientElement.xpath("//Client[@name='%s']" % chost)[0]
if status == 0:
- elm.set("pingable",'Y')
+ elm.set("pingable", 'Y')
elm.set("pingtime", str(time.time()))
else:
- elm.set("pingable",'N')
+ elm.set("pingable", 'N')
fout = open(clientdatapath, 'w')
- fout.write(lxml.etree.tostring(clientElement.getroot(), encoding='UTF-8', xml_declaration=True))
+ fout.write(lxml.etree.tostring(clientElement.getroot(),
+ encoding='UTF-8',
+ xml_declaration=True))
fout.close()
-
diff --git a/src/sbin/bcfg2-repo-validate b/src/sbin/bcfg2-repo-validate
index 554e4f72b..cea09cda3 100755..120000
--- a/src/sbin/bcfg2-repo-validate
+++ b/src/sbin/bcfg2-repo-validate
@@ -1,227 +1 @@
-#!/usr/bin/env python
-
-"""
-bcfg2-repo-validate checks all xml files in Bcfg2
-repos against their respective XML schemas.
-"""
-__revision__ = '$Revision$'
-
-import glob
-import lxml.etree
-import os
-import sys
-import Bcfg2.Options
-
-if __name__ == '__main__':
- opts = {'repo': Bcfg2.Options.SERVER_REPOSITORY,
- 'prefix': Bcfg2.Options.INSTALL_PREFIX,
- 'verbose': Bcfg2.Options.VERBOSE,
- 'configfile': Bcfg2.Options.CFILE}
- setup = Bcfg2.Options.OptionParser(opts)
- setup.parse(sys.argv[1:])
- verbose = setup['verbose']
- cpath = setup['configfile']
- prefix = setup['prefix']
- schemadir = "%s/share/bcfg2/schemas" % (prefix)
- os.chdir(schemadir)
- repo = setup['repo']
-
- # Get a list of all info.xml files in the bcfg2 repository
- info_list = []
- for infodir in ['Cfg', 'TGenshi', 'TCheetah']:
- for root, dirs, files in os.walk('%s/%s' % (repo, infodir)):
- for filename in files:
- if filename == 'info.xml':
- info_list.append(os.path.join(root, filename))
-
- # get metadata list (with all included files)
- metadata_list = glob.glob("%s/Metadata/groups.xml" % repo)
- ref_bundles = set()
- xdata = lxml.etree.parse("%s/Metadata/groups.xml" % repo)
- included = set([ent.get('href') for ent in \
- xdata.findall('./{http://www.w3.org/2001/XInclude}include')])
- while included:
- try:
- filename = included.pop()
- except KeyError:
- continue
- metadata_list.append("%s/Metadata/%s" % (repo, filename))
- groupdata = lxml.etree.parse("%s/Metadata/%s" % (repo, filename))
- group_ents = [ent.get('href') for ent in \
- groupdata.
- findall('./{http://www.w3.org/2001/XInclude}include')]
- for ent in group_ents:
- included.add(ent)
- included.discard(filename)
-
- # check for multiple default group definitions
- default_groups = []
- for grp in lxml.etree.parse("%s/Metadata/groups.xml" \
- % repo).findall('.//Group'):
- if grp.get('default') == 'true':
- default_groups.append(grp)
- if len(default_groups) > 1:
- print("*** Warning: Multiple default groups defined")
- for grp in default_groups:
- print(" %s" % grp.get('name'))
-
- # get all XIncluded bundles
- xdata.xinclude()
- for bundle in xdata.findall("//Bundle"):
- ref_bundles.add("%s/Bundler/%s" % (repo, bundle.get('name')))
-
- # get lists of all other xml files to validate
- clients_list = glob.glob("%s/Metadata/clients.xml" % repo)
- bundle_list = glob.glob("%s/Bundler/*.xml" % repo)
- genshibundle_list = glob.glob("%s/Bundler/*.genshi" % repo)
- pkg_list = glob.glob("%s/Pkgmgr/*.xml" % repo)
- base_list = glob.glob("%s/Base/*.xml" % repo)
- rules_list = glob.glob("%s/Rules/*.xml" % repo)
- imageinfo_list = glob.glob("%s/etc/report-configuration.xml" % repo)
- services_list = glob.glob("%s/Svcmgr/*.xml" % repo)
- deps_list = glob.glob("%s/Deps/*.xml" % repo)
- dec_list = glob.glob("%s/Decisions/*" % repo)
- pkgcfg_list = glob.glob("%s/Packages/config.xml" % repo)
- gp_list = glob.glob('%s/GroupPatterns/config.xml' % repo)
-
- # verify attributes for configuration entries
- # (as defined in doc/server/configurationentries)
- # TODO: See if it is possible to do this in the schema instead
- required_configuration_attrs = {
- 'device': ['name', 'owner', 'group', 'dev_type'],
- 'directory': ['name', 'owner', 'group', 'perms'],
- 'file': ['name', 'owner', 'group', 'perms'],
- 'hardlink': ['name', 'to'],
- 'symlink': ['name', 'to'],
- 'ignore': ['name'],
- 'nonexistent': ['name'],
- 'permissions': ['name', 'owner', 'group', 'perms']}
- for rfile in rules_list:
- try:
- xdata = lxml.etree.parse(rfile)
- except lxml.etree.XMLSyntaxError, e:
- print("Failed to parse %s: %s" % (rfile, e))
- for posixpath in xdata.findall("//Path"):
- pathname = posixpath.get('name')
- pathtype = posixpath.get('type')
- pathset = set(posixpath.attrib.keys())
- try:
- required_attrs = set(required_configuration_attrs[pathtype] \
- + ['type'])
- except KeyError:
- continue
- if 'dev_type' in required_attrs:
- dev_type = posixpath.get('dev_type')
- if dev_type in ['block', 'char']:
- # check if major/minor are specified
- required_attrs |= set(['major', 'minor'])
- if pathset.issuperset(required_attrs):
- continue
- else:
- print("The following required attributes are missing for"
- " Path %s in %s: %s" % (pathname, rfile,
- [attr for attr in required_attrs.difference(pathset)]))
-
- # warn on duplicate Pkgmgr entries with the same priority
- pset = set()
- for plist in pkg_list:
- try:
- xdata = lxml.etree.parse(plist)
- except lxml.etree.XMLSyntaxError, e:
- print("Failed to parse %s: %s" % (plist, e))
- # get priority, type, group
- priority = xdata.getroot().get('priority')
- ptype = xdata.getroot().get('type')
- for pkg in xdata.findall("//Package"):
- if pkg.getparent().tag == 'Group':
- grp = pkg.getparent().get('name')
- if type(grp) is not str and grp.getparent().tag == 'Group':
- pgrp = grp.getparent().get('name')
- else:
- pgrp = 'none'
- else:
- grp = 'none'
- pgrp = 'none'
- ptuple = (pkg.get('name'), priority, ptype, grp, pgrp)
- # check if package is already listed with same priority,
- # type, grp
- if ptuple in pset:
- print("Duplicate Package %s, priority:%s, type:%s"\
- % (pkg.get('name'), priority, ptype))
- else:
- pset.add(ptuple)
-
- filesets = {'metadata': (metadata_list, "%s/metadata.xsd"),
- 'clients': (clients_list, "%s/clients.xsd"),
- 'info': (info_list, "%s/info.xsd"),
- 'bundle': (bundle_list, "%s/bundle.xsd"),
- 'pkglist': (pkg_list, "%s/pkglist.xsd"),
- 'base': (base_list, "%s/base.xsd"),
- 'rules': (rules_list, "%s/rules.xsd"),
- 'imageinfo': (imageinfo_list, "%s/report-configuration.xsd"),
- 'services': (services_list, "%s/services.xsd"),
- 'deps': (deps_list, "%s/deps.xsd"),
- 'decisions': (dec_list, "%s/decisions.xsd"),
- 'packages': (pkgcfg_list, "%s/packages.xsd"),
- 'grouppatterns': (gp_list, "%s/grouppatterns.xsd"),
- }
-
- failures = 0
- for k, (filelist, schemaname) in list(filesets.items()):
- try:
- schema = lxml.etree.XMLSchema(lxml.etree.parse(open(schemaname%(schemadir))))
- except:
- print("Failed to process schema %s" % (schemaname%(schemadir)))
- failures = 1
- continue
- for filename in filelist:
- try:
- datafile = lxml.etree.parse(open(filename))
- except SyntaxError:
- print("%s ***FAILS*** to parse \t\t<----" % (filename))
- os.system("xmllint %s" % filename)
- failures = 1
- continue
- except IOError:
- print("Failed to open file %s \t\t<---" % (filename))
- failures = 1
- continue
- if schema.validate(datafile):
- if verbose:
- print("%s checks out" % (filename))
- else:
- rc = os.system("xmllint --noout --xinclude --schema \
- %s %s > /dev/null 2>/dev/null" % \
- (schemaname % schemadir, filename))
- if rc:
- failures = 1
- print("%s ***FAILS*** to verify \t\t<----" % (filename))
- os.system("xmllint --noout --xinclude --schema %s %s" % \
- (schemaname % schemadir, filename))
- elif verbose:
- print("%s checks out" % (filename))
-
- # print out missing bundle information
- if verbose:
- print("")
- for bundle in ref_bundles:
- # check for both regular and genshi bundles
- xmlbundle = "%s.xml" % bundle
- genshibundle = "%s.genshi" % bundle
- allbundles = bundle_list + genshibundle_list
- if xmlbundle not in allbundles and \
- genshibundle not in allbundles:
- print("*** Warning: Bundle %s referenced, but does not "
- "exist." % bundle)
- # verify bundle name attribute matches filename
- for bundle in (bundle_list + genshibundle_list):
- fname = bundle.split('Bundler/')[1].split('.')[0]
- xdata = lxml.etree.parse(bundle)
- bname = xdata.getroot().get('name')
- if fname != bname:
- print("The following names are inconsistent:")
- print(" Filename is %s" % fname)
- print(" Bundle name found in %s is %s" % (fname, bname))
-
-
- raise SystemExit, failures
+bcfg2-lint \ No newline at end of file
diff --git a/src/sbin/bcfg2-reports b/src/sbin/bcfg2-reports
index d83e45e7c..20288fc5e 100755
--- a/src/sbin/bcfg2-reports
+++ b/src/sbin/bcfg2-reports
@@ -5,7 +5,13 @@ __revision__ = '$Revision$'
import os
import sys
-import Bcfg2.Server.Reports.settings
+try:
+ import Bcfg2.Server.Reports.settings
+except ConfigParser.NoSectionError:
+ print("Your bcfg2.conf is currently missing the statistics section which "
+ "is necessary for the reporting interface. Please see bcfg2.conf(5) "
+ "for more details.")
+ sys.exit(1)
project_directory = os.path.dirname(Bcfg2.Server.Reports.settings.__file__)
project_name = os.path.basename(project_directory)
@@ -87,13 +93,13 @@ def print_fields(fields, cli, max_name, entrydict):
if len(entrydict) > 0:
display += " "
display += str(entrydict[cli])
- print display
+ print(display)
def print_entry(item, max_name):
fmt = ("%%-%ds " % (max_name))
fdata = item.entry.kind + ":" + item.entry.name
display = fmt % (fdata)
- print display
+ print(display)
fields = ""
sort = ""
@@ -131,14 +137,14 @@ if expire != "":
if expire == c_inst.name:
if c_inst.expiration == None:
c_inst.expiration = datetime.datetime.now()
- print "Host expired."
+ print("Host expired.")
else:
c_inst.expiration = None
- print "Host un-expired."
+ print("Host un-expired.")
c_inst.save()
elif '-h' in args:
- print """Usage: bcfg2-reports [option] ...
+ print("""Usage: bcfg2-reports [option] ...
Options and arguments (and corresponding environment variables):
-a : shows all hosts, including expired hosts
@@ -164,13 +170,13 @@ Options and arguments (and corresponding environment variables):
(name,time,state)
--sort=ARG1,ARG2,... : sorts output on ARG1,ARG2,... (name,time,state)
--stale : shows hosts which haven't run in the last 24 hours
-"""
+""")
elif singlehost != "":
for c_inst in c_list:
if singlehost == c_inst.name:
baditems = c_inst.current_interaction.bad()
if len(baditems) > 0 and ('-b' in args or '-s' in args):
- print "Bad Entries:"
+ print("Bad Entries:")
max_name = -1
for item in baditems:
if len(item.entry.name) > max_name:
@@ -179,7 +185,7 @@ elif singlehost != "":
print_entry(item, max_name)
extraitems = c_inst.current_interaction.extra()
if len(extraitems) > 0 and ('-e' in args or '-s' in args):
- print "Extra Entries:"
+ print("Extra Entries:")
max_name = -1
for item in extraitems:
if len(item.entry.name) > max_name:
diff --git a/src/sbin/bcfg2-server b/src/sbin/bcfg2-server
index cf44f1699..f4bd5e5b7 100755
--- a/src/sbin/bcfg2-server
+++ b/src/sbin/bcfg2-server
@@ -69,7 +69,8 @@ if __name__ == '__main__':
certfile=setup['cert'],
ca=setup['ca'],
)
- except CoreInitError, msg:
+ except CoreInitError:
+ msg = sys.exc_info()[1]
logger.error(msg)
logger.error("exiting")
sys.exit(1)