summaryrefslogtreecommitdiffstats
path: root/src/lib
diff options
context:
space:
mode:
Diffstat (limited to 'src/lib')
-rw-r--r--src/lib/Client/Tools/Pacman.py22
-rw-r--r--src/lib/Client/Tools/YUMng.py7
-rw-r--r--src/lib/Server/Admin/Backup.py32
-rw-r--r--src/lib/Server/Admin/__init__.py2
-rw-r--r--src/lib/Server/Hostbase/settings.py7
-rw-r--r--src/lib/Server/Plugins/Packages.py25
-rw-r--r--src/lib/Server/Plugins/SSLCA.py239
-rw-r--r--src/lib/Server/Reports/settings.py3
-rw-r--r--src/lib/Server/Reports/updatefix.py8
9 files changed, 315 insertions, 30 deletions
diff --git a/src/lib/Client/Tools/Pacman.py b/src/lib/Client/Tools/Pacman.py
index a9edc4d65..be3fb0c94 100644
--- a/src/lib/Client/Tools/Pacman.py
+++ b/src/lib/Client/Tools/Pacman.py
@@ -1,8 +1,7 @@
"""This is the bcfg2 support for pacman"""
import Bcfg2.Client.Tools
-import Bcfg2.Options
-import Bcfg2.Client.Tools
+
class Pacman(Bcfg2.Client.Tools.PkgTool):
'''Archlinux package support'''
@@ -31,7 +30,8 @@ class Pacman(Bcfg2.Client.Tools.PkgTool):
def VerifyPackage(self, entry, modlist):
'''Verify Package status for entry'''
- print "VerifyPackage : " + entry.get('name')+ " : " + entry.get('version')
+ self.logger.info("VerifyPackage : %s : %s" % entry.get('name'),
+ entry.get('version'))
if not 'version' in entry.attrib:
self.logger.info("Cannot verify unversioned package %s" %
@@ -44,8 +44,8 @@ class Pacman(Bcfg2.Client.Tools.PkgTool):
elif self.installed[entry.attrib['name']] == entry.attrib['version']:
#if not self.setup['quick'] and \
# entry.get('verify', 'true') == 'true':
- #FIXME: We should be able to check this once
- # http://trac.macports.org/ticket/15709 is implemented
+ #FIXME: need to figure out if pacman
+ # allows you to verify packages
return True
else:
entry.set('current_version', self.installed[entry.get('name')])
@@ -76,11 +76,7 @@ class Pacman(Bcfg2.Client.Tools.PkgTool):
print "packages : " + pkgline
try:
- self.logger.debug('Running Pacman.Install()')
- print "running : %s -S %s" % (self.pkgtool, pkgline)
- s = self.cmd.run("%s -S %s" % (self.pkgtool, pkgline))
- print "pacman : " + str(s)
- except Exception as ex:
- print "error in cmd.run ", ex
-
- self.logger.debug('Running Pacman.Install()')
+ self.logger.debug("Running : %s -S %s" % (self.pkgtool, pkgline))
+ self.cmd.run("%s -S %s" % (self.pkgtool, pkgline))
+ except Exception, e:
+ self.logger.error("Error occurred during installation: %s" % e)
diff --git a/src/lib/Client/Tools/YUMng.py b/src/lib/Client/Tools/YUMng.py
index 9cdcdca40..f0d906717 100644
--- a/src/lib/Client/Tools/YUMng.py
+++ b/src/lib/Client/Tools/YUMng.py
@@ -210,6 +210,8 @@ class YUMng(Bcfg2.Client.Tools.PkgTool):
"version_fail_action", "upgrade").lower() == "upgrade"
self.doReinst = CP.get(self.name, "verify_fail_action",
"reinstall").lower() == "reinstall"
+ self.verifyFlags = CP.get(self.name, "verify_flags",
+ "").lower().replace(' ', ',')
self.installOnlyPkgs = self.yb.conf.installonlypkgs
if 'gpg-pubkey' not in self.installOnlyPkgs:
@@ -225,6 +227,7 @@ class YUMng(Bcfg2.Client.Tools.PkgTool):
% self.doReinst)
self.logger.debug("YUMng: installOnlyPkgs: %s" \
% str(self.installOnlyPkgs))
+ self.logger.debug("YUMng: verify_flags: %s" % self.verifyFlags)
def _fixAutoVersion(self, entry):
# old style entry; synthesize Instances from current installed
@@ -436,6 +439,8 @@ class YUMng(Bcfg2.Client.Tools.PkgTool):
stat['verify_fail'] = False
stat['pkg'] = entry
stat['modlist'] = modlist
+ verify_flags = inst.get('verify_flags', self.verifyFlags)
+ verify_flags = verify_flags.lower().replace(' ', ',').split(',')
if len(POs) == 0:
# Package not installed
@@ -505,6 +510,8 @@ class YUMng(Bcfg2.Client.Tools.PkgTool):
for p in probs:
if p.type == 'missing' and os.path.islink(fn):
continue
+ elif 'no' + p.type in verify_flags:
+ continue
if p.type not in ['missingok', 'ghost']:
tmp.append((p.type, p.message))
if tmp != []:
diff --git a/src/lib/Server/Admin/Backup.py b/src/lib/Server/Admin/Backup.py
new file mode 100644
index 000000000..27a7fd8c8
--- /dev/null
+++ b/src/lib/Server/Admin/Backup.py
@@ -0,0 +1,32 @@
+import os
+import sys
+import time
+import tarfile
+import Bcfg2.Server.Admin
+import Bcfg2.Options
+
+class Backup(Bcfg2.Server.Admin.MetadataCore):
+ __shorthelp__ = "Make a backup of the Bcfg2 repository."
+ __longhelp__ = (__shorthelp__ + "\n\nbcfg2-admin backup")
+ #"\n\nbcfg2-admin backup restore")
+ __usage__ = ("bcfg2-admin backup")
+
+ def __init__(self, configfile):
+ Bcfg2.Server.Admin.MetadataCore.__init__(self, configfile,
+ self.__usage__)
+
+ def __call__(self, args):
+ Bcfg2.Server.Admin.MetadataCore.__call__(self, args)
+ # Get Bcfg2 repo directory
+ opts = {'repo': Bcfg2.Options.SERVER_REPOSITORY}
+ setup = Bcfg2.Options.OptionParser(opts)
+ setup.parse(sys.argv[1:])
+ self.datastore = setup['repo']
+ timestamp = time.strftime('%Y%m%d%H%M%S')
+ format = 'gz'
+ mode = 'w:' + format
+ filename = timestamp + '.tar' + '.' + format
+ out = tarfile.open(self.datastore + '/' + filename, mode=mode)
+ out.add(self.datastore, os.path.basename(self.datastore))
+ out.close()
+ print "Archive %s was stored under %s" % (filename, self.datastore)
diff --git a/src/lib/Server/Admin/__init__.py b/src/lib/Server/Admin/__init__.py
index 3a088b2fb..bb5c41895 100644
--- a/src/lib/Server/Admin/__init__.py
+++ b/src/lib/Server/Admin/__init__.py
@@ -2,7 +2,7 @@ __revision__ = '$Revision$'
__all__ = ['Mode', 'Client', 'Compare', 'Init', 'Minestruct', 'Perf',
'Pull', 'Query', 'Reports', 'Snapshots', 'Tidy', 'Viz',
- 'Xcmd', 'Group']
+ 'Xcmd', 'Group', 'Backup']
import ConfigParser
import logging
diff --git a/src/lib/Server/Hostbase/settings.py b/src/lib/Server/Hostbase/settings.py
index dadf98d24..a42fd5b2e 100644
--- a/src/lib/Server/Hostbase/settings.py
+++ b/src/lib/Server/Hostbase/settings.py
@@ -44,8 +44,11 @@ DATABASE_HOST = options['database_host']
# Set to empty string for default. Not used with sqlite3.
DATABASE_PORT = int(options['database_port'])
# Local time zone for this installation. All choices can be found here:
-# http://www.postgresql.org/docs/current/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
-TIME_ZONE = 'America/Chicago'
+# http://docs.djangoproject.com/en/dev/ref/settings/#time-zone
+try:
+ TIME_ZONE = c.get('statistics', 'time_zone')
+except:
+ TIME_ZONE = None
# enter the defauly MX record machines will get in Hostbase
# this setting may move elsewhere eventually
diff --git a/src/lib/Server/Plugins/Packages.py b/src/lib/Server/Plugins/Packages.py
index 194330723..ee21fb622 100644
--- a/src/lib/Server/Plugins/Packages.py
+++ b/src/lib/Server/Plugins/Packages.py
@@ -76,8 +76,8 @@ def _fetch_url(url):
class Source(object):
basegroups = []
- def __init__(self, basepath, url, version, arches, components, groups, rawurl,
- blacklist, whitelist, recommended):
+ def __init__(self, basepath, url, version, arches, components, groups,
+ rawurl, blacklist, whitelist, recommended):
self.basepath = basepath
self.version = version
self.components = components
@@ -112,7 +112,8 @@ class Source(object):
try:
self.read_files()
except:
- logger.error("Packages: File read failed; falling back to file download")
+ logger.error("Packages: File read failed; "
+ "falling back to file download")
should_download = True
if should_download or force_update:
@@ -389,7 +390,7 @@ class APTSource(Source):
if self.recommended:
depfnames = ['Depends', 'Pre-Depends', 'Recommends']
else:
- depfnames = ['Depends', 'Pre-Depends']
+ depfnames = ['Depends', 'Pre-Depends']
for fname in self.files:
if not self.rawurl:
barch = [x for x in fname.split('@') if x.startswith('binary-')][0][7:]
@@ -504,7 +505,6 @@ class PACSource(Source):
raise Exception("PACSource : RAWUrl not supported (yet)")
urls = property(get_urls)
-
def read_files(self):
bdeps = dict()
bprov = dict()
@@ -512,7 +512,7 @@ class PACSource(Source):
if self.recommended:
depfnames = ['Depends', 'Pre-Depends', 'Recommends']
else:
- depfnames = ['Depends', 'Pre-Depends']
+ depfnames = ['Depends', 'Pre-Depends']
for fname in self.files:
if not self.rawurl:
@@ -535,8 +535,8 @@ class PACSource(Source):
for tarinfo in tar:
if tarinfo.isdir():
- self.pkgnames.add(tarinfo.name.rsplit("-",2)[0])
- print "added : " + tarinfo.name.rsplit("-",2)[0]
+ self.pkgnames.add(tarinfo.name.rsplit("-", 2)[0])
+ print "added : " + tarinfo.name.rsplit("-", 2)[0]
tar.close()
self.deps['global'] = dict()
@@ -676,7 +676,7 @@ class Packages(Bcfg2.Server.Plugin.Plugin,
# do while unclassified or vpkgs or both or pkgs
while unclassified or pkgs or both or final_pass:
#print len(unclassified), len(pkgs), len(both), len(vpkgs), final_pass
- if really_done:
+ if really_done:
break
if len(unclassified) + len(pkgs) + len(both) == 0:
# one more pass then exit
@@ -760,7 +760,9 @@ class Packages(Bcfg2.Server.Plugin.Plugin,
meta - client metadata instance
structures - a list of structure-stage entry combinations
'''
- if self.disableResolver: return # Config requests no resolver
+ if self.disableResolver:
+ # Config requests no resolver
+ return
initial = set([pkg.get('name') for struct in structures \
for pkg in struct.findall('Package') +
@@ -857,7 +859,8 @@ class Packages(Bcfg2.Server.Plugin.Plugin,
cachefiles = []
for source in self.sources:
cachefiles.append(source.cachefile)
- if not self.disableMetaData: source.setup_data(force_update)
+ if not self.disableMetaData:
+ source.setup_data(force_update)
self.sentinels.update(source.basegroups)
for cfile in glob.glob("%s/cache-*" % self.cachepath):
if cfile not in cachefiles:
diff --git a/src/lib/Server/Plugins/SSLCA.py b/src/lib/Server/Plugins/SSLCA.py
new file mode 100644
index 000000000..4125cd498
--- /dev/null
+++ b/src/lib/Server/Plugins/SSLCA.py
@@ -0,0 +1,239 @@
+import Bcfg2.Server.Plugin
+import Bcfg2.Options
+import lxml.etree
+import posixpath
+import tempfile
+import os
+from subprocess import Popen, PIPE, STDOUT
+from ConfigParser import ConfigParser
+
+
+class SSLCA(Bcfg2.Server.Plugin.GroupSpool):
+ """
+ The SSLCA generator handles the creation and
+ management of ssl certificates and their keys.
+ """
+ name = 'SSLCA'
+ __version__ = '$Id:$'
+ __author__ = 'g.hagger@gmail.com'
+ __child__ = Bcfg2.Server.Plugin.FileBacked
+ key_specs = {}
+ cert_specs = {}
+ CAs = {}
+
+ def HandleEvent(self, event=None):
+ """
+ Updates which files this plugin handles based upon filesystem events.
+ Allows configuration items to be added/removed without server restarts.
+ """
+ action = event.code2str()
+ if event.filename[0] == '/':
+ return
+ epath = "".join([self.data, self.handles[event.requestID],
+ event.filename])
+ if posixpath.isdir(epath):
+ ident = self.handles[event.requestID] + event.filename
+ else:
+ ident = self.handles[event.requestID][:-1]
+
+ fname = "".join([ident, '/', event.filename])
+
+ if event.filename.endswith('.xml'):
+ if action in ['exists', 'created', 'changed']:
+ if event.filename.endswith('key.xml'):
+ key_spec = dict(lxml.etree.parse(epath).find('Key').items())
+ self.key_specs[ident] = {
+ 'bits': key_spec.get('bits', 2048),
+ 'type': key_spec.get('type', 'rsa')
+ }
+ self.Entries['Path'][ident] = self.get_key
+ elif event.filename.endswith('cert.xml'):
+ cert_spec = dict(lxml.etree.parse(epath).find('Cert').items())
+ ca = cert_spec.get('ca', 'default')
+ self.cert_specs[ident] = {
+ 'ca': ca,
+ 'format': cert_spec.get('format', 'pem'),
+ 'key': cert_spec.get('key'),
+ 'days': cert_spec.get('days', 365),
+ 'C': cert_spec.get('c'),
+ 'L': cert_spec.get('l'),
+ 'ST': cert_spec.get('st'),
+ 'OU': cert_spec.get('ou'),
+ 'O': cert_spec.get('o'),
+ 'emailAddress': cert_spec.get('emailaddress')
+ }
+ cp = ConfigParser()
+ cp.read(self.core.cfile)
+ self.CAs[ca] = dict(cp.items('sslca_'+ca))
+ self.Entries['Path'][ident] = self.get_cert
+ if action == 'deleted':
+ if ident in self.Entries['Path']:
+ del self.Entries['Path'][ident]
+ else:
+ if action in ['exists', 'created']:
+ if posixpath.isdir(epath):
+ self.AddDirectoryMonitor(epath[len(self.data):])
+ if ident not in self.entries and posixpath.isfile(epath):
+ self.entries[fname] = self.__child__(epath)
+ self.entries[fname].HandleEvent(event)
+ if action == 'changed':
+ self.entries[fname].HandleEvent(event)
+ elif action == 'deleted':
+ if fname in self.entries:
+ del self.entries[fname]
+ else:
+ self.entries[fname].HandleEvent(event)
+
+ def get_key(self, entry, metadata):
+ """
+ either grabs a prexisting key hostfile, or triggers the generation
+ of a new key if one doesn't exist.
+ """
+ # set path type and permissions, otherwise bcfg2 won't bind the file
+ permdata = {'owner': 'root',
+ 'group': 'root',
+ 'type': 'file',
+ 'perms': '644'}
+ [entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
+
+ # check if we already have a hostfile, or need to generate a new key
+ # TODO: verify key fits the specs
+ path = entry.get('name')
+ filename = "".join([path, '/', path.rsplit('/', 1)[1], '.H_', metadata.hostname])
+ if filename not in self.entries.keys():
+ key = self.build_key(filename, entry, metadata)
+ open(self.data + filename, 'w').write(key)
+ entry.text = key
+ else:
+ entry.text = self.entries[filename].data
+
+ def build_key(self, filename, entry, metadata):
+ """
+ generates a new key according the the specification
+ """
+ type = self.key_specs[entry.get('name')]['type']
+ bits = self.key_specs[entry.get('name')]['bits']
+ if type == 'rsa':
+ cmd = "openssl genrsa %s " % bits
+ elif type == 'dsa':
+ cmd = "openssl dsaparam -noout -genkey %s" % bits
+ key = Popen(cmd, shell=True, stdout=PIPE).stdout.read()
+ return key
+
+ def get_cert(self, entry, metadata):
+ """
+ either grabs a prexisting cert hostfile, or triggers the generation
+ of a new cert if one doesn't exist.
+ """
+ # set path type and permissions, otherwise bcfg2 won't bind the file
+ permdata = {'owner': 'root',
+ 'group': 'root',
+ 'type': 'file',
+ 'perms': '644'}
+ [entry.attrib.__setitem__(key, permdata[key]) for key in permdata]
+
+ path = entry.get('name')
+ filename = "".join([path, '/', path.rsplit('/', 1)[1], '.H_', metadata.hostname])
+
+ # first - ensure we have a key to work with
+ key = self.cert_specs[entry.get('name')].get('key')
+ key_filename = "".join([key, '/', key.rsplit('/', 1)[1], '.H_', metadata.hostname])
+ if key_filename not in self.entries:
+ e = lxml.etree.Element('Path')
+ e.attrib['name'] = key
+ self.core.Bind(e, metadata)
+
+ # check if we have a valid hostfile
+ if filename in self.entries.keys() and self.verify_cert(filename, entry):
+ entry.text = self.entries[filename].data
+ else:
+ cert = self.build_cert(key_filename, entry, metadata)
+ open(self.data + filename, 'w').write(cert)
+ entry.text = cert
+
+ def verify_cert(self, filename, entry):
+ """
+ check that a certificate validates against the ca cert,
+ and that it has not expired.
+ """
+ chaincert = self.CAs[self.cert_specs[entry.get('name')]['ca']].get('chaincert')
+ cert = self.data + filename
+ cmd = "openssl verify -CAfile %s %s" % (chaincert, cert)
+ res = Popen(cmd, shell=True, stdout=PIPE, stderr=STDOUT).stdout.read()
+ if res == cert + ": OK\n":
+ return True
+ return False
+
+ def build_cert(self, key_filename, entry, metadata):
+ """
+ creates a new certificate according to the specification
+ """
+ req_config = self.build_req_config(entry, metadata)
+ req = self.build_request(key_filename, req_config, entry)
+ ca = self.cert_specs[entry.get('name')]['ca']
+ ca_config = self.CAs[ca]['config']
+ days = self.cert_specs[entry.get('name')]['days']
+ passphrase = self.CAs[ca].get('passphrase')
+ if passphrase:
+ cmd = "openssl ca -config %s -in %s -days %s -batch -passin pass:%s" % (ca_config, req, days, passphrase)
+ else:
+ cmd = "openssl ca -config %s -in %s -days %s -batch" % (ca_config, req, days)
+ cert = Popen(cmd, shell=True, stdout=PIPE).stdout.read()
+ try:
+ os.unlink(req_config)
+ os.unlink(req)
+ except OSError:
+ self.logger.error("Failed to unlink temporary files")
+ return cert
+
+ def build_req_config(self, entry, metadata):
+ """
+ generates a temporary openssl configuration file that is
+ used to generate the required certificate request
+ """
+ # create temp request config file
+ conffile = open(tempfile.mkstemp()[1], 'w')
+ cp = ConfigParser({})
+ cp.optionxform = str
+ defaults = {
+ 'req': {
+ 'default_md': 'sha1',
+ 'distinguished_name': 'req_distinguished_name',
+ 'req_extensions': 'v3_req',
+ 'x509_extensions': 'v3_req',
+ 'prompt': 'no'
+ },
+ 'req_distinguished_name': {},
+ 'v3_req': {
+ 'subjectAltName': '@alt_names'
+ },
+ 'alt_names': {}
+ }
+ for section in defaults.keys():
+ cp.add_section(section)
+ for key in defaults[section]:
+ cp.set(section, key, defaults[section][key])
+ x = 1
+ altnames = list(metadata.aliases)
+ altnames.append(metadata.hostname)
+ for altname in altnames:
+ cp.set('alt_names', 'DNS.'+str(x), altname)
+ x += 1
+ for item in ['C', 'L', 'ST', 'O', 'OU', 'emailAddress']:
+ if self.cert_specs[entry.get('name')][item]:
+ cp.set('req_distinguished_name', item, self.cert_specs[entry.get('name')][item])
+ cp.set('req_distinguished_name', 'CN', metadata.hostname)
+ cp.write(conffile)
+ conffile.close()
+ return conffile.name
+
+ def build_request(self, key_filename, req_config, entry):
+ """
+ creates the certificate request
+ """
+ req = tempfile.mkstemp()[1]
+ days = self.cert_specs[entry.get('name')]['days']
+ key = self.data + key_filename
+ cmd = "openssl req -new -config %s -days %s -key %s -text -out %s" % (req_config, days, key, req)
+ res = Popen(cmd, shell=True, stdout=PIPE).stdout.read()
+ return req
diff --git a/src/lib/Server/Reports/settings.py b/src/lib/Server/Reports/settings.py
index 81220c0e3..9efe38552 100644
--- a/src/lib/Server/Reports/settings.py
+++ b/src/lib/Server/Reports/settings.py
@@ -49,7 +49,8 @@ if DATABASE_ENGINE == 'sqlite3' and DATABASE_NAME == '':
try:
TIME_ZONE = c.get('statistics', 'time_zone')
except:
- TIME_ZONE = 'America/Chicago'
+ if django.VERSION[0] == 1 and django.VERSION[1] > 2:
+ TIME_ZONE = None
# Language code for this installation. All choices can be found here:
# http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
diff --git a/src/lib/Server/Reports/updatefix.py b/src/lib/Server/Reports/updatefix.py
index 6d9b5e952..f8fca1f90 100644
--- a/src/lib/Server/Reports/updatefix.py
+++ b/src/lib/Server/Reports/updatefix.py
@@ -139,8 +139,12 @@ def dosync():
fresh = True
# ensure database connection are close, so that the management can do it's job right
- cursor.close()
- connection.close()
+ try:
+ cursor.close()
+ connection.close()
+ except:
+ # ignore any errors from missing/invalid dbs
+ pass
# Do the syncdb according to the django version
if "call_command" in dir(django.core.management):
# this is available since django 1.0 alpha.