summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorChris St. Pierre <chris.a.st.pierre@gmail.com>2011-04-20 09:41:07 -0400
committerChris St. Pierre <chris.a.st.pierre@gmail.com>2011-04-20 09:41:07 -0400
commitb5810882e8c6b1e6b76a8239f70a129d415ecee6 (patch)
tree8c2df3610bebd92f52b70b7f37a7197c9ec2a3e9
parent20974e1311168b75e621cad14894fe7b217b61a2 (diff)
downloadbcfg2-b5810882e8c6b1e6b76a8239f70a129d415ecee6.tar.gz
bcfg2-b5810882e8c6b1e6b76a8239f70a129d415ecee6.tar.bz2
bcfg2-b5810882e8c6b1e6b76a8239f70a129d415ecee6.zip
Rewrote bcfg2-repo-validate as bcfg2-lint, which uses a plugin
interface to be lots more flexible and extensible. Added several more tests. If bcfg2-lint is run as bcfg2-repo-validate, it roughly emulates the functionality of that program. TODO: Need to figure out correct way to symlink bcfg2-repo-validate to bcfg2-lint on install.
-rw-r--r--examples/bcfg2-lint.conf20
-rw-r--r--src/lib/Logger.py12
-rw-r--r--src/lib/Server/Lint/Bundles.py56
-rw-r--r--src/lib/Server/Lint/Comments.py183
-rw-r--r--src/lib/Server/Lint/Duplicates.py79
-rw-r--r--src/lib/Server/Lint/InfoXML.py38
-rw-r--r--src/lib/Server/Lint/Pkgmgr.py33
-rw-r--r--src/lib/Server/Lint/RequiredAttrs.py69
-rw-r--r--src/lib/Server/Lint/Validate.py185
-rw-r--r--src/lib/Server/Lint/__init__.py90
-rwxr-xr-xsrc/sbin/bcfg2-lint167
-rwxr-xr-xsrc/sbin/bcfg2-repo-validate328
12 files changed, 930 insertions, 330 deletions
diff --git a/examples/bcfg2-lint.conf b/examples/bcfg2-lint.conf
new file mode 100644
index 000000000..5c7641d4a
--- /dev/null
+++ b/examples/bcfg2-lint.conf
@@ -0,0 +1,20 @@
+[main]
+plugins=Duplicates,InfoXML,Bundles,Headers,RequiredAttrs,Validate
+
+[InfoXML]
+require = owner,group,perms,paranoid
+require_paranoid = True
+
+[Comments]
+global_keywords = Id
+sgenshi_comments = Properties,Probes,Description
+properties_comments = Template,Format
+tgenshi_comments = Maintainer,Properties,Probes,Description
+cfg_comments =
+cfg_keywords =
+probe_comments = Maintainer,Purpose,Groups,Other Output
+
+[Validate]
+schema=/home/stpierre/devel/bcfg2/schema
+repo=/home/stpierre/bcfg2/trunk
+properties_schema=warn
diff --git a/src/lib/Logger.py b/src/lib/Logger.py
index a9c4372b7..ae73a6d41 100644
--- a/src/lib/Logger.py
+++ b/src/lib/Logger.py
@@ -158,17 +158,23 @@ class FragmentingSysLogHandler(logging.handlers.SysLogHandler):
pass
-def setup_logging(procname, to_console=True, to_syslog=True, syslog_facility='daemon', level=0, to_file=None):
+def setup_logging(procname, to_console=True, to_syslog=True,
+ syslog_facility='daemon', level=0, to_file=None):
"""Setup logging for Bcfg2 software."""
if hasattr(logging, 'already_setup'):
return
+
# add the handler to the root logger
if to_console:
console = logging.StreamHandler(sys.stdout)
- console.setLevel(logging.DEBUG)
+ if to_console is True:
+ console.setLevel(logging.DEBUG)
+ else:
+ console.setLevel(to_console)
# tell the handler to use this format
console.setFormatter(TermiosFormatter())
logging.root.addHandler(console)
+
if to_syslog:
try:
try:
@@ -186,11 +192,13 @@ def setup_logging(procname, to_console=True, to_syslog=True, syslog_facility='da
logging.root.error("failed to activate syslogging")
except:
print("Failed to activate syslogging")
+
if not to_file == None:
filelog = logging.FileHandler(to_file)
filelog.setLevel(logging.DEBUG)
filelog.setFormatter(logging.Formatter('%(asctime)s %(name)s[%(process)d]: %(message)s'))
logging.root.addHandler(filelog)
+
logging.root.setLevel(level)
logging.already_setup = True
diff --git a/src/lib/Server/Lint/Bundles.py b/src/lib/Server/Lint/Bundles.py
new file mode 100644
index 000000000..a1ce631c9
--- /dev/null
+++ b/src/lib/Server/Lint/Bundles.py
@@ -0,0 +1,56 @@
+import lxml.etree
+import Bcfg2.Server.Lint
+
+class Bundles(Bcfg2.Server.Lint.ServerPlugin):
+ """ Perform various bundle checks """
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ """ run plugin """
+ self.missing_bundles()
+ self.bundle_names()
+ self.sgenshi_groups()
+
+ def missing_bundles(self):
+ """ find bundles listed in Metadata but not implemented in Bundler """
+ groupdata = self.metadata.groups_xml.xdata
+ ref_bundles = set([b.get("name")
+ for b in groupdata.findall("//Bundle")])
+
+ allbundles = self.core.plugins['Bundler'].entries.keys()
+ for bundle in ref_bundles:
+ xmlbundle = "%s.xml" % bundle
+ genshibundle = "%s.genshi" % bundle
+ if xmlbundle not in allbundles and genshibundle not in allbundles:
+ self.LintError("Bundle %s referenced, but does not exist" %
+ bundle)
+
+ def bundle_names(self):
+ """ verify bundle name attribute matches filename """
+ for bundle in self.core.plugins['Bundler'].entries.values():
+ if self.HandlesFile(bundle.name):
+ try:
+ xdata = lxml.etree.XML(bundle.data)
+ except AttributeError:
+ # genshi template
+ xdata = lxml.etree.parse(bundle.template.filepath).getroot()
+
+ fname = bundle.name.split('Bundler/')[1].split('.')[0]
+ bname = xdata.get('name')
+ if fname != bname:
+ self.LintWarning("Inconsistent bundle name: filename is %s, bundle name is %s" %
+ (fname, bname))
+
+ def sgenshi_groups(self):
+ """ ensure that Genshi Bundles do not include <Group> tags,
+ which are not supported """
+ for bundle in self.core.plugins['Bundler'].entries.values():
+ if self.HandlesFile(bundle.name):
+ if (type(bundle) is
+ Bcfg2.Server.Plugins.SGenshi.SGenshiTemplateFile):
+ xdata = lxml.etree.parse(bundle.name)
+ groups = [self.RenderXML(g)
+ for g in xdata.getroottree().findall("//Group")]
+ if groups:
+ self.LintError("<Group> tag is not allowed in SGenshi Bundle:\n%s" %
+ "\n".join(groups))
diff --git a/src/lib/Server/Lint/Comments.py b/src/lib/Server/Lint/Comments.py
new file mode 100644
index 000000000..0b50df373
--- /dev/null
+++ b/src/lib/Server/Lint/Comments.py
@@ -0,0 +1,183 @@
+import os.path
+import lxml.etree
+import Bcfg2.Server.Lint
+
+class Comments(Bcfg2.Server.Lint.ServerPlugin):
+ """ check files for various required headers """
+ def __init__(self, *args, **kwargs):
+ Bcfg2.Server.Lint.ServerPlugin.__init__(self, *args, **kwargs)
+ self.config_cache = {}
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ self.check_bundles()
+ self.check_properties()
+ self.check_metadata()
+ self.check_cfg()
+ self.check_infoxml()
+ self.check_probes()
+
+ def required_keywords(self, rtype):
+ """ given a file type, fetch the list of required VCS keywords
+ from the bcfg2-lint config """
+ return self.required_items(rtype, "keyword", default=["Id"])
+
+ def required_comments(self, rtype):
+ """ given a file type, fetch the list of required comments
+ from the bcfg2-lint config """
+ return self.required_items(rtype, "comment")
+
+ def required_items(self, rtype, itype, default=None):
+ """ given a file type and item type (comment or keyword),
+ fetch the list of required items from the bcfg2-lint config """
+ if itype not in self.config_cache:
+ self.config_cache[itype] = {}
+
+ if rtype not in self.config_cache[itype]:
+ rv = []
+ global_item = "global_%ss" % itype
+ if global_item in self.config:
+ rv.extend(self.config[global_item].split(","))
+ elif default is not None:
+ rv.extend(default)
+
+ item = "%s_%ss" % (rtype.lower(), itype)
+ if item in self.config:
+ if self.config[item]:
+ rv.extend(self.config[item].split(","))
+ else:
+ # config explicitly specifies nothing
+ rv = []
+ self.config_cache[itype][rtype] = rv
+ return self.config_cache[itype][rtype]
+
+ def check_bundles(self):
+ """ check bundle files for required headers """
+ for bundle in self.core.plugins['Bundler'].entries.values():
+ xdata = None
+ rtype = ""
+ try:
+ xdata = lxml.etree.XML(bundle.data)
+ rtype = "bundler"
+ except AttributeError:
+ xdata = lxml.etree.parse(bundle.template.filepath).getroot()
+ rtype = "sgenshi"
+
+ self.check_xml(bundle.name, xdata, rtype)
+
+ def check_properties(self):
+ """ check properties files for required headers """
+ if 'Properties' in self.core.plugins:
+ props = self.core.plugins['Properties']
+ for propfile, pdata in props.store.entries.items():
+ if os.path.splitext(propfile)[1] == ".xml":
+ self.check_xml(pdata.name, pdata.data, 'properties')
+
+ def check_metadata(self):
+ """ check metadata files for required headers """
+ metadata = self.core.plugins['Metadata']
+ if self.has_all_xincludes("groups.xml"):
+ self.check_xml(os.path.join(metadata.data, "groups.xml"),
+ metadata.groups_xml.data,
+ "metadata")
+ if self.has_all_xincludes("clients.xml"):
+ self.check_xml(os.path.join(metadata.data, "clients.xml"),
+ metadata.clients_xml.data,
+ "metadata")
+
+ def check_cfg(self):
+ """ check Cfg files for required headers """
+ for entryset in self.core.plugins['Cfg'].entries.values():
+ for entry in entryset.entries.values():
+ if entry.name.endswith(".genshi"):
+ rtype = "tgenshi"
+ else:
+ rtype = "cfg"
+ self.check_plaintext(entry.name, entry.data, rtype)
+
+ def check_infoxml(self):
+ """ check info.xml files for required headers """
+ for entryset in self.core.plugins['Cfg'].entries.items():
+ if hasattr(entryset, "infoxml") and entryset.infoxml is not None:
+ self.check_xml(entryset.infoxml.name,
+ entryset.infoxml.pnode.data,
+ "infoxml")
+
+ def check_probes(self):
+ """ check probes for required headers """
+ if 'Probes' in self.core.plugins:
+ for probe in self.core.plugins['Probes'].probes.entries.values():
+ self.check_plaintext(probe.name, probe.data, "probes")
+
+ def check_xml(self, filename, xdata, rtype):
+ """ check generic XML files for required headers """
+ self.check_lines(filename,
+ [str(el)
+ for el in xdata.getiterator(lxml.etree.Comment)],
+ rtype)
+
+ def check_plaintext(self, filename, data, rtype):
+ """ check generic plaintex files for required headers """
+ self.check_lines(filename, data.splitlines(), rtype)
+
+ def check_lines(self, filename, lines, rtype):
+ """ generic header check for a set of lines """
+ if self.HandlesFile(filename):
+ # found is trivalent:
+ # False == not found
+ # None == found but not expanded
+ # True == found and expanded
+ found = dict((k, False) for k in self.required_keywords(rtype))
+
+ for line in lines:
+ # we check for both '$<keyword>:' and '$<keyword>$' to see
+ # if the keyword just hasn't been expanded
+ for (keyword, status) in found.items():
+ if not status:
+ if '$%s:' % keyword in line:
+ found[keyword] = True
+ elif '$%s$' % keyword in line:
+ found[keyword] = None
+
+ unexpanded = [keyword for (keyword, status) in found.items()
+ if status is None]
+ if unexpanded:
+ self.LintError("%s: Required keywords(s) found but not expanded: %s" %
+ (filename, ", ".join(unexpanded)))
+ missing = [keyword for (keyword, status) in found.items()
+ if status is False]
+ if missing:
+ self.LintError("%s: Required keywords(s) not found: %s" %
+ (filename, ", ".join(missing)))
+
+ # next, check for required comments. found is just
+ # boolean
+ found = dict((k, False) for k in self.required_comments(rtype))
+
+ for line in lines:
+ for (comment, status) in found.items():
+ if not status:
+ found[comment] = comment in line
+
+ missing = [comment for (comment, status) in found.items()
+ if status is False]
+ if missing:
+ self.LintError("%s: Required comments(s) not found: %s" %
+ (filename, ", ".join(missing)))
+
+ def has_all_xincludes(self, mfile):
+ """ return true if self.files includes all XIncludes listed in
+ the specified metadata type, false otherwise"""
+ if self.files is None:
+ return True
+ else:
+ path = os.path.join(self.metadata.data, mfile)
+ if path in self.files:
+ xdata = lxml.etree.parse(path)
+ for el in xdata.findall('./{http://www.w3.org/2001/XInclude}include'):
+ if not self.has_all_xincludes(el.get('href')):
+ self.LintWarning("Broken XInclude chain: could not include %s" % path)
+ return False
+
+ return True
+
diff --git a/src/lib/Server/Lint/Duplicates.py b/src/lib/Server/Lint/Duplicates.py
new file mode 100644
index 000000000..c8b542025
--- /dev/null
+++ b/src/lib/Server/Lint/Duplicates.py
@@ -0,0 +1,79 @@
+import os.path
+import lxml.etree
+import Bcfg2.Server.Lint
+
+class Duplicates(Bcfg2.Server.Lint.ServerPlugin):
+ """ Find duplicate clients, groups, etc. """
+ def __init__(self, *args, **kwargs):
+ Bcfg2.Server.Lint.ServerPlugin.__init__(self, *args, **kwargs)
+ self.groups_xdata = None
+ self.clients_xdata = None
+ self.load_xdata()
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ """ run plugin """
+ # only run this plugin if we were not given a list of files.
+ # not only is it marginally silly to run this plugin with a
+ # partial list of files, it turns out to be really freaking
+ # hard to get only a fragment of group or client metadata
+ if self.groups_xdata is not None:
+ self.duplicate_groups()
+ self.duplicate_defaults()
+ if self.clients_xdata is not None:
+ self.duplicate_clients()
+
+ def load_xdata(self):
+ """ attempt to load XML data for groups and clients. only
+ actually load data if all documents reference in XIncludes can
+ be found in self.files"""
+ if self.has_all_xincludes("groups.xml"):
+ self.groups_xdata = self.metadata.clients_xml.xdata
+ if self.has_all_xincludes("clients.xml"):
+ self.clients_xdata = self.metadata.clients_xml.xdata
+
+ def duplicate_groups(self):
+ """ find duplicate groups """
+ self.duplicate_entries(self.clients_xdata.xpath('//Groups/Group'),
+ 'group')
+
+ def duplicate_clients(self):
+ """ find duplicate clients """
+ self.duplicate_entries(self.clients_xdata.xpath('//Clients/Client'),
+ 'client')
+
+ def duplicate_entries(self, data, etype):
+ """ generic duplicate entry finder """
+ seen = {}
+ for el in data:
+ if el.get('name') not in seen:
+ seen[el.get('name')] = el
+ else:
+ self.LintError("Duplicate %s '%s':\n%s\n%s" %
+ (etype, el.get('name'),
+ self.RenderXML(seen[el.get('name')]),
+ self.RenderXML(el)))
+
+ def duplicate_defaults(self):
+ """ check for multiple default group definitions """
+ default_groups = [g for g in self.groups_xdata.findall('.//Group')
+ if g.get('default') == 'true']
+ if len(default_groups) > 1:
+ self.LintError("Multiple default groups defined: %s" %
+ ",".join(default_groups))
+
+ def has_all_xincludes(self, mfile):
+ """ return true if self.files includes all XIncludes listed in
+ the specified metadata type, false otherwise"""
+ if self.files is None:
+ return True
+ else:
+ path = os.path.join(self.metadata.data, mfile)
+ if path in self.files:
+ xdata = lxml.etree.parse(path)
+ for el in xdata.findall('./{http://www.w3.org/2001/XInclude}include'):
+ if not self.has_all_xincludes(el.get('href')):
+ self.LintWarning("Broken XInclude chain: could not include %s" % path)
+ return False
+
+ return True
diff --git a/src/lib/Server/Lint/InfoXML.py b/src/lib/Server/Lint/InfoXML.py
new file mode 100644
index 000000000..097c2d6f9
--- /dev/null
+++ b/src/lib/Server/Lint/InfoXML.py
@@ -0,0 +1,38 @@
+import os.path
+import Bcfg2.Options
+import Bcfg2.Server.Lint
+
+class InfoXML(Bcfg2.Server.Lint.ServerPlugin):
+ """ ensure that all config files have an info.xml file"""
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ for filename, entryset in self.core.plugins['Cfg'].entries.items():
+ infoxml_fname = os.path.join(entryset.path, "info.xml")
+ if self.HandlesFile(infoxml_fname):
+ if (hasattr(entryset, "infoxml") and
+ entryset.infoxml is not None):
+ xdata = entryset.infoxml.pnode.data
+ for info in xdata.getroottree().findall("//Info"):
+ required = ["owner", "group", "perms"]
+ if "required" in self.config:
+ required = self.config["required"].split(",")
+
+ missing = [attr for attr in required
+ if info.get(attr) is None]
+ if missing:
+ self.LintError("Required attribute(s) %s not found in %s:%s" %
+ (",".join(missing), infoxml_fname,
+ self.RenderXML(info)))
+
+ if ("require_paranoid" in self.config and
+ self.config["require_paranoid"].lower() == "true" and
+ not Bcfg2.Options.MDATA_PARANOID.value and
+ info.get("paranoid").lower() != "true"):
+ self.LintError("Paranoid must be true in %s:%s" %
+ (infoxml_fname,
+ self.RenderXML(info)))
+ elif ("require" in self.config and
+ self.config["require"].lower != "false"):
+ self.LintError("No info.xml found for %s" % filename)
+
diff --git a/src/lib/Server/Lint/Pkgmgr.py b/src/lib/Server/Lint/Pkgmgr.py
new file mode 100644
index 000000000..28ca698bd
--- /dev/null
+++ b/src/lib/Server/Lint/Pkgmgr.py
@@ -0,0 +1,33 @@
+import Bcfg2.Server.Lint
+
+class Pkgmgr(Bcfg2.Server.Lint.ServerPlugin):
+ """ find duplicate Pkgmgr entries with the same priority """
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ pset = set()
+ for plist in self.core.plugins['Pkgmgr'].entries.values():
+ if self.HandlesFile(plist.name):
+ xdata = plist.data
+ # get priority, type, group
+ priority = xdata.getroot().get('priority')
+ ptype = xdata.getroot().get('type')
+ for pkg in xdata.findall("//Package"):
+ if pkg.getparent().tag == 'Group':
+ grp = pkg.getparent().get('name')
+ if (type(grp) is not str and
+ grp.getparent().tag == 'Group'):
+ pgrp = grp.getparent().get('name')
+ else:
+ pgrp = 'none'
+ else:
+ grp = 'none'
+ pgrp = 'none'
+ ptuple = (pkg.get('name'), priority, ptype, grp, pgrp)
+ # check if package is already listed with same
+ # priority, type, grp
+ if ptuple in pset:
+ self.LintWarning("Duplicate Package %s, priority:%s, type:%s" %
+ (pkg.get('name'), priority, ptype))
+ else:
+ pset.add(ptuple)
diff --git a/src/lib/Server/Lint/RequiredAttrs.py b/src/lib/Server/Lint/RequiredAttrs.py
new file mode 100644
index 000000000..7215fe163
--- /dev/null
+++ b/src/lib/Server/Lint/RequiredAttrs.py
@@ -0,0 +1,69 @@
+import os.path
+import lxml.etree
+import Bcfg2.Server.Lint
+
+class RequiredAttrs(Bcfg2.Server.Lint.ServerPlugin):
+ """ verify attributes for configuration entries (as defined in
+ doc/server/configurationentries) """
+
+ def __init__(self, *args, **kwargs):
+ Bcfg2.Server.Lint.ServerPlugin.__init__(self, *args, **kwargs)
+ self.required_attrs = {
+ 'device': ['name', 'owner', 'group', 'dev_type'],
+ 'directory': ['name', 'owner', 'group', 'perms'],
+ 'file': ['name', 'owner', 'group', 'perms'],
+ 'hardlink': ['name', 'to'],
+ 'symlink': ['name', 'to'],
+ 'ignore': ['name'],
+ 'nonexistent': ['name'],
+ 'permissions': ['name', 'owner', 'group', 'perms']}
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ self.check_rules()
+ self.check_bundles()
+
+ def check_rules(self):
+ """ check Rules for Path entries with missing attrs """
+ if 'Rules' in self.core.plugins:
+ for rules in self.core.plugins['Rules'].entries.values():
+ xdata = rules.pnode.data
+ for path in xdata.xpath("//Path"):
+ self.check_entry(path, os.path.join(self.config['repo'],
+ rules.name))
+
+ def check_bundles(self):
+ """ check bundles for BoundPath entries with missing attrs """
+ for bundle in self.core.plugins['Bundler'].entries.values():
+ try:
+ xdata = lxml.etree.XML(bundle.data)
+ except AttributeError:
+ xdata = lxml.etree.parse(bundle.template.filepath).getroot()
+
+ for path in xdata.xpath("//BoundPath"):
+ self.check_entry(path, bundle.name)
+
+ def check_entry(self, entry, filename):
+ """ generic entry check """
+ if self.HandlesFile(filename):
+ pathname = entry.get('name')
+ pathtype = entry.get('type')
+ pathset = set(entry.attrib.keys())
+ try:
+ required_attrs = set(self.required_attrs[pathtype] + ['type'])
+ except KeyError:
+ self.LintError("Unknown path type %s: %s" %
+ (pathtype, self.RenderXML(entry)))
+
+ if 'dev_type' in required_attrs:
+ dev_type = entry.get('dev_type')
+ if dev_type in ['block', 'char']:
+ # check if major/minor are specified
+ required_attrs |= set(['major', 'minor'])
+ if not pathset.issuperset(required_attrs):
+ self.LintError("The required attributes %s are missing for %s %sin %s:\n%s" %
+ (",".join([attr
+ for attr in
+ required_attrs.difference(pathset)]),
+ entry.tag, pathname, filename,
+ self.RenderXML(entry)))
diff --git a/src/lib/Server/Lint/Validate.py b/src/lib/Server/Lint/Validate.py
new file mode 100644
index 000000000..bb5af93f4
--- /dev/null
+++ b/src/lib/Server/Lint/Validate.py
@@ -0,0 +1,185 @@
+import glob
+import lxml.etree
+import os
+import fnmatch
+import Bcfg2.Options
+import Bcfg2.Server.Lint
+from subprocess import Popen, PIPE, STDOUT
+
+class Validate(Bcfg2.Server.Lint.ServerlessPlugin):
+ """ Ensure that the repo validates """
+
+ def __init__(self, *args, **kwargs):
+ Bcfg2.Server.Lint.ServerlessPlugin.__init__(self, *args, **kwargs)
+ self.filesets = {"metadata:groups":"%s/metadata.xsd",
+ "metadata:clients":"%s/clients.xsd",
+ "info":"%s/info.xsd",
+ "%s/Bundler/*.{xml,genshi}":"%s/bundle.xsd",
+ "%s/Pkgmgr/*.xml":"%s/pkglist.xsd",
+ "%s/Base/*.xml":"%s/base.xsd",
+ "%s/Rules/*.xml":"%s/rules.xsd",
+ "%s/etc/report-configuration.xml":"%s/report-configuration.xsd",
+ "%s/Svcmgr/*.xml":"%s/services.xsd",
+ "%s/Deps/*.xml":"%s/deps.xsd",
+ "%s/Decisions/*.xml":"%s/decisions.xsd",
+ "%s/Packages/config.xml":"%s/packages.xsd",
+ "%s/GroupPatterns/config.xml":"%s/grouppatterns.xsd"}
+
+ self.filelists = {}
+ self.get_filelists()
+
+ @Bcfg2.Server.Lint.returnErrors
+ def Run(self):
+ self.schemadir = self.config['schema']
+
+ for schemaname, path in self.filesets.items():
+ try:
+ filelist = self.filelists[path]
+ except KeyError:
+ filelist = []
+
+ if filelist:
+ # avoid loading schemas for empty file lists
+ try:
+ schema = lxml.etree.XMLSchema(lxml.etree.parse(schemaname %
+ schemadir))
+ except:
+ self.LintWarning("Failed to process schema %s",
+ schemaname % schemadir)
+ continue
+ for filename in filelist:
+ self.validate(filename, schemaname % schemadir,
+ schema=schema)
+
+ self.check_properties()
+
+ def check_properties(self):
+ """ check Properties files against their schemas """
+ alert = self.logger.debug
+ if "properties_schema" in self.config:
+ if self.config['properties_schema'].lower().startswith('warn'):
+ alert = self.LintWarning
+ elif self.config['properties_schema'].lower().startswith('require'):
+ alert = self.LintError
+
+ for filename in self.filelists['props']:
+ schemafile = "%s.xsd" % os.path.splitext(filename)[0]
+ if os.path.exists(schemafile):
+ self.validate(filename, schemafile)
+ else:
+ alert("No schema found for %s" % filename)
+
+ def validate(self, filename, schemafile, schema=None):
+ """validate a file against the given lxml.etree.Schema.
+ return True on success, False on failure """
+ if schema is None:
+ # if no schema object was provided, instantiate one
+ try:
+ schema = lxml.etree.XMLSchema(lxml.etree.parse(schemafile))
+ except:
+ self.LintWarning("Failed to process schema %s" % schemafile)
+ return False
+
+ try:
+ datafile = lxml.etree.parse(filename)
+ except SyntaxError:
+ lint = Popen(["xmllint", filename], stdout=PIPE, stderr=STDOUT)
+ self.LintError("%s fails to parse:\n%s" % (filename,
+ lint.communicate()[0]))
+ lint.wait()
+ return False
+ except IOError:
+ self.LintError("Failed to open file %s" % filename)
+ return False
+
+ if not schema.validate(datafile):
+ cmd = ["xmllint"]
+ if self.files is None:
+ cmd.append("--xinclude")
+ cmd.extend(["--noout", "--schema", schemafile, filename])
+ lint = Popen(cmd, stdout=PIPE, stderr=STDOUT)
+ output = lint.communicate()[0]
+ if lint.wait():
+ self.LintError("%s fails to verify:\n%s" % (filename, output))
+ return False
+ return True
+
+ def get_filelists(self):
+ """ get lists of different kinds of files to validate """
+ if self.files is not None:
+ listfiles = lambda p: fnmatch.filter(self.files, p % "*")
+ else:
+ listfiles = lambda p: glob.glob(p % self.config['repo'])
+
+ for path in self.filesets.keys():
+ if path.startswith("metadata:"):
+ mtype = path.split(":")[1]
+ self.filelists[path] = self.get_metadata_list(mtype)
+ elif path == "info":
+ if self.files is not None:
+ self.filelists[path] = \
+ [f for f in self.files
+ if os.path.basename(f) == 'info.xml']
+ else: # self.files is None
+ self.filelists[path] = []
+ for infodir in ['Cfg', 'TGenshi', 'TCheetah']:
+ for root, dirs, files in os.walk('%s/%s' %
+ (self.config['repo'],
+ infodir)):
+ self.filelists[path].extend([os.path.join(root, f)
+ for f in files
+ if f == 'info.xml'])
+ else:
+ self.filelists[path] = listfiles(path)
+
+ self.filelists['props'] = listfiles("%s/Properties/*.xml")
+ all_metadata = listfiles("%s/Metadata/*.xml")
+
+ # if there are other files in Metadata that aren't xincluded
+ # from clients.xml or groups.xml, we can't verify them. warn
+ # about those.
+ for fname in all_metadata:
+ if (fname not in self.filelists['metadata:groups'] and
+ fname not in self.filelists['metadata:clients']):
+ self.LintWarning("Broken XInclude chain: Could not determine file type of %s" % fname)
+
+ def get_metadata_list(self, mtype):
+ """ get all metadata files for the specified type (clients or
+ group) """
+ if self.files is not None:
+ rv = fnmatch.filter(self.files, "*/Metadata/%s.xml" % mtype)
+ else:
+ rv = glob.glob("%s/Metadata/%s.xml" % (self.config['repo'], mtype))
+
+ # attempt to follow XIncludes. if the top-level files aren't
+ # listed in self.files, though, there's really nothing we can
+ # do to guess what a file in Metadata is
+ if rv:
+ rv.extend(self.follow_xinclude(rv[0]))
+
+ return rv
+
+ def follow_xinclude(self, xfile):
+ """ follow xincludes in the given file """
+ xdata = lxml.etree.parse(xfile)
+ included = set([ent.get('href') for ent in
+ xdata.findall('./{http://www.w3.org/2001/XInclude}include')])
+ rv = []
+
+ while included:
+ try:
+ filename = included.pop()
+ except KeyError:
+ continue
+
+ path = os.path.join(os.path.dirname(xfile), filename)
+ if self.HandlesFile(path):
+ rv.append(path)
+ groupdata = lxml.etree.parse(path)
+ [included.add(el.get('href'))
+ for el in
+ groupdata.findall('./{http://www.w3.org/2001/XInclude}include')]
+ included.discard(filename)
+
+ return rv
+
diff --git a/src/lib/Server/Lint/__init__.py b/src/lib/Server/Lint/__init__.py
new file mode 100644
index 000000000..4e6d03fb5
--- /dev/null
+++ b/src/lib/Server/Lint/__init__.py
@@ -0,0 +1,90 @@
+__revision__ = '$Revision$'
+
+__all__ = ['Bundles',
+ 'Comments',
+ 'Duplicates',
+ 'InfoXML',
+ 'Pkgmgr',
+ 'RequiredAttrs',
+ 'Validate']
+
+import logging
+import os.path
+from copy import copy
+import lxml.etree
+import Bcfg2.Logger
+
+def returnErrors(fn):
+ """ Decorator for Run method that returns error counts """
+ def run(self, *args, **kwargs):
+ fn(self, *args, **kwargs)
+ return (self.error_count, self.warning_count)
+
+ return run
+
+class Plugin (object):
+ """ base class for ServerlessPlugin and ServerPlugin """
+ def __init__(self, config, files=None):
+ self.files = files
+ self.error_count = 0
+ self.warning_count = 0
+ self.config = config
+ Bcfg2.Logger.setup_logging('bcfg2-info', to_syslog=False)
+ self.logger = logging.getLogger('bcfg2-lint')
+
+ def Run(self):
+ """ run the plugin. must be overloaded by child classes """
+ pass
+
+ def HandlesFile(self, fname):
+ """ returns true if the given file should be handled by the
+ plugin according to the files list, false otherwise """
+ return (self.files is None or
+ fname in self.files or
+ os.path.join(self.config['repo'], fname) in self.files or
+ os.path.abspath(fname) in self.files or
+ os.path.abspath(os.path.join(self.config['repo'],
+ fname)) in self.files)
+
+ def LintError(self, msg):
+ """ log an error condition """
+ self.error_count += 1
+ lines = msg.splitlines()
+ self.logger.error("ERROR: %s" % lines.pop())
+ [self.logger.error(" %s" % l) for l in lines]
+
+ def LintWarning(self, msg):
+ """ log a warning condition """
+ self.warning_count += 1
+ lines = msg.splitlines()
+ self.logger.warning("WARNING: %s" % lines.pop())
+ [self.logger.warning(" %s" % l) for l in lines]
+
+ def RenderXML(self, element):
+ """render an XML element for error output -- line number
+ prefixed, no children"""
+ xml = None
+ if len(element) or element.text:
+ el = copy(element)
+ if el.text:
+ el.text = '...'
+ [el.remove(c) for c in el.iterchildren()]
+ xml = lxml.etree.tostring(el).strip()
+ else:
+ xml = lxml.etree.tostring(element).strip()
+ return " line %s: %s" % (element.sourceline, xml)
+
+class ServerlessPlugin (Plugin):
+ """ base class for plugins that are run before the server starts
+ up (i.e., plugins that check things that may prevent the server
+ from starting up) """
+ pass
+
+class ServerPlugin (Plugin):
+ """ base class for plugins that check things that require the
+ running Bcfg2 server """
+ def __init__(self, lintCore, config, files=None):
+ Plugin.__init__(self, config, files=files)
+ self.core = lintCore
+ self.logger = self.core.logger
+ self.metadata = self.core.metadata
diff --git a/src/sbin/bcfg2-lint b/src/sbin/bcfg2-lint
new file mode 100755
index 000000000..42c077d63
--- /dev/null
+++ b/src/sbin/bcfg2-lint
@@ -0,0 +1,167 @@
+#!/usr/bin/env python
+
+"""This tool examines your Bcfg2 specifications for errors."""
+__revision__ = '$Revision$'
+
+import sys
+import inspect
+import logging
+import ConfigParser
+import Bcfg2.Logger
+import Bcfg2.Options
+import Bcfg2.Server.Core
+import Bcfg2.Server.Lint
+
+logger = logging.getLogger('bcfg2-lint')
+
+class Parser(ConfigParser.ConfigParser):
+ def get(self, section, option, default):
+ """ Override ConfigParser.get: If the request option is not in
+ the config file then return the value of default rather than
+ raise an exception. We still raise exceptions on missing
+ sections.
+ """
+ try:
+ return ConfigParser.ConfigParser.get(self, section, option)
+ except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
+ return default
+
+def run_serverless_plugins(plugins, config=None, setup=None):
+ logger.debug("Running serverless plugins")
+ errors = (0, 0)
+ for plugin_name, plugin in plugins.items():
+ plugin_errors = run_plugin(plugin, plugin_name,
+ setup=setup, config=config, files=files)
+ errors = [errors[n] + plugin_errors[n]
+ for n in range(0, len(errors))]
+ return errors
+
+def run_server_plugins(plugins, config=None, setup=None):
+ core = load_server(setup)
+ logger.debug("Running server plugins")
+ errors = (0, 0)
+ for plugin_name, plugin in plugins.items():
+ plugin_errors = run_plugin(plugin, plugin_name, args=[core],
+ setup=setup, config=config, files=files)
+ errors = [errors[n] + plugin_errors[n]
+ for n in range(0, len(errors))]
+ return errors
+
+def run_plugin(plugin, plugin_name, setup=None, args=None, config=None,
+ files=None):
+ logger.debug(" Running %s" % plugin_name)
+ if args is None:
+ args = []
+
+ if config is not None and config.has_section(plugin_name):
+ args.append(dict(config.items(plugin_name), **setup))
+ else:
+ args.append(setup)
+
+ return plugin(*args, files=files).Run()
+ # try:
+ # return plugin(*args, files=files).Run()
+ # except Exception, err:
+ # logger.error("Failed to run plugin %s: %s" % (plugin, err))
+ # raise SystemExit(1)
+
+def load_server(setup):
+ """ load server """
+ core = Bcfg2.Server.Core.Core(setup['repo'], setup['plugins'],
+ setup['password'], setup['encoding'])
+ if setup['event debug']:
+ core.fam.debug = True
+ core.fam.handle_events_in_interval(4)
+ return core
+
+if __name__ == '__main__':
+ optinfo = {
+ 'configfile': Bcfg2.Options.CFILE,
+ 'help': Bcfg2.Options.HELP,
+ 'verbose': Bcfg2.Options.VERBOSE,
+ }
+ optinfo.update({
+ 'event debug': Bcfg2.Options.DEBUG,
+ 'encoding': Bcfg2.Options.ENCODING,
+ # Server options
+ 'repo': Bcfg2.Options.SERVER_REPOSITORY,
+ 'plugins': Bcfg2.Options.SERVER_PLUGINS,
+ 'mconnect': Bcfg2.Options.SERVER_MCONNECT,
+ 'filemonitor': Bcfg2.Options.SERVER_FILEMONITOR,
+ 'location': Bcfg2.Options.SERVER_LOCATION,
+ 'static': Bcfg2.Options.SERVER_STATIC,
+ 'key': Bcfg2.Options.SERVER_KEY,
+ 'cert': Bcfg2.Options.SERVER_CERT,
+ 'ca': Bcfg2.Options.SERVER_CA,
+ 'password': Bcfg2.Options.SERVER_PASSWORD,
+ 'protocol': Bcfg2.Options.SERVER_PROTOCOL,
+ # More options
+ 'logging': Bcfg2.Options.LOGGING_FILE_PATH,
+ 'stdin': Bcfg2.Options.FILES_ON_STDIN,
+ 'schema': Bcfg2.Options.SCHEMA_PATH,
+ 'config': Bcfg2.Options.Option('Specify bcfg2-lint configuration file',
+ '/etc/bcfg2-lint.conf',
+ cmd='--lint-config',
+ odesc='<conffile>',
+ long_arg = True),
+ })
+ setup = Bcfg2.Options.OptionParser(optinfo)
+ setup.parse(sys.argv[1:])
+
+ log_args = dict(to_syslog=False, to_console=logging.WARNING)
+ if setup['verbose']:
+ log_args['to_console'] = logging.DEBUG
+ Bcfg2.Logger.setup_logging('bcfg2-info', **log_args)
+
+ config = Parser()
+ config.read(setup['config'])
+
+ # get list of plugins to run
+ if setup['args']:
+ allplugins = setup['args']
+ elif "bcfg2-repo-validate" in sys.argv[0]:
+ allplugins = 'Duplicates,RequiredAttrs,Validate'.split(',')
+ else:
+ allplugins = config.get('main', 'plugins',
+ ",".join(Bcfg2.Server.Lint.__all__)).split(',')
+
+ if setup['stdin']:
+ files = [s.strip() for s in sys.stdin.readlines()]
+ else:
+ files = None
+
+ # load plugins
+ serverplugins = {}
+ serverlessplugins = {}
+ for plugin_name in allplugins:
+ try:
+ mod = getattr(__import__("Bcfg2.Server.Lint.%s" %
+ (plugin_name)).Server.Lint, plugin_name)
+ except ImportError:
+ try:
+ mod = __import__(plugin_name)
+ except Exception, err:
+ logger.error("Failed to load plugin %s: %s" % (plugin_name,
+ err))
+ raise SystemExit(1)
+ plugin = getattr(mod, plugin_name)
+ if [c for c in inspect.getmro(plugin)
+ if c == Bcfg2.Server.Lint.ServerPlugin]:
+ serverplugins[plugin_name] = plugin
+ else:
+ serverlessplugins[plugin_name] = plugin
+
+ # errors is a tuple of (errors, warnings)
+ errors = run_serverless_plugins(serverlessplugins,
+ config=config, setup=setup)
+
+ if serverplugins:
+ perrors = run_server_plugins(serverplugins, config=config, setup=setup)
+ errors = [errors[n] + perrors[n] for n in range(0, len(errors))]
+
+ print "%d errors" % errors[0]
+ print "%d warnings" % errors[1]
+ if errors[0]:
+ raise SystemExit(2)
+ elif errors[1]:
+ raise SystemExit(3)
diff --git a/src/sbin/bcfg2-repo-validate b/src/sbin/bcfg2-repo-validate
deleted file mode 100755
index e82b57659..000000000
--- a/src/sbin/bcfg2-repo-validate
+++ /dev/null
@@ -1,328 +0,0 @@
-#!/usr/bin/env python
-
-"""
-bcfg2-repo-validate checks all xml files in Bcfg2
-repos against their respective XML schemas.
-"""
-__revision__ = '$Revision$'
-
-import fnmatch
-import glob
-import lxml.etree
-import os
-import sys
-import fnmatch
-import logging
-import Bcfg2.Options
-from subprocess import Popen, PIPE, STDOUT
-
-def follow_xinclude(xfile, file_list=None):
- """ follow xincludes in the given file """
- xdata = lxml.etree.parse(xfile)
- included = set([ent.get('href') for ent in
- xdata.findall('./{http://www.w3.org/2001/XInclude}include')])
- rv = []
-
- while included:
- try:
- filename = included.pop()
- except KeyError:
- continue
-
- path = os.path.join(os.path.dirname(xfile), filename)
- if file_list is not None and path in file_list:
- rv.append(path)
- groupdata = lxml.etree.parse(path)
- [included.add(el.get('href'))
- for el in
- groupdata.findall('./{http://www.w3.org/2001/XInclude}include')]
- included.discard(filename)
-
- return rv
-
-def validate(filename, schemafile, schema=None, xinclude=True):
- """validate a fail against the given lxml.etree.Schema. return
- True on success, False on failure"""
- if schema is None:
- # if no schema object was provided, instantiate one
- try:
- schema = lxml.etree.XMLSchema(lxml.etree.parse(schemafile))
- except:
- logging.warn("Failed to process schema %s", schemafile)
- return False
-
- try:
- datafile = lxml.etree.parse(filename)
- except SyntaxError:
- logging.warn("%s ***FAILS*** to parse \t\t<----", filename)
- lint = Popen(["xmllint", filename], stdout=PIPE, stderr=STDOUT)
- logging.warn(lint.communicate()[0])
- lint.wait()
- return False
- except IOError:
- logging.warn("Failed to open file %s \t\t<---", filename)
- return False
-
- if schema.validate(datafile):
- logging.info("%s checks out", filename)
- else:
- cmd = ["xmllint"]
- if xinclude:
- cmd.append("--xinclude")
- cmd.extend(["--noout", "--schema", schemafile, filename])
- lint = Popen(cmd, stdout=PIPE, stderr=STDOUT)
- output = lint.communicate()[0]
- if lint.wait():
- logging.warn("%s ***FAILS*** to verify \t\t<----", filename)
- logging.warn(output)
- return False
- else:
- logging.info("%s checks out", filename)
- return True
-
-if __name__ == '__main__':
- opts = {'repo': Bcfg2.Options.SERVER_REPOSITORY,
- 'verbose': Bcfg2.Options.VERBOSE,
- 'configfile': Bcfg2.Options.CFILE,
- 'require-schema': Bcfg2.Options.REQUIRE_SCHEMA,
- 'schema': Bcfg2.Options.SCHEMA_PATH,
- 'stdin': Bcfg2.Options.FILES_ON_STDIN}
- setup = Bcfg2.Options.OptionParser(opts)
- setup.parse(sys.argv[1:])
- verbose = setup['verbose']
- cpath = setup['configfile']
- schemadir = setup['schema']
- os.chdir(schemadir)
- repo = setup['repo']
-
- # set up logging
- level = logging.WARNING
- if verbose:
- level = logging.INFO
- logging.basicConfig(level=level, format="%(message)s")
-
- if setup['stdin']:
- file_list = [s.strip() for s in sys.stdin.readlines()]
- info_list = [f for f in file_list if os.path.basename(f) == 'info.xml']
- metadata_list = fnmatch.filter(file_list, "*/Metadata/groups.xml")
- clients_list = fnmatch.filter(file_list, "*/Metadata/clients.xml")
- bundle_list = fnmatch.filter(file_list, "*/Bundler/*.xml")
- genshibundle_list = fnmatch.filter(file_list, "*/Bundler/*.genshi")
- pkg_list = fnmatch.filter(file_list, "*/Pkgmgr/*.xml")
- base_list = fnmatch.filter(file_list, "*/Base/*.xml")
- rules_list = fnmatch.filter(file_list, "*/Rules/*.xml")
- imageinfo_list = fnmatch.filter(file_list,
- "*/etc/report-configuration.xml")
- services_list = fnmatch.filter(file_list, "*/Svcmgr/*.xml")
- deps_list = fnmatch.filter(file_list, "*/Deps/*.xml")
- dec_list = fnmatch.filter(file_list, "*/Decisions/*")
- pkgcfg_list = fnmatch.filter(file_list, "*/Packages/config.xml")
- gp_list = fnmatch.filter(file_list, "*/GroupPatterns/config.xml")
- props_list = [f
- for f in fnmatch.filter(file_list, "*/Properties/*.xml")
- if "%s.xsd" % os.path.splitext(f)[0] in file_list]
-
- # attempt to follow XIncludes in groups.xml and clients.xml.
- # if those top-level files aren't listed in file_list, though,
- # there's really nothing we can do to guess what a file in
- # Metadata is
- if metadata_list:
- metadata_list.extend(follow_xinclude(metadata_list[0],
- file_list=file_list))
- if clients_list:
- clients_list.extend(follow_xinclude(clients_list[0],
- file_list=file_list))
-
- # if there are other files in Metadata in file_list that
- # aren't listed in metadata_list or clients_list, we can't
- # verify them. warn about those.
- for fname in fnmatch.filter(file_list, "*/Metadata/*.xml"):
- if fname not in metadata_list and fname not in clients_list:
- logging.warn("Broken XInclude chain: Could not determine file type of %s", fname)
- else:
- # not reading files from stdin
-
- # Get a list of all info.xml files in the bcfg2 repository
- info_list = []
- for infodir in ['Cfg', 'TGenshi', 'TCheetah']:
- for root, dirs, files in os.walk('%s/%s' % (repo, infodir)):
- info_list.extend([os.path.join(root, f) for f in files
- if f == 'info.xml'])
-
- # get metadata list
- metadata_list = glob.glob("%s/Metadata/groups.xml" % repo)
-
- # get other file lists
- clients_list = glob.glob("%s/Metadata/clients.xml" % repo)
- bundle_list = glob.glob("%s/Bundler/*.xml" % repo)
- genshibundle_list = glob.glob("%s/Bundler/*.genshi" % repo)
- pkg_list = glob.glob("%s/Pkgmgr/*.xml" % repo)
- base_list = glob.glob("%s/Base/*.xml" % repo)
- rules_list = glob.glob("%s/Rules/*.xml" % repo)
- imageinfo_list = glob.glob("%s/etc/report-configuration.xml" % repo)
- services_list = glob.glob("%s/Svcmgr/*.xml" % repo)
- deps_list = glob.glob("%s/Deps/*.xml" % repo)
- dec_list = glob.glob("%s/Decisions/*" % repo)
- pkgcfg_list = glob.glob("%s/Packages/config.xml" % repo)
- gp_list = glob.glob('%s/GroupPatterns/config.xml' % repo)
- props_list = glob.glob("%s/Properties/*.xml" % repo)
-
- metadata_list.extend(follow_xinclude("%s/Metadata/groups.xml" % repo))
- clients_list.extend(follow_xinclude("%s/Metadata/clients.xml" % repo))
-
- # get all bundles
- ref_bundles = set()
- xdata = lxml.etree.parse("%s/Metadata/groups.xml" % repo)
- xdata.xinclude()
- for bundle in xdata.findall("//Bundle"):
- ref_bundles.add("%s/Bundler/%s" % (repo, bundle.get('name')))
- included = set([ent.get('href') for ent in
- xdata.findall('./{http://www.w3.org/2001/XInclude}include')])
-
- # check for multiple default group definitions
- if "%s/Metadata/groups.xml" % repo in metadata_list:
- default_groups = [g for g in lxml.etree.parse("%s/Metadata/groups.xml" %
- repo).findall('.//Group')
- if g.get('default') == 'true']
- if len(default_groups) > 1:
- logging.warn("*** Warning: Multiple default groups defined")
- for grp in default_groups:
- logging.warn(" %s", grp.get('name'))
-
- # verify attributes for configuration entries
- # (as defined in doc/server/configurationentries)
- # TODO: See if it is possible to do this in the schema instead
- required_configuration_attrs = {
- 'device': ['name', 'owner', 'group', 'dev_type'],
- 'directory': ['name', 'owner', 'group', 'perms'],
- 'file': ['name', 'owner', 'group', 'perms'],
- 'hardlink': ['name', 'to'],
- 'symlink': ['name', 'to'],
- 'ignore': ['name'],
- 'nonexistent': ['name'],
- 'permissions': ['name', 'owner', 'group', 'perms']}
- for rfile in rules_list:
- try:
- xdata = lxml.etree.parse(rfile)
- except lxml.etree.XMLSyntaxError, e:
- logging.warn("Failed to parse %s: %s", rfile, e)
- for posixpath in xdata.findall("//Path"):
- pathname = posixpath.get('name')
- pathtype = posixpath.get('type')
- pathset = set(posixpath.attrib.keys())
- try:
- required_attrs = set(required_configuration_attrs[pathtype] \
- + ['type'])
- except KeyError:
- continue
- if 'dev_type' in required_attrs:
- dev_type = posixpath.get('dev_type')
- if dev_type in ['block', 'char']:
- # check if major/minor are specified
- required_attrs |= set(['major', 'minor'])
- if pathset.issuperset(required_attrs):
- continue
- else:
- logging.warn("The following required attributes are missing for"
- " Path %s in %s: %s",
- pathname, rfile,
- [attr
- for attr in required_attrs.difference(pathset)])
-
- # warn on duplicate Pkgmgr entries with the same priority
- pset = set()
- for plist in pkg_list:
- try:
- xdata = lxml.etree.parse(plist)
- except lxml.etree.XMLSyntaxError, e:
- logging.warn("Failed to parse %s: %s", plist, e)
- # get priority, type, group
- priority = xdata.getroot().get('priority')
- ptype = xdata.getroot().get('type')
- for pkg in xdata.findall("//Package"):
- if pkg.getparent().tag == 'Group':
- grp = pkg.getparent().get('name')
- if type(grp) is not str and grp.getparent().tag == 'Group':
- pgrp = grp.getparent().get('name')
- else:
- pgrp = 'none'
- else:
- grp = 'none'
- pgrp = 'none'
- ptuple = (pkg.get('name'), priority, ptype, grp, pgrp)
- # check if package is already listed with same priority,
- # type, grp
- if ptuple in pset:
- logging.warn("Duplicate Package %s, priority:%s, type:%s",
- pkg.get('name'), priority, ptype)
- else:
- pset.add(ptuple)
-
- filesets = {"%s/metadata.xsd": metadata_list,
- "%s/clients.xsd": clients_list,
- "%s/info.xsd": info_list,
- "%s/bundle.xsd": bundle_list + genshibundle_list,
- "%s/pkglist.xsd": pkg_list,
- "%s/base.xsd": base_list,
- "%s/rules.xsd": rules_list,
- "%s/report-configuration.xsd": imageinfo_list,
- "%s/services.xsd": services_list,
- "%s/deps.xsd": deps_list,
- "%s/decisions.xsd": dec_list,
- "%s/packages.xsd": pkgcfg_list,
- "%s/grouppatterns.xsd": gp_list}
-
- failures = 0
- for schemaname, filelist in list(filesets.items()):
- if filelist:
- # avoid loading schemas for empty file lists
- try:
- schema = lxml.etree.XMLSchema(lxml.etree.parse(schemaname %
- schemadir))
- except:
- logging.warn("Failed to process schema %s",
- schemaname % schemadir)
- failures = 1
- continue
- for filename in filelist:
- if not validate(filename, schemaname % schemadir,
- schema=schema, xinclude=not setup['stdin']):
- failures = 1
-
- # check Properties files against their schemas
- for filename in props_list:
- logging.info("checking %s" % filename)
- schemafile = "%s.xsd" % os.path.splitext(filename)[0]
- if os.path.exists(schemafile):
- if not validate(filename, schemafile, xinclude=not setup['stdin']):
- failures = 1
- elif setup['require-schema']:
- logging.warn("No schema found for %s", filename)
- failures = 1
-
- # print out missing bundle information
- logging.info("")
- if not setup['stdin']:
- # if we've taken a list of files on stdin, there's an
- # excellent chance that referenced bundles do not exist, so
- # skip this check
- for bundle in ref_bundles:
- # check for both regular and genshi bundles
- xmlbundle = "%s.xml" % bundle
- genshibundle = "%s.genshi" % bundle
- allbundles = bundle_list + genshibundle_list
- if xmlbundle not in allbundles and genshibundle not in allbundles:
- logging.info("*** Warning: Bundle %s referenced, but does not "
- "exist.", bundle)
-
- # verify bundle name attribute matches filename
- for bundle in (bundle_list + genshibundle_list):
- fname = bundle.split('Bundler/')[1].split('.')[0]
- xdata = lxml.etree.parse(bundle)
- bname = xdata.getroot().get('name')
- if fname != bname:
- logging.warn("The following names are inconsistent:")
- logging.warn(" Filename is %s", fname)
- logging.warn(" Bundle name found in %s is %s", fname, bname)
-
- raise SystemExit(failures)