1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
|
import os
import Bcfg2.Server
import Bcfg2.Server.Plugin
from subprocess import Popen, PIPE
try:
from syck import load as yaml_load, error as yaml_error
except ImportError:
try:
from yaml import load as yaml_load, YAMLError as yaml_error
except ImportError:
raise ImportError("No yaml library could be found")
class PuppetENCFile(Bcfg2.Server.Plugin.FileBacked):
def HandleEvent(self, event=None):
return
class PuppetENC(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Connector,
Bcfg2.Server.Plugin.ClientRunHooks,
Bcfg2.Server.Plugin.DirectoryBacked):
""" A plugin to run Puppet external node classifiers
(http://docs.puppetlabs.com/guides/external_nodes.html) """
name = 'PuppetENC'
experimental = True
__child__ = PuppetENCFile
def __init__(self, core, datastore):
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Connector.__init__(self)
Bcfg2.Server.Plugin.ClientRunHooks.__init__(self)
Bcfg2.Server.Plugin.DirectoryBacked.__init__(self, self.data,
self.core.fam)
self.cache = dict()
def _run_encs(self, metadata):
cache = dict(groups=[], params=dict())
for enc in self.entries.keys():
epath = os.path.join(self.data, enc)
self.debug_log("PuppetENC: Running ENC %s for %s" %
(enc, metadata.hostname))
proc = Popen([epath, metadata.hostname], stdin=PIPE, stdout=PIPE,
stderr=PIPE)
(out, err) = proc.communicate()
rv = proc.wait()
if rv != 0:
msg = "PuppetENC: Error running ENC %s for %s (%s): %s" % \
(enc, metadata.hostname, rv)
self.logger.error("%s: %s" % (msg, err))
raise Bcfg2.Server.Plugin.PluginExecutionError(msg)
if err:
self.debug_log("ENC Error: %s" % err)
try:
yaml = yaml_load(out)
self.debug_log("Loaded data from %s for %s: %s" %
(enc, metadata.hostname, yaml))
except yaml_error:
err = sys.exc_info()[1]
msg = "Error decoding YAML from %s for %s: %s" % \
(enc, metadata.hostname, err)
self.logger.error(msg)
raise Bcfg2.Server.Plugin.PluginExecutionError(msg)
groups = []
if "classes" in yaml:
# stock Puppet ENC output format
groups = yaml['classes']
elif "groups" in yaml:
# more Bcfg2-ish output format
groups = yaml['groups']
if groups:
if isinstance(groups, list):
self.debug_log("ENC %s adding groups to %s: %s" %
(enc, metadata.hostname, groups))
cache['groups'].extend(groups)
else:
self.debug_log("ENC %s adding groups to %s: %s" %
(enc, metadata.hostname, groups.keys()))
for group, params in groups.items():
cache['groups'].append(group)
if params:
cache['params'].update(params)
if "parameters" in yaml and yaml['parameters']:
cache['params'].update(yaml['parameters'])
if "environment" in yaml:
self.logger.info("Ignoring unsupported environment section of "
"ENC %s for %s" % (enc, metadata.hostname))
self.cache[metadata.hostname] = cache
def get_additional_groups(self, metadata):
if metadata.hostname not in self.cache:
self._run_encs(metadata)
return self.cache[metadata.hostname]['groups']
def get_additional_data(self, metadata):
if metadata.hostname not in self.cache:
self._run_encs(metadata)
return self.cache[metadata.hostname]['params']
def end_client_run(self, metadata):
""" clear the entire cache at the end of each client run. this
guarantees that each client will run all ENCs at or near the
start of each run; we have to clear the entire cache instead
of just the cache for this client because a client that builds
templates that use metadata for other clients will populate
the cache for those clients, which we don't want. This makes
the caching less than stellar, but it does prevent multiple
runs of ENCs for a single host a) for groups and data
separately; and b) when a single client's metadata is
generated multiple times by separate templates """
self.cache = dict()
def end_statistics(self, metadata):
self.end_client_run(self, metadata)
|