def get_data(self, entry, metadata): if self.template is None: raise PluginExecutionError("Failed to load template %s" % self.name) stream = self.template.generate(**get_template_data( entry, metadata, self.name, default=DefaultGenshiDataProvider())).filter(removecomment) try: try: return stream.render('text', encoding=Bcfg2.Options.setup.encoding, strip_whitespace=False) except TypeError: return stream.render('text', encoding=Bcfg2.Options.setup.encoding) except UndefinedError: # a failure in a genshi expression _other_ than %{ python ... %} err = sys.exc_info()[1] stack = traceback.extract_tb(sys.exc_info()[2]) for quad in stack: if quad[0] == self.name: raise PluginExecutionError( "%s: %s at '%s'" % (err.__class__.__name__, err, quad[2])) raise except: self._handle_genshi_exception(sys.exc_info())
def _handle_genshi_exception(self, exc): """ this is horrible, and I deeply apologize to whoever gets to maintain this after I go to the Great Beer Garden in the Sky. genshi is incredibly opaque about what's being executed, so the only way I can find to determine which {% python %} block is being executed -- if there are multiples -- is to iterate through them and match the snippet of the first line that's in the traceback with the first non-empty line of the block. """ # a failure in a %{ python ... %} block -- the snippet in # the traceback is just the beginning of the block. err = exc[1] stack = traceback.extract_tb(exc[2]) # find the right frame of the stack for frame in reversed(stack): if frame[0] == self.name: lineno, func = frame[1:3] break else: # couldn't even find the stack frame, wtf. raise PluginExecutionError("%s: %s" % (err.__class__.__name__, err)) execs = [ contents for etype, contents, _ in self.template.stream if etype == self.template.EXEC ] contents = None if len(execs) == 1: contents = execs[0] elif len(execs) > 1: match = self.pyerror_re.match(func) if match: firstline = match.group(0) for pyblock in execs: if pyblock.startswith(firstline): contents = pyblock break # else, no EXEC blocks -- WTF? if contents: # we now have the bogus block, but we need to get the # offending line. To get there, we do (line number given # in the exception) - (firstlineno from the internal # genshi code object of the snippet) = (line number of the # line with an error within the block, with blank lines # removed as appropriate for # :attr:`GENSHI_REMOVES_BLANK_LINES`) code = contents.source.strip().splitlines() if GENSHI_REMOVES_BLANK_LINES: code = [l for l in code if l.strip()] try: line = code[lineno - contents.code.co_firstlineno] raise PluginExecutionError("%s: %s at '%s'" % (err.__class__.__name__, err, line)) except IndexError: raise PluginExecutionError("%s: %s" % (err.__class__.__name__, err)) raise
def write(self): """ Write the data in this data structure back to the property file """ if not SETUP.cfp.getboolean( "properties", "writes_enabled", default=True): msg = "Properties files write-back is disabled in the " + \ "configuration" LOGGER.error(msg) raise PluginExecutionError(msg) try: self.validate_data() except PluginExecutionError: msg = "Cannot write %s: %s" % (self.name, sys.exc_info()[1]) LOGGER.error(msg) raise PluginExecutionError(msg) try: open(self.name, "wb").write( lxml.etree.tostring(self.xdata, xml_declaration=False, pretty_print=True).decode('UTF-8')) return True except IOError: err = sys.exc_info()[1] msg = "Failed to write %s: %s" % (self.name, err) LOGGER.error(msg) raise PluginExecutionError(msg)
def get_ipcache_entry(self, client): """ Build a cache of dns results. """ if client in self.ipcache: if self.ipcache[client]: return self.ipcache[client] else: raise PluginExecutionError("No cached IP address for %s" % client) else: # need to add entry try: ipaddr = set( [info[4][0] for info in socket.getaddrinfo(client, None)]) self.ipcache[client] = (ipaddr, client) return (ipaddr, client) except socket.gaierror: result = self.cmd.run(["getent", "hosts", client]) if result.success: ipaddr = result.stdout.strip().split() if ipaddr: self.ipcache[client] = (ipaddr, client) return (ipaddr, client) self.ipcache[client] = False msg = "Failed to find IP address for %s: %s" % (client, result.error) self.logger(msg) raise PluginExecutionError(msg)
def HandleEvent(self, _=None): """Read file upon update.""" try: data = open(self.name).read() except IOError: msg = "Failed to read file %s: %s" % (self.name, sys.exc_info()[1]) logger.error(msg) raise PluginExecutionError(msg) self.items = {} try: xdata = lxml.etree.XML(data, parser=Bcfg2.Server.XMLParser) except lxml.etree.XMLSyntaxError: msg = "Failed to parse file %s: %s" % (self.name, sys.exc_info()[1]) logger.error(msg) raise PluginExecutionError(msg) self.pnode = self.__node__(xdata, self.items) self.cache = None try: self.priority = int(xdata.get('priority')) except (ValueError, TypeError): if self.__priority_required__: msg = "Got bogus priority %s for file %s" % \ (xdata.get('priority'), self.name) logger.error(msg) raise PluginExecutionError(msg) del xdata, data
def bind_entry(self, entry, metadata): self.bind_info_to_entry(entry, metadata) data, generator = self._generate_data(entry, metadata) if generator is not None: # apply no filters if the data was created by a CfgCreator for fltr in self.get_handlers(metadata, CfgFilter): if fltr.specific <= generator.specific: # only apply filters that are as specific or more # specific than the generator used for this entry. # Note that specificity comparison is backwards in # this sense, since it's designed to sort from # most specific to least specific. data = fltr.modify_data(entry, metadata, data) if SETUP['validate']: try: self._validate_data(entry, metadata, data) except CfgVerificationError: raise PluginExecutionError( "Failed to verify %s for %s: %s" % (entry.get('name'), metadata.hostname, sys.exc_info()[1])) if entry.get('encoding') == 'base64': data = b64encode(data) else: try: if not isinstance(data, unicode): if not isinstance(data, str): data = data.decode('utf-8') data = u_str(data, self.encoding) except UnicodeDecodeError: msg = "Failed to decode %s: %s" % (entry.get('name'), sys.exc_info()[1]) self.logger.error(msg) self.logger.error("Please verify you are using the proper " "encoding") raise PluginExecutionError(msg) except ValueError: msg = "Error in specification for %s: %s" % (entry.get('name'), sys.exc_info()[1]) self.logger.error(msg) self.logger.error( "You need to specify base64 encoding for %s" % entry.get('name')) raise PluginExecutionError(msg) except TypeError: # data is already unicode; newer versions of Cheetah # seem to return unicode pass if data: entry.text = data else: entry.set('empty', 'true') return entry
def Update(self, ref=None): """ Git.Update() => True|False Update the working copy against the upstream repository """ self.logger.info("Git: Git.Update(ref='%s')" % ref) self.debug_log("Git: Performing garbage collection on repo at %s" % Bcfg2.Options.setup.vcs_root) try: self._log_git_cmd(self.repo.git.gc('--auto')) except git.GitCommandError: self.logger.warning("Git: Failed to perform garbage collection: %s" % sys.exc_info()[1]) self.debug_log("Git: Fetching all refs for repo at %s" % Bcfg2.Options.setup.vcs_root) try: self._log_git_cmd(self.repo.git.fetch('--all')) except git.GitCommandError: self.logger.warning("Git: Failed to fetch refs: %s" % sys.exc_info()[1]) if ref: self.debug_log("Git: Checking out %s" % ref) try: self._log_git_cmd(self.repo.git.checkout('-f', ref)) except git.GitCommandError: raise PluginExecutionError("Git: Failed to checkout %s: %s" % (ref, sys.exc_info()[1])) # determine if we should try to pull to get the latest commit # on this head tracking = None if not self.repo.head.is_detached: self.debug_log("Git: Determining if %s is a tracking branch" % self.repo.head.ref.name) tracking = self.repo.head.ref.tracking_branch() if tracking is not None: self.debug_log("Git: %s is a tracking branch, pulling from %s" % (self.repo.head.ref.name, tracking)) try: self._log_git_cmd(self.repo.git.pull("--rebase")) except git.GitCommandError: raise PluginExecutionError("Git: Failed to pull from " "upstream: %s" % sys.exc_info()[1]) self.logger.info("Git: Repo at %s updated to %s" % (Bcfg2.Options.setup.vcs_root, self.get_revision())) return True
def write_update(self, specific, new_entry, log): """ Write pulled data to the filesystem """ if 'text' in new_entry: name = self.build_filename(specific) if os.path.exists("%s.genshi" % name): msg = "Cfg: Unable to pull data for genshi types" self.logger.error(msg) raise PluginExecutionError(msg) elif os.path.exists("%s.cheetah" % name): msg = "Cfg: Unable to pull data for cheetah types" self.logger.error(msg) raise PluginExecutionError(msg) try: etext = new_entry['text'].encode(self.encoding) except: msg = "Cfg: Cannot encode content of %s as %s" % \ (name, self.encoding) self.logger.error(msg) raise PluginExecutionError(msg) open(name, 'w').write(etext) self.debug_log("Wrote file %s" % name, flag=log) badattr = [ attr for attr in ['owner', 'group', 'mode'] if attr in new_entry ] if badattr: # check for info files and inform user of their removal for ifile in ['info', ':info']: info = os.path.join(self.path, ifile) if os.path.exists(info): self.logger.info( "Removing %s and replacing with info.xml" % info) os.remove(info) metadata_updates = {} metadata_updates.update(self.metadata) for attr in badattr: metadata_updates[attr] = new_entry.get(attr) infoxml = lxml.etree.Element('FileInfo') infotag = lxml.etree.SubElement(infoxml, 'Info') for attr in metadata_updates: infotag.attrib.__setitem__(attr, metadata_updates[attr]) ofile = open(self.path + "/info.xml", "w") ofile.write( lxml.etree.tostring(infoxml, xml_declaration=False, pretty_print=True).decode('UTF-8')) ofile.close() self.debug_log("Wrote file %s" % os.path.join(self.path, "info.xml"), flag=log)
def Index(self): Bcfg2.Server.Plugin.StructFile.Index(self) if self.xdata.get("encryption", "false").lower() != "false": if not HAS_CRYPTO: msg = "Properties: M2Crypto is not available: %s" % self.name LOGGER.error(msg) raise PluginExecutionError(msg) for el in self.xdata.xpath("//*[@encrypted]"): try: el.text = self._decrypt(el) except EVPError: msg = "Failed to decrypt %s element in %s" % (el.tag, self.name) LOGGER.error(msg) raise PluginExecutionError(msg)
def bind_entry(self, entry, metadata): self.bind_info_to_entry(entry, metadata) data = self._generate_data(entry, metadata) for fltr in self.get_handlers(metadata, CfgFilter): data = fltr.modify_data(entry, metadata, data) if self.setup['validate']: try: self._validate_data(entry, metadata, data) except CfgVerificationError: raise PluginExecutionError( "Failed to verify %s for %s: %s" % (entry.get('name'), metadata.hostname, sys.exc_info()[1])) if entry.get('encoding') == 'base64': data = b64encode(data) else: try: if not isinstance(data, unicode): if not isinstance(data, str): data = data.decode('utf-8') data = u_str(data, self.encoding) except UnicodeDecodeError: msg = "Failed to decode %s: %s" % (entry.get('name'), sys.exc_info()[1]) self.logger.error(msg) self.logger.error("Please verify you are using the proper " "encoding") raise PluginExecutionError(msg) except ValueError: msg = "Error in specification for %s: %s" % (entry.get('name'), sys.exc_info()[1]) self.logger.error(msg) self.logger.error( "You need to specify base64 encoding for %s" % entry.get('name')) raise PluginExecutionError(msg) except TypeError: # data is already unicode; newer versions of Cheetah # seem to return unicode pass if data: entry.text = data else: entry.set('empty', 'true') return entry
def _generate_data(self, entry, metadata): """ Generate data for the given entry on the given client :param entry: The abstract entry to generate data for. This will not be modified :type entry: lxml.etree._Element :param metadata: The client metadata to generate data for :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata :returns: string - the data for the entry """ try: generator = self.best_matching( metadata, self.get_handlers(metadata, CfgGenerator)) except PluginExecutionError: # if no creators or generators exist, _create_data() # raises an appropriate exception return self._create_data(entry, metadata) try: return generator.get_data(entry, metadata) except: msg = "Cfg: Error rendering %s: %s" % (entry.get("name"), sys.exc_info()[1]) self.logger.error(msg) raise PluginExecutionError(msg)
def _generate_data(self, entry, metadata): """ Generate data for the given entry on the given client :param entry: The abstract entry to generate data for. This will not be modified :type entry: lxml.etree._Element :param metadata: The client metadata to generate data for :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata :returns: tuple of (string, generator) - the data for the entry and the generator used to generate it (or None, if data was created) """ try: generator = self.best_matching( metadata, self.get_handlers(metadata, CfgGenerator)) except PluginExecutionError: # if no creators or generators exist, _create_data() # raises an appropriate exception return (self._create_data(entry, metadata), None) try: return (generator.get_data(entry, metadata), generator) except: # TODO: the exceptions raised by ``get_data`` are not # constrained in any way, so for now this needs to be a # blanket except. msg = "Cfg: Error rendering %s: %s" % (entry.get("name"), sys.exc_info()[1]) self.logger.error(msg) raise PluginExecutionError(msg)
def _generate_data(self, entry, metadata): """ Generate data for the given entry on the given client :param entry: The abstract entry to generate data for. This will not be modified :type entry: lxml.etree._Element :param metadata: The client metadata to generate data for :type metadata: Bcfg2.Server.Plugins.Metadata.ClientMetadata :returns: tuple of (string, generator) - the data for the entry and the generator used to generate it (or None, if data was created) """ try: generator = self.best_matching( metadata, self.get_handlers(metadata, CfgGenerator)) except PluginExecutionError: # if no creators or generators exist, _create_data() # raises an appropriate exception return (self._create_data(entry, metadata), None) if entry.get('mode').lower() == 'inherit': # use on-disk permissions self.logger.warning( "Cfg: %s: Use of mode='inherit' is deprecated" % entry.get("name")) fname = os.path.join(self.path, generator.name) entry.set('mode', oct_mode(stat.S_IMODE(os.stat(fname).st_mode))) try: return (generator.get_data(entry, metadata), generator) except: msg = "Cfg: Error rendering %s: %s" % (entry.get("name"), sys.exc_info()[1]) self.logger.error(msg) raise PluginExecutionError(msg)
def process_statistics(self, client, xdata): stats = xdata.find("Statistics") stats.set('time', time.asctime(time.localtime())) cdata = {'server': self.whoami} for field in self.CLIENT_METADATA_FIELDS: try: value = getattr(client, field) except AttributeError: continue if value: if isinstance(value, set): value = [v for v in value] cdata[field] = value # try 3 times to store the data for i in [1, 2, 3]: try: self.transport.store( client.hostname, cdata, lxml.etree.tostring( stats, xml_declaration=False)) self.debug_log("%s: Queued statistics data for %s" % (self.__class__.__name__, client.hostname)) return except TransportError: continue except: self.logger.error("%s: Attempt %s: Failed to add statistic: %s" % (self.__class__.__name__, i, sys.exc_info()[1])) raise PluginExecutionError("%s: Retry limit reached for %s" % (self.__class__.__name__, client.hostname))
def build_key(self, entry, metadata): """ either grabs a prexisting key hostfile, or triggers the generation of a new key if one doesn't exist. """ # TODO: verify key fits the specs filename = "%s.H_%s" % (os.path.basename( entry.get('name')), metadata.hostname) self.logger.info("SSLCA: Generating new key %s" % filename) key_spec = self.key.get_spec(metadata) ktype = key_spec['type'] bits = key_spec['bits'] if ktype == 'rsa': cmd = ["openssl", "genrsa", bits] elif ktype == 'dsa': cmd = ["openssl", "dsaparam", "-noout", "-genkey", bits] self.debug_log("SSLCA: Generating new key: %s" % " ".join(cmd)) result = self.cmd.run(cmd) if not result.success: raise PluginExecutionError( "SSLCA: Failed to generate key %s for " "%s: %s" % (entry.get("name"), metadata.hostname, result.error)) open(os.path.join(self.path, filename), 'w').write(result.stdout) return result.stdout
def validate_data(self): try: yaml.dump(self.yaml) except yaml.YAMLError: err = sys.exc_info()[1] raise PluginExecutionError("Data for %s cannot be dumped to YAML: " "%s" % (self.name, err))
def validate_data(self): try: json.dumps(self.json) except: err = sys.exc_info()[1] raise PluginExecutionError("Data for %s cannot be dumped to JSON: " "%s" % (self.name, err))
def Index(self): try: self.yaml = yaml.load(self.data) except yaml.YAMLError: err = sys.exc_info()[1] raise PluginExecutionError("Could not load YAML data from %s: %s" % (self.name, err))
def _real_rpc_call(self, *args, **kwargs): """Wrapper for calls to the reporting collector""" try: return self.transport.rpc(method, *args, **kwargs) except TransportError: # this is needed for Admin.Pull raise PluginExecutionError(sys.exc_info()[1])
def Bind(self, entry, metadata): """Bind an entry using the appropriate generator.""" if 'altsrc' in entry.attrib: oldname = entry.get('name') entry.set('name', entry.get('altsrc')) entry.set('realname', oldname) del entry.attrib['altsrc'] try: ret = self.Bind(entry, metadata) entry.set('name', oldname) del entry.attrib['realname'] return ret except: entry.set('name', oldname) logger.error("Failed binding entry %s:%s with altsrc %s" \ % (entry.tag, entry.get('name'), entry.get('altsrc'))) logger.error("Falling back to %s:%s" % (entry.tag, entry.get('name'))) glist = [gen for gen in self.generators if entry.get('name') in gen.Entries.get(entry.tag, {})] if len(glist) == 1: return glist[0].Entries[entry.tag][entry.get('name')](entry, metadata) elif len(glist) > 1: generators = ", ".join([gen.name for gen in glist]) logger.error("%s %s served by multiple generators: %s" % \ (entry.tag, entry.get('name'), generators)) g2list = [gen for gen in self.generators if gen.HandlesEntry(entry, metadata)] if len(g2list) == 1: return g2list[0].HandleEntry(entry, metadata) entry.set('failure', 'no matching generator') raise PluginExecutionError(entry.tag, entry.get('name'))
def calculate_prereqs(self, metadata, entries): """Calculate the prerequisites defined in Deps for the passed set of entries. """ prereqs = [] toexamine = list(entries[:]) while toexamine: entry = toexamine.pop() # tuples of (PriorityStructFile, element) for each # matching element and the structfile that contains it matching = [] for deps in self.entries.values(): el = deps.find("/%s[name='%s']" % (entry.tag, entry.get("name"))) if el: matching.append((deps, el)) if len(matching) > 1: prio = [int(m[0].priority) for m in matching] if prio.count(max(prio)) > 1: raise PluginExecutionError( "Deps: Found conflicting dependencies with same " "priority for %s:%s for %s: %s" % (entry.tag, entry.get("name"), metadata.hostname, [m[0].name for m in matching])) index = prio.index(max(prio)) matching = [matching[index]] if not matching: continue for prq in matching[0][1].getchildren(): if prq not in prereqs and prq not in entries: toexamine.append(prq) prereqs.append(prq) return prereqs
def Index(self): try: self.json = json.loads(self.data) except ValueError: err = sys.exc_info()[1] raise PluginExecutionError("Could not load JSON data from %s: %s" % (self.name, err))
def handle_event(self, event): CfgGenerator.handle_event(self, event) try: self.template = \ self.environment.get_template(self.name) except: raise PluginExecutionError("Failed to load template: %s" % sys.exc_info()[1])
def GenerateHostKeyPair(self, client, filename): """Generate new host key pair for client.""" match = re.search(r'(ssh_host_(?:((?:ecd|d|r)sa)_)?key)', filename) if match: hostkey = "%s.H_%s" % (match.group(1), client) if match.group(2): keytype = match.group(2) else: keytype = 'rsa1' else: raise PluginExecutionError("Unknown key filename: %s" % filename) fileloc = os.path.join(self.data, hostkey) publoc = os.path.join( self.data, ".".join([hostkey.split('.')[0], 'pub', "H_%s" % client])) tempdir = tempfile.mkdtemp() temploc = os.path.join(tempdir, hostkey) cmd = [ "ssh-keygen", "-q", "-f", temploc, "-N", "", "-t", keytype, "-C", "root@%s" % client ] self.debug_log("SSHbase: Running: %s" % " ".join(cmd)) result = self.cmd.run(cmd) if not result.success: raise PluginExecutionError( "SSHbase: Error running ssh-keygen: %s" % result.error) try: shutil.copy(temploc, fileloc) shutil.copy("%s.pub" % temploc, publoc) except IOError: err = sys.exc_info()[1] raise PluginExecutionError("Temporary SSH keys not found: %s" % err) try: os.unlink(temploc) os.unlink("%s.pub" % temploc) os.rmdir(tempdir) except OSError: err = sys.exc_info()[1] raise PluginExecutionError("Failed to unlink temporary ssh keys: " "%s" % err)
def handle_event(self, event): CfgGenerator.handle_event(self, event) if self.data is None: return # todo: let the user specify a passphrase by name try: self.data = bruteforce_decrypt(self.data) except EVPError: raise PluginExecutionError("Failed to decrypt %s" % self.name)
def get_data(self, entry, metadata): if self.template is None: raise PluginExecutionError("Failed to load template %s" % self.name) return self.template.render( get_template_data(entry, metadata, self.name, default=DefaultJinja2DataProvider()))
def handle_event(self, event): CfgGenerator.handle_event(self, event) try: self.template = \ self.loader.load(self.name, cls=NewTextTemplate, encoding=Bcfg2.Options.setup.encoding) except: raise PluginExecutionError("Failed to load template: %s" % sys.exc_info()[1])
def GenerateHostKeyPair(self, client, filename): """Generate new host key pair for client.""" match = re.search(r'(ssh_host_(?:((?:ecd|d|r)sa|ed25519)_)?key)', filename) if match: hostkey = "%s.H_%s" % (match.group(1), client) if match.group(2): keytype = match.group(2) else: keytype = 'rsa1' else: raise PluginExecutionError("Unknown key filename: %s" % filename) fileloc = os.path.join(self.data, hostkey) publoc = os.path.join(self.data, ".".join([hostkey.split('.')[0], 'pub', "H_%s" % client])) tempdir = tempfile.mkdtemp() temploc = os.path.join(tempdir, hostkey) cmd = ["ssh-keygen", "-q", "-f", temploc, "-N", "", "-t", keytype, "-C", "root@%s" % client] self.debug_log("SSHbase: Running: %s" % " ".join(cmd)) result = self.cmd.run(cmd) if not result.success: raise PluginExecutionError("SSHbase: Error running ssh-keygen: %s" % result.error) if self.passphrase: self.debug_log("SSHbase: Encrypting private key for %s" % fileloc) try: data = ssl_encrypt(open(temploc).read(), self.passphrase) except IOError: raise PluginExecutionError("Unable to read temporary SSH key: " "%s" % sys.exc_info()[1]) except EVPError: raise PluginExecutionError("Unable to encrypt SSH key: %s" % sys.exc_info()[1]) try: open("%s.crypt" % fileloc, "wb").write(data) except IOError: raise PluginExecutionError("Unable to write encrypted SSH " "key: %s" % sys.exc_info()[1]) try: if not self.passphrase: shutil.copy(temploc, fileloc) shutil.copy("%s.pub" % temploc, publoc) except IOError: raise PluginExecutionError("Unable to copy temporary SSH key: %s" % sys.exc_info()[1]) try: os.unlink(temploc) os.unlink("%s.pub" % temploc) os.rmdir(tempdir) except OSError: err = sys.exc_info()[1] raise PluginExecutionError("Failed to unlink temporary ssh keys: " "%s" % err)
def handle_event(self, event): Bcfg2.Server.Plugin.SpecificData.handle_event(self, event) if event.filename.endswith(".crypt"): if self.data is None: return # todo: let the user specify a passphrase by name try: self.data = bruteforce_decrypt(self.data) except EVPError: raise PluginExecutionError("Failed to decrypt %s" % self.name)
def write(self): """ Write the data in this data structure back to the property file. This public method performs checking to ensure that writing is possible and then calls :func:`_write`. """ if not Bcfg2.Options.setup.writes_enabled: raise PluginExecutionError("Properties files write-back is " "disabled in the configuration") try: self.validate_data() except PluginExecutionError: msg = "Cannot write %s: %s" % (self.name, sys.exc_info()[1]) LOGGER.error(msg) raise PluginExecutionError(msg) try: return self._write() except IOError: err = sys.exc_info()[1] msg = "Failed to write %s: %s" % (self.name, err) LOGGER.error(msg) raise PluginExecutionError(msg)