def doWrite(self): """Write data out to the pipe.""" while self._data or self._data_queue: if not self._data: self._data = self._data_queue.popleft() log.trace("Writing Nagios command to fifo: %s", self._data) try: data_written = os.write(self._fd, self._data) except OSError, (errno, errmsg): if errno == 11: # EAGAIN, pause writing until next doWrite() return else: log.warn("Failed to write to nagios pipe: %s" % errmsg) self._reopen_file() return if len(self._data) != data_written: self._data = self._data[data_written:] return else: self._data = None
def _threaded_command(self, cmd_time, cmd_list, force=False): """Write out out the temporary command file from a thread to avoid any momentary delays that may be caused by creating creating the file. """ spool_fd, spool_path = tempfile.mkstemp(dir=self.spool_dir) try: try: os.fchmod(spool_fd, 0644) for cmd in cmd_list: text = self._format_command(cmd_time, *cmd) log.trace("Writing Nagios command to spool: %s", text) os.write(spool_fd, text) submit = self._format_command(cmd_time, 'PROCESS_FILE', spool_path, '1') if force: self.writer.write(submit) self.writer.doWrite() else: reactor.callFromThread(self.writer.write, submit) except: os.unlink(spool_path) raise finally: os.close(spool_fd)
def register(self, task): """Register a top level Runnable to be run directly by the scheduler""" assert self._startup assert task not in self._group_index assert isinstance(task, Runnable) log.trace("Registering task %s", task) task_deps = task.getAllDependencies() groups = set(g for g in (self._group_index.get(d, None) for d in task_deps) if g and g.repeat <= task.repeat) update_index = set(task_deps) update_index.add(task) if not groups: group = RunnableGroup([task]) self._update_stats(group) self._registered.add(group) log.trace("Created group %s", group) else: group = groups.pop() group.addDependency(task) log.trace("Updated group %s", group) for extra_group in groups: self._update_stats(extra_group, -1) self._registered.remove(extra_group) group.addDependencies(extra_group) update_index.update(extra_group.getAllDependencies()) log.trace("Merged group %s", extra_group) for runnable in update_index: if runnable not in self._group_index: self._update_stats(runnable) self._group_index[runnable] = group
def __init__(self, nagcat, conf): super(SSLMixin, self).__init__(nagcat, conf) if SSL is None: raise errors.InitError("pyOpenSSL is required for SSL support.") for opt in ('key', 'cert', 'cacert'): self.conf['ssl_'+opt] = conf.get('ssl_'+opt, None) key_type = str(conf.get('ssl_'+opt+'_type', '')) if not key_type or key_type.upper() == "PEM": key_type = crypto.FILETYPE_PEM elif key_type.upper() == "ASN1": key_type = crypto.FILETYPE_ASN1 else: raise errors.InitError("Invalid ssl_%s_type %r, " "must be 'PEM' or 'ASN1'" % (opt, key_type)) self.conf['ssl_%s_type'%opt] = key_type def maybe_read(key, private=False): # Only support PEM for now filetype = crypto.FILETYPE_PEM path = self.conf[key] filetype = self.conf[key+'_type'] if not path: return None log.debug("Loading %s from %s", key, path) try: fd = open(path) try: data = fd.read() finally: fd.close() except IOError, ex: raise errors.InitError("Failed to read %s file %s: %s" % (path, key, ex.strerror)) log.trace("Loaded %s:\n%s", key, data) if private: return crypto.load_privatekey(filetype, data) else: return crypto.load_certificate(filetype, data)
def register(self, task): """Register a top level Runnable to be run directly by the scheduler""" assert self._startup assert task not in self._group_index assert isinstance(task, Runnable) log.trace("Registering task %s", task) task_deps = task.getAllDependencies() all_groups = chain.from_iterable(self._group_index[d] for d in task_deps) groups = set(g for g in all_groups if g.repeat == task.repeat) update_index = set(task_deps) update_index.add(task) if not groups: group = RunnableGroup([task], task.repeat) self._update_stats(group) self._registered.add(group) log.trace("Created group %s", group) else: group = groups.pop() group.addDependency(task) log.trace("Updated group %s", group) for extra_group in groups: self._update_stats(extra_group, -1) self._registered.remove(extra_group) group.addDependencies(extra_group) update_index.update(extra_group.getAllDependencies()) log.trace("Merged group %s", extra_group) for runnable in update_index: # If the group set is empty this runnable is new, count it. if not self._group_index[runnable]: self._update_stats(runnable) # Add our selected group, discard extras self._group_index[runnable].add(group) self._group_index[runnable].difference_update(groups)
def __init__(self, nagcat, conf): BaseTest.__init__(self, conf) self._nagcat = nagcat self._test = conf.get('test', "") self._description = conf.get('description', self._test) self._documentation = conf.get('documentation', "") self._investigation = conf.get('investigation', "") self._priority = conf.get('priority', "") self._url = conf.get('url', "") self._subtests = {} # Special little value! # Mark this test as CRITICAL if it has been in WARNING # for too long. A value of 0 disables this check. self._warning_time_limit = util.Interval( conf.get('warning_time_limit', 0)) # If self._documentation is a list convert it to a string if isinstance(self._documentation, list): self._documentation = "\n".join(self._documentation) if isinstance(self._investigation, list): self._investigation = "\n".join(self._documentation) if self._priority: self._priority = "Priority: %s\n\n" % self._priority if conf['query.type'] == "compound": self._compound = True conf['query'].expand(recursive=False) self._return = conf.get('query.return', None) for name, qconf in conf['query'].iteritems(): if not isinstance(qconf, struct.Struct): continue self._addDefaults(qconf) self._subtests[name] = nagcat.new_query(qconf, qcls=query.FilteredQuery) self.addDependency(self._subtests[name]) if not self._subtests: raise errors.ConfigError(conf['query'], "compound query must have a sub-query") if self._return or len(self._subtests) > 1: if not self._return: raise errors.ConfigError(conf['query'], "return statement is required") # Convert $(subquery) to data['subquery'] self._return = re.sub("\\$\\(([^\\)]+)\\)", lambda m: "data['%s']" % m.group(1), self._return) test_values = {'NOW': util.MathString('9999')} for name in self._subtests: #XXX this test string isn't fool-proof but will mostly work test_values[name] = util.MathString('9999') try: log.trace("Testing expr %r with data=%r" % (self._return, test_values)) eval(self._return, {'data': test_values}) except SyntaxError, ex: raise errors.ConfigError(conf['query'], "Syntax error in return: %s" % ex) except KeyError, ex: raise errors.ConfigError(conf['query'], "Unknown sub-query in return: %s" % ex)