def run(self): if not which('docker'): raise UnknownError("'docker' command not found in $PATH") process = subprocess.Popen(['docker', 'images', '{repo}'.format(repo=self.docker_image)], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = process.communicate() exitcode = process.returncode log.debug('stdout:\n%s', stdout) log.debug('stderr:\n%s', stderr) log.debug('exitcode: %s', exitcode) if stderr: raise UnknownError(stderr) if exitcode != 0: raise UnknownError("exit code returned was '{0}': {1} {2}".format(exitcode, stdout, stderr)) if not stdout: raise UnknownError('no output from docker images command!') self.parse(stdout)
def get_peers(content): json_data = None try: json_data = json.loads(content) except ValueError: raise UnknownError("non-json data returned by consul: '%s'. %s" % (content, support_msg_api())) if not json_data: raise CriticalError('no peers found, recently started?') #if not json_data: # raise UnknownError("blank list returned by consul! '%s'. %s" % (content, support_msg_api())) if not isList(json_data): raise UnknownError("non-list returned by consul: '%s'. %s" % (content, support_msg_api())) for peer in json_data: log.debug('peer: {0}'.format(peer)) peers = uniq_list(json_data) return peers
def parse(self, req): soup = BeautifulSoup(req.content, 'html.parser') dead_workers = 0 try: log.info('parsing %s page for number of dead workers', self.path) dead_workers = len([_ for _ in soup.find(id='data2').find('tbody').find_all('tr') if _]) except (AttributeError, TypeError): raise UnknownError('failed to parse {0} Master info for dead workers. UI may have changed. {1}'. format(self.software, support_msg())) try: dead_workers = int(dead_workers) except (ValueError, TypeError): raise UnknownError('{0} Master dead workers parsing returned non-integer: {1}. UI may have changed. {2}'. format(self.software, dead_workers, support_msg())) self.msg = '{0} dead workers = {1}'.format(self.software, dead_workers) self.check_thresholds(dead_workers) self.msg += ' | ' self.msg += 'dead_workers={0}{1}'.format(dead_workers, self.get_perf_thresholds())
def parse_json(self, json_data): if not isList(json_data): raise UnknownError( 'non-list returned by Presto for nodes failed. {0}'.format( support_msg_api())) num_failed_nodes = len(json_data) self.msg = 'Presto SQL - {0} worker node{1} failed'.format( num_failed_nodes, plural(num_failed_nodes)) self.check_thresholds(num_failed_nodes)
def parse_json(self, json_data): if json_data['Name'] != 'apache-atlas': raise UnknownError('Name {} != apache-atlas'.format( json_data['Name'])) version = json_data['Version'] version = version.split('-')[0] if not self.verbose: version = '.'.join(version.split('.')[0:3]) return version
def parse_is_table_compacting(self, content): soup = BeautifulSoup(content, 'html.parser') if log.isEnabledFor(logging.DEBUG): log.debug("BeautifulSoup prettified:\n{0}\n{1}".format( soup.prettify(), '=' * 80)) try: headings = soup.findAll('h2') for heading in headings: log.debug("checking heading '%s'", heading) if heading.get_text() == 'Table Attributes': log.debug('found Table Attributes section header') table = heading.find_next('table') return self.parse_table(table) raise UnknownError( 'parse error - failed to find Table Attributes section in JSP. ' + support_msg()) except (AttributeError, TypeError): raise UnknownError('failed to parse output. ' + support_msg())
def process_result(self, result): _id = result['id'] log.info('latest build id: %s', _id) status = result['status'] log.info('status: %s', status) if not isInt(status, allow_negative=True): raise UnknownError( 'non-integer status returned by DockerHub API. {0}'.format( support_msg_api())) tag = result['dockertag_name'] log.info('tag: %s', tag) trigger = result['cause'] log.info('trigger: %s', trigger) created_date = result['created_date'] log.info('created date: %s', created_date) last_updated = result['last_updated'] log.info('last updated: %s', last_updated) created_datetime = datetime.datetime.strptime( created_date.split('.')[0], '%Y-%m-%dT%H:%M:%S') updated_datetime = datetime.datetime.strptime( last_updated.split('.')[0], '%Y-%m-%dT%H:%M:%S') build_latency_timedelta = updated_datetime - created_datetime build_latency = build_latency_timedelta.total_seconds() log.info('build latency (creation to last updated): %s', build_latency) # results in .0 floats anyway build_latency = int(build_latency) build_code = result['build_code'] build_url = 'https://hub.docker.com/r/{0}/builds/{1}'.format( self.repo, build_code) log.info('latest build URL: %s', build_url) if str(status) in self.statuses: status = self.statuses[str(status)] else: log.warning("status code '%s' not recognized! %s", status, support_msg_api()) log.warning('defaulting to assume status is an Error') status = 'Error' if status != 'Success': self.critical() self.msg += "'{repo}' last completed build status: '{status}', tag: '{tag}', build code: {build_code}"\ .format(repo=self.repo, status=status, tag=tag, build_code=build_code) if self.verbose: self.msg += ', id: {0}'.format(_id) self.msg += ', trigger: {0}'.format(trigger) self.msg += ', created date: {0}'.format(created_date) self.msg += ', last updated: {0}'.format(last_updated) self.msg += ', build_latency: {0}'.format(sec2human(build_latency)) self.msg += ', build URL: {0}'.format(build_url) self.msg += ' | build_latency={0:d}s'.format(build_latency)
def parse(self, req): if not isJson(req.content): raise UnknownError( 'non-JSON returned by Oozie server at {0}:{1}'.format( self.host, self.port)) status = None try: _ = json.loads(req.content) status = _['systemMode'] except KeyError: raise UnknownError( 'systemMode key was not returned in output from Oozie. {0}'. format(support_msg_api())) if status == 'NORMAL': self.ok() else: self.critical() return status
def check_app(self, app): state = app['state'] user = app['user'] queue = app['queue'] # Hadoop 2.2 doesn't have this field running_containers = None if 'runningContainers' in app: running_containers = app['runningContainers'] if not isInt(running_containers, allow_negative=True): raise UnknownError( 'running_containers {} is not an integer!'.format( running_containers)) running_containers = int(running_containers) elapsed_time = app['elapsedTime'] if not isInt(elapsed_time): raise UnknownError( 'elapsed time {} is not an integer'.format(elapsed_time)) elapsed_time = int(elapsed_time / 1000) self.msg = "Yarn application '{0}' state = '{1}'".format( app['name'], state) if state != 'RUNNING': self.critical() ################## # This shouldn't be used any more now using more targeted query to only return running apps # state = FAILED / KILLED also gets final status = FAILED KILLED, no point double printing if state == 'FINISHED': self.msg += ", final status = '{0}'".format(app['finalStatus']) ################## self.msg += ", user = '******'".format(user) if self.app_user is not None and self.app_user != user: self.critical() self.msg += " (expected '{0}')".format(self.app_user) self.msg += ", queue = '{0}'".format(queue) if self.queue is not None and self.queue != queue: self.critical() self.msg += " (expected '{0}')".format(self.queue) if running_containers is not None: self.msg += ", running containers = {0}".format(running_containers) if self.min_containers is not None and running_containers < self.min_containers: self.critical() self.msg += " (< '{0}')".format(self.min_containers) self.msg += ", elapsed time = {0} secs".format(elapsed_time) self.check_thresholds(elapsed_time) return elapsed_time
def parse_json(self, json_data): apps = json_data['apps'] if not apps: raise CriticalError('no Yarn apps running') app_list = apps['app'] host_info = '' if self.verbose: host_info = " at '{0}:{1}'".format(self.host, self.port) if not isList(app_list): raise UnknownError("non-list returned for json_data[apps][app] by Yarn Resource Manager{0}"\ .format(host_info)) num_apps = len(app_list) log.info( "processing {0:d} running apps returned by Yarn Resource Manager{1}" .format(num_apps, host_info)) if num_apps > self.limit: raise UnknownError('num_apps {} > limit {}'.format( num_apps, self.limit)) if self.list_apps: self.print_apps(app_list) sys.exit(ERRORS['UNKNOWN']) matched_app = None regex = re.compile(self.app, re.I) for app in app_list: if regex.search(app['name']): matched_app = app break if not matched_app: raise CriticalError("no app found with name matching '{app}' in list of last {limit} apps "\ .format(app=self.app, limit=self.limit) + "returned by Yarn Resource Manager{host_info}".format(host_info=host_info)) log.info('found matching app:\n\n%s\n', jsonpp(matched_app)) elapsed_time = self.check_app(matched_app) if self.warn_on_dup_app: log.info('checking for duplicate apps matching the same regex') count = 0 for app in app_list: if regex.match(app['name']): count += 1 if count > 1: self.msg += ', {0} DUPLICATE APPS WITH MATCHING NAMES DETECTED!'.format( count) self.msg += ' | app_elapsed_time={0}{1}'.format( elapsed_time, self.get_perf_thresholds())
def parse_json(self, json_data): if not isList(json_data): raise UnknownError( 'non-list returned by Presto for nodes. {0}'.format( support_msg_api())) nodes_failing = [] max_ratio = 0.0 re_protocol = re.compile('^https?://') for node_item in json_data: recent_failure_ratio = node_item['recentFailureRatio'] if not isFloat(recent_failure_ratio): raise UnknownError( 'recentFailureRatio is not a float! {0}'.format( support_msg_api())) recent_failure_ratio = float( '{0:.2f}'.format(recent_failure_ratio)) if recent_failure_ratio < 0: raise UnknownError('recentFailureRatio < 0 ?!!! {0}'.format( support_msg_api())) if recent_failure_ratio > 1: raise UnknownError('recentFailureRatio > 1 ?!!! {0}'.format( support_msg_api())) if recent_failure_ratio > max_ratio: max_ratio = recent_failure_ratio if recent_failure_ratio > self.max_ratio: uri = node_item['uri'] uri = re_protocol.sub('', uri) nodes_failing += [uri] log.info("node '%s' recent failure ratio %f > max ratio %f", node_item['uri'], recent_failure_ratio, self.max_ratio) elif recent_failure_ratio: log.info( "node '%s' recent failures ratio %f, but less than max ratio threshold of %f", node_item['uri'], recent_failure_ratio, self.max_ratio) num_nodes_failing = len(nodes_failing) self.msg = 'Presto SQL worker nodes with recent failure ratio > {0:.2f} = {1:d}'\ .format(self.max_ratio, num_nodes_failing) self.check_thresholds(num_nodes_failing) self.msg += ', max recent failure ratio = {0:.2f}'.format(max_ratio) if self.verbose and nodes_failing: self.msg += ' [{0}]'.format(','.join(nodes_failing)) self.msg += ' | num_nodes_failing={0}{1} max_ratio={2:.2f}'\ .format(num_nodes_failing, self.get_perf_thresholds(), max_ratio)
def parse_json(self, json_data): if not isList(json_data): raise UnknownError( 'non-list returned by Presto for nodes. {0}'.format( support_msg_api())) nodes_failing = [] max_failures = 0.0 re_protocol = re.compile('^https?://') num_nodes = len(json_data) for node_item in json_data: recent_failures = node_item['recentFailures'] if not isFloat(recent_failures): raise UnknownError('recentFailures is not a float! {0}'.format( support_msg_api())) recent_failures = float('{0:.2f}'.format(recent_failures)) if recent_failures < 0: raise UnknownError('recentFailures < 0 ?!!! {0}'.format( support_msg_api())) if recent_failures > max_failures: max_failures = recent_failures if recent_failures > self.max_failures: uri = node_item['uri'] uri = re_protocol.sub('', uri) nodes_failing += [uri] log.info("node '%s' recent failures %f > max failures %f", node_item['uri'], recent_failures, self.max_failures) elif recent_failures: log.info( "node '%s' recent failures %f, but less than max failures threshold of %f", node_item['uri'], recent_failures, self.max_failures) num_nodes_failing = len(nodes_failing) self.msg = 'Presto SQL - worker nodes with recent failures > {0:.2f} = {1:d}'\ .format(self.max_failures, num_nodes_failing) if num_nodes < 1: self.warning() self.msg += ' (< 1 worker found)' self.check_thresholds(num_nodes_failing) self.msg += ' out of {0:d} nodes'.format(num_nodes) self.msg += ', max recent failures per node = {0:.2f}'.format( max_failures) if self.verbose and nodes_failing: self.msg += ' [{0}]'.format(','.join(nodes_failing)) self.msg += ' | num_nodes_failing={0}{1} max_recent_failures={2:.2f}'\ .format(num_nodes_failing, self.get_perf_thresholds(), max_failures)
def get_response_age(node): if not 'lastResponseTime' in node: raise UnknownError('lastResponseTime field not found in node data, if node was just started this may ' + \ 'not be populated until second run. If this error persists then {0}'\ .format(support_msg_api())) last_response_time = node['lastResponseTime'] last_response_datetime = datetime.strptime(last_response_time, '%Y-%m-%dT%H:%M:%S.%fZ') timedelta = datetime.utcnow() - last_response_datetime response_age = timedelta.total_seconds() return response_age
def process_json(self, content): try: self.json_data = json.loads(content) if log.isEnabledFor(logging.DEBUG): log.debug('JSON prettified:\n\n%s\n%s', jsonpp(self.json_data), '='*80) return self.parse_json(self.json_data) #except (KeyError, ValueError) as _: #raise UnknownError('{0}: {1}. {2}'.format(type(_).__name__, _, support_msg_api())) except (KeyError, ValueError): raise UnknownError('{0}. {1}'.format(self.exception_msg(), support_msg_api()))
def parse_json(self, json_data): if not isList(json_data): raise UnknownError( 'non-list returned by Presto for nodes. {0}'.format( support_msg_api())) num_worker_nodes = len(json_data) self.msg = 'Presto SQL worker nodes = {0}'.format(num_worker_nodes) self.check_thresholds(num_worker_nodes) self.msg += ' | num_worker_nodes={0}{1}'.format( num_worker_nodes, self.get_perf_thresholds('lower'))
def run(self): self.ok() start_time = time.time() try: self.check_selenium() except selenium.common.exceptions.WebDriverException as _: raise UnknownError('Selenium WebDriverException: {}'.format(_)) query_time = time.time() - start_time log.info('Finished check in {:.2f} secs'.format(query_time)) self.msg += ' | query_time={:.2f}s'.format(query_time)
def parse_json(self, json_data): log.info('parsing response') try: live_nodes = json_data['beans'][0]['LiveNodes'] live_nodes_data = json.loads(live_nodes) if self.list_nodes: print('Datanodes:\n') for datanode in live_nodes_data: print(datanode) sys.exit(ERRORS['UNKNOWN']) last_contact_secs = None found_datanode = False for datanode in live_nodes_data: # it looks like Hadoop 2.7 includes port whereas Hadoop 2.5 / 2.6 doesn't so allow user supplied string # to include port and match against full if port is included, otherwise strip port and try again to # match older versions or if user has not supplied port in datanode name if datanode == self.datanode or datanode.split( ':')[0] == self.datanode: last_contact_secs = live_nodes_data[datanode][ 'lastContact'] found_datanode = True if not found_datanode: raise UnknownError( "datanode '{0}' was not found in list of live datanodes". format(self.datanode)) if not isInt(last_contact_secs): raise UnknownError("non-integer '{0}' returned for last contact seconds by namenode '{1}:{2}'"\ .format(last_contact_secs, self.host, self.port)) last_contact_secs = int(last_contact_secs) assert last_contact_secs >= 0 self.ok() self.msg = "HDFS datanode '{0}' last contact with namenode was {1} sec{2} ago"\ .format(datanode, last_contact_secs, plural(last_contact_secs)) self.check_thresholds(last_contact_secs) self.msg += ' | datanode_last_contact_secs={0}'.format( last_contact_secs) self.msg += self.get_perf_thresholds() except KeyError as _: raise UnknownError("failed to parse json returned by NameNode at '{0}:{1}': {2}. {3}"\ .format(self.host, self.port, _, support_msg_api())) except ValueError as _: raise UnknownError("invalid json returned for LiveNodes by Namenode '{0}:{1}': {2}"\ .format(self.host, self.port, _))
def parse_json(self, json_data): # pylint: disable=no-self-use data = json_data['beans'][0] if data['name'] != 'Hadoop:service=NameNode,name=NameNodeInfo': raise UnknownError( 'name {} != Hadoop:service=NameNode,name=NameNodeInfo'.format( data['name'])) version = data['Version'] #log.info("raw version = '%s'", version) version = version.split(',')[0] return version
def end(self): # log.debug('self.state = %s' % self.state) if self.state is None: raise UnknownError('state is not set!') self.msg = "{0} status = '{1}'".format(self.name, self.state) if self.msg2: self.msg += ', {0}'.format(self.msg2) if self.perfdata: self.msg += ' | {0}'.format(self.perfdata) qquit(self.status, self.msg)
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') validate_host(host) validate_port(port) expected = self.get_opt('expected') if expected is not None: validate_regex(expected) log.info('expected version regex: %s', expected) log.info('querying %s', self.software) url = 'http://%(host)s:%(port)s/version' % locals() log.debug('GET %s' % url) try: req = requests.get(url) except requests.exceptions.RequestException as _: raise CriticalError(_) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n%s\n%s\n%s", '='*80, req.content.strip(), '='*80) # Special handling for Nginx, expecting 404 rather than usual 200 if req.status_code != 404: raise CriticalError("%s %s (expecting 404)" % (req.status_code, req.reason)) soup = BeautifulSoup(req.content, 'html.parser') if log.isEnabledFor(logging.DEBUG): log.debug("BeautifulSoup prettified:\n{0}\n{1}".format(soup.prettify(), '='*80)) try: version = soup.findAll('center')[1].text except (AttributeError, TypeError) as _: raise UnknownError('failed to find parse {0} output. {1}\n{2}'. format(self.software, support_msg_api(), traceback.format_exc())) if '/' in version: version = version.split('/')[1] if not version: raise UnknownError('{0} version not found in output. {1}'.format(self.software, support_msg_api())) if not isVersion(version): raise UnknownError('{0} version unrecognized \'{1}\'. {2}'. format(self.software, version, support_msg_api())) self.ok() self.msg = '{0} version = {1}'.format(self.software, version) if expected is not None and not re.search(expected, version): self.msg += " (expected '{0}')".format(expected) self.critical()
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') validate_host(host) validate_port(port) expected = self.get_opt('expected') if expected is not None: validate_regex(expected) log.info('expected version regex: %s', expected) url = 'http://%(host)s:%(port)s/solr/admin/info/system' % locals() log.debug('GET %s' % url) try: req = requests.get(url) except requests.exceptions.RequestException as _: raise CriticalError(_) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n%s\n%s\n%s", '=' * 80, req.content.strip(), '=' * 80) if req.status_code != 200: raise CriticalError("%s %s" % (req.status_code, req.reason)) soup = BeautifulSoup(req.content, 'html.parser') if log.isEnabledFor(logging.DEBUG): log.debug("BeautifulSoup prettified:\n{0}\n{1}".format( soup.prettify(), '=' * 80)) try: version = soup.find('str', {'name': 'solr-spec-version'}).text except (AttributeError, TypeError) as _: raise UnknownError( 'failed to find parse Solr output. {0}\n{1}'.format( support_msg_api(), traceback.format_exc())) if not version: raise UnknownError('Solr version not found in output. {0}'.format( support_msg_api())) if not isVersion(version): raise UnknownError('Solr version unrecognized \'{0}\'. {1}'.format( version, support_msg_api())) self.ok() self.msg = 'Solr version = {0}'.format(version) if expected is not None and not re.search(expected, version): self.msg += " (expected '{0}')".format(expected) self.critical()
def parse_json(self, json_data): use_security = json_data['useSecurity'] if not isinstance(use_security, bool): raise UnknownError('non-boolean returned by Jenkins. {0}'.format( support_msg_api())) self.msg += '{0}'.format(use_security) if not use_security: self.msg += ' (expected \'True\')' self.critical()
def get_mounts(): try: with open('/proc/mounts', 'r') as _: lines = _.readlines() if log.isEnabledFor(logging.DEBUG): for line in lines: log.debug('/proc/mounts: %s', line.rstrip('\n')) return lines except IOError as _: raise UnknownError(_)
def consume(self): self.consumer.assign([self.topic_partition]) log.debug('consumer.seek({0})'.format(self.start_offset)) self.consumer.seek(self.topic_partition, self.start_offset) # self.consumer.resume() log.debug('consumer.poll(timeout_ms={0})'.format(self.timeout_ms)) obj = self.consumer.poll(timeout_ms=self.timeout_ms) log.debug('msg object returned: %s', obj) msg = None try: for consumer_record in obj[self.topic_partition]: if consumer_record.key == self.key: msg = consumer_record.value break except KeyError: raise UnknownError('TopicPartition key was not found in response') if msg is None: raise UnknownError("failed to find matching consumer record with key '{0}'".format(self.key)) return msg
def parse_version(self, soup): version = None try: attributes_table = soup.find('table', {'id': 'attributes_table'}) rows = attributes_table.findAll('tr') num_rows = len(rows) self.sanity_check(num_rows > 5, 'too few rows ({0})'.format(num_rows)) headers = rows[0].findAll('th') num_headers = len(headers) self.sanity_check( num_headers > 2, 'too few header columns ({0})'.format(num_headers)) self.sanity_check( headers[0].text.strip() == 'Attribute Name', 'header first column does not match expected \'Attribute Name\'' ) self.sanity_check( headers[1].text.strip() == 'Value', 'header second column does not match expected \'Value\'') for row in rows: cols = row.findAll('td') num_cols = len(cols) if num_cols == 0: continue self.sanity_check(num_cols > 2, 'too few columns ({0})'.format(num_cols)) if cols[0].text.strip() == 'HBase Version': version = cols[1].text.split(',')[0] break except (AttributeError, TypeError): raise UnknownError( 'failed to find parse HBase output. {0}\n{1}'.format( support_msg(), traceback.format_exc())) if not version: raise UnknownError('HBase version not found in output. {0}'.format( support_msg())) if not isVersion(version): raise UnknownError( 'HBase version unrecognized \'{0}\'. {1}'.format( version, support_msg())) return version
def parse(self, req): soup = BeautifulSoup(req.content, 'html.parser') #if log.isEnabledFor(logging.DEBUG): # log.debug("BeautifulSoup prettified:\n%s\n%s", soup.prettify(), '='*80) # this masks underlying exception #try: tab = soup.find('div', {'id': 'tab_baseStats'}) table = tab.find_next('table') rows = table.findChildren('tr') if len(rows) < 2: raise UnknownError( 'no regionserver rows found in base stats table! {}'.format( support_msg())) # HBase 1.1 in HDP 2.3: ServerName | Start time | Requests Per Second | Num. Regions # HBase 1.2 (Apache): ServerName | Start time | Version | Requests per Second | Num. Regions # HBase 1.4 (Apache): ServerName | Start time | Last Contact | Version | Requests Per Second | Num. Regions th_list = rows[0].findChildren('th') if len(th_list) < 4: raise UnknownError('no table header for base stats table!') expected_header = 'Requests Per Second' col_index = len(th_list) - 2 found_header = th_list[col_index].text if found_header != expected_header: raise UnknownError("wrong table header found for column 4! Expected '{}' but got '{}'. {}"\ .format(expected_header, found_header, support_msg())) stats = {} for row in rows[1:]: cols = row.findChildren('td') if len(cols) < 4: raise UnknownError('4th column in table not found! {}'.format( support_msg())) regionserver = cols[0].text.strip().split(',')[0] if 'Total:' in regionserver: break reqs_per_sec = cols[col_index].text.strip() if not isInt(reqs_per_sec): raise UnknownError("non-integer found in Requests Per Second column for regionserver '{}'. {}"\ .format(regionserver, support_msg())) # fix for this is to cast string '1.0' to float and then cast to int # ValueError: invalid literal for int() with base 10: '1.0' stats[regionserver] = int(float(reqs_per_sec)) self.process_stats(stats)
def parse_json(self, json_data): log.info('parsing response') try: data = json_data['beans'][0] corrupt_files = data['CorruptFiles'] corrupt_files_data = json.loads(corrupt_files) num_corrupt_files = len(corrupt_files_data) for filename in corrupt_files_data: log.info('corrupt file: %s', filename) self.msg = 'HDFS has {0} corrupt file{1}'.format( num_corrupt_files, plural(num_corrupt_files)) if num_corrupt_files > 0: self.critical() self.msg += ' | hdfs_corrupt_files={0}'.format(num_corrupt_files) except KeyError as _: raise UnknownError("failed to parse json returned by NameNode at '{0}:{1}': {2}. {3}"\ .format(self.host, self.port, _, support_msg_api())) except ValueError as _: raise UnknownError("invalid json returned for CorruptFiles by Namenode '{0}:{1}': {2}"\ .format(self.host, self.port, _))
def parse_json(self, json_data): if self.get_opt('list_vhosts'): print('RabbitMQ vhosts:\n') print('\n'.join([_['name'] for _ in json_data])) sys.exit(ERRORS['UNKNOWN']) # when returning all vhosts, otherwise will return lone dict item or 404 if not isList(json_data): raise UnknownError("non-list returned by RabbitMQ (got type '{0}'). {1}"\ .format(type(json_data), support_msg_api())) self.msg = "{0} vhost '{1}' ".format(self.name, self.vhost) self.check_vhost(json_data)
def check_id(self, _id): log.debug('id: %s', _id) self.msg += ", id = '{id}'".format(id=_id) if self.expected_id: log.debug('checking expected --id') if not re.match(r'(sha\d+:)?\w+', _id): raise UnknownError("{msg} not in sha format as expected! {support}"\ .format(msg=self.msg, support=support_msg_api())) if _id != self.expected_id: self.critical() self.msg += " (expected id = '{0}')".format(self.expected_id)
def parse_json(self, json_data): stats_db_event_queue = json_data['statistics_db_event_queue'] if not isInt(stats_db_event_queue): raise UnknownError("non-integer stats db event queue returned ('{0}'). {1}"\ .format(stats_db_event_queue, support_msg_api())) stats_db_event_queue = int(stats_db_event_queue) self.msg = "{0} stats dbs event queue = {1}".format( self.name, stats_db_event_queue) self.check_thresholds(stats_db_event_queue) self.msg += " | stats_db_event_queue={0}".format(stats_db_event_queue) self.msg += self.get_perf_thresholds()