def run(self): if not self.args: self.usage('no Dockerfile / directory args given') args = uniq_list_ordered(self.args) self.tag_prefix = self.get_opt('tag_prefix') if self.tag_prefix is not None: validate_regex(self.tag_prefix, 'tag prefix') self.tag_prefix = re.compile(self.tag_prefix) for arg in args: if not os.path.exists(arg): print("'%s' not found" % arg) sys.exit(ERRORS['WARNING']) if os.path.isfile(arg): log_option('file', arg) elif os.path.isdir(arg): log_option('directory', arg) else: die("path '%s' could not be determined as either a file or directory" % arg) for arg in args: self.check_git_tags_dockerfiles(arg) if self.failed: log.error('Dockerfile validation FAILED') sys.exit(ERRORS['CRITICAL']) log.info('Dockerfile validation SUCCEEDED')
def run(self): expected = self.get_opt('expected') if expected is not None: validate_regex(expected) log.info('expected version regex: %s', expected) cmd = 'consul version' log.debug('cmd: ' + cmd) proc = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT) (stdout, _) = proc.communicate() log.debug('stdout: ' + str(stdout)) returncode = proc.wait() log.debug('returncode: ' + str(returncode)) if returncode != 0 or (stdout is not None and 'Error' in stdout): raise CriticalError('consul returncode: {0}, output: {1}'.format( returncode, stdout)) version = None for line in str(stdout).split('\n'): match = self.version_regex.match(line) if match: version = match.group(1) if not version: raise UnknownError( 'Consul version not found in output. Consul output may have changed. {0}' .format(support_msg())) if not isVersion(version): raise UnknownError( 'Consul version unrecognized \'{0}\'. {1}'.format( version, support_msg())) self.ok() self.msg = 'Consul version = {0}'.format(version) if expected is not None and not re.search(expected, version): self.msg += " (expected '{0}')".format(expected) self.critical()
def run(self): expected = self.get_opt('expected') if expected is not None: validate_regex(expected) log.info('expected version regex: %s', expected) cmd = 'nodetool version' log.debug('cmd: ' + cmd) proc = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT) (stdout, _) = proc.communicate() log.debug('stdout: ' + str(stdout)) returncode = proc.wait() log.debug('returncode: ' + str(returncode)) if returncode != 0 or (stdout is not None and 'Error' in stdout): raise CriticalError('nodetool returncode: {0}, output: {1}'.format(returncode, stdout)) version = None for line in str(stdout).split('\n'): match = self.version_regex.match(line) if match: version = match.group(1) if not version: raise UnknownError('Cassandra version not found in output. Nodetool output may have changed. {0}'. format(support_msg())) if not isVersion(version): raise UnknownError('Cassandra version unrecognized \'{0}\'. {1}'.format(version, support_msg())) self.ok() self.msg = 'Cassandra version = {0}'.format(version) if expected is not None and not re.search(expected, version): self.msg += " (expected '{0}')".format(expected) self.critical()
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') validate_host(host) validate_port(port) expected = self.get_opt('expected') if expected is not None: validate_regex(expected) log.info('expected version regex: %s', expected) url = 'http://%(host)s:%(port)s/' % locals() + self.url_path log.debug('GET %s' % url) try: req = requests.get(url) except requests.exceptions.RequestException as _: raise CriticalError(_) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n%s\n%s\n%s", '=' * 80, req.content.strip(), '=' * 80) if req.status_code != 200: raise CriticalError("%s %s" % (req.status_code, req.reason)) soup = BeautifulSoup(req.content, 'html.parser') if log.isEnabledFor(logging.DEBUG): log.debug("BeautifulSoup prettified:\n{0}\n{1}".format( soup.prettify(), '=' * 80)) self.ok() version = self.parse_version(soup) self.msg = 'HBase {0} version = {1}'.format(self.role, version) if expected is not None and not re.search(expected, version): self.msg += " (expected '{0}')".format(expected) self.critical()
def run(self): if not self.args: self.usage('no git directory args given') self.origin = self.get_opt('origin') args = uniq_list_ordered(self.args) self.branch_prefix = self.get_opt('branch_prefix') if self.branch_prefix is not None: validate_regex(self.branch_prefix, 'branch prefix') self.branch_prefix = re.compile(self.branch_prefix) for arg in args: if not os.path.exists(arg): print("'%s' not found" % arg) sys.exit(ERRORS['WARNING']) if os.path.isfile(arg): log_option('file', arg) elif os.path.isdir(arg): log_option('directory', arg) else: die("path '%s' could not be determined as either a file or directory" % arg) for arg in args: self.check_git_branches_upstream(arg) if self.status == "OK": log.info('SUCCESS - All Git branches are tracking the expected upstream origin branches') else: log.critical('FAILED') sys.exit(ERRORS['CRITICAL'])
def process_options(self): super(SeleniumHubBrowserTest, self).process_options() self.hub_url = self.get_opt('hub_url') if self.hub_url: validate_url(self.hub_url, 'hub') else: self.host = self.get_opt('host') self.port = self.get_opt('port') validate_host(self.host) validate_port(self.port) if self.get_opt('ssl') or int(self.port) == 443: self.protocol = 'https' self.hub_url = '{protocol}://{host}:{port}/{path}'\ .format(protocol=self.protocol, \ host=self.host, \ port=self.port, \ path=self.path) self.url = self.get_opt('url') if ':' not in self.url: self.url = 'http://' + self.url validate_url(self.url) self.expected_content = self.get_opt('content') self.expected_regex = self.get_opt('regex') if self.expected_regex: validate_regex(self.expected_regex) self.expected_regex = re.compile(self.expected_regex) elif self.url == self.url_default: self.expected_content = self.expected_content_default if not self.args: # test basic Chrome and Firefox are available self.args.append('chrome') self.args.append('firefox')
def run(self): expected = self.get_opt("expected") if expected is not None: validate_regex(expected) log.info("expected version regex: %s", expected) cmd = "consul version" log.debug("cmd: " + cmd) proc = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT) (stdout, _) = proc.communicate() log.debug("stdout: " + str(stdout)) returncode = proc.wait() log.debug("returncode: " + str(returncode)) if returncode != 0 or (stdout is not None and "Error" in stdout): raise CriticalError("consul returncode: {0}, output: {1}".format(returncode, stdout)) version = None for line in str(stdout).split("\n"): match = self.version_regex.match(line) if match: version = match.group(1) if not version: raise UnknownError( "Consul version not found in output. Consul output may have changed. {0}".format(support_msg()) ) if not isVersion(version): raise UnknownError("Consul version unrecognized '{0}'. {1}".format(version, support_msg())) self.ok() self.msg = "Consul version = {0}".format(version) if expected is not None and not re.search(expected, version): self.msg += " (expected '{0}')".format(expected) self.critical()
def process_options(self): super(CheckHadoopYarnAppRunning, self).process_options() self.app = self.get_opt('app') self.app_user = self.get_opt('user') self.queue = self.get_opt('queue') self.min_containers = self.get_opt('min_containers') self.limit = self.get_opt('limit') self.warn_on_dup_app = self.get_opt('warn_on_duplicate_app') self.list_apps = self.get_opt('list_apps') if not self.list_apps: if not self.app: self.usage('--app regex not defined') validate_regex(self.app, 'app') if self.app_user is not None: validate_chars(self.app_user, 'app user', r'\w') if self.queue is not None: validate_chars(self.queue, 'queue', r'\w-') if self.min_containers is not None: validate_int(self.min_containers, 'min containers', 0, None) self.min_containers = int(self.min_containers) self.limit = self.get_opt('limit') validate_int(self.limit, 'num results', 1, None) self.path += '?states=running&limit={0}'.format(self.limit) self.validate_thresholds(optional=True)
def process_options(self): super(CheckSeleniumHubBrowser, self).process_options() self.hub_url = self.get_opt('hub_url') if self.hub_url: validate_url(self.hub_url, 'hub') else: self.host = self.get_opt('host') self.port = self.get_opt('port') validate_host(self.host) validate_port(self.port) if self.get_opt('ssl') or int(self.port) == 443: self.protocol = 'https' self.hub_url = '{protocol}://{host}:{port}/{path}'\ .format(protocol=self.protocol, \ host=self.host, \ port=self.port, \ path=self.path) self.url = self.get_opt('url') if ':' not in self.url: self.url = 'http://' + self.url validate_url(self.url) self.browser = self.get_opt('browser') if self.browser: self.browser = self.browser.upper() validate_alnum(self.browser, 'browser') self.expected_content = self.get_opt('content') self.expected_regex = self.get_opt('regex') if self.expected_regex: validate_regex(self.expected_regex) self.expected_regex = re.compile(self.expected_regex) elif self.url == self.url_default: self.expected_content = self.expected_content_default
def process_options(self): super(CheckHadoopYarnAppLastFinishedState, self).process_options() self.app = self.get_opt('app') self.app_user = self.get_opt('user') self.queue = self.get_opt('queue') self.limit = self.get_opt('limit') self.warn_on_dup_app = self.get_opt('warn_on_duplicate_app') self.list_apps = self.get_opt('list_apps') if not self.list_apps: if not self.app: self.usage('--app name is not defined') validate_regex(self.app, 'app') if self.app_user is not None: validate_chars(self.app_user, 'app user', r'\w') if self.queue is not None: validate_chars(self.queue, 'queue', r'\w-') self.limit = self.get_opt('limit') validate_int(self.limit, 'num results', 1, None) # Not limited to states here in case we miss one, instead will return all and # then explicitly skip only RUNNING/ACCEPTED states self.path += '?limit={0}'.format(self.limit) self.validate_thresholds(optional=True)
def process_options(self): super(CheckInfluxDBVersion, self).process_options() self.expected_build = self.get_opt('build') if self.expected_build: validate_regex(self.expected_build, 'build') # Override default RequestHandler() error checking self.request.check_response_code = self.check_response_code
def run(self): if not self.args: self.usage('no git directory args given') self.origin = self.get_opt('origin') args = uniq_list_ordered(self.args) self.branch_prefix = self.get_opt('branch_prefix') if self.branch_prefix is not None: validate_regex(self.branch_prefix, 'branch prefix') self.branch_prefix = re.compile(self.branch_prefix) for arg in args: if not os.path.exists(arg): print("'%s' not found" % arg) sys.exit(ERRORS['WARNING']) if os.path.isfile(arg): log_option('file', arg) elif os.path.isdir(arg): log_option('directory', arg) else: die("path '%s' could not be determined as either a file or directory" % arg) for arg in args: self.check_git_branches_upstream(arg) if self.status == "OK": log.info( 'SUCCESS - All Git branches are tracking the expected upstream origin branches' ) else: log.critical('FAILED') sys.exit(ERRORS['CRITICAL'])
def process_args(self): args = uniq_list_ordered(self.args) if not args: self.usage('no directories specified as arguments') log_option('directories', args) self.compare_by_name = self.get_opt('name') self.compare_by_size = self.get_opt('size') self.compare_by_checksum = self.get_opt('checksum') self.regex = self.get_opt('regex') self.quiet = self.get_opt('quiet') self.no_short_circuit = self.get_opt('no_short_circuit') self.include_dot_dirs = self.get_opt('include_dot_dirs') if self.regex: if '(' not in self.regex: log.info('regex no capture brackets specified, will capture entire given regex') self.regex = '(' + self.regex + ')' validate_regex(self.regex) self.re_compiled = re.compile(self.regex, re.I) if not (self.compare_by_name or self.compare_by_size or self.compare_by_checksum or self.regex): self.compare_by_name = True #self.compare_by_size = True self.compare_by_checksum = True log_option('compare by name', self.compare_by_name) log_option('compare by size', self.compare_by_size) log_option('compare by checksum', self.compare_by_checksum) log_option('compare by regex', True if self.regex else False) return args
def process_args(self): args = uniq_list_ordered(self.args) if not args: self.usage('no directories specified as arguments') log_option('directories', args) self.compare_by_name = self.get_opt('name') self.compare_by_size = self.get_opt('size') self.compare_by_checksum = self.get_opt('checksum') self.regex = self.get_opt('regex') self.quiet = self.get_opt('quiet') self.no_short_circuit = self.get_opt('no_short_circuit') self.include_dot_dirs = self.get_opt('include_dot_dirs') if self.regex: if '(' not in self.regex: log.info( 'regex no capture brackets specified, will capture entire given regex' ) self.regex = '(' + self.regex + ')' validate_regex(self.regex) self.re_compiled = re.compile(self.regex, re.I) if not (self.compare_by_name or self.compare_by_size or self.compare_by_checksum or self.regex): self.compare_by_name = True #self.compare_by_size = True self.compare_by_checksum = True log_option('compare by name', self.compare_by_name) log_option('compare by size', self.compare_by_size) log_option('compare by checksum', self.compare_by_checksum) log_option('compare by regex', True if self.regex else False) return args
def process_args(self): log.setLevel(logging.INFO) self.no_args() regex = self.get_opt('regex') if regex: validate_regex(regex) self.table_regex = re.compile(regex, re.I) log.info('filtering to flush only tables matching regex \'{0}\''.format(regex))
def process_options(self): super(CheckApacheDrillConfig, self).process_options() self.config_key = self.get_opt('key') self.expected_value = self.get_opt('expected') self.list_config = self.get_opt('list') if not self.list_config: validate_chars(self.config_key, 'config key', r'A-Za-z0-9_\.-') validate_regex(self.expected_value, 'expected value regex')
def run(self): if not self.args: self.usage('no Dockerfile / directory args given') args = uniq_list_ordered(self.args) self.branch_prefix = self.get_opt('branch_prefix') if self.branch_prefix is not None: validate_regex(self.branch_prefix, 'branch prefix') self.branch_prefix = re.compile(self.branch_prefix) for arg in args: if not os.path.exists(arg): print("'%s' not found" % arg) sys.exit(ERRORS['WARNING']) if os.path.isfile(arg): log_option('file', arg) elif os.path.isdir(arg): log_option('directory', arg) else: die("path '%s' could not be determined as either a file or directory" % arg) for arg in args: self.check_git_branches_dockerfiles(arg) log.info('Total Branches: %s', len(self.branches)) log.info('Selected Branches: %s', len(self.selected_branches)) log.info('Branches checked: %s', self.branches_checked) log.info('Branches with Dockerfile checked: %s', len(self.branches_dockerfile_checked)) branches_skipped = len(self.branches_skipped) if branches_skipped > 0: log.warn( '{0} branches skipped for not matching expected naming format'. format(branches_skipped)) branches_not_checked = len(self.selected_branches) - len( self.branches_dockerfile_checked) if branches_not_checked > 1: log.warn( '{0} branches not checked (no matching Dockerfile found?)'. format(branches_not_checked)) if log.isEnabledFor(logging.DEBUG): log.debug( 'Branches with no corresponding Dockerfile found:\n%s', '\n'.join( set(self.selected_branches) - set(self.branches_dockerfile_checked))) log.info('{0} Dockerfiles checked'.format(len( self.dockerfiles_checked))) branches_failed = len(self.branches_failed) _ = '{0} Dockerfiles failed validation across {1} branches'.format( self.dockerfiles_failed, branches_failed) if branches_failed > 0: log.error(_) else: log.info(_) if self.failed: log.error('Dockerfile validation FAILED') sys.exit(ERRORS['CRITICAL']) log.info('Dockerfile validation SUCCEEDED')
def process_options(self): self.no_args() self.host = self.get_opt('host') self.port = self.get_opt('port') validate_host(self.host) validate_port(self.port) self.expected = self.get_opt('expected') if self.expected is not None: validate_regex(self.expected) log.info('expected version regex: %s', self.expected)
def process_options(self): super(CheckLinuxDiskMountsReadOnly, self).process_options() self.no_args() self.include = self.get_opt('include') self.exclude = self.get_opt('exclude') if self.include: validate_regex(self.include, 'include') self.include = re.compile(self.include, re.I) if self.exclude: validate_regex(self.exclude, 'exclude') self.exclude = re.compile(self.exclude, re.I)
def process_options(self): self.include = self.get_opt('include') self.exclude = self.get_opt('exclude') if self.include: validate_regex(self.include, 'include') self.include = re.compile(self.include, re.I) if self.exclude: validate_regex(self.exclude, 'exclude') self.exclude = re.compile(self.exclude, re.I) for key in self.opts: log_option(key, self.opts[key])
def process_options(self): super(CheckConsulServiceLeaderElected, self).process_options() self.key = self.get_opt('key') self.regex = self.get_opt('regex') if not self.key: self.usage('--key not defined') self.key = self.key.lstrip('/') validate_chars(self.key, 'key', r'\w\/-') if self.regex: validate_regex(self.regex, 'key') self.path += '{}'.format(self.key)
def process_options(self): # Python 2.x super(HiveTablesMetadata, self).process_options() # Python 3.x # super().__init__() if self.field is None: self.field = self.get_opt('field') if not self.field: self.usage('--field not specified') validate_regex(self.field, 'field') self.field = re.compile(self.field)
def process_options(self): super(CheckDiskMountsReadOnly, self).process_options() self.no_args() self.include = self.get_opt('include') self.exclude = self.get_opt('exclude') if self.include: validate_regex(self.include, 'include') self.include = re.compile(self.include, re.I) if self.exclude: validate_regex(self.exclude, 'exclude') self.exclude = re.compile(self.exclude, re.I)
def process_options(self): super(HiveForEachTable, self).process_options() if self.query is None: self.query = self.get_opt('query') if not self.query: self.usage('query not defined') self.database = self.get_opt('database') self.table = self.get_opt('table') #self.partition = self.get_opt('partition') self.ignore_errors = self.get_opt('ignore_errors') validate_regex(self.database, 'database') validate_regex(self.table, 'table')
def process_options(self): self.opts = { 'no_hashes': self.get_opt('no_hash_comments'), 'allow_colons': self.get_opt('allow_colon_delimiters'), #'disallow_inline_comments': self.get_opt('disallow_inline_comments'), 'disallow_blanks': self.get_opt('no_blanks'), 'print': self.get_opt('print') } self.exclude = self.get_opt('exclude') if self.exclude: validate_regex(self.exclude, 'exclude') self.exclude = re.compile(self.exclude, re.I)
def process_args(self): log.setLevel(logging.INFO) self.no_args() self.host = self.get_opt('host') self.port = self.get_opt('port') validate_host(self.host) validate_port(self.port) regex = self.get_opt('regex') if regex: validate_regex(regex) self.table_regex = re.compile(regex, re.I) log.info('filtering to compact only tables matching regex \'{0}\''.format(regex))
def validate_options(self): if not self.host_list: self.usage('no hosts specified') validate_hostport_list(self.host_list, port_optional=True) validate_port(self.port) if self.protocol and self.protocol not in ('http', 'https', 'ping'): code_error('invalid protocol, must be one of http or https') if self.regex: if not self.protocol: self.usage('--regex cannot be used without --http / --https') validate_regex(self.regex) self.regex = re.compile(self.regex) validate_int(self.num_threads, 'num threads', 1, 100)
def process_args(self): log.setLevel(logging.INFO) self.no_args() self.host = self.get_opt('host') self.port = self.get_opt('port') validate_host(self.host) validate_port(self.port) regex = self.get_opt('regex') if regex: validate_regex(regex) self.table_regex = re.compile(regex, re.I) log.info("filtering to compact only tables matching regex \'%s\'", regex)
def process_options_common(self): self.limit = self.get_opt('limit') self.list_apps = self.get_opt('list_apps') if self.include is not None: validate_regex(self.include, 'include') self.include = re.compile(self.include, re.I) if self.exclude is not None: validate_regex(self.exclude, 'exclude') self.exclude = re.compile(self.exclude, re.I) queue = self.get_opt('queue') if queue: validate_regex(queue, 'queue') self.queue = re.compile(queue, re.I) exclude_queue = self.get_opt('exclude_queue') if exclude_queue: validate_regex(exclude_queue, 'exclude queue') self.exclude_queue = re.compile(exclude_queue, re.I) self.limit = self.get_opt('limit') validate_int(self.limit, 'num results', 1, None) self.path += '?states=running&limit={0}'.format(self.limit) self.validate_thresholds(optional=True)
def process_options(self): super(ClouderaNavigatorTablesUsed, self).process_options() self.delimiter = self.get_opt('delimiter') self.quotechar = self.get_opt('quotechar') self.escapechar = self.get_opt('escapechar') ignore_users = self.get_opt('ignore_users') if ignore_users: ignored_users = ignore_users.split(',') for username in ignored_users: validate_regex(username, 'ignored user') # account for kerberized names - user, [email protected] or user/[email protected] self.re_ignored_users = re.compile('^(?:' + '|'.join(ignored_users) + ')(?:[@/]|$)', re.I) if not self.args: self.usage('no CSV file argument given')
def process_args(self): if not self.name: raise CodingError("didn't name check, please set self.name in __init__()") self.no_args() self.host = self.get_opt('host') self.port = self.get_opt('port') validate_host(self.host) validate_port(self.port) self.key = self.get_opt('key') self.regex = self.get_opt('regex') if not self.key: self.usage('--key not defined') self.key = self.key.lstrip('/') validate_chars(self.key, 'key', r'\w\/-') if self.regex: validate_regex(self.regex, 'key') self.validate_thresholds(optional=True)
def process_options(self): super(CheckPrestoQueries, self).process_options() # Possible Query States - https://prestodb.io/docs/current/admin/web-interface.html self.list = self.get_opt('list') if not self.list: if self.get_opt('running'): self.state_selector = [ 'RUNNING', 'PLANNING', 'STARTING', 'FINISHING' ] if self.get_opt('failed'): if self.state_selector is not None: self.usage( 'cannot specify more than one of --running / --failed / --blocked / --queued at a time' ) self.state_selector = ['FAILED'] if self.get_opt('blocked'): if self.state_selector is not None: self.usage( 'cannot specify more than one of --running / --failed / --blocked / --queued at a time' ) self.state_selector = ['BLOCKED'] if self.get_opt('queued'): if self.state_selector is not None: self.usage( 'cannot specify more than one of --running / --failed / --blocked / --queued at a time' ) self.state_selector = ['QUEUED'] if self.state_selector is None: self.usage( 'must specify one type of --running / --failed / --blocked / --queued queries' ) self.include = self.get_opt('include') self.exclude = self.get_opt('exclude') if self.include: validate_regex(self.include, 'include') self.include = re.compile(self.include, re.I) if self.exclude: validate_regex(self.exclude, 'exclude') self.exclude = re.compile(self.exclude, re.I) self.num = self.get_opt('num') validate_int(self.num, 'num', 0) self.num = int(self.num) self.min_queries = self.get_opt('min_queries') validate_int(self.min_queries, 'minimum queries', 0) self.min_queries = int(self.min_queries) self.validate_thresholds()
def run(self): if not self.args: self.usage('no Dockerfile / directory args given') args = uniq_list_ordered(self.args) self.branch_prefix = self.get_opt('branch_prefix') if self.branch_prefix is not None: validate_regex(self.branch_prefix, 'branch prefix') self.branch_prefix = re.compile(self.branch_prefix) for arg in args: if not os.path.exists(arg): print("'%s' not found" % arg) sys.exit(ERRORS['WARNING']) if os.path.isfile(arg): log_option('file', arg) elif os.path.isdir(arg): log_option('directory', arg) else: die("path '%s' could not be determined as either a file or directory" % arg) for arg in args: self.check_git_branches_dockerfiles(arg) log.info('Total Branches: %s', len(self.branches)) log.info('Selected Branches: %s', len(self.selected_branches)) log.info('Branches checked: %s', self.branches_checked) log.info('Branches with Dockerfile checked: %s', len(self.branches_dockerfile_checked)) branches_skipped = len(self.branches_skipped) if branches_skipped > 0: log.warn('{0} branches skipped for not matching expected naming format' .format(branches_skipped)) branches_not_checked = len(self.selected_branches) - len(self.branches_dockerfile_checked) if branches_not_checked > 1: log.warn('{0} branches not checked (no matching Dockerfile found?)'.format(branches_not_checked)) if log.isEnabledFor(logging.DEBUG): log.debug('Branches with no corresponding Dockerfile found:\n%s', '\n'.join(set(self.selected_branches) - set(self.branches_dockerfile_checked))) log.info('{0} Dockerfiles checked'.format(len(self.dockerfiles_checked))) branches_failed = len(self.branches_failed) _ = '{0} Dockerfiles failed validation across {1} branches'.format(self.dockerfiles_failed, branches_failed) if branches_failed > 0: log.error(_) else: log.info(_) if self.failed: log.error('Dockerfile validation FAILED') sys.exit(ERRORS['CRITICAL']) log.info('Dockerfile validation SUCCEEDED')
def run(self): self.no_args() host = self.options.host port = self.options.port validate_host(host) validate_port(port) key = self.options.key regex = self.options.regex if not key: self.usage('--key not defined') key = key.lstrip('/') validate_chars(key, 'key', r'\w\/-') if regex: validate_regex(regex, 'key') self.validate_thresholds(optional=True) req = None url = 'http://%(host)s:%(port)s/v1/kv/%(key)s' % locals() log.debug('GET %s' % url) try: req = requests.get(url) except requests.exceptions.RequestException as _: qquit('CRITICAL', _) log.debug("response: %s %s" % (req.status_code, req.reason)) log.debug("content: '%s'" % req.content) if req.status_code != 200: err = '' if req.content and isStr( req.content) and len(req.content.split('\n')) < 2: err += ': ' + req.content qquit( 'CRITICAL', "failed to retrieve consul key '%s': '%s' %s%s" % (key, req.status_code, req.reason, err)) value = self.extract_value(req.content) log.info("value = '%(value)s'" % locals()) self.ok() self.msg = "consul key '%s' value = '%s'" % (key, value) if regex: if not re.search(regex, value): self.critical() self.msg += " (did not match expected regex '%s')" % regex #elif self.get_verbose(): # self.msg += " (matched regex '%s')" % regex self.check_thresholds(value) if isFloat(value): self.msg += " | '%s'=%s" % (key, value)
def process_options(self): super(CheckPrestoQueries, self).process_options() self.include = self.get_opt('include') self.exclude = self.get_opt('exclude') if self.include: validate_regex(self.include, 'include') self.include = re.compile(self.include, re.I) if self.exclude: validate_regex(self.exclude, 'exclude') self.exclude = re.compile(self.exclude, re.I) self.num = self.get_opt('num') validate_int(self.num, 'num', 0) self.num = int(self.num) self.min_queries = self.get_opt('min_queries') validate_int(self.min_queries, 'minimum queries', 0) self.min_queries = int(self.min_queries) self.list = self.get_opt('list') self.validate_thresholds()
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') validate_host(host) validate_port(port) expected = self.get_opt('expected') if expected is not None: validate_regex(expected) log.info('expected version regex: %s', expected) url = 'http://%(host)s:%(port)s/solr/admin/info/system' % locals() log.debug('GET %s' % url) try: req = requests.get(url) except requests.exceptions.RequestException as _: raise CriticalError(_) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n%s\n%s\n%s", '=' * 80, req.content.strip(), '=' * 80) if req.status_code != 200: raise CriticalError("%s %s" % (req.status_code, req.reason)) soup = BeautifulSoup(req.content, 'html.parser') if log.isEnabledFor(logging.DEBUG): log.debug("BeautifulSoup prettified:\n{0}\n{1}".format( soup.prettify(), '=' * 80)) try: version = soup.find('str', {'name': 'solr-spec-version'}).text except (AttributeError, TypeError) as _: raise UnknownError( 'failed to find parse Solr output. {0}\n{1}'.format( support_msg_api(), traceback.format_exc())) if not version: raise UnknownError('Solr version not found in output. {0}'.format( support_msg_api())) if not isVersion(version): raise UnknownError('Solr version unrecognized \'{0}\'. {1}'.format( version, support_msg_api())) self.ok() self.msg = 'Solr version = {0}'.format(version) if expected is not None and not re.search(expected, version): self.msg += " (expected '{0}')".format(expected) self.critical()
def process_args(self): self.skip_errors = self.get_opt('continue') self.quick = self.get_opt('quick') self.regex = self.get_opt('regex') args = uniq_list_ordered(self.args) if not args: self.usage('no files/dirs specified') log_option('files/dirs', args) log_option('regex', self.regex) log_option('quick', self.quick) log_option('continue-on-error', self.skip_errors) if self.regex: validate_regex(self.regex) self.regex = re.compile(self.regex, re.I) if self.quick: self.validate_cmd = 'ffprobe' if not which(self.validate_cmd.split()[0]): die('ffmpeg / ffprobe not found in $PATH') return args
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') validate_host(host) validate_port(port) expected = self.get_opt('expected') if expected is not None: validate_regex(expected) log.info('expected version regex: %s', expected) log.info('querying %s', self.software) url = 'http://%(host)s:%(port)s/version' % locals() log.debug('GET %s' % url) try: req = requests.get(url) except requests.exceptions.RequestException as _: raise CriticalError(_) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n%s\n%s\n%s", '='*80, req.content.strip(), '='*80) # Special handling for Nginx, expecting 404 rather than usual 200 if req.status_code != 404: raise CriticalError("%s %s (expecting 404)" % (req.status_code, req.reason)) soup = BeautifulSoup(req.content, 'html.parser') if log.isEnabledFor(logging.DEBUG): log.debug("BeautifulSoup prettified:\n{0}\n{1}".format(soup.prettify(), '='*80)) try: version = soup.findAll('center')[1].text except (AttributeError, TypeError) as _: raise UnknownError('failed to find parse {0} output. {1}\n{2}'. format(self.software, support_msg_api(), traceback.format_exc())) if '/' in version: version = version.split('/')[1] if not version: raise UnknownError('{0} version not found in output. {1}'.format(self.software, support_msg_api())) if not isVersion(version): raise UnknownError('{0} version unrecognized \'{1}\'. {2}'. format(self.software, version, support_msg_api())) self.ok() self.msg = '{0} version = {1}'.format(self.software, version) if expected is not None and not re.search(expected, version): self.msg += " (expected '{0}')".format(expected) self.critical()
def run(self): self.no_args() host = self.options.host port = self.options.port validate_host(host) validate_port(port) key = self.options.key regex = self.options.regex if not key: self.usage("--key not defined") key = key.lstrip("/") validate_chars(key, "key", r"\w\/-") if regex: validate_regex(regex, "key") self.validate_thresholds(optional=True) req = None url = "http://%(host)s:%(port)s/v1/kv/%(key)s" % locals() log.debug("GET %s" % url) try: req = requests.get(url) except requests.exceptions.RequestException as _: qquit("CRITICAL", _) log.debug("response: %s %s" % (req.status_code, req.reason)) log.debug("content: '%s'" % req.content) if req.status_code != 200: err = "" if req.content and isStr(req.content) and len(req.content.split("\n")) < 2: err += ": " + req.content qquit("CRITICAL", "failed to retrieve consul key '%s': '%s' %s%s" % (key, req.status_code, req.reason, err)) value = self.extract_value(req.content) log.info("value = '%(value)s'" % locals()) self.ok() self.msg = "consul key '%s' value = '%s'" % (key, value) if regex: if not re.search(regex, value): self.critical() self.msg += " (did not match expected regex '%s')" % regex # elif self.get_verbose(): # self.msg += " (matched regex '%s')" % regex self.check_thresholds(value) if isFloat(value): self.msg += " | '%s'=%s" % (key, value)
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') validate_host(host) validate_port(port) key = self.get_opt('key') regex = self.get_opt('regex') if not key: self.usage('--key not defined') key = key.lstrip('/') validate_chars(key, 'key', r'\w\/-') if regex: validate_regex(regex, 'key') self.validate_thresholds(optional=True) req = None url = 'http://%(host)s:%(port)s/v1/kv/%(key)s' % locals() log.debug('GET %s' % url) try: req = requests.get(url) except requests.exceptions.RequestException as _: qquit('CRITICAL', _) log.debug("response: %s %s" % (req.status_code, req.reason)) log.debug("content: '%s'" % req.content) if req.status_code != 200: err = '' if req.content and isStr(req.content) and len(req.content.split('\n')) < 2: err += ': ' + req.content qquit('CRITICAL', "failed to retrieve consul key '%s': '%s' %s%s" % (key, req.status_code, req.reason, err)) value = self.extract_value(req.content) log.info("value = '%(value)s'" % locals()) self.ok() self.msg = "consul key '%s' value = '%s'" % (key, value) if regex: if not re.search(regex, value): self.critical() self.msg += " (did not match expected regex '%s')" % regex #elif self.get_verbose(): # self.msg += " (matched regex '%s')" % regex self.check_thresholds(value) if isFloat(value): self.msg += " | '%s'=%s" % (key, value)
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') validate_host(host) validate_port(port) expected = self.get_opt('expected') if expected is not None: validate_regex(expected) log.info('expected version regex: %s', expected) url = 'http://%(host)s:%(port)s/solr/admin/info/system' % locals() log.debug('GET %s' % url) try: req = requests.get(url) except requests.exceptions.RequestException as _: raise CriticalError(_) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n%s\n%s\n%s", '='*80, req.content.strip(), '='*80) if req.status_code != 200: raise CriticalError("%s %s" % (req.status_code, req.reason)) soup = BeautifulSoup(req.content, 'html.parser') if log.isEnabledFor(logging.DEBUG): log.debug("BeautifulSoup prettified:\n{0}\n{1}".format(soup.prettify(), '='*80)) try: version = soup.find('str', {'name':'solr-spec-version'}).text except (AttributeError, TypeError) as _: raise UnknownError('failed to find parse Solr output. {0}\n{1}'. format(support_msg_api(), traceback.format_exc())) if not version: raise UnknownError('Solr version not found in output. {0}'.format(support_msg_api())) if not isVersion(version): raise UnknownError('Solr version unrecognized \'{0}\'. {1}'.format(version, support_msg_api())) self.ok() self.msg = 'Solr version = {0}'.format(version) if expected is not None and not re.search(expected, version): self.msg += " (expected '{0}')".format(expected) self.critical()
def run(self): if not self.args: self.usage("no Dockerfile / directory args given") args = uniq_list_ordered(self.args) self.tag_prefix = self.get_opt("tag_prefix") if self.tag_prefix is not None: validate_regex(self.tag_prefix, "tag prefix") self.tag_prefix = re.compile(self.tag_prefix) for arg in args: if not os.path.exists(arg): print("'%s' not found" % arg) sys.exit(ERRORS["WARNING"]) if os.path.isfile(arg): log_option("file", arg) elif os.path.isdir(arg): log_option("directory", arg) else: die("path '%s' could not be determined as either a file or directory" % arg) for arg in args: self.check_git_tags_dockerfiles(arg) if self.failed: log.error("Dockerfile validation FAILED") sys.exit(ERRORS["CRITICAL"]) log.info("Dockerfile validation SUCCEEDED")
def process_options(self): self.no_args() self.host = self.get_opt('host') self.port = self.get_opt('port') self.row = self.get_opt('row') self.column = self.get_opt('column') self.regex = self.get_opt('expected') self.precision = self.get_opt('precision') self.graph = self.get_opt('graph') self.units = self.get_opt('units') validate_host(self.host) validate_port(self.port) self.list_tables = self.get_opt('list') if not self.list_tables: self.table = self.get_opt('table') validate_hbase_table(self.table, 'hbase') validate_hbase_rowkey(self.row) validate_hbase_column_qualifier(self.column) if self.regex is not None: validate_regex('expected value', self.regex) if self.units is not None: validate_units(self.units) self.validate_thresholds(optional=True, positive=False) validate_int(self.precision, 'precision', 0, 10)
def run(self): if not self.args: self.usage('no Dockerfile / directory args given') args = uniq_list_ordered(self.args) self.branch_prefix = self.get_opt('branch_prefix') if self.branch_prefix is not None: validate_regex(self.branch_prefix, 'branch prefix') self.branch_prefix = re.compile(self.branch_prefix) for arg in args: if not os.path.exists(arg): print("'%s' not found" % arg) sys.exit(ERRORS['WARNING']) if os.path.isfile(arg): log_option('file', arg) elif os.path.isdir(arg): log_option('directory', arg) else: die("path '%s' could not be determined as either a file or directory" % arg) for arg in args: self.check_git_branches_dockerfiles(arg) branches_skipped = len(self.branches_skipped) if branches_skipped > 0: log.warn('{0} branches skipped for not matching expected naming format' .format(branches_skipped)) log.info('{0} Dockerfiles checked across {1} branches' .format(len(self.dockerfiles_checked), self.branches_checked)) branches_failed = len(self.branches_failed) _ = '{0} Dockerfiles failed validation across {1} branches'.format(self.dockerfiles_failed, branches_failed) if branches_failed > 0: log.error(_) else: log.info(_) if self.failed: log.error('Dockerfile validation FAILED') sys.exit(ERRORS['CRITICAL']) log.info('Dockerfile validation SUCCEEDED')
def process_options(self): super(CheckPrestoQueries, self).process_options() # Possible Query States - https://prestodb.io/docs/current/admin/web-interface.html self.list = self.get_opt('list') if not self.list: if self.get_opt('running'): self.state_selector = ['RUNNING', 'PLANNING', 'STARTING', 'FINISHING'] if self.get_opt('failed'): if self.state_selector is not None: self.usage('cannot specify more than one of --running / --failed / --blocked / --queued at a time') self.state_selector = ['FAILED'] if self.get_opt('blocked'): if self.state_selector is not None: self.usage('cannot specify more than one of --running / --failed / --blocked / --queued at a time') self.state_selector = ['BLOCKED'] if self.get_opt('queued'): if self.state_selector is not None: self.usage('cannot specify more than one of --running / --failed / --blocked / --queued at a time') self.state_selector = ['QUEUED'] if self.state_selector is None: self.usage('must specify one type of --running / --failed / --blocked / --queued queries') self.include = self.get_opt('include') self.exclude = self.get_opt('exclude') if self.include: validate_regex(self.include, 'include') self.include = re.compile(self.include, re.I) if self.exclude: validate_regex(self.exclude, 'exclude') self.exclude = re.compile(self.exclude, re.I) self.num = self.get_opt('num') validate_int(self.num, 'num', 0) self.num = int(self.num) self.min_queries = self.get_opt('min_queries') validate_int(self.min_queries, 'minimum queries', 0) self.min_queries = int(self.min_queries) self.validate_thresholds()
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') validate_host(host) validate_port(port) expected = self.get_opt('expected') if expected is not None: validate_regex(expected) log.info('expected version regex: %s', expected) data = None try: #conn = socket.create_connection('%(host)s:%(port)s' % locals(), timeout=self.timeout/2) #conn = socket.create_connection('%s:%s' % (host, port), timeout=self.timeout/2) conn = socket.create_connection((host, port), timeout=self.timeout/2) conn.sendall('envi') data = conn.recv(1024) conn.close() except socket.error as _: raise CriticalError('Failed to connect to ZooKeeper: ' + str(_)) version = None log.debug(data.strip()) for line in data.split('\n'): _ = self.version_line_regex.match(line) if _: version = _.group(1) break if not version: raise UnknownError('ZooKeeper version not found in output. {0}'.format(support_msg_api())) if not isVersion(version): raise UnknownError('ZooKeeper version unrecognized \'{0}\'. {1}'.format(version, support_msg_api())) self.ok() self.msg = 'ZooKeeper version = {0}'.format(version) if expected is not None and not re.match(expected, version): self.msg += " (expected '{0}')".format(expected) self.critical()
def process_options(self): super(CheckHadoopYarnQueueApps, self).process_options() self.allow = self.get_opt('allow') self.disallow = self.get_opt('disallow') self.limit = self.get_opt('limit') self.list_apps = self.get_opt('list_apps') if self.allow is not None: validate_regex(self.allow, 'allow') self.allow = re.compile(self.allow, re.I) if self.disallow is not None: validate_regex(self.disallow, 'disallow') self.disallow = re.compile(self.disallow, re.I) queue = self.get_opt('queue') validate_regex(queue, 'queue') self.queue = re.compile(queue, re.I) self.limit = self.get_opt('limit') validate_int(self.limit, 'num results', 1, None) self.path += '?states=running&limit={0}'.format(self.limit)
def process_options(self): super(CheckPrestoEnvironment, self).process_options() self.expected = self.get_opt('expected') if self.expected: validate_regex(self.expected, 'expected environment')