def process_args(self): self.no_args() self.user = self.get_opt('user') if self.user == 'root': self.user = '******' self.days = self.get_opt('days') validate_int(self.days, 'days')
def process_options_common(self): self.limit = self.get_opt('limit') self.list_apps = self.get_opt('list_apps') if self.include is not None: validate_regex(self.include, 'include') self.include = re.compile(self.include, re.I) if self.exclude is not None: validate_regex(self.exclude, 'exclude') self.exclude = re.compile(self.exclude, re.I) queue = self.get_opt('queue') if queue: validate_regex(queue, 'queue') self.queue = re.compile(queue, re.I) exclude_queue = self.get_opt('exclude_queue') if exclude_queue: validate_regex(exclude_queue, 'exclude queue') self.exclude_queue = re.compile(exclude_queue, re.I) self.limit = self.get_opt('limit') validate_int(self.limit, 'num results', 1, None) self.path += '?states=running&limit={0}'.format(self.limit) self.validate_thresholds(optional=True)
def process_args(self): self.host_list = self.args if not self.host_list: self.usage( 'no host arguments given, must give at least one regionserver host as an argument' ) for host in self.host_list: validate_host(host) self.port = self.get_opt('port') validate_port(self.port) self.table = self.get_opt('table') table_chars = 'A-Za-z0-9:._-' if self.table: validate_chars(self.table, 'table', table_chars) else: self.table = '[{}]+'.format(table_chars) self.namespace = self.get_opt('namespace') validate_chars(self.namespace, 'hbase namespace', 'A-Za-z0-9:._-') self.interval = self.get_opt('interval') self.count = self.get_opt('count') self.since_uptime = self.get_opt('average') validate_int(self.interval, 'interval') validate_int(self.count, 'count') self.interval = int(self.interval) self.count = int(self.count) if self.count == 0: self.disable_timeout() if self.get_opt('reads'): self.show.add('read') if self.get_opt('writes'): self.show.add('write') if self.get_opt('total'): self.show.add('total')
def process_options(self): self.repo = self.get_opt('repo') validate_chars(self.repo, 'repo', 'A-Za-z0-9/_-') # official repos don't have slashes in them but then you can't check their build statuses either if '/' not in self.repo: self.usage('--repo must contain a slash (/) in it - ' + \ 'official repos are not supported as DockerHub doesn\'t expose their build info') (namespace, repo) = self.repo.split('/', 1) validate_chars(namespace, 'namespace', 'A-Za-z0-9_-') validate_chars(repo, 'repo', 'A-Za-z0-9_-') self.repo = '{0}/{1}'.format(namespace, repo) # not needed as dashes and underscores are all that validation above permits through and they # are returned as is and processed successfully by DockerHub API #(user, repo) = self.repo.split('/', 1) #repo = urllib.quote_plus(repo) #self.repo = '{0}/{1}'.format(user, repo) self.tag = self.get_opt('tag') if self.tag is not None: # if you have a tag which characters other than these then please raise a ticket for extension at: # # https://github.com/harisekhon/nagios-plugins/issues # self.tag = self.tag.lstrip(':') validate_chars(self.tag, 'tag', r'A-Za-z0-9/\._-') #if not self.tag: # self.usage('--tag cannot be blank if given') self.max_pages = self.get_opt('pages') # if you have to iterate more than 20 pages you have problems, and this check will take ages validate_int(self.max_pages, 'max pages', 1, 20) self.max_pages = int(self.max_pages)
def process_options(self): super(CheckHadoopYarnAppRunning, self).process_options() self.app = self.get_opt('app') self.app_user = self.get_opt('user') self.queue = self.get_opt('queue') self.min_containers = self.get_opt('min_containers') self.limit = self.get_opt('limit') self.warn_on_dup_app = self.get_opt('warn_on_duplicate_app') self.list_apps = self.get_opt('list_apps') if not self.list_apps: if not self.app: self.usage('--app regex not defined') validate_regex(self.app, 'app') if self.app_user is not None: validate_chars(self.app_user, 'app user', r'\w') if self.queue is not None: validate_chars(self.queue, 'queue', r'\w-') if self.min_containers is not None: validate_int(self.min_containers, 'min containers', 0, None) self.min_containers = int(self.min_containers) self.limit = self.get_opt('limit') validate_int(self.limit, 'num results', 1, None) self.path += '?states=running&limit={0}'.format(self.limit) self.validate_thresholds(optional=True)
def process_options(self): super(CheckHadoopYarnAppLastFinishedState, self).process_options() self.app = self.get_opt('app') self.app_user = self.get_opt('user') self.queue = self.get_opt('queue') self.limit = self.get_opt('limit') self.warn_on_dup_app = self.get_opt('warn_on_duplicate_app') self.list_apps = self.get_opt('list_apps') if not self.list_apps: if not self.app: self.usage('--app name is not defined') validate_regex(self.app, 'app') if self.app_user is not None: validate_chars(self.app_user, 'app user', r'\w') if self.queue is not None: validate_chars(self.queue, 'queue', r'\w-') self.limit = self.get_opt('limit') validate_int(self.limit, 'num results', 1, None) # Not limited to states here in case we miss one, instead will return all and # then explicitly skip only RUNNING/ACCEPTED states self.path += '?limit={0}'.format(self.limit) self.validate_thresholds(optional=True)
def process_options(self): self.travis_token = self.get_opt('travis_token') self.repo = self.get_opt('repo') self.job_id = self.get_opt('job_id') if self.args: if '/' in self.args[0] and '://' not in self.args[0]: if not self.repo: log.info('using argument as --repo') self.repo = self.args[0] elif not self.job_id: log.info('using argument as --job-id') self.job_id = self.args[0] if self.job_id: # convenience to be able to lazily paste a URL like the following and still have it extract the job_id # https://travis-ci.org/HariSekhon/nagios-plugins/jobs/283840596#L1079 self.job_id = self.job_id.split('/')[-1].split('#')[0] validate_chars(self.job_id, 'job id', '0-9') elif self.repo: travis_user = os.getenv('TRAVIS_USER') if '/' not in self.repo: self.repo = '/' + self.repo if self.repo[0] == '/' and travis_user: self.repo = travis_user + self.repo validate_chars(self.repo, 'repo', r'\/\w\.-') else: self.usage('--job-id / --repo not specified') validate_alnum(self.travis_token, 'travis token') self.headers['Authorization'] = 'token {0}'.format(self.travis_token) self.num = self.get_opt('num') validate_int(self.num, 'num', 1) self.num = int(self.num) self.completed = self.get_opt('completed') self.failed = self.get_opt('failed')
def process_options(self): self.repo = self.get_opt('repo') #validate_chars(self.repo, 'repo', 'A-Za-z0-9/_-') # official repos don't have slashes in them but then you can't check their build statuses either #if '/' not in self.repo: # self.usage('--repo must contain a slash (/) in it - ' + \ # 'official repos are not supported as DockerHub doesn\'t expose their build info') (namespace, repo) = self.repo.split('/', 1) validate_chars(namespace, 'namespace', 'A-Za-z0-9_-') validate_chars(repo, 'repo', 'A-Za-z0-9_-') self.repo = '{0}/{1}'.format(namespace, repo) # not needed as dashes and underscores are all that validation above permits through and they # are returned as is and processed successfully by DockerHub API #(user, repo) = self.repo.split('/', 1) #repo = urllib.quote_plus(repo) #self.repo = '{0}/{1}'.format(user, repo) self.tag = self.get_opt('tag') if self.tag is not None: # if you have a tag which characters other than these then please raise a ticket for extension at: # # https://github.com/harisekhon/nagios-plugins/issues # self.tag = self.tag.lstrip(':') validate_chars(self.tag, 'tag', r'A-Za-z0-9/\._-') #if not self.tag: # self.usage('--tag cannot be blank if given') self.max_pages = self.get_opt('pages') # if you have to iterate more than 20 pages you have problems, and this check will take ages validate_int(self.max_pages, 'max pages', 1, 20) self.max_pages = int(self.max_pages)
def process_args(self): self.host_list = self.args if not self.host_list: self.usage( 'no host arguments given, must give at least one regionserver host as an argument' ) for host in self.host_list: validate_host(host) self.port = self.get_opt('port') validate_port(self.port) self.table = self.get_opt('table') table_chars = 'A-Za-z0-9:._-' if self.table: validate_chars(self.table, 'table', table_chars) else: self.table = '[{}]+'.format(table_chars) self.namespace = self.get_opt('namespace') validate_chars(self.namespace, 'hbase namespace', 'A-Za-z0-9:._-') self.top_n = self.get_opt('top') if self.top_n: validate_int(self.top_n, 'top N', 1) self.top_n = int(self.top_n) self.request_threshold = self.get_opt('requests') validate_int(self.request_threshold, 'request count threshold') self.request_threshold = int(self.request_threshold)
def process_args(self): self.host_list = self.args if not self.host_list: self.usage( 'no host arguments given, must give at least one regionserver host as an argument' ) for host in self.host_list: validate_host(host) self.port = self.get_opt('port') validate_port(self.port) self.table = self.get_opt('table') table_chars = 'A-Za-z0-9:._-' if self.table: validate_chars(self.table, 'table', table_chars) else: self.table = '[{}]+'.format(table_chars) self.namespace = self.get_opt('namespace') validate_chars(self.namespace, 'hbase namespace', 'A-Za-z0-9:._-') self.region_regex = re.compile('^Namespace_{namespace}_table_({table})_region_(.+)_metric_{metric}'\ .format(namespace=self.namespace, table=self.table, metric=self.metric)) self.top_n = self.get_opt('top') if self.top_n: validate_int(self.top_n, 'top N', 1) self.top_n = int(self.top_n)
def process_args(self): self.host_list = self.args if not self.host_list: self.usage( 'no host arguments given, must give at least one regionserver host as an argument' ) for host in self.host_list: validate_host(host) self.port = self.get_opt('port') validate_port(self.port) self.interval = self.get_opt('interval') self.count = self.get_opt('count') self.since_uptime = self.get_opt('average') validate_int(self.interval, 'interval') validate_int(self.count, 'count') self.interval = int(self.interval) self.count = int(self.count) if self.count == 0: self.disable_timeout() self.request_type = self.get_opt('type') if self.request_type: self.request_type = [ _.strip() for _ in self.request_type.split(',') ] for _ in self.request_type: if _ not in self.request_types: self.usage('--type may only include: {}'.format(', '.join( self.request_types)))
def process_args(self): self.host = self.get_opt('host') self.port = self.get_opt('port') # pika library requires int type self.port = self.port self.user = self.get_opt('user') self.password = self.get_opt('password') validate_host(self.host) validate_port(self.port) self.port = int(self.port) validate_user(self.user) validate_password(self.password) self.vhost = self.get_opt('vhost') self.vhost = self.vhost if self.vhost else '/' validate_chars(self.vhost, 'vhost', r'/\w\._-') self.exchange = self.get_opt('exchange') if self.exchange: validate_chars(self.exchange, 'exchange', r'\w\._-') else: log_option('exchange', self.exchange) self.exchange_type = self.get_opt('exchange_type') if self.exchange_type: if self.exchange_type not in self.valid_exchange_types: self.usage('invalid --exchange-type given, expected one of: {valid_exchange_types}'\ .format(valid_exchange_types=', '.join(self.valid_exchange_types))) log_option('exchange type', self.exchange_type) self.queue = self.get_opt('queue') if self.queue: validate_chars(self.queue, 'queue', r'\w\._-') else: log_option('queue', self.queue) self.routing_key = self.get_opt('routing_key') if not self.routing_key: self.routing_key = self.queue log_option('routing key', self.routing_key) #self.no_ack = self.get_opt('no_ack') log_option('no ack', self.no_ack) self.connection_attempts = self.get_opt('connection_attempts') validate_int(self.connection_attempts, 'connection attempts', min_value=1, max_value=10) self.connection_attempts = int(self.connection_attempts) self.retry_delay = self.get_opt('retry_delay') validate_int(self.retry_delay, 'retry delay', min_value=0, max_value=10) self.retry_delay = int(self.retry_delay) self.use_transactions = self.get_opt('use_transactions') #self.durable = not self.get_opt('non_durable') if self.get_opt('non_durable'): self.durable = False log_option('non-durable', not self.durable) sleep_secs = self.get_opt('sleep') if sleep_secs: # validation done through property wrapper self.sleep_secs = sleep_secs log_option('sleep secs', self.sleep_secs) self.validate_thresholds()
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') user = self.get_opt('user') password = self.get_opt('password') if self.get_opt('ssl'): self.protocol = 'https' history_mins = self.get_opt('history_mins') num = self.get_opt('num') #inventory_id = self.get_opt('id') source = self.get_opt('source') dest = self.get_opt('dest') max_age = self.get_opt('max_age') max_runtime = self.get_opt('max_runtime') validate_host(host) validate_port(port) validate_user(user) validate_password(password) validate_float(history_mins, 'history mins') self.history_mins = float(history_mins) filter_opts = {} if self.history_mins: now = datetime.now() filter_opts['dateRangeStart'] = datetime.strftime(now - timedelta(minutes=self.history_mins), '%F %H:%M:%S') filter_opts['dateRangeEnd'] = datetime.strftime(now, '%F %H:%M:%S') if num is not None: validate_int(num, 'num ingestions', 1) #if inventory_id is not None: # validate_chars(inventory_id, 'ingestion id', r'\w-') # filter_opts['inventoryId'] = inventory_id if source is not None: log_option('source', source) filter_opts['fileName'] = source if dest is not None: log_option('dest', dest) filter_opts['destinationPath'] = dest if max_age is not None: validate_float(max_age, 'max age', 1) max_age = float(max_age) if max_runtime is not None: validate_float(max_runtime, 'max incomplete runtime', 1) max_runtime = float(max_runtime) self.url_base = '{protocol}://{host}:{port}/bedrock-app/services/rest'.format(host=host, port=port, protocol=self.protocol) # auth first, get JSESSIONID cookie # cookie jar doesn't work in Python or curl, must extract JSESSIONID to header manually #self.jar = cookielib.CookieJar() log.info('authenticating to Zaloni Bedrock') (_, self.auth_time) = self.req(url='{url_base}/admin/getUserRole'.format(url_base=self.url_base), # using json instead of constructing string manually, # this correctly escapes backslashes in password body=json.dumps({"username": user, "password": password})) if self.get_opt('list'): self.list_ingestions(num=num) self.check_ingestion(num=num, filter_opts=filter_opts, max_age=max_age, max_runtime=max_runtime)
def run(self): self.no_args() filename = self.get_opt('file') self.max_file_age = self.get_opt('max_file_age') validate_file(filename, 'hbck') validate_int(self.max_file_age, 'max file age', 0, 86400 * 31) self.max_file_age = int(self.max_file_age) self.parse(filename)
def process_options(self): super(CheckDockerSwarmServiceStatus, self).process_options() self.service = self.get_opt('service') self.updated = self.get_opt('warn_if_last_updated_within') validate_chars(self.service, 'docker service', r'A-Za-z0-9/:\._-') if self.updated is not None: validate_int(self.updated, 'last updated threshold') self.updated = int(self.updated) self.validate_thresholds(simple='lower', positive=True, optional=True)
def main(self): # log.debug('running main()') log.setLevel(logging.WARN) self.setup() try: self.add_options() self.add_default_opts() except InvalidOptionException as _: self.usage(_) try: self.__parse_args__() # broken # autoflush() # too late # os.environ['PYTHONUNBUFFERED'] = "anything" self.verbose = self.get_opt('verbose') if self.is_option_defined('quiet') and self.get_opt('quiet'): self.verbose = 0 elif self.verbose > 2: log.setLevel(logging.DEBUG) elif self.verbose > 1: log.setLevel(logging.INFO) elif self.verbose > 0 and self._prog[0:6] != 'check_': log.setLevel(logging.WARN) if self.options.debug: log.setLevel(logging.DEBUG) # pragma: no cover log.debug('enabling debug logging') if self.verbose < 3: self.verbose = 3 log.info('Hari Sekhon %s', self.version) log.info(self._github_repo) log.info('verbose level: %s (%s)', self.verbose, logging.getLevelName(log.getEffectiveLevel())) if self.timeout is not None: validate_int(self.timeout, 'timeout', 0, self.timeout_max) log.debug('setting timeout alarm (%s)', self.timeout) signal.signal(signal.SIGALRM, self.timeout_handler) signal.alarm(int(self.timeout)) # if self.options.version: # print(self.version) # sys.exit(ERRORS['UNKNOWN']) self.process_options() self.process_args() try: self.run() except CriticalError as _: qquit('CRITICAL', _) except WarningError as _: qquit('WARNING', _) except UnknownError as _: qquit('UNKNOWN', _) self.__end__() except InvalidOptionException as _: self.usage(_) # pragma: no cover except KeyboardInterrupt: # log.debug('Caught control-c...') print('Caught control-c...') # pragma: no cover
def run(self): if not self.args: self.usage('no search term given as args') if len(self.args) > 1: self.usage('only single search term argument may be given') term = self.args[0] log.info('term: %s', term) num = self.get_opt('num') validate_int(num, 'limit', 1, 1000) self.print_results(self.args[0], num)
def process_options(self): super(CheckJenkinsJob, self).process_options() self.job = self.get_opt('job') self.list_jobs = self.get_opt('list') if not self.list_jobs: validate_chars(self.job, 'job', r'A-Za-z0-9\s\._-') self.msg += "'{job}' ".format(job=self.job) self.age = self.get_opt('age') if self.age: validate_int(self.age, 'age') self.age = int(self.age) self.validate_thresholds(integer=False, optional=True)
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') user = self.get_opt('user') password = self.get_opt('password') workflow_id = self.get_opt('id') workflow_name = self.get_opt('name') max_age = self.get_opt('max_age') max_runtime = self.get_opt('max_runtime') if self.get_opt('ssl'): self.protocol = 'https' validate_host(host) validate_port(port) validate_user(user) validate_password(password) if workflow_id is not None: if workflow_name is not None: self.usage('cannot specify both --id and --name simultaneously') validate_int(workflow_id, 'workflow id', 1) elif workflow_name is not None: validate_chars(workflow_name, 'workflow name', r'\w-') elif self.get_opt('list'): pass else: self.usage('must specify either --name or --id or use --list to find them') if max_age is not None: validate_float(max_age, 'max age', 1) max_age = float(max_age) if max_runtime is not None: validate_float(max_runtime, 'max runtime', 1) max_runtime = float(max_runtime) self.url_base = '{protocol}://{host}:{port}/bedrock-app/services/rest'.format(host=host, port=port, protocol=self.protocol) # auth first, get JSESSIONID cookie # cookie jar doesn't work in Python or curl, must extract JSESSIONID to header manually #self.jar = cookielib.CookieJar() log.info('authenticating to Zaloni Bedrock') (_, self.auth_time) = self.req(url='{url_base}/admin/getUserRole'.format(url_base=self.url_base), # using json instead of constructing string manually, # this correctly escapes backslashes in password body=json.dumps({"username": user, "password": password})) # alternative method #session = requests.Session() #req = self.req(session, # url='http://%(host)s:%(port)s/bedrock-app/services/rest/%(user)s/getUserRole' % locals(), # method='POST') if self.get_opt('list'): self.list_workflows() self.check_workflow(workflow_name, workflow_id, max_age, max_runtime)
def validate_options(self): if not self.host_list: self.usage('no hosts specified') validate_hostport_list(self.host_list, port_optional=True) validate_port(self.port) if self.protocol and self.protocol not in ('http', 'https', 'ping'): code_error('invalid protocol, must be one of http or https') if self.regex: if not self.protocol: self.usage('--regex cannot be used without --http / --https') validate_regex(self.regex) self.regex = re.compile(self.regex) validate_int(self.num_threads, 'num threads', 1, 100)
def process_args(self): # this resets DEBUG env var #log.setLevel(logging.INFO) self.no_args() self.host = self.get_opt('host') self.port = self.get_opt('port') validate_host(self.host) validate_port(self.port) # happybase socket requires an integer self.port = int(self.port) self.table = self.get_opt('table') self.num_rows = self.get_opt('num') self.key_length = self.get_opt('key_length') self.value_length = self.get_opt('value_length') validate_database_tablename(self.table) validate_int(self.num_rows, 'num rows', 1, 1000000000) validate_int(self.key_length, 'key length', 10, 1000) validate_int(self.value_length, 'value length', 1, 1000000) self.num_rows = int(self.num_rows) self.skew = self.get_opt('skew') log_option('skew data', self.skew) self.skew_pc = self.get_opt('skew_percentage') validate_int(self.skew_pc, 'skew percentage', 0, 100) self.skew_pc = int(self.skew_pc) self.drop_table = self.get_opt('drop_table') self.use_existing_table = self.get_opt('use_existing_table') if self.drop_table and self.table != self.default_table_name: die("not allowed to use --drop-table if using a table name other than the default table '{0}'"\ .format(self.default_table_name))
def process_options(self): self.no_args() self.host = self.get_opt('host') self.port = self.get_opt('port') self.user = self.get_opt('user') self.password = self.get_opt('password') self._all = self.get_opt('all') self.workflow_id = self.get_opt('id') self.workflow_name = self.get_opt('name') self.max_age = self.get_opt('max_age') self.max_runtime = self.get_opt('max_runtime') self.min_runtime = self.get_opt('min_runtime') if self.get_opt('ssl'): self.protocol = 'https' validate_host(self.host) validate_port(self.port) validate_user(self.user) validate_password(self.password) if self._all and (self.workflow_name is not None or self.workflow_id is not None): self.usage( 'cannot specify both --all and --name/--id simultaneously') if self.workflow_id is not None: if self.workflow_name is not None: self.usage( 'cannot specify both --id and --name simultaneously') validate_int(self.workflow_id, 'workflow id', 1) self.workflow_id = int(self.workflow_id) elif self.workflow_name is not None: validate_chars(self.workflow_name, 'workflow name', r'\w\s-') elif self._all: pass elif self.get_opt('list'): pass else: self.usage( 'must specify one of --name / --id / --all or use --list to find workflow names/IDs to specify' ) if self.max_age is not None: validate_float(self.max_age, 'max age', 1) self.max_age = float(self.max_age) if self.max_runtime is not None: validate_float(self.max_runtime, 'max runtime', 1) self.max_runtime = float(self.max_runtime) if self.min_runtime is not None: validate_float(self.min_runtime, 'min runtime', 0) self.min_runtime = float(self.min_runtime) if self.max_runtime is not None and self.min_runtime > self.max_runtime: self.usage( '--min-runtime cannot be greater than --max-runtime!')
def process_args(self): # this resets DEBUG env var #log.setLevel(logging.INFO) self.no_args() self.host = self.get_opt('host') self.port = self.get_opt('port') validate_host(self.host) validate_port(self.port) self.table = self.get_opt('table') self.num_rows = self.get_opt('num') self.key_length = self.get_opt('key_length') self.value_length = self.get_opt('value_length') validate_database_tablename(self.table) validate_int(self.num_rows, 'num rows', 1, 1000000000) validate_int(self.key_length, 'key length', 10, 1000) validate_int(self.value_length, 'value length', 1, 1000000) self.num_rows = int(self.num_rows) self.skew = self.get_opt('skew') log_option('skew data', self.skew) self.skew_pc = self.get_opt('skew_percentage') validate_int(self.skew_pc, 'skew percentage', 0, 100) self.skew_pc = int(self.skew_pc) self.drop_table = self.get_opt('drop_table') self.use_existing_table = self.get_opt('use_existing_table') if self.drop_table and self.table != self.default_table_name: die("not allowed to use --drop-table if using a table name other than the default table '{0}'"\ .format(self.default_table_name))
def main(self): # DEBUG env var is picked up immediately in pylib utils, do not override it here if so if os.getenv('DEBUG'): log.setLevel(logging.DEBUG) if not log.isEnabledFor(logging.DEBUG) and \ not log.isEnabledFor(logging.ERROR): # do not downgrade logging either log.setLevel(logging.WARN) self.setup() try: self.add_options() self.add_default_opts() except InvalidOptionException as _: self.usage(_) try: self.__parse_args__() # broken # autoflush() # too late # os.environ['PYTHONUNBUFFERED'] = "anything" log.info('Hari Sekhon %s', self.version) log.info(self._github_repo) log.info('verbose level: %s (%s)', self.verbose, logging.getLevelName(log.getEffectiveLevel())) if self.timeout is not None: validate_int(self.timeout, 'timeout', 0, self.timeout_max) log.debug('setting timeout alarm (%s)', self.timeout) signal.signal(signal.SIGALRM, self.timeout_handler) signal.alarm(int(self.timeout)) # if self.options.version: # print(self.version) # sys.exit(ERRORS['UNKNOWN']) self.process_options() self.process_args() try: self.run() except CriticalError as _: qquit('CRITICAL', _) except WarningError as _: qquit('WARNING', _) except UnknownError as _: qquit('UNKNOWN', _) self.__end__() except InvalidOptionException as _: if log.isEnabledFor(logging.DEBUG): log.debug(traceback.format_exc()) self.usage(_) # pragma: no cover except KeyboardInterrupt: # log.debug('Caught control-c...') print('Caught control-c...') # pragma: no cover
def process_options(self): super(HdfsFindReplicationFactor1, self).process_options() self.path_list = self.args if not self.path_list: self.path_list = ['/'] self.replication_factor = self.get_opt('set_replication') if self.replication_factor is not None: validate_int(self.replication_factor, 'set replication', 2, 5) hadoop_home = self.get_opt('hadoop_home') if hadoop_home is not None: os.environ['HADOOP_HOME'] = hadoop_home hadoop_home_env = os.getenv('HADOOP_HOME') log_option('HADOOP_HOME', hadoop_home_env) if hadoop_home_env: log.info('will search for Hadoop config in %s/conf', hadoop_home_env)
def process_args(self): #log.setLevel(logging.INFO) self.no_args() self.host = self.get_opt('host') self.port = self.get_opt('port') self.table = self.get_opt('table') self.prefix_length = self.get_opt('key_prefix_length') self.sort = self.get_opt('sort') self.sort_desc = self.get_opt('desc') validate_host(self.host) validate_port(self.port) if not self.get_opt('list_tables'): validate_chars(self.table, 'hbase table', 'A-Za-z0-9:._-') validate_int(self.prefix_length, 'row key prefix length', 1, 10) self.prefix_length = int(self.prefix_length)
def process_partitions(self, list_partitions=False): if list_partitions: if self.topic: self.print_topic_partitions(self.topic) else: for topic in self.get_topics(): self.print_topic_partitions(topic) sys.exit(ERRORS['UNKNOWN']) self.partition = self.get_opt('partition') # technically optional, will hash to a random partition, but need to know which partition to get offset if self.partition is None: log.info('partition not specified, getting random partition') self.partition = random.choice(list(self.get_topic_partitions(self.topic))) log.info('selected partition %s', self.partition) validate_int(self.partition, "partition", 0, 10000)
def process_options(self): self.no_args() self.host = self.get_opt('host') self.port = self.get_opt('port') validate_host(self.host) validate_port(self.port) self.precision = self.get_opt('precision') self.list_tables = self.get_opt('list') if not self.list_tables: self.table = self.get_opt('table') validate_hbase_table(self.table, 'hbase') self.validate_thresholds(min=1) log_option('unique row', self.row) log_option('unique column qualifier', self.column) log_option('unique generated value', self.value) validate_int(self.precision, 'precision', 0, 10)
def process_args(self): self.brokers = self.get_opt('brokers') # TODO: add broker list validation back in # validate_hostport(self.brokers) log_option('brokers', self.brokers) self.timeout_ms = max((self.timeout * 1000 - 1000) / 2, 1000) try: list_topics = self.get_opt('list_topics') list_partitions = self.get_opt('list_partitions') if list_topics: self.print_topics() sys.exit(ERRORS['UNKNOWN']) self.topic = self.get_opt('topic') except KafkaError: raise CriticalError(self.exception_msg()) if self.topic: validate_chars(self.topic, 'topic', 'A-Za-z-') elif list_topics or list_partitions: pass else: self.usage('--topic not specified') try: if list_partitions: if self.topic: self.print_topic_partitions(self.topic) else: for topic in self.get_topics(): self.print_topic_partitions(topic) sys.exit(ERRORS['UNKNOWN']) except KafkaError: raise CriticalError(self.exception_msg()) self.partition = self.get_opt('partition') # technically optional, will hash to a random partition, but need to know which partition to get offset # if self.partition is not None: validate_int(self.partition, "partition", 0, 10000) self.topic_partition = TopicPartition(self.topic, self.partition) self.acks = self.get_opt('acks') try: self.acks = int(self.acks) except ValueError: pass log_option('acks', self.acks) self.validate_thresholds()
def process_options(self): super(CheckPrestoQueries, self).process_options() # Possible Query States - https://prestodb.io/docs/current/admin/web-interface.html self.list = self.get_opt('list') if not self.list: if self.get_opt('running'): self.state_selector = [ 'RUNNING', 'PLANNING', 'STARTING', 'FINISHING' ] if self.get_opt('failed'): if self.state_selector is not None: self.usage( 'cannot specify more than one of --running / --failed / --blocked / --queued at a time' ) self.state_selector = ['FAILED'] if self.get_opt('blocked'): if self.state_selector is not None: self.usage( 'cannot specify more than one of --running / --failed / --blocked / --queued at a time' ) self.state_selector = ['BLOCKED'] if self.get_opt('queued'): if self.state_selector is not None: self.usage( 'cannot specify more than one of --running / --failed / --blocked / --queued at a time' ) self.state_selector = ['QUEUED'] if self.state_selector is None: self.usage( 'must specify one type of --running / --failed / --blocked / --queued queries' ) self.include = self.get_opt('include') self.exclude = self.get_opt('exclude') if self.include: validate_regex(self.include, 'include') self.include = re.compile(self.include, re.I) if self.exclude: validate_regex(self.exclude, 'exclude') self.exclude = re.compile(self.exclude, re.I) self.num = self.get_opt('num') validate_int(self.num, 'num', 0) self.num = int(self.num) self.min_queries = self.get_opt('min_queries') validate_int(self.min_queries, 'minimum queries', 0) self.min_queries = int(self.min_queries) self.validate_thresholds()
def process_args(self): self.files = self.args self.prefix_length = self.get_opt('key_prefix_length') self.skip_errors = self.get_opt('skip_errors') self.sort_desc = self.get_opt('desc') self.include_timestamps = self.get_opt('include_timestamps') if self.prefix_length is not None: validate_int(self.prefix_length, 'key key prefix length', 1, 100) self.prefix_length = int(self.prefix_length) if not self.files: self.usage('no file(s) specified as arguments') self.files = uniq_list_ordered(self.files) for filename in self.files: if filename == '-': log_option('file', '<stdin>') continue validate_file(filename)
def process_options(self): hosts = self.get_opt('host') self.port = self.get_opt('port') if hosts: self.host_list = [ host.strip() for host in hosts.split(',') if host ] self.host_list += self.args self.host_list = uniq_list_ordered(self.host_list) if self.get_opt('ssl'): self.protocol = 'https' log_option('SSL', 'true') else: log_option('SSL', 'false') self.request_timeout = self.get_opt('request_timeout') validate_int(self.request_timeout, 'request timeout', 1, 60) self.validate_options()
def process_options(self): hosts = self.get_opt('host') self.port = self.get_opt('port') self.url_path = self.get_opt('url') self.regex = self.get_opt('regex') self.num_threads = self.get_opt('num_threads') self.request_timeout = self.get_opt('request_timeout') if hosts: self.host_list = [ host.strip() for host in hosts.split(',') if host ] self.host_list += self.args self.host_list = uniq_list_ordered(self.host_list) if self.get_opt('random'): log_option('random', True) shuffle(self.host_list) if self.get_opt('https'): self.protocol = 'https' # optparse returns string, even though default we gave from __init__ was int # comparison would fail without this cast if str(self.port) == '80': log.info('overriding port 80 => 443 for https') self.port = 443 elif self.get_opt('http'): self.protocol = 'http' if not self.port: self.port = 80 if self.get_opt('ping'): if self.protocol: self.usage( 'cannot specify --ping with --http / --https, mutually exclusive tests!' ) elif self.port != self.default_port: self.usage( 'cannot specify --port with --ping, mutually exclusive options!' ) self.protocol = 'ping' if self.url_path: if self.protocol is None: self.protocol = 'http' elif self.protocol == 'ping': self.usage( 'cannot specify --url-path with --ping, mutually exclusive options!' ) validate_int(self.request_timeout, 'request timeout', 1, 60) self.validate_options()
def process_options(self): super(CheckPrestoQueries, self).process_options() self.include = self.get_opt('include') self.exclude = self.get_opt('exclude') if self.include: validate_regex(self.include, 'include') self.include = re.compile(self.include, re.I) if self.exclude: validate_regex(self.exclude, 'exclude') self.exclude = re.compile(self.exclude, re.I) self.num = self.get_opt('num') validate_int(self.num, 'num', 0) self.num = int(self.num) self.min_queries = self.get_opt('min_queries') validate_int(self.min_queries, 'minimum queries', 0) self.min_queries = int(self.min_queries) self.list = self.get_opt('list') self.validate_thresholds()
def process_options(self): self.no_args() self.host = self.get_opt('host') self.port = self.get_opt('port') self.user = self.get_opt('user') self.password = self.get_opt('password') self._all = self.get_opt('all') self.workflow_id = self.get_opt('id') self.workflow_name = self.get_opt('name') self.max_age = self.get_opt('max_age') self.max_runtime = self.get_opt('max_runtime') self.min_runtime = self.get_opt('min_runtime') if self.get_opt('ssl'): self.protocol = 'https' validate_host(self.host) validate_port(self.port) validate_user(self.user) validate_password(self.password) if self._all and (self.workflow_name is not None or self.workflow_id is not None): self.usage('cannot specify both --all and --name/--id simultaneously') if self.workflow_id is not None: if self.workflow_name is not None: self.usage('cannot specify both --id and --name simultaneously') validate_int(self.workflow_id, 'workflow id', 1) self.workflow_id = int(self.workflow_id) elif self.workflow_name is not None: validate_chars(self.workflow_name, 'workflow name', r'\w\s-') elif self._all: pass elif self.get_opt('list'): pass else: self.usage('must specify one of --name / --id / --all or use --list to find workflow names/IDs to specify') if self.max_age is not None: validate_float(self.max_age, 'max age', 1) self.max_age = float(self.max_age) if self.max_runtime is not None: validate_float(self.max_runtime, 'max runtime', 1) self.max_runtime = float(self.max_runtime) if self.min_runtime is not None: validate_float(self.min_runtime, 'min runtime', 0) self.min_runtime = float(self.min_runtime) if self.max_runtime is not None and self.min_runtime > self.max_runtime: self.usage('--min-runtime cannot be greater than --max-runtime!')
def process_options(self): super(CheckHadoopYarnQueueApps, self).process_options() self.allow = self.get_opt('allow') self.disallow = self.get_opt('disallow') self.limit = self.get_opt('limit') self.list_apps = self.get_opt('list_apps') if self.allow is not None: validate_regex(self.allow, 'allow') self.allow = re.compile(self.allow, re.I) if self.disallow is not None: validate_regex(self.disallow, 'disallow') self.disallow = re.compile(self.disallow, re.I) queue = self.get_opt('queue') validate_regex(queue, 'queue') self.queue = re.compile(queue, re.I) self.limit = self.get_opt('limit') validate_int(self.limit, 'num results', 1, None) self.path += '?states=running&limit={0}'.format(self.limit)
def process_options(self): self.no_args() self.host = self.get_opt('host') self.port = self.get_opt('port') self.row = self.get_opt('row') self.column = self.get_opt('column') self.regex = self.get_opt('expected') self.precision = self.get_opt('precision') self.graph = self.get_opt('graph') self.units = self.get_opt('units') validate_host(self.host) validate_port(self.port) self.list_tables = self.get_opt('list') if not self.list_tables: self.table = self.get_opt('table') validate_hbase_table(self.table, 'hbase') validate_hbase_rowkey(self.row) validate_hbase_column_qualifier(self.column) if self.regex is not None: validate_regex('expected value', self.regex) if self.units is not None: validate_units(self.units) self.validate_thresholds(optional=True, positive=False) validate_int(self.precision, 'precision', 0, 10)
def process_options(self): super(CheckPrestoQueries, self).process_options() # Possible Query States - https://prestodb.io/docs/current/admin/web-interface.html self.list = self.get_opt('list') if not self.list: if self.get_opt('running'): self.state_selector = ['RUNNING', 'PLANNING', 'STARTING', 'FINISHING'] if self.get_opt('failed'): if self.state_selector is not None: self.usage('cannot specify more than one of --running / --failed / --blocked / --queued at a time') self.state_selector = ['FAILED'] if self.get_opt('blocked'): if self.state_selector is not None: self.usage('cannot specify more than one of --running / --failed / --blocked / --queued at a time') self.state_selector = ['BLOCKED'] if self.get_opt('queued'): if self.state_selector is not None: self.usage('cannot specify more than one of --running / --failed / --blocked / --queued at a time') self.state_selector = ['QUEUED'] if self.state_selector is None: self.usage('must specify one type of --running / --failed / --blocked / --queued queries') self.include = self.get_opt('include') self.exclude = self.get_opt('exclude') if self.include: validate_regex(self.include, 'include') self.include = re.compile(self.include, re.I) if self.exclude: validate_regex(self.exclude, 'exclude') self.exclude = re.compile(self.exclude, re.I) self.num = self.get_opt('num') validate_int(self.num, 'num', 0) self.num = int(self.num) self.min_queries = self.get_opt('min_queries') validate_int(self.min_queries, 'minimum queries', 0) self.min_queries = int(self.min_queries) self.validate_thresholds()
def process_options(self): super(CheckPrestoWorkersResponseLag, self).process_options() self.max_age = self.get_opt('max_age') validate_int(self.max_age, 'max age', 0, 3600) self.max_age = int(self.max_age) self.validate_thresholds()
def timeout(self, secs): validate_int(secs, 'timeout', 0, self.timeout_max) #if not isInt(secs): # raise CodingError('invalid timeout passed to set_timeout(), must be an integer representing seconds') # pylint: disable=line-too-long log.debug('setting timeout to %s secs', secs) self.__timeout = int(secs)