def check_multirecord_json(self): log.debug('check_multirecord_json()') for line in self.iostream: if isJson(line): # can't use self.print() here, don't want to print valid for every line of a file / stdin if self.passthru: print(line, end='') elif isJson(line.replace("'", '"')): if self.permit_single_quotes: log.debug('valid multirecord json (single quoted)') # self.single_quotes_detected = True if self.passthru: print(line, end='') else: log.debug('invalid multirecord json (single quoted)') self.failed = True if not self.passthru: die('%s (multi-record format)' % self.invalid_json_msg_single_quotes) return False else: log.debug('invalid multirecord json') self.failed = True return False # self.multi_record_detected = True log.debug('multirecord json (all lines passed)') if not self.passthru: print('%s (multi-record format)' % self.valid_json_msg) return True
def process_file(self, filename): if self.is_excluded(filename): return if filename == '-': self.iostream = sys.stdin self.process_json(sys.stdin.read(), '<STDIN>') else: # reset this flag which we use to only print single quote detection once per file self.single_quotes_detected = False try: with open(filename) as self.iostream: # check if it's a Big Data format file with json doc on first line # this is more efficient than slurping a large file only to fail with out of memory for _ in range(1, 10): line = self.iostream.readline() if line: if isJson(line) or \ isJson(self.convert_single_quoted(line)) or \ isJson(self.convert_single_quoted_escaped(line)): log.debug("header line of '{0}' detected as a valid JSON document".format(filename) + ", assuming Big Data format multi-line json") self.process_multirecord_json(filename) break else: try: self.iostream.seek(0) content = self.iostream.read() self.process_json(content, filename) except MemoryError: # may be a big data format after all and perhaps the first record was broken log.warning("memory error validating contents from file '{0}', ".format(filename) + "assuming Big Data multi-record json and re-trying validation line-by-line") self.process_multirecord_json(filename) except IOError as _: die("ERROR: %s" % _)
def check_json_line_single_quoted(self, line): json_single_quoted = self.convert_single_quoted(line) if isJson(json_single_quoted): #log.debug('valid multirecord json (single quoted)') return True json_single_quoted_escaped = self.convert_single_quoted_escaped(line) if isJson(json_single_quoted_escaped): #log.debug('valid multirecord json (single quoted escaped)') return True return False
def check_multirecord_json(self): for line in self.iostream: if isJson(line): if self.options.print: print(line, end='') else: self.failed = True if not self.options.print and isJson(line.replace("'", '"')): die('%s (multi-record format)' % self.invalid_json_msg_single_quotes) else: return False if not self.options.print: print('%s (multi-record format)' % self.valid_json_msg) return True
def check_json(self, content): log.debug('check_json()') if isJson(content): log.debug('valid json') self.msg = self.valid_json_msg self.print(content) # check if it's regular single quoted JSON a la MongoDB elif self.permit_single_quotes: log.debug('checking for single quoted JSON') json_single_quoted = self.convert_single_quoted(content) if isJson(json_single_quoted): # self.single_quotes_detected = True log.debug('valid json (single quotes)') self.msg = self.valid_json_msg_single_quotes self.print(content) return True log.debug( 'single quoted JSON check failed, trying with pre-escaping double quotes' ) # check if it's single quoted JSON with double quotes that aren't escaped, # by pre-escaping them before converting single quotes to doubles for processing json_single_quoted_escaped = self.convert_single_quoted_escaped( content) if isJson(json_single_quoted_escaped): #log.debug("found single quoted json with non-escaped double quotes in '%s'", filename) self.msg = self.valid_json_msg_single_quotes2 self.print(content) return True log.debug( 'single quoted JSON check failed even with pre-escaping any double quotes' ) if self.rewind_check_multirecord_json(): return True self.failed = True if not self.passthru: die(self.self.invalid_json_msg_single_quotes) else: log.debug('not valid json') if self.rewind_check_multirecord_json(): return True # pointless since it would simply return 'ValueError: No JSON object could be decoded' # if self.verbose > 2: # try: # json.loads(content) # except Exception as _: # print(_) self.failed = True if not self.passthru: die(self.invalid_json_msg) return False
def search(term, limit=25): url = 'https://index.docker.io/v1/search?q={0}&n={1}'.format(urllib.quote_plus(term), limit) log.debug('GET %s' % url) try: verify = True # workaround for Travis CI and older pythons - we're not exchanging secret data so this is ok #if os.getenv('TRAVIS'): # verify = False req = requests.get(url, verify=verify) except requests.exceptions.RequestException as _: die(_) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n%s\n%s\n%s", '='*80, req.content.strip(), '='*80) if req.status_code != 200: die("%s %s" % (req.status_code, req.reason)) if not isJson(req.content): die('invalid non-JSON response from DockerHub!') if log.isEnabledFor(logging.DEBUG): print(jsonpp(req.content)) print('='*80) try: data = json.loads(req.content) except KeyError as _: die('failed to parse output from DockerHub (format may have changed?): {0}'.format(_)) return data
def create_cluster(self, cluster, filename, blueprint=''): # log.debug('create_cluster(%s, %s)' % (filename, name)) validate_file(filename, 'cluster hosts mapping', nolog=True) try: _ = open(str(filename)) file_data = _.read() except IOError as _: err = "failed to read Ambari cluster host mapping from file '%s': %s" % (filename, _) # log.critical(err) qquit('CRITICAL', err) log.info("creating cluster '%s' using file '%s'" % (cluster, filename)) if not isJson(file_data): qquit('CRITICAL', "invalid json found in file '%s'" % filename) # don't have access to a blueprint name to enforce reset here # json_data = json.loads(file_data) # try: # json_data['Blueprints']['blueprint_name'] = blueprint # except KeyError, e: # qquit('CRITICAL', 'failed to (re)set blueprint name in cluster/hostmapping data before creating cluster') if blueprint: try: log.info("setting blueprint in cluster creation to '%s'" % blueprint) json_data = json.loads(file_data) json_data['blueprint'] = blueprint file_data = json.dumps(json_data) except KeyError as _: log.warn("failed to inject blueprint name '%s' in to cluster creation" % blueprint) response = self.send('clusters/%s' % cluster, file_data) log.info("Cluster creation submitted, see Ambari web UI to track progress") return response
def query(url): log.debug('GET %s' % url) try: verify = True # workaround for Travis CI and older pythons - we're not exchanging secret data so this is ok #if os.getenv('TRAVIS'): # verify = False req = requests.get(url, verify=verify) except requests.exceptions.RequestException as _: die(_) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n%s\n%s\n%s", '='*80, req.content.strip(), '='*80) if req.status_code != 200: die("%s %s" % (req.status_code, req.reason)) if not isJson(req.content): die('invalid non-JSON response from DockerHub!') if log.isEnabledFor(logging.DEBUG): print(jsonpp(req.content)) print('='*80) tag_list = [] try: j = json.loads(req.content) tag_list = [_['name'] for _ in j['results']] # could perhaps stack overflow in some scenario # not as functional programming 'cool' but will do own tail recursion and just while loop instead #if 'next' in j and j['next']: # tag_list += self.query(j['next']) return (tag_list, j['next']) except KeyError as _: die('failed to parse output from DockerHub (format may have changed?): {0}'.format(_))
def search(term, limit=25): url = 'https://index.docker.io/v1/search?q={0}&n={1}'.format( urllib.quote_plus(term), limit) log.debug('GET %s' % url) try: verify = True # workaround for Travis CI and older pythons - we're not exchanging secret data so this is ok #if os.getenv('TRAVIS'): # verify = False req = requests.get(url, verify=verify) except requests.exceptions.RequestException as _: die(_) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n%s\n%s\n%s", '=' * 80, req.content.strip(), '=' * 80) if req.status_code != 200: die("%s %s" % (req.status_code, req.reason)) if not isJson(req.content): die('invalid non-JSON response from DockerHub!') if log.isEnabledFor(logging.DEBUG): print(jsonpp(req.content), file=sys.stderr) print('=' * 80, file=sys.stderr) try: data = json.loads(req.content) except KeyError as _: die('failed to parse output from DockerHub (format may have changed?): {0}' .format(_)) return data
def get_version(self): url = 'http://{host}:{port}/solr/admin/info/system'.format( host=self.host, port=self.port) log.debug('GET %s', url) try: req = requests.get(url) except requests.exceptions.RequestException as _: qquit('CRITICAL', _) log.debug('response: %s %s', req.status_code, req.reason) log.debug('content:\n%s\n%s\n%s', '=' * 80, req.content.strip(), '=' * 80) if req.status_code != 200: qquit('CRITICAL', '%s %s' % (req.status_code, req.reason)) # versions 7.0+ if isJson(req.content): json_data = json.loads(req.content) version = json_data['lucene']['solr-spec-version'] else: soup = BeautifulSoup(req.content, 'html.parser') if log.isEnabledFor(logging.DEBUG): log.debug("BeautifulSoup prettified:\n{0}\n{1}".format( soup.prettify(), '=' * 80)) try: version = soup.find('str', {'name': 'solr-spec-version'}).text except (AttributeError, TypeError) as _: qquit('UNKNOWN', 'failed to find parse Solr output. {0}\n{1}'\ .format(support_msg_api(), traceback.format_exc())) return version
def create_cluster(self, cluster, filename, blueprint=''): # log.debug('create_cluster(%s, %s)' % (filename, name)) validate_file(filename, 'cluster hosts mapping', nolog=True) try: _ = open(str(filename)) file_data = _.read() except IOError as _: err = "failed to read Ambari cluster host mapping from file '%s': %s" % ( filename, _) # log.critical(err) qquit('CRITICAL', err) log.info("creating cluster '%s' using file '%s'" % (cluster, filename)) if not isJson(file_data): qquit('CRITICAL', "invalid json found in file '%s'" % filename) # don't have access to a blueprint name to enforce reset here # json_data = json.loads(file_data) # try: # json_data['Blueprints']['blueprint_name'] = blueprint # except KeyError, e: # qquit('CRITICAL', 'failed to (re)set blueprint name in cluster/hostmapping data before creating cluster') if blueprint: try: log.info("setting blueprint in cluster creation to '%s'" % blueprint) json_data = json.loads(file_data) json_data['blueprint'] = blueprint file_data = json.dumps(json_data) except KeyError as _: log.warn( "failed to inject blueprint name '%s' in to cluster creation" % blueprint) response = self.send('clusters/%s' % cluster, file_data) log.info( "Cluster creation submitted, see Ambari web UI to track progress") return response
def run(self): self.no_args() host = self.options.host port = self.options.port validate_host(host) validate_port(port) url = 'http://%(host)s:%(port)s/oozie/v1/admin/status' % locals() log.debug('GET %s' % url) try: req = requests.get(url) except requests.exceptions.RequestException as _: qquit('CRITICAL', _) log.debug("response: %s %s" % (req.status_code, req.reason)) log.debug("content: '%s'" % req.content) if req.status_code != 200: qquit('CRITICAL', "Non-200 response! %s %s" % (req.status_code, req.reason)) # should == NORMAL if not isJson(req.content): qquit('UNKNOWN', 'non-JSON returned by Oozie server at {0}:{1}'.format(host, port)) status = None try: _ = json.loads(req.content) status = _['systemMode'] except KeyError: qquit('UNKNOWN', 'systemMode key was not returned in output from Oozie. {0}'.format(support_msg_api())) self.msg = 'Oozie status = {0}'.format(status) if status == 'NORMAL': self.ok() else: self.critical()
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') validate_host(host) validate_port(port) url = 'http://%(host)s:%(port)s/oozie/v1/admin/status' % locals() req = RequestHandler().get(url) # should == NORMAL if not isJson(req.content): raise UnknownError( 'non-JSON returned by Oozie server at {0}:{1}'.format( host, port)) status = None try: _ = json.loads(req.content) status = _['systemMode'] except KeyError: raise UnknownError( 'systemMode key was not returned in output from Oozie. {0}'. format(support_msg_api())) self.msg = 'Oozie status = {0}'.format(status) if status == 'NORMAL': self.ok() else: self.critical()
def get_version(self): url = 'http://{host}:{port}/solr/admin/info/system'.format(host=self.host, port=self.port) log.debug('GET %s', url) try: req = requests.get(url) except requests.exceptions.RequestException as _: qquit('CRITICAL', _) log.debug('response: %s %s', req.status_code, req.reason) log.debug('content:\n%s\n%s\n%s', '='*80, req.content.strip(), '='*80) if req.status_code != 200: qquit('CRITICAL', '%s %s' % (req.status_code, req.reason)) # versions 7.0+ if isJson(req.content): json_data = json.loads(req.content) version = json_data['lucene']['solr-spec-version'] else: soup = BeautifulSoup(req.content, 'html.parser') if log.isEnabledFor(logging.DEBUG): log.debug("BeautifulSoup prettified:\n{0}\n{1}".format(soup.prettify(), '='*80)) try: version = soup.find('str', {'name':'solr-spec-version'}).text except (AttributeError, TypeError) as _: qquit('UNKNOWN', 'failed to find parse Solr output. {0}\n{1}'\ .format(support_msg_api(), traceback.format_exc())) return version
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') validate_host(host) validate_port(port) url = 'http://%(host)s:%(port)s/oozie/v1/admin/status' % locals() log.debug('GET %s' % url) try: req = requests.get(url) except requests.exceptions.RequestException as _: qquit('CRITICAL', _) log.debug("response: %s %s" % (req.status_code, req.reason)) log.debug("content: '%s'" % req.content) if req.status_code != 200: qquit('CRITICAL', "%s %s" % (req.status_code, req.reason)) # should == NORMAL if not isJson(req.content): qquit('UNKNOWN', 'non-JSON returned by Oozie server at {0}:{1}'.format(host, port)) status = None try: _ = json.loads(req.content) status = _['systemMode'] except KeyError: qquit('UNKNOWN', 'systemMode key was not returned in output from Oozie. {0}'.format(support_msg_api())) self.msg = 'Oozie status = {0}'.format(status) if status == 'NORMAL': self.ok() else: self.critical()
def process_json(self, content, filename): log.debug('process_json()') if not content: log.warning("blank content passed to process_json for contents of file '%s'", filename) if isJson(content): print(json.dumps(json.loads(content))) return True elif self.permit_single_quotes: log.debug('checking for single quoted JSON') # check if it's regular single quoted JSON a la MongoDB json_single_quoted = self.convert_single_quoted(content) if self.process_json_single_quoted(json_single_quoted, filename): return True log.debug('single quoted JSON check failed, trying with pre-escaping double quotes') # check if it's single quoted JSON with double quotes that aren't escaped, # by pre-escaping them before converting single quotes to doubles for processing json_single_quoted_escaped = self.convert_single_quoted_escaped(content) if self.process_json_single_quoted(json_single_quoted_escaped, filename): log.debug("processed single quoted json with non-escaped double quotes in '%s'", filename) return True log.debug('single quoted JSON check failed even with pre-escaping any double quotes') self.failed = True log.error("invalid json detected in '%s':", filename) printerr(content) if not self.continue_on_error: sys.exit(ERRORS['CRITICAL']) return False
def query(url): log.debug('GET %s' % url) try: verify = True # workaround for Travis CI and older pythons - we're not exchanging secret data so this is ok #if os.getenv('TRAVIS'): # verify = False req = requests.get(url, verify=verify) except requests.exceptions.RequestException as _: die(_) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n%s\n%s\n%s", '=' * 80, req.content.strip(), '=' * 80) if req.status_code != 200: die("%s %s" % (req.status_code, req.reason)) if not isJson(req.content): die('invalid non-JSON response from DockerHub!') if log.isEnabledFor(logging.DEBUG): print(jsonpp(req.content)) print('=' * 80) tag_list = [] try: j = json.loads(req.content) tag_list = [_['name'] for _ in j['results']] # could perhaps stack overflow in some scenario # not as functional programming 'cool' but will do own tail recursion and just while loop instead #if 'next' in j and j['next']: # tag_list += self.query(j['next']) return (tag_list, j['next']) except KeyError as _: die('failed to parse output from DockerHub (format may have changed?): {0}' .format(_))
def process_json_single_quoted(self, content, filename): if isJson(content): if not self.single_quotes_detected: log.debug("detected single quoted json in '%s'", filename) self.single_quotes_detected = True print(json.dumps(json.loads(content))) return True return False
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') slave = self.get_opt('slave') list_slaves = self.get_opt('list_slaves') validate_host(host) validate_port(port) if not list_slaves: validate_host(slave, 'slave') url = 'http://%(host)s:%(port)s/master/slaves' % locals() log.debug('GET %s', url) try: req = requests.get(url) except requests.exceptions.RequestException as _: qquit('CRITICAL', _) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n{0}\n{1}\n{2}".format('=' * 80, req.content.strip(), '=' * 80)) if req.status_code != 200: qquit('CRITICAL', "Non-200 response! %s %s" % (req.status_code, req.reason)) content = req.content if not isJson(content): qquit('UNKNOWN', 'invalid JSON returned by Mesos Master') data = json.loads(content) if log.isEnabledFor(logging.DEBUG): log.debug('\n%s', jsonpp(data)) slaves = {} regex = re.compile(r'^slave\(\d+\)\@(.+):\d+') try: for item in data['slaves']: match = regex.match(item['pid']) if match: slaves[item['hostname']] = match.group(1) else: slaves[item['hostname']] = item['pid'] except KeyError: qquit( 'UNKNOWN', 'failed to parse slaves from Mesos API output. {0}'.format( support_msg_api)) if list_slaves: qquit('UNKNOWN', 'Slaves list:\n\n{0}'.format(dict_lines(slaves))) log.info('found slaves:\n\n{0}\n'.format(dict_lines(slaves))) slave = slave.lower() for _ in slaves: if slave == _.lower() or slave == slaves[_].lower(): qquit('OK', "Mesos slave '{0}' registered with master".format(slave)) break else: qquit('CRITICAL', "Mesos slave '{0}' not registered with master".format(slave))
def get_latest_builds(self): log.info('getting latest builds') # gets 404 unless replacing the slash url = 'https://api.travis-ci.org/repo/{repo}/builds'.format(repo=self.repo.replace('/', '%2F')) # request returns blank without authorization header req = self.request_handler.get(url, headers=self.headers) if log.isEnabledFor(logging.DEBUG): log.debug("\n%s", jsonpp(req.content)) if not isJson(req.content): raise UnknownError('non-json returned by Travis CI. {0}'.format(support_msg_api())) return req.content
def query(self, url): log.debug('GET %s' % url) try: verify = True # workaround for Travis CI and older pythons - we're not exchanging secret data so this is ok #if os.getenv('TRAVIS'): # verify = False if os.getenv('SSL_NOVERIFY') == '1': log.warn('disabling SSL verification') verify = False auth = None if self.user and self.password: auth = (self.user, self.password) log.debug( 'setting basic HTTP authenication using username: %s, password: <omitted>', self.user) req = requests.get(url, auth=auth, verify=verify) except requests.exceptions.RequestException as _: die(_) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n%s\n%s\n%s", '=' * 80, req.content.strip(), '=' * 80) if req.status_code != 200: die("%s %s" % (req.status_code, req.reason)) if not isJson(req.content): die('invalid non-JSON response from Docker Registry!') if log.isEnabledFor(logging.DEBUG): print(jsonpp(req.content)) print('=' * 80) tag_list = [] try: json_data = json.loads(req.content) # DockerHub returns like this if 'results' in json_data: tag_list = [result['name'] for result in json_data['results']] # Docker Registry returns like this elif 'tags' in json_data: tag_list = json_data['tags'] else: raise UnknownError('failed to parse response, found neither results nor tags fields. {0}'\ .format(support_msg_api())) # could perhaps stack overflow in some scenario # not as functional programming 'cool' but will do own tail recursion and just while loop instead next_page_url = None if 'next' in json_data and json_data['next']: # tag_list += self.query(json_data['next']) next_page_url = json_data['next'] return (tag_list, next_page_url) except KeyError as _: die('failed to parse output from Docker Registry (format may have changed?): {0}' .format(_))
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') slave = self.get_opt('slave') list_slaves = self.get_opt('list_slaves') validate_host(host) validate_port(port) if not list_slaves: validate_host(slave, 'slave') url = 'http://%(host)s:%(port)s/master/slaves' % locals() log.debug('GET %s', url) try: req = requests.get(url) except requests.exceptions.RequestException as _: qquit('CRITICAL', _) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n{0}\n{1}\n{2}".format('='*80, req.content.strip(), '='*80)) if req.status_code != 200: if req.status_code == 404: qquit('CRITICAL', '%s %s (did you point this at the correct Mesos Master?)' % (req.status_code, req.reason)) qquit('CRITICAL', "Non-200 response! %s %s" % (req.status_code, req.reason)) content = req.content if not isJson(content): qquit('UNKNOWN', 'invalid JSON returned by Mesos Master') data = json.loads(content) if log.isEnabledFor(logging.DEBUG): log.debug('\n%s', jsonpp(data)) slaves = {} regex = re.compile(r'^slave\(\d+\)\@(.+):\d+') try: for item in data['slaves']: match = regex.match(item['pid']) if match: slaves[item['hostname']] = match.group(1) else: slaves[item['hostname']] = item['pid'] except KeyError: qquit('UNKNOWN', 'failed to parse slaves from Mesos API output. {0}'.format(support_msg_api)) if list_slaves: qquit('UNKNOWN', 'Slaves list:\n\n{0}'.format(dict_lines(slaves))) log.info('found slaves:\n\n{0}\n'.format(dict_lines(slaves))) slave = slave.lower() for _ in slaves: if slave == _.lower() or slave == slaves[_].lower(): qquit('OK', "Mesos slave '{0}' registered with master".format(slave)) break else: qquit('CRITICAL', "Mesos slave '{0}' not registered with master".format(slave))
def parse(self, req): if not isJson(req.content): raise UnknownError('non-JSON returned by Oozie server at {0}:{1}'.format(self.host, self.port)) status = None try: _ = json.loads(req.content) status = _['systemMode'] except KeyError: raise UnknownError('systemMode key was not returned in output from Oozie. {0}'.format(support_msg_api())) if status == 'NORMAL': self.ok() else: self.critical() return status
def check_json(self, content): log.debug('check_json()') if isJson(content): log.debug('valid json') self.msg = self.valid_json_msg self.print(content) # XXX: Limitation this may not work with JSON with double quotes embedded within single quotes as # that may lead to unbalanced quoting, although that is still a data problem so might be ok for it to be flagged # as failing elif isJson(content.replace("'", '"')): log.debug('valid json (single quotes)') # self.single_quotes_detectedsingle_quotes_detected = True if self.permit_single_quotes: self.msg = self.valid_json_msg_single_quotes self.print(content) else: self.failed = True self.msg = self.invalid_json_msg_single_quotes if not self.passthru: die(self.msg) else: log.debug('not valid json') if self.iostream is not sys.stdin: self.iostream.seek(0) if self.check_multirecord_json(): return True # pointless since it would simply return 'ValueError: No JSON object could be decoded' # if self.verbose > 2: # try: # json.loads(content) # except Exception, e: # print(e) else: self.failed = True self.msg = self.invalid_json_msg if not self.passthru: die(self.msg)
def parse(self, req): if not isJson(req.content): raise UnknownError('non-JSON returned by HiveServer2 Interactive instance at {0}:{1}'\ .format(self.host, self.port)) _ = json.loads(req.content) status = self.get_key(_, 'status') uptime = self.get_key(_, 'uptime') self.msg2 = 'uptime = {0}'.format(sec2human(int(uptime/1000))) if self.verbose: self.msg2 += ', version ' + self.get_key(_, 'build').split('from')[0] if status == 'STARTED': self.ok() else: self.critical() return status
def check_json(self, content): if isJson(content): if self.options.print: print(content, end='') else: print(self.valid_json_msg) elif isJson(content.replace("'", '"')): self.failed = True if not self.options.print: die(self.invalid_json_msg_single_quotes) else: if self.iostream is not sys.stdin: self.iostream.seek(0) if self.check_multirecord_json(): return True # pointless since it would simply return 'ValueError: No JSON object could be decoded' # if self.get_verbose() > 2: # try: # json.loads(content) # except Exception, e: # print(e) self.failed = True if not self.options.print: die(self.invalid_json_msg)
def parse(self, req): if not isJson(req.content): raise UnknownError('non-JSON returned by Atlas metadata server instance at {0}:{1}'\ .format(self.host, self.port)) _ = json.loads(req.content) status = self.get_key(_, 'Status') if status == 'ACTIVE': pass elif self.high_availability and status == 'PASSIVE': pass elif status in ('BECOMING_ACTIVE', 'BECOMING_PASSIVE'): self.warning() else: self.critical() return status
def parse(self, req): if not isJson(req.content): raise UnknownError('non-JSON returned by HiveServer2 Interactive instance at {0}:{1}'\ .format(self.host, self.port)) _ = json.loads(req.content) status = self.get_key(_, 'status') uptime = self.get_key(_, 'uptime') self.msg2 = 'uptime = {0}'.format(sec2human(int(uptime / 1000))) if self.verbose: self.msg2 += ', version ' + self.get_key(_, 'build').split('from')[0] if status == 'STARTED': self.ok() else: self.critical() return status
def parse(self, req): # versions 7.0+ if isJson(req.content): json_data = json.loads(req.content) version = json_data['lucene']['solr-spec-version'] else: soup = BeautifulSoup(req.content, 'html.parser') if log.isEnabledFor(logging.DEBUG): log.debug("BeautifulSoup prettified:\n{0}\n{1}".format( soup.prettify(), '=' * 80)) try: version = soup.find('str', {'name': 'solr-spec-version'}).text except (AttributeError, TypeError): qquit('UNKNOWN', 'failed to find parse Solr output. {0}\n{1}'\ .format(support_msg_api(), traceback.format_exc())) return version
def parse(self, req): if not isJson(req.content): raise UnknownError( 'non-JSON returned by Oozie server at {0}:{1}'.format( self.host, self.port)) status = None try: _ = json.loads(req.content) status = _['systemMode'] except KeyError: raise UnknownError('\'systemMode\' key was not returned in output from Oozie at {0}:{1}. {2}'\ .format(self.host, self.port, support_msg_api())) if status == 'NORMAL': self.ok() else: self.critical() return status
def check_multirecord_json(self): log.debug('check_multirecord_json()') normal_json = False single_quoted = False count = 0 for line in self.iostream: if isJson(line): normal_json = True # can't use self.print() here, don't want to print valid for every line of a file / stdin if self.passthru: print(line, end='') count += 1 continue elif self.permit_single_quotes and self.check_json_line_single_quoted( line): single_quoted = True if self.passthru: print(line, end='') count += 1 continue else: log.debug('invalid multirecord json') self.failed = True if not self.passthru: die(self.invalid_json_msg) return False if count == 0: log.debug( 'blank input, detected zero lines while multirecord checking') self.failed = True return False # self.multi_record_detected = True log.debug('multirecord json (all %s lines passed)', count) extra_info = '' if single_quoted: extra_info = ' single quoted' if normal_json: extra_info += ' mixed with normal json!' log.warning('mixture of normal and single quoted json detected, ' + \ 'may cause issues for data processing engines') if not self.passthru: print('{0} (multi-record format{1}, {2} records)'.format( self.valid_json_msg, extra_info, count)) return True
def get_tags(repo): namespace = 'library' if '/' in repo: (namespace, repo) = repo.split('/', 2) url = 'https://registry.hub.docker.com/v2/repositories/{0}/{1}/tags/'\ .format(urllib.quote_plus(namespace), urllib.quote_plus(repo)) log.debug('GET %s' % url) try: verify = True # workaround for Travis CI and older pythons - we're not exchanging secret data so this is ok #if os.getenv('TRAVIS'): # verify = False req = requests.get(url, verify=verify) except requests.exceptions.RequestException as _: die(_) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n%s\n%s\n%s", '=' * 80, req.content.strip(), '=' * 80) if req.status_code != 200: die("%s %s" % (req.status_code, req.reason)) if not isJson(req.content): die('invalid non-JSON response from DockerHub!') if log.isEnabledFor(logging.DEBUG): print(jsonpp(req.content)) print('=' * 80) tag_list = [] try: j = json.loads(req.content) tag_list = [_['name'] for _ in j['results']] except KeyError as _: die('failed to parse output from DockerHub (format may have changed?): {0}' .format(_)) tag_list.sort() # put latest to the top of the list try: tag_list.insert(0, tag_list.pop(tag_list.index('latest'))) except ValueError: pass return tag_list
def run(self): self.no_args() host = self.get_opt('host') port = self.get_opt('port') validate_host(host) validate_port(port) url = 'http://%(host)s:%(port)s/oozie/v1/admin/status' % locals() req = RequestHandler().get(url) # should == NORMAL if not isJson(req.content): raise UnknownError('non-JSON returned by Oozie server at {0}:{1}'.format(host, port)) status = None try: _ = json.loads(req.content) status = _['systemMode'] except KeyError: raise UnknownError('systemMode key was not returned in output from Oozie. {0}'.format(support_msg_api())) self.msg = 'Oozie status = {0}'.format(status) if status == 'NORMAL': self.ok() else: self.critical()
def get_tags(repo): namespace = 'library' if '/' in repo: (namespace, repo) = repo.split('/', 2) url = 'https://registry.hub.docker.com/v2/repositories/{0}/{1}/tags/'\ .format(urllib.quote_plus(namespace), urllib.quote_plus(repo)) log.debug('GET %s' % url) try: # workaround for Travis CI and older pythons - we're not exchanging secret data so this is ok verify = True if os.getenv('TRAVIS'): verify = False req = requests.get(url, verify=verify) except requests.exceptions.RequestException as _: die(_) log.debug("response: %s %s", req.status_code, req.reason) log.debug("content:\n%s\n%s\n%s", '='*80, req.content.strip(), '='*80) if req.status_code != 200: die("%s %s" % (req.status_code, req.reason)) if not isJson(req.content): die('invalid non-JSON response from DockerHub!') if log.isEnabledFor(logging.DEBUG): print(jsonpp(req.content)) print('='*80) tag_list = [] try: j = json.loads(req.content) tag_list = [_['name'] for _ in j['results']] except KeyError as _: die('failed to parse output from DockerHub (format may have changed?): {0}'.format(_)) tag_list.sort() # put latest to the top of the list try: tag_list.insert(0, tag_list.pop(tag_list.index('latest'))) except ValueError: pass return tag_list