def _process_ping(self, ping_url, ping_reply, auth, tags, pool_name, http_host): if ping_reply is None: ping_reply = 'pong' sc_tags = ["ping_url:{0}".format(ping_url)] + tags if http_host is not None: sc_tags += ["http_host:{0}".format(http_host)] try: # TODO: adding the 'full' parameter gets you per-process detailed # informations, which could be nice to parse and output as metrics resp = requests.get(ping_url, auth=auth, headers=headers(self.agentConfig, http_host=http_host)) resp.raise_for_status() if ping_reply not in resp.text: raise Exception("Received unexpected reply to ping {0}".format(resp.text)) except Exception as e: self.log.error("Failed to ping FPM pool {0} on URL {1}." "\nError {2}".format(pool_name, ping_url, e)) self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, tags=sc_tags, message=str(e)) else: self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK, tags=sc_tags)
def check(self, instance): if 'monitor_agent_url' not in instance: raise Exception( 'Fluentd instance missing "monitor_agent_url" value.') try: url = instance.get('monitor_agent_url') plugin_ids = instance.get('plugin_ids', []) parsed_url = urlparse.urlparse(url) monitor_agent_host = parsed_url.hostname monitor_agent_port = parsed_url.port or 24220 service_check_tags = [ 'fluentd_host:%s' % monitor_agent_host, 'fluentd_port:%s' % monitor_agent_port ] r = requests.get(url, headers=headers(self.agentConfig)) r.raise_for_status() status = r.json() for p in status['plugins']: for m in self.GAUGES: if p.get(m) is None: continue if p.get('plugin_id') in plugin_ids: self.gauge('fluentd.%s' % (m), p.get(m), ["plugin_id:%s" % p.get('plugin_id')]) except Exception, e: msg = "No stats could be retrieved from %s : %s" % (url, str(e)) self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, tags=service_check_tags, message=msg) raise e
def _get_data(self, instance): url = instance.get('nginx_status_url') ssl_validation = instance.get('ssl_validation', True) auth = None if 'user' in instance and 'password' in instance: auth = (instance['user'], instance['password']) # Submit a service check for status page availability. parsed_url = urlparse.urlparse(url) nginx_host = parsed_url.hostname nginx_port = parsed_url.port or 80 service_check_name = 'nginx.can_connect' service_check_tags = ['host:%s' % nginx_host, 'port:%s' % nginx_port] try: self.log.debug(u"Querying URL: {0}".format(url)) r = requests.get(url, auth=auth, headers=headers(self.agentConfig), verify=ssl_validation, timeout=self.default_integration_http_timeout) r.raise_for_status() except Exception: self.service_check(service_check_name, AgentCheck.CRITICAL, tags=service_check_tags) raise else: self.service_check(service_check_name, AgentCheck.OK, tags=service_check_tags) body = r.content resp_headers = r.headers return body, resp_headers.get('content-type', 'text/plain')
def check(self, instance): if 'apache_status_url' not in instance: self.log.warn("Missing 'apache_status_url' in Apache config") return tags = instance.get('tags', []) try: req = urllib2.Request(instance['apache_status_url'], None, headers(self.agentConfig)) request = urllib2.urlopen(req) response = request.read() # Loop through and extract the numerical values for line in response.split('\n'): values = line.split(': ') if len(values) == 2: # match metric, value = values metric_name = self.METRIC_TRANSLATION.get(metric, metric) try: if metric_name == 'apache.net.bytes': self.gauge(metric_name, float(value) * 1024, tags=tags) else: self.gauge(metric_name, float(value), tags=tags) except ValueError: continue except: self.log.exception('Unable to get Apache status')
def _process_status(self, status_url, auth, tags): data = {} try: resp = requests.get(status_url, auth=auth, headers=headers(self.agentConfig), params={'json': True}) resp.raise_for_status() data = resp.json() except Exception as e: self.log.error("Failed to get metrics from {0}.\nError {1}".format( status_url, e)) raise pool_name = data.get('pool', 'default') metric_tags = tags + ["pool:{0}".format(pool_name)] for key, mname in self.GAUGES.iteritems(): if key not in data: self.log.warn( "Gauge metric {0} is missing from FPM status".format(key)) continue self.gauge(mname, int(data[key]), tags=metric_tags) for key, mname in self.MONOTONIC_COUNTS.iteritems(): if key not in data: self.log.warn( "Counter metric {0} is missing from FPM status".format( key)) continue self.monotonic_count(mname, int(data[key]), tags=metric_tags) return pool_name
def _get_metrics(self, url, tags): req = urllib2.Request(url, None, headers(self.agentConfig)) request = urllib2.urlopen(req) response = request.read() # Thanks to http://hostingfu.com/files/nginx/nginxstats.py for this code # Connections parsed = re.search(r'Active connections:\s+(\d+)', response) if parsed: connections = int(parsed.group(1)) self.gauge("nginx.net.connections", connections, tags=tags) # Requests per second parsed = re.search(r'\s*(\d+)\s+(\d+)\s+(\d+)', response) if parsed: conn = int(parsed.group(1)) requests = int(parsed.group(3)) self.rate("nginx.net.conn_opened_per_s", conn, tags=tags) self.rate("nginx.net.request_per_s", requests, tags=tags) # Connection states, reading, writing or waiting for clients parsed = re.search(r'Reading: (\d+)\s+Writing: (\d+)\s+Waiting: (\d+)', response) if parsed: reading, writing, waiting = map(int, parsed.groups()) self.gauge("nginx.net.reading", reading, tags=tags) self.gauge("nginx.net.writing", writing, tags=tags) self.gauge("nginx.net.waiting", waiting, tags=tags)
def check(self, instance): if "monitor_agent_url" not in instance: raise Exception('Fluentd instance missing "monitor_agent_url" value.') try: url = instance.get("monitor_agent_url") plugin_ids = instance.get("plugin_ids", []) parsed_url = urlparse.urlparse(url) monitor_agent_host = parsed_url.hostname monitor_agent_port = parsed_url.port or 24220 service_check_tags = ["fluentd_host:%s" % monitor_agent_host, "fluentd_port:%s" % monitor_agent_port] req = urllib2.Request(url, None, headers(self.agentConfig)) res = urllib2.urlopen(req).read() status = json.loads(res) for p in status["plugins"]: for m in self.GAUGES: if p.get(m) is None: continue if p.get("plugin_id") in plugin_ids: self.gauge("fluentd.%s" % (m), p.get(m), ["plugin_id:%s" % p.get("plugin_id")]) except Exception, e: msg = "No stats could be retrieved from %s : %s" % (url, str(e)) self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, tags=service_check_tags, message=msg) raise e
def check(self, instance): if 'lighttpd_status_url' not in instance: raise Exception("Missing 'lighttpd_status_url' in Lighttpd config") tags = instance.get('tags', []) req = urllib2.Request(instance['lighttpd_status_url'], None, headers(self.agentConfig)) request = urllib2.urlopen(req) response = request.read() # Loop through and extract the numerical values for line in response.split('\n'): values = line.split(': ') if len(values) == 2: # match metric, value = values try: value = float(value) except ValueError: continue # Special case: kBytes => bytes if metric == 'Total kBytes': value = value * 1024 # Send metric as a gauge, if applicable if metric in self.GAUGES: metric_name = self.GAUGES[metric] self.gauge(metric_name, value, tags=tags) # Send metric as a rate, if applicable if metric in self.RATES: metric_name = self.RATES[metric] self.rate(metric_name, value, tags=tags)
def check(self, instance): if 'monitor_agent_url' not in instance: raise Exception('Fluentd instance missing "monitor_agent_url" value.') try: url = instance.get('monitor_agent_url') plugin_ids = instance.get('plugin_ids', []) # Fallback with `tag_by: plugin_id` tag_by = instance.get('tag_by') tag_by = tag_by if tag_by in self._AVAILABLE_TAGS else 'plugin_id' parsed_url = urlparse.urlparse(url) monitor_agent_host = parsed_url.hostname monitor_agent_port = parsed_url.port or 24220 service_check_tags = ['fluentd_host:%s' % monitor_agent_host, 'fluentd_port:%s' % monitor_agent_port] r = requests.get(url, headers=headers(self.agentConfig)) r.raise_for_status() status = r.json() for p in status['plugins']: tag = "%s:%s" % (tag_by, p.get(tag_by)) for m in self.GAUGES: if p.get(m) is None: continue # Filter unspecified plugins to keep backward compatibility. if len(plugin_ids) == 0 or p.get('plugin_id') in plugin_ids: self.gauge('fluentd.%s' % (m), p.get(m), [tag]) except Exception, e: msg = "No stats could be retrieved from %s : %s" % (url, str(e)) self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, tags=service_check_tags, message=msg) raise
def _get_data(self, instance): url = instance.get('nginx_status_url') auth = None if 'user' in instance and 'password' in instance: auth = (instance['user'], instance['password']) # Submit a service check for status page availability. parsed_url = urlparse.urlparse(url) nginx_host = parsed_url.hostname nginx_port = parsed_url.port or 80 service_check_name = 'nginx.can_connect' service_check_tags = ['host:%s' % nginx_host, 'port:%s' % nginx_port] try: r = requests.get(url, auth=auth, headers=headers(self.agentConfig)) r.raise_for_status() except Exception: self.service_check(service_check_name, AgentCheck.CRITICAL, tags=service_check_tags) raise else: self.service_check(service_check_name, AgentCheck.OK, tags=service_check_tags) body = r.content resp_headers = r.headers return body, resp_headers.get('content-type', 'text/plain')
def _get_data(self, instance): url = instance.get("nginx_status_url") ssl_validation = instance.get("ssl_validation", True) auth = None if "user" in instance and "password" in instance: auth = (instance["user"], instance["password"]) # Submit a service check for status page availability. parsed_url = urlparse.urlparse(url) nginx_host = parsed_url.hostname nginx_port = parsed_url.port or 80 service_check_name = "nginx.can_connect" service_check_tags = ["host:%s" % nginx_host, "port:%s" % nginx_port] try: self.log.debug(u"Querying URL: {0}".format(url)) r = requests.get(url, auth=auth, headers=headers(self.agentConfig), verify=ssl_validation) r.raise_for_status() except Exception: self.service_check(service_check_name, AgentCheck.CRITICAL, tags=service_check_tags) raise else: self.service_check(service_check_name, AgentCheck.OK, tags=service_check_tags) body = r.content resp_headers = r.headers return body, resp_headers.get("content-type", "text/plain")
def _fetch_data(self, instance): if 'kong_status_url' not in instance: raise Exception('missing "kong_status_url" value') tags = instance.get('tags', []) url = instance.get('kong_status_url') parsed_url = urlparse.urlparse(url) host = parsed_url.hostname port = parsed_url.port service_check_name = 'kong.can_connect' service_check_tags = ['kong_host:%s' % host, 'kong_port:%s' % port] try: self.log.debug(u"Querying URL: {0}".format(url)) response = requests.get(url, headers=headers(self.agentConfig)) self.log.debug(u"Kong status `response`: {0}".format(response)) response.raise_for_status() except Exception: self.service_check(service_check_name, AgentCheck.CRITICAL, tags=service_check_tags) raise else: if response.status_code == 200: self.service_check(service_check_name, AgentCheck.OK, tags=service_check_tags) else: self.service_check(service_check_name, AgentCheck.CRITICAL, tags=service_check_tags) return self._parse_json(response.content, tags)
def _get_data(self, instance): url = instance.get('nginx_status_url') req = urllib2.Request(url, None, headers(self.agentConfig)) if 'user' in instance and 'password' in instance: add_basic_auth(req, instance['user'], instance['password']) # Submit a service check for status page availability. parsed_url = urlparse.urlparse(url) nginx_host = parsed_url.hostname nginx_port = parsed_url.port or 80 service_check_name = 'nginx.can_connect' service_check_tags = ['host:%s' % nginx_host, 'port:%s' % nginx_port] try: response = urllib2.urlopen(req) except Exception: self.service_check(service_check_name, AgentCheck.CRITICAL, tags=service_check_tags) raise else: self.service_check(service_check_name, AgentCheck.OK, tags=service_check_tags) body = response.read() resp_headers = response.info() return body, resp_headers.get('Content-Type', 'text/plain')
def check(self, instance): if 'gostats_url' not in instance: raise Exception("Missing 'gostats_url' in GoStats config") try: url = instance.get('gostats_url') parsed_url = urlparse.urlparse(url) gostats_host = parsed_url.hostname gostats_port = parsed_url.port or 8080 r = requests.get(url, headers=headers(self.agentConfig)) r.raise_for_status() status = r.json() for metric in self.GAUGES: if status.get(metric) is None: continue self.gauge('gostats.%s' % (metric), status.get(metric)) for metric in self.HISTGRAMS: if status.get(metric) is None: continue for value in status.get(metric): self.histogram('gostats.%s' % (metric), value) except Exception, e: self.log.error('error: %s' % str(e)) raise
def _get_data(self, instance): url = instance.get('nginx_status_url') req = urllib2.Request(url, None, headers(self.agentConfig)) if 'user' in instance and 'password' in instance: add_basic_auth(req, instance['user'], instance['password']) request = urllib2.urlopen(req) return request.read()
def _fetch_data(self, instance): if 'kong_status_url' not in instance: raise Exception('missing "kong_status_url" value') tags = instance.get('tags', []) url = instance.get('kong_status_url') parsed_url = urlparse.urlparse(url) host = parsed_url.hostname port = parsed_url.port or 80 service_check_name = 'kong.can_connect' service_check_tags = ['kong_host:%s' % host, 'kong_port:%s' % port] try: self.log.debug(u"Querying URL: {0}".format(url)) response = requests.get(url, headers=headers(self.agentConfig)) self.log.debug(u"Kong status `response`: {0}".format(response)) response.raise_for_status() except Exception: self.service_check(service_check_name, AgentCheck.CRITICAL, tags=service_check_tags) raise else: if response.status_code == 200: self.service_check(service_check_name, AgentCheck.OK, tags=service_check_tags) else: self.service_check(service_check_name, AgentCheck.CRITICAL, tags=service_check_tags) return self._parse_json(response.content, tags)
def _get_json(self, url, ssl_params, timeout): try: certificate = None if 'ssl_certfile' in ssl_params and 'ssl_keyfile' in ssl_params: certificate = (ssl_params['ssl_certfile'], ssl_params['ssl_keyfile']) verify = ssl_params.get( 'ssl_ca_certs', True) if ssl_params['ssl_cert_validation'] else False r = requests.get(url, verify=verify, cert=certificate, timeout=timeout, headers=headers(self.agentConfig)) except requests.exceptions.Timeout: # If there's a timeout self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, message="Timeout when hitting %s" % url, tags=["url:{0}".format(url)]) raise if r.status_code != 200: self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, message="Got %s when hitting %s" % (r.status_code, url), tags=["url:{0}".format(url)]) raise Exception("Http status code {0} on url {1}".format( r.status_code, url)) return r.json()
def _get_data(self, url, auth=None, send_service_check=False, service_check_tags=None): """ Hit a given URL and return the parsed json `auth` is a tuple of (username, password) or None """ req = urllib2.Request(url, None, headers(self.agentConfig)) if auth: add_basic_auth(req, *auth) try: request = urllib2.urlopen(req) except urllib2.URLError as e: if send_service_check: self.service_check(self.SERVICE_CHECK_CONNECT_NAME, AgentCheck.CRITICAL, tags=service_check_tags, message=e.reason) raise except Exception as e: if send_service_check: self.service_check(self.SERVICE_CHECK_CONNECT_NAME, AgentCheck.CRITICAL, tags=service_check_tags, message=str(e)) raise response = request.read() return json.loads(response)
def save_image(self, url): """ 保存图片,并且实现去重 :param url: Image url :return: """ local_path = f"./images/{sys.argv[1]}/" os.makedirs(local_path, exist_ok=True) content = "" try: response = requests.get(url, headers=headers(), verify=False) if response.status_code == 200: # print(f"{'---'*20} Image url request success {response.status_code}{'---'*40}") content = response.content except Exception as e: # print(f"----Request image url failure----") return if len(content) == 0: return name = image_name(content) image_url = re.findall(r"thumburl=(.*)", url)[0] name_suffix = self.re_name_suffix.findall(image_url) if len(name_suffix) == 1: suffix = name_suffix[0] if os.path.exists(local_path + f"{name}{suffix}"): print(f"图片 {name}{suffix} 已经存在") # time.sleep(SECONDS) return with open(local_path + f"{name}{suffix}", "ab") as f: f.write(content) f.flush() print(f"下载 {name}.jpg 成功")
def check(self, logger, agentConfig): if 'rabbitMQStatusUrl' not in agentConfig or \ 'rabbitMQUser' not in agentConfig or \ 'rabbitMQPass' not in agentConfig or \ agentConfig['rabbitMQStatusUrl'] == 'http://www.example.com:55672/json': return False try: logger.debug('getRabbitMQStatus: attempting authentication setup') manager = urllib2.HTTPPasswordMgrWithDefaultRealm() manager.add_password(None, agentConfig['rabbitMQStatusUrl'], agentConfig['rabbitMQUser'], agentConfig['rabbitMQPass']) handler = urllib2.HTTPBasicAuthHandler(manager) opener = urllib2.build_opener(handler) urllib2.install_opener(opener) logger.debug('getRabbitMQStatus: attempting urlopen') req = urllib2.Request(agentConfig['rabbitMQStatusUrl'], None, headers(agentConfig)) # Do the request, log any errors request = urllib2.urlopen(req) response = request.read() return json.loads(response) except: logger.exception('Unable to get RabbitMQ status') return False
def _process_status(self, status_url, auth, tags): data = {} try: # TODO: adding the 'full' parameter gets you per-process detailed # informations, which could be nice to parse and output as metrics resp = requests.get(status_url, auth=auth, headers=headers(self.agentConfig), params={'json': True}) resp.raise_for_status() data = resp.json() except Exception as e: self.log.error("Failed to get metrics from {0}.\nError {1}".format(status_url, e)) raise pool_name = data.get('pool', 'default') metric_tags = tags + ["pool:{0}".format(pool_name)] for key, mname in self.GAUGES.iteritems(): if key not in data: self.log.warn("Gauge metric {0} is missing from FPM status".format(key)) continue self.gauge(mname, int(data[key]), tags=metric_tags) for key, mname in self.MONOTONIC_COUNTS.iteritems(): if key not in data: self.log.warn("Counter metric {0} is missing from FPM status".format(key)) continue self.monotonic_count(mname, int(data[key]), tags=metric_tags) # return pool, to tag the service check with it if we have one return pool_name
def _get_data(self, url, config, send_sc=True): """ Hit a given URL and return the parsed json """ # Load basic authentication configuration, if available. if config.username and config.password: auth = (config.username, config.password) else: auth = None try: resp = requests.get( url, timeout=config.timeout, headers=headers(self.agentConfig), auth=auth ) resp.raise_for_status() except Exception as e: if send_sc: self.service_check( self.SERVICE_CHECK_CONNECT_NAME, AgentCheck.CRITICAL, message="Error {0} when hitting {1}".format(e, url), tags=config.service_check_tags ) raise return resp.json()
def check(self, instance): if 'apache_status_url' not in instance: self.log.warn("Missing 'apache_status_url' in Apache config") return tags = instance.get('tags', []) req = urllib2.Request(instance['apache_status_url'], None, headers(self.agentConfig)) request = urllib2.urlopen(req) response = request.read() # Loop through and extract the numerical values for line in response.split('\n'): values = line.split(': ') if len(values) == 2: # match metric, value = values try: value = float(value) except ValueError: continue # Special case: kBytes => bytes if metric == 'Total kBytes': value = value * 1024 # Send metric as a gauge, if applicable if metric in self.GAUGES: metric_name = self.GAUGES[metric] self.gauge(metric_name, value, tags=tags) # Send metric as a rate, if applicable if metric in self.RATES: metric_name = self.RATES[metric] self.rate(metric_name, value, tags=tags)
def _get_metrics(self, url, tags): req = urllib2.Request(url, None, headers(self.agentConfig)) request = urllib2.urlopen(req) response = request.read() # Thanks to http://hostingfu.com/files/nginx/nginxstats.py for this code # Connections parsed = re.search(r'Active connections:\s+(\d+)', response) if parsed: connections = int(parsed.group(1)) self.gauge("nginx.net.connections", connections, tags=tags) # Requests per second parsed = re.search(r'\s*(\d+)\s+(\d+)\s+(\d+)', response) if parsed: requests = int(parsed.group(3)) self.rate("nginx.net.request_per_s", requests, tags=tags) # Connection states, reading, writing or waiting for clients parsed = re.search(r'Reading: (\d+)\s+Writing: (\d+)\s+Waiting: (\d+)', response) if parsed: reading, writing, waiting = map(int, parsed.groups()) assert connections == reading + writing + waiting self.gauge("nginx.net.reading", reading, tags=tags) self.gauge("nginx.net.writing", writing, tags=tags) self.gauge("nginx.net.waiting", waiting, tags=tags)
def _process_ping(self, ping_url, ping_reply, auth, tags, pool_name, http_host): if ping_reply is None: ping_reply = 'pong' sc_tags = ["ping_url:{0}".format(ping_url)] try: # TODO: adding the 'full' parameter gets you per-process detailed # informations, which could be nice to parse and output as metrics resp = requests.get(ping_url, auth=auth, headers=headers(self.agentConfig, http_host=http_host)) resp.raise_for_status() if ping_reply not in resp.text: raise Exception("Received unexpected reply to ping {0}".format( resp.text)) except Exception as e: self.log.error("Failed to ping FPM pool {0} on URL {1}." "\nError {2}".format(pool_name, ping_url, e)) self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, tags=sc_tags, message=str(e)) else: self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK, tags=sc_tags)
def check(self, logger, agentConfig): if 'rabbitmq_status_url' not in agentConfig or \ 'rabbitmq_user' not in agentConfig or \ 'rabbitmq_pass' not in agentConfig or \ agentConfig['rabbitmq_status_url'] == 'http://www.example.com:55672/json': return False try: logger.debug('getRabbitMQStatus: attempting authentication setup') manager = urllib2.HTTPPasswordMgrWithDefaultRealm() manager.add_password(None, agentConfig['rabbitmq_status_url'], agentConfig['rabbitmq_user'], agentConfig['rabbitmq_pass']) handler = urllib2.HTTPBasicAuthHandler(manager) opener = urllib2.build_opener(handler) urllib2.install_opener(opener) logger.debug('getRabbitMQStatus: attempting urlopen') req = urllib2.Request(agentConfig['rabbitmq_status_url'], None, headers(agentConfig)) # Do the request, log any errors request = urllib2.urlopen(req) response = request.read() return json.loads(response) except: logger.exception('Unable to get RabbitMQ status') return False
def check(self, instance): if 'monitor_agent_url' not in instance: raise Exception('Fluentd instance missing "monitor_agent_url" value.') try: url = instance.get('monitor_agent_url') plugin_ids = instance.get('plugin_ids', []) parsed_url = urlparse.urlparse(url) monitor_agent_host = parsed_url.hostname monitor_agent_port = parsed_url.port or 24220 service_check_tags = ['fluentd_host:%s' % monitor_agent_host, 'fluentd_port:%s' % monitor_agent_port] r = requests.get(url, headers=headers(self.agentConfig)) r.raise_for_status() status = r.json() for p in status['plugins']: for m in self.GAUGES: if p.get(m) is None: continue if p.get('plugin_id') in plugin_ids: self.gauge('fluentd.%s' % (m), p.get(m), ["plugin_id:%s" % p.get('plugin_id')]) except Exception, e: msg = "No stats could be retrieved from %s : %s" % (url, str(e)) self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, tags=service_check_tags, message=msg) raise e
def _process_ping(self, ping_url, ping_reply, auth, tags, pool_name): if ping_reply is None: ping_reply = 'pong' sc_tags = ["ping_url:{0}".format(ping_url)] try: resp = requests.get(ping_url, auth=auth, headers=headers(self.agentConfig)) resp.raise_for_status() if ping_reply not in resp.text: raise Exception("Received unexpected reply to ping {0}".format( resp.text)) except Exception as e: self.log.error("Failed to ping FPM pool {0} on URL {1}." "\nError {2}".format(pool_name, ping_url, e)) self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, tags=sc_tags, message=str(e)) else: self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK, tags=sc_tags)
def _perform_request(self, url, ssl_validation, auth): r = requests.get(url, auth=auth, headers=headers(self.agentConfig), verify=ssl_validation, timeout=self.default_integration_http_timeout) r.raise_for_status() return r
def _check_connectivity_to_master(self, instance): url = instance.get('gitlab_url') if url is None: # Simply ignore this service check if not configured return parsed_url = urlparse.urlparse(url) gitlab_host = parsed_url.hostname gitlab_port = parsed_url.port or 80 service_check_tags = ['gitlab_host:%s' % gitlab_host, 'gitlab_port:%s' % gitlab_port] ## Load the ssl configuration ssl_params = { 'ssl_cert_validation': _is_affirmative(instance.get('ssl_cert_validation', True)), 'ssl_ca_certs': instance.get('ssl_ca_certs'), } for key, param in ssl_params.items(): if param is None: del ssl_params[key] verify_ssl = ssl_params.get('ssl_ca_certs', True) if ssl_params['ssl_cert_validation'] else False ## Timeout settings timeouts = (int(instance.get('connect_timeout', GitlabRunnerCheck.DEFAULT_CONNECT_TIMEOUT)), int(instance.get('receive_timeout', GitlabRunnerCheck.DEFAULT_RECEIVE_TIMEOUT))) ## Auth settings auth = None if 'gitlab_user' in instance and 'gitlab_password' in instance: auth = (instance['gitlab_user'], instance['gitlab_password']) try: self.log.debug('checking connectivity against %s' % url) r = requests.get(url, auth=auth, verify=verify_ssl, timeout=timeouts, headers=headers(self.agentConfig)) if r.status_code != 200: self.service_check(self.MASTER_SERVICE_CHECK_NAME, PrometheusCheck.CRITICAL, message="Got %s when hitting %s" % (r.status_code, url), tags=service_check_tags) raise Exception("Http status code {0} on url {1}".format(r.status_code, url)) else: r.raise_for_status() except requests.exceptions.Timeout: # If there's a timeout self.service_check(self.MASTER_SERVICE_CHECK_NAME, PrometheusCheck.CRITICAL, message="Timeout when hitting %s" % url, tags=service_check_tags) raise except Exception as e: self.service_check(self.MASTER_SERVICE_CHECK_NAME, PrometheusCheck.CRITICAL, message="Error hitting %s. Error: %s" % (url, e.message), tags=service_check_tags) raise else: self.service_check(self.MASTER_SERVICE_CHECK_NAME, PrometheusCheck.OK, tags=service_check_tags) self.log.debug("gitlab check succeeded")
def _get_data(self, instance): url = instance.get('nginx_status_url') req = urllib2.Request(url, None, headers(self.agentConfig)) if 'user' in instance and 'password' in instance: add_basic_auth(req, instance['user'], instance['password']) response = urllib2.urlopen(req) body = response.read() resp_headers = response.info() return body, resp_headers.get('Content-Type', 'text/plain')
def _get_data(self, instance): url = instance.get("nginx_status_url") req = urllib2.Request(url, None, headers(self.agentConfig)) if "user" in instance and "password" in instance: add_basic_auth(req, instance["user"], instance["password"]) response = urllib2.urlopen(req) body = response.read() resp_headers = response.info() return body, resp_headers.get("Content-Type", "text/plain")
def _get_response_from_url(self, url, instance, params=None): self.log.debug('Fetching uwsgi stats at url: %s' % url) request_headers = headers(self.agentConfig) response = requests.get(url, params=params, headers=request_headers, timeout=int(instance.get('timeout', self.TIMEOUT))) response.raise_for_status() return response
def _get_stats(self, url): "Hit a given URL and return the parsed json" self.log.debug('Fetching Couchbase stats at url: %s' % url) req = urllib2.Request(url, None, headers(self.agentConfig)) # Do the request, log any errors request = urllib2.urlopen(req) response = request.read() return json.loads(response)
def check(self, instance): if 'lighttpd_status_url' not in instance: raise Exception("Missing 'lighttpd_status_url' variable in Lighttpd config") url = self.assumed_url.get(instance['lighttpd_status_url'], instance['lighttpd_status_url']) tags = instance.get('tags', []) self.log.debug("Connecting to %s" % url) req = urllib2.Request(url, None, headers(self.agentConfig)) if 'user' in instance and 'password' in instance: add_basic_auth(req, instance['user'], instance['password']) request = urllib2.urlopen(req) headers_resp = request.info().headers server_version = self._get_server_version(headers_resp) response = request.read() metric_count = 0 # Loop through and extract the numerical values for line in response.split('\n'): values = line.split(': ') if len(values) == 2: # match metric, value = values try: value = float(value) except ValueError: continue # Special case: kBytes => bytes if metric == 'Total kBytes': value = value * 1024 # Send metric as a gauge, if applicable if metric in self.GAUGES: metric_count += 1 metric_name = self.GAUGES[metric] self.gauge(metric_name, value, tags=tags) # Send metric as a rate, if applicable if metric in self.RATES: metric_count += 1 metric_name = self.RATES[metric] self.rate(metric_name, value, tags=tags) # Send metric as a counter, if applicable if metric in self.COUNTERS: metric_count += 1 metric_name = self.COUNTERS[metric] self.increment(metric_name, value, tags=tags) if metric_count == 0: url_suffix = self.URL_SUFFIX_PER_VERSION[server_version] if self.assumed_url.get(instance['lighttpd_status_url'], None) is None and url[-len(url_suffix):] != url_suffix: self.assumed_url[instance['lighttpd_status_url']] = '%s%s' % (url, url_suffix) self.warning("Assuming url was not correct. Trying to add %s suffix to the url" % url_suffix) self.check(instance) else: raise Exception("No metrics were fetched for this instance. Make sure that %s is the proper url." % instance['lighttpd_status_url'])
def _get_data(self, url, auth=None): """ Hit a given URL and return the parsed json `auth` is a tuple of (username, password) or None """ req = urllib2.Request(url, None, headers(self.agentConfig)) if auth: add_basic_auth(req, *auth) request = urllib2.urlopen(req) response = request.read() return json.loads(response)
def _check_health_endpoint(self, instance, check_type, tags): if check_type not in self.ALLOWED_SERVICE_CHECKS: raise CheckException("Health endpoint %s is not a valid endpoint" % check_type) url = instance.get('gitlab_url') if url is None: # Simply ignore this service check if not configured self.log.debug("gitlab_url not configured, service check %s skipped" % check_type) return service_check_tags = self._service_check_tags(url) service_check_tags.extend(tags) verify_ssl = self._verify_ssl(instance) ## Timeout settings timeouts = (int(instance.get('connect_timeout', GitlabCheck.DEFAULT_CONNECT_TIMEOUT)), int(instance.get('receive_timeout', GitlabCheck.DEFAULT_RECEIVE_TIMEOUT))) ## Auth settings auth = None if 'gitlab_user' in instance and 'gitlab_password' in instance: auth = (instance['gitlab_user'], instance['gitlab_password']) # These define which endpoint is hit and which type of check is actually performed # TODO: parse errors and report for single sub-service failure? service_check_name = "gitlab.%s" % check_type check_url = "%s/-/%s" % (url, check_type) try: self.log.debug('checking %s against %s' % (check_type, check_url)) r = requests.get(check_url, auth=auth, verify=verify_ssl, timeout=timeouts, headers=headers(self.agentConfig)) if r.status_code != 200: self.service_check(service_check_name, PrometheusCheck.CRITICAL, message="Got %s when hitting %s" % (r.status_code, check_url), tags=service_check_tags) raise Exception("Http status code {0} on check_url {1}".format(r.status_code, check_url)) else: r.raise_for_status() except requests.exceptions.Timeout: # If there's a timeout self.service_check(service_check_name, PrometheusCheck.CRITICAL, message="Timeout when hitting %s" % check_url, tags=service_check_tags) raise except Exception as e: self.service_check(service_check_name, PrometheusCheck.CRITICAL, message="Error hitting %s. Error: %s" % (check_url, e.message), tags=service_check_tags) raise else: self.service_check(service_check_name, PrometheusCheck.OK, tags=service_check_tags) self.log.debug("gitlab check %s succeeded" % check_type)
def _get_stats(self, url, instance): """ Hit a given URL and return the parsed json. """ self.log.debug('Fetching Couchbase stats at url: %s' % url) req = urllib2.Request(url, None, headers(self.agentConfig)) if 'user' in instance and 'password' in instance: add_basic_auth(req, instance['user'], instance['password']) timeout = float(instance.get('timeout', DEFAULT_TIMEOUT)) request = urllib2.urlopen(req, timeout=timeout) response = request.read() return json.loads(response)
def _get_stats(self, url, instance): "Hit a given URL and return the parsed json" self.log.debug('Fetching Couchbase stats at url: %s' % url) req = urllib2.Request(url, None, headers(self.agentConfig)) if 'user' in instance and 'password' in instance: add_basic_auth(req, instance['user'], instance['password']) # Do the request, log any errors request = urllib2.urlopen(req) response = request.read() return json.loads(response)
def _fetch_data(self, url, username, password): auth = (username, password) url = "%s%s" % (url, STATS_URL) self.log.debug("HAProxy Fetching haproxy search data from: %s" % url) r = requests.get(url, auth=auth, headers=headers(self.agentConfig)) r.raise_for_status() return r.content.splitlines()
def _get_response_from_url(self, url, instance, params=None): self.log.debug('Fetching Celery stats at url: %s' % url) auth=None if 'username' and 'password' in instance: auth = (instance['username'], instance['password']) request_headers = headers(self.agentConfig) response = requests.get(url, params=params, auth=auth, headers=request_headers, timeout=int(instance.get('timeout', self.TIMEOUT))) response.raise_for_status() return response
def _get_stats(self, agentConfig, url): "Hit a given URL and return the parsed json" try: req = urllib2.Request(url, None, headers(agentConfig)) # Do the request, log any errors request = urllib2.urlopen(req) response = request.read() return json.loads(response) except: self.logger.exception('Unable to get CouchDB statistics') return None
def _get_stats(self, url, instance): "Hit a given URL and return the parsed json" self.log.debug('Fetching Couchdb stats at url: %s' % url) req = urllib2.Request(url, None, headers(self.agentConfig)) if 'user' in instance and 'password' in instance: add_basic_auth(req, instance['user'], instance['password']) # Do the request, log any errors request = urllib2.urlopen(req) response = request.read() return json.loads(response)
def _get_stats(self, url, instance): "Hit a given URL and return the parsed json" self.log.debug('Fetching Couchdb stats at url: %s' % url) auth = None if 'user' in instance and 'password' in instance: auth = (instance['user'], instance['password']) r = requests.get(url, auth=auth, headers=headers(self.agentConfig), timeout=int(instance.get('timeout', self.TIMEOUT))) r.raise_for_status() return r.json()
def _get_stats(self, url, instance): """ Hit a given URL and return the parsed json. """ self.log.debug("Fetching Couchbase stats at url: %s" % url) timeout = float(instance.get("timeout", DEFAULT_TIMEOUT)) auth = None if "user" in instance and "password" in instance: auth = (instance["user"], instance["password"]) r = requests.get(url, auth=auth, headers=headers(self.agentConfig), timeout=timeout) r.raise_for_status() return r.json()
def _fetch_url_data(self, url, username, password, verify): ''' Hit a given http url and return the stats lines ''' # Try to fetch data from the stats URL auth = (username, password) url = "%s%s" % (url, STATS_URL) self.log.debug("Fetching haproxy stats from url: %s" % url) response = requests.get(url, auth=auth, headers=headers(self.agentConfig), verify=verify, timeout=self.default_integration_http_timeout) response.raise_for_status() return response.content.splitlines()
def _fetch_data(self, url, username, password): ''' Hit a given URL and return the parsed json ''' # Try to fetch data from the stats URL auth = (username, password) url = "%s%s" % (url, STATS_URL) self.log.debug("HAProxy Fetching haproxy search data from: %s" % url) r = requests.get(url, auth=auth, headers=headers(self.agentConfig)) r.raise_for_status() return r.content.splitlines()
def _perform_request(self, url, path, ssl_params, timeout): certificate = None if 'ssl_certfile' in ssl_params and 'ssl_keyfile' in ssl_params: certificate = (ssl_params['ssl_certfile'], ssl_params['ssl_keyfile']) verify = ssl_params.get( 'ssl_ca_certs', True) if ssl_params['ssl_cert_validation'] else False return requests.get(url + path, verify=verify, cert=certificate, timeout=timeout, headers=headers(self.agentConfig))