def main(): parser = argparse.ArgumentParser(description="Run condor exporter to expose metrics for prometheus consumption") parser.add_argument('-p', '--port', type=int, default=9118, required=False, help='Specify a port to be used. Defaults to 9118') parser.add_argument('-a', '--host', type=str, default='localhost', required=False, help='Host address to listen on. Defaults to localhost') parser.add_argument('-c', '--collector', type=str, default='', required=False, help='Condor collector address. Defaults to localhost') args = parser.parse_args() port = args.port address = args.host collector_address = args.collector try: from exporter.wsgi import wsgi_collector REGISTRY.unregister(wsgi_collector) REGISTRY.register(CondorCollector(collector_address)) start_wsgi_server(addr=address, port=port) print("Exporter listening on %s:%d" % (address, port)) while True: time.sleep(1) except KeyboardInterrupt: print("Interrupted, Shutting down") exit(0)
def main(unused_argv): if FLAGS.verbose: logging.set_verbosity(logging.DEBUG) if FLAGS.ipv6_hack: # TODO(pkern): Eliminate this. Right now socketserver is IPv4-only, # so what we should do is actually bring up a proper HTTP server and # then map the path. However to enable IPv6 listening this is an # awful hack of monkey-patching to make it work for now. import socketserver import socket socketserver.TCPServer.address_family = socket.AF_INET6 client = fritzconnection.FritzConnection( address=FLAGS.address, user=FLAGS.username, password=FLAGS.password, ) logging.info( f'Connection succeeded to {client.modelname} on {FLAGS.address}') actions = collect_actions(client) logging.info(f'Collected {len(actions)} actions') variables = collect_variables(client, actions) logging.info(f'Collected {len(variables)} variables') address, port = FLAGS.listen.rsplit(':', 1) start_wsgi_server(port=int(port), addr=address) logging.info(f'Listening on {FLAGS.listen}') for sig in (signal.SIGTERM, signal.SIGINT, signal.SIGHUP): signal.signal(sig, quit) exit.wait()
def main(): """Register the RQ collector and start a WSGI server.""" args = parse_args() logging.basicConfig( format = args.log_format, datefmt = args.log_datefmt, level = args.log_level.upper() ) # Register the RQ collector try: connection = get_redis_connection( url = args.redis_url, host = args.redis_host, port = args.redis_port, db = args.redis_db, password = args.redis_pass, password_file = args.redis_pass_file ) # Register the RQ collector # The `collect` method is called on registration REGISTRY.register(RQCollector(connection)) except (IOError, RedisError) as exc: logger.exception('There was an error starting the RQ exporter') sys.exit(1) # Start the WSGI server start_wsgi_server(args.port, args.host) logger.info(f'Serving the application on {args.host}:{args.port}') while True: time.sleep(1)
def rq_exporter_server(rq_exporter_addr): """RQ's Prometheus exporter.""" from prometheus_client import start_wsgi_server from prometheus_client.core import REGISTRY REGISTRY.register(RQPrometheusExporter(get_test_cache())) start_wsgi_server(*rq_exporter_addr) yield
def start_admin(settings): """Start the admin interface. Args: settings: dict(str -> value). """ port = settings.get("admin_port", DEFAULT_ADMIN_PORT) if port and not start_admin.started: prometheus_client.start_wsgi_server(port) start_admin.started = True
def prepare_prometheus(port, multiproc_dir): LOG.info('Init Prometheus') # Set environment variable "prometheus_multiproc_dir" if multiproc_dir: LOG.info('Set prometheus_multiproc_dir for: %s', multiproc_dir) os.environ['prometheus_multiproc_dir'] = multiproc_dir else: LOG.exception('prometheus_multiproc_dir environment variable must be ' 'set to a directory that the prometheus client library ' 'can use for metrics') # Create registry for prometheus client multiprocess registry = CollectorRegistry() multiprocess.MultiProcessCollector(registry) start_wsgi_server(port=port, registry=registry)
def bootstrap_metrics_server(): """Configure and bootstrap metrics server.""" try: logger.info("Creating cache connection") cache = Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, password=REDIS_PASS) logger.info("Registering RQ Prometheus exporter") REGISTRY.register(RQPrometheusExporter(cache)) except (RedisError, IOError) as exp: logger.exception(exp) sys.exit(1) logger.info("Starting a metrics server") start_wsgi_server(int(PORT), HOST) signal.signal(signal.SIGINT, sig_handle) signal.signal(signal.SIGTERM, sig_handle)
def start_metrics_server(): """ Starts a http server on a port specified in the configuration file and exposes Prometheus metrics on it. Also removes GC_COLLECTOR metrics because they are not really needed. """ # Remove garbage collection metrics REGISTRY.unregister(GC_COLLECTOR) # Gather configurations config = Configuration().values ip = config.service.ip metrics_port = config.service.metrics_port # Start server start_wsgi_server(metrics_port) # Log Logger() \ .event(category="runnable", action="run metrics") \ .server(ip=ip, port=metrics_port) \ .out(severity=Severity.INFO)
def __init__(self, listen_address: str = "0.0.0.0", http_port: int = 8080, **kwargs) -> None: """ Initialize Metrics JustInTime Collector :param listen_address: address that the server is reachable on :param http_port: port that the http server runs on """ self.readers: tp.List[BaseReader] = [] if kwargs: logger.warning( f'Unknown Metrics configuration parameter{"s" if len(kwargs) > 1 else ""}:{", ".join(kwargs.keys())}' ) # Initialize Registry super().__init__(auto_describe=True, target_info=None) # Register as registry REGISTRY.register(self) logger.info( f"Starting Metrics HTTP Server on {listen_address}:{http_port}") # Start http server in separate thread start_wsgi_server(addr=listen_address, port=http_port)
import logging from prometheus_client import start_wsgi_server, Counter, Gauge from time import sleep # Read environment variables REFRESH_INTERVAL = os.environ.get("REFRESH_INTERVAL", 20) PRIVATE_GITHUB_TOKEN = os.environ["PRIVATE_GITHUB_TOKEN"] OWNER = os.environ["OWNER"] LOGLEVEL = os.environ.get('LOGLEVEL', 'INFO').upper() logging.basicConfig(level=LOGLEVEL) logging.StreamHandler(stream=sys.stdout) # Start prometheus metrics start_wsgi_server(8000) logging.warning("Exporter Server started on Port 8000") metric_runner_api_ratelimit = Gauge("github_runner_api_remain_rate_limit", "Github Api remaining rate limit", ["org"]) class runnerExports: def __init__(self): # Define metrics to expose self.metric_runner_org_status = Gauge( "github_runner_org_status", "Runner status", ["name", "id", "os", "labels", "status"], ) self.metric_runner_org_label_status = Gauge(
metrics['custom_emojis'] = json.loads(request.urlopen('{}/api/v1/custom_emojis'.format(instance_url)).read().decode('UTF-8')) metrics['trends'] = json.loads(request.urlopen('{}/api/v1/trends'.format(instance_url)).read().decode('UTF-8')) last_query.info({'timestamp': str(round(time.time()))}) version.info({'version': metrics['instance']['version']}) uri.info({'title': metrics['instance']['title']}) title.info({'uri': metrics['instance']['title']}) short_description.info({'short_description': metrics['instance']['short_description']}) description.info({'short_description': metrics['instance']['description']}) email.info({'email': metrics['instance']['email']}) user_count.set(metrics['instance']['stats']['user_count']) status_count.set(metrics['instance']['stats']['status_count']) domain_count.set(metrics['instance']['stats']['domain_count']) custom_emoji_count.set(len(metrics['custom_emojis'])) _trends = {} for trend in metrics['trends']: _trends[trend['name']] = trend['history'][0]['uses'] trends.info(_trends) print('[{}] start exporting Stats about {}'.format(datetime.now().strftime('%FT%H:%M%Z%z'), instance_url)) print('[{}] listening on {}:{}. Point your Prometheus installation at this address and port'.format(datetime.now().strftime('%FT%H:%M%Z%z'), host, port)) pc.start_wsgi_server(port, addr=host) while True: collectMetrics() time.sleep(interval)
import time from prometheus_client import start_wsgi_server, Counter from prometheus_client.metrics_core import CounterMetricFamily counter = Counter('demo', 'demo') if __name__ == '__main__': start_wsgi_server(8087, '0.0.0.0') i = 0 while 1: i += 1 time.sleep(1) print(f'i = {i}') CounterMetricFamily('demo', 'demo', i)
def main(): logging.basicConfig(level=logging.DEBUG) args = parse_args() start_wsgi_server(8888) run_scraping(args.hnbgw)
def run_server(self, port_): print("READY to START Exporter") start_wsgi_server(port_) self.logger.info('Exporter started in port: ' + str(port_))