def run(self): self.print_info() server = ForkingHTTPServer((self._address, self._port), RequestHandler) server.endpoint = self.endpoint try: while True: server.handle_request() except KeyboardInterrupt: get_module_logger("exporter").info("Killing exporter") server.server_close()
def main(): get_module_logger("main").info("HP ILO Metrics Exporter (Python %s, OpenSSL %s)" % (platform.python_version(), ssl.OPENSSL_VERSION)) parser = argparse.ArgumentParser(description='Exports ilo heath_at_a_glance state to Prometheus') parser.add_argument('--address', type=str, dest='address', default='0.0.0.0', help='address to serve on') parser.add_argument('--port', type=int, dest='port', default='9416', help='port to bind') parser.add_argument('--endpoint', type=str, dest='endpoint', default='/metrics', help='endpoint where metrics will be published') args = parser.parse_args() exporter = ILOExporterServer(**vars(args)) exporter.run()
from datetime import datetime from sqlalchemy import Column, Integer, String, Text, DATETIME, DATE, ForeignKey, Float from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker, relationship from logger import get_module_logger from config import config logger = get_module_logger(__name__) def get_db_url(): return f"{dbi['DBMS']}://{dbi['USER']}:{dbi['PASS']}@{dbi['HOST']}:{dbi['PORT']}/{dbi['DB']}?charset=utf8mb4" Base = declarative_base() # MySQLに接続。 dbi = config['db'] url = get_db_url() engine = create_engine(url, echo=True) # セッションの作成 Session = sessionmaker(bind=engine) session = Session() class CrawlProgress(Base): """クロールのプログレス""" __tablename__ = 'crawl_progresses' id = Column(Integer, primary_key=True) host_name = Column(String(100)) search_query_id = Column(Integer,
def do_GET(self): """ Process GET request :return: Response with Prometheus metrics """ # this will be used to return the total amount of time the request took start_time = time.time() # get parameters from the URL url = urlparse(self.path) # following boolean will be passed to True if an error is detected during the argument parsing error_detected = False query_components = parse_qs(urlparse(self.path).query) # 127.0.0.1 - - [03/Aug/2020 10:15:09] "GET /metrics?ilo_host=192.168.220.188&ilo_port=443&ilo_user=prometheus&ilo_password=xzcwjikomEvkqidm4t HTTP/1.1" 200 - get_module_logger("exporter").info("{} GET {}".format( self.client_address, self.path)) ilo_host = None ilo_port = None ilo_user = None ilo_password = None try: ilo_host = query_components.get('ilo_host', [''])[0] or os.environ['ILO_HOST'] ilo_port = int( query_components.get('ilo_port', [''])[0] or os.environ['ILO_PORT']) ilo_user = query_components.get('ilo_user', [''])[0] or os.environ['ILO_USER'] ilo_password = query_components.get( 'ilo_password', [''])[0] or os.environ['ILO_PASSWORD'] except KeyError as e: get_module_logger("exporter").error("missing parameter %s" % e) self.return_error() error_detected = True if url.path == self.server.endpoint and ilo_host and ilo_user and ilo_password and ilo_port: ilo = None ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) # Sadly, ancient iLO's aren't dead yet, so let's enable sslv3 by default ssl_context.options &= ~ssl.OP_NO_SSLv3 ssl_context.check_hostname = False ssl_context.set_ciphers(( 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:' '!eNULL:!MD5')) try: ilo = hpilo.Ilo(hostname=ilo_host, login=ilo_user, password=ilo_password, port=ilo_port, timeout=10, ssl_context=ssl_context) except hpilo.IloLoginFailed: get_module_logger("exporter").error("ILO login failed") self.return_error() except gaierror: get_module_logger("exporter").error( "ILO invalid address or port") self.return_error() except hpilo.IloCommunicationError as e: get_module_logger("exporter").error(e) self.return_error() # get product and server name try: product_name = ilo.get_product_name() except: product_name = "Unknown HP Server" try: server_name = ilo.get_server_name() if server_name == "": server_name = ilo_host except: server_name = ilo_host # get health embedded_health = ilo.get_embedded_health() health_at_glance = embedded_health['health_at_a_glance'] if health_at_glance is not None: for key, value in health_at_glance.items(): for status in value.items(): if status[0] == 'status': gauge = 'hpilo_{}_gauge'.format(key) if status[1].upper() == 'OK': prometheus_metrics.gauges[gauge].labels( product_name=product_name, server_name=server_name).set(0) elif status[1].upper() == 'DEGRADED': prometheus_metrics.gauges[gauge].labels( product_name=product_name, server_name=server_name).set(1) else: prometheus_metrics.gauges[gauge].labels( product_name=product_name, server_name=server_name).set(2) # get firmware version fw_version = ilo.get_fw_version()["firmware_version"] # prometheus_metrics.hpilo_firmware_version.set(fw_version) prometheus_metrics.hpilo_firmware_version.labels( product_name=product_name, server_name=server_name).set(fw_version) # get temperature informations for temp in embedded_health['temperature']: value = embedded_health['temperature'][temp]['currentreading'][ 0] if value != "N": prometheus_metrics.hpilo_temperature_status_gauge.labels( product_name=product_name, server_name=server_name, sensor=temp).set(value) # get the amount of time the request took REQUEST_TIME.observe(time.time() - start_time) get_module_logger("exporter").info("REQUEST_TIME: {}s".format( str(REQUEST_TIME._sum._value))) # generate and publish metrics metrics = generate_latest(prometheus_metrics.registry) self.send_response(200) self.send_header('Content-Type', 'text/plain') self.end_headers() self.wfile.write(metrics) elif url.path == '/': self.send_response(200) self.send_header('Content-Type', 'text/html') self.end_headers() self.wfile.write("""<html> <head><title>HP iLO Exporter</title></head> <body> <h1>HP iLO Exporter</h1> <p>Visit <a href="/metrics">Metrics</a> to use.</p> </body> </html>""") else: if not error_detected: self.send_response(404) self.end_headers()
def print_err(*args, **kwargs): get_module_logger("exporter").error(*args, file=sys.stderr, **kwargs)
def print_info(self): get_module_logger("exporter").info( "Starting exporter on: http://{}:{}{}".format( self._address, self._port, self.endpoint))
"option 2": 2 } } }, "regex:pattern": { # regex pattern sections can also be used, their key must be "regex:{pattern}" "type": "section" # when using regex sections that may match other keys they should be last } # and CONF_OPTIONS should be an OrderedDict } # Module name, this should be included in all modules and used when the module name is needed TEXT_NAME = __name__.split(".")[-1] # Get a logger with this file's name log = logger.get_module_logger() # Filled with configuration by `init` config = {} # All modules must have a `queue`, this will be set by the module loader queue = None def init(config_data={}): """ Initializes the module. Generally this will just parse the configuration data and do some validation of it. No MQTT subscriptions should be made here as it is not guaranteed that the MQTT module will be initialized yet, they should be done in ``pre_loop()``. Any long running init tasks should also be done in ``pre_loop()``. Do not start any timers in this function, they must be done
############################################################################# # system # project from logger import get_module_logger # third party import click ############################################################################# # Script # ############################################################################# # constant LOGGER = get_module_logger(__name__) @click.group() @click.argument("path") def cli(path): pass @cli.command() def init(): pass @cli.command() @click.option("--name", help="the name of the pipeline to run.")