def connect_db(conf): "connect to Postgres" try: db_connection = psycopg2.connect( database=conf.db_schema, user=conf.db_user, password=conf.db_passwd, host=conf.db_host, port=conf.db_port, ) except: m.error_print("can't connect to DB - please check the configuration") sys.exit(1) m.if_debug_print("Database connected {}".format(conf.db_host), conf) return db_connection
def fill_targets(target_list, conf): "read the yaml targets and fill the target objects" try: with open(conf.path_target_list, 'r') as fh_target_list: yaml_entries = yaml.safe_load(fh_target_list) # TODO should be more detailed perror() except yaml.YAMLError as exc: m.error_print( "error {} opening list of targets: >{}<".format( str(exc), conf.path_target_list), conf) sys.exit(1) # we iterate through the YAML entries and append another object # to an array of objects for name, rest in yaml_entries.items(): target_list.append( m.Targets(name, str(rest['host'].strip()), str(rest['regex']).strip())) return target_list
def main(argv=None): "main" config_file = '' try: opts, args = getopt.getopt(sys.argv[1:], "hi:c:") except getopt.GetoptError: print('consumntodb.py -i <clinet_num>') sys.exit(2) for opt, args in opts: if opt == '-c': config_file = args # read config conf = m.Configuration(config_file, "checker") # prepare the array for all the Target objects targets = [] # parse targets.yaml targets = fill_targets(targets, conf) target_num = len(targets) if target_num == 0: m.error_print("no targets found, exiting", conf) sys.exit(1) m.if_debug_print(str(target_num) + " targets", conf) # get connected to Kafka KafkaHandle = connect_kafka(conf) # iterate through the target array and check each and write out the result for entry in targets: check_host(entry, conf) write_results(entry, KafkaHandle, conf)
def connect_kafka(conf, client_num): "connect to Kafka" try: handle = KafkaConsumer( conf.kafka_topic, auto_offset_reset="earliest", enable_auto_commit=True, bootstrap_servers=conf.kafka_broker, security_protocol="SSL", ssl_cafile=conf.kafka_SSL_CA, ssl_certfile=conf.kafka_SSL_cert, ssl_keyfile=conf.kafka_SSL_key, client_id="postgres-client-"+str(client_num), group_id="postgres-consumer", value_deserializer=lambda x: json.loads(x).encode("utf-8"), ) except: m.error_print("can't connect to Kafka - please check the configuration") sys.exit(1) m.if_debug_print("connected to kafka: {}".format(conf.kafka_broker), conf) return handle
def connect_kafka(conf): "connect to the kafka cluster" # first we need to check if the cluster has the topic setup try: KafkaClient = KafkaConsumer( bootstrap_servers=conf.kafka_broker, security_protocol="SSL", ssl_cafile=conf.kafka_SSL_CA, ssl_certfile=conf.kafka_SSL_cert, ssl_keyfile=conf.kafka_SSL_key, ) except: m.error_print( "can't connect to Kafka - please check the configuration") sys.exit(1) # we check if the topic is already setup if conf.kafka_topic not in KafkaClient.topics(): m.error_print("The Kafka cluster does not have the topic \"" + conf.kafka_topic + "\". Please add it or change the config.") sys.exit(1) KafkaClient.close() # found on https://help.aiven.io/en/articles/489572-getting-started-with-aiven-kafka ;-) # https://github.com/msgpack/msgpack-python for serialization try: KafkaProducerHandle = KafkaProducer( bootstrap_servers=conf.kafka_broker, security_protocol="SSL", ssl_cafile=conf.kafka_SSL_CA, ssl_certfile=conf.kafka_SSL_cert, ssl_keyfile=conf.kafka_SSL_key, value_serializer=lambda x: json.dumps(x).encode("utf-8"), ) except: m.error_print( "can't connect to Kafka - please check the configuration") sys.exit(1) m.if_debug_print("connected to kafka: {}".format(conf.kafka_broker), conf) return KafkaProducerHandle