def zbx_connect(config, env): ''' connect to zabbix API ''' mode_verbose = config["default"]["mode_verbose"] zbx_url = config[env]["zbx_url"] if mode_verbose: logging.info("zabbix URL = %s" % zbx_url) zbx_usr = config[env]["zbx_usr"] zbx_pwd = config[env]["zbx_pwd"] zapi = ZabbixAPI(url=zbx_url,user=zbx_usr,password=zbx_pwd) if mode_verbose: logging.info("connection zabbix API version = %s " % zapi.api_version()) return zapi
def transfer_data(): def get_latest_timestamp(kafka_topic): latest_timestamp = 0 topic_partition = TopicPartition( topic=kafka_topic, partition=0, ) end_offset = consumer.end_offsets([topic_partition])[topic_partition] if end_offset > 0: # partition assigned after poll, and we could seek consumer.poll(5, 1) consumer.seek(topic_partition, end_offset - 1) message = consumer.poll(10000, 500) msgs = message[topic_partition] if len(msgs) > 0: record = msgs[-1] latest_timestamp = record.timestamp / 1000.0 return latest_timestamp try: producer = KafkaProducer( bootstrap_servers=bootstrap_servers, client_id=client_id, retries=-1, acks=1, request_timeout_ms=600000, api_version=(1, 0, 0), compression_type='lz4', security_protocol=security_protocol, ssl_context=ssl_context, ssl_check_hostname=ssl_check_hostname, ssl_cafile=ssl_cafile, ssl_certfile=ssl_certfile, ssl_keyfile=ssl_keyfile, ssl_password=ssl_password, value_serializer=lambda m: json.dumps(m).encode('utf-8')) consumer = KafkaConsumer( kafka_topic, api_version=(1, 0, 0), value_deserializer=lambda m: json.loads(m.decode('utf-8')), session_timeout_ms=600000, security_protocol=security_protocol, ssl_context=ssl_context, ssl_check_hostname=ssl_check_hostname, ssl_cafile=ssl_cafile, ssl_certfile=ssl_certfile, ssl_keyfile=ssl_keyfile, ssl_password=ssl_password, bootstrap_servers=bootstrap_servers) zapi = ZabbixAPI( url="%s/api_jsonrpc.php" % zabbix_host, user=zabbix_login, password=zabbix_passwd, ) log.info("Zabbix API version: " + str(zapi.api_version())) latest_timestamp = get_latest_timestamp(kafka_topic) tz = pytz.timezone('Europe/Moscow') log.info("Latest timestamp from kafka: %s" % datetime.fromtimestamp(latest_timestamp, tz=tz)) # Get all monitored hosts hosts = zapi.host.get(monitored_hosts=1, selectGroups="extend", output=["hostid", "groups", "name"]) hosts_chunked = chunks(hosts, 10) sended = 0 for hosts_chunk in hosts_chunked: h_ids = list(h['hostid'] for h in hosts_chunk) items = zapi.item.get({ "hostids": h_ids, "monitored": True, "output": [ "itemid", "hostid", "name", ], }) items_chunked = chunks(items, 10) for items_chunk in items_chunked: i_ids = list(i['itemid'] for i in items_chunk) current_history = zapi.history.get( history=0, time_from=int(latest_timestamp), # time_till=int(latest_timestamp + 60), hostids=h_ids, itemids=i_ids, sortfield="clock", sortorder="DESC", output=["itemid", "value", "clock"]) log.debug(current_history) for h_item in current_history: host_id = [ h['hostid'] for h in items_chunk if h['itemid'] == h_item['itemid'] ][-1] groups_list = [ h['groups'] for h in hosts_chunk if h['hostid'] == host_id ] groups = [g['name'] for g in groups_list[0]] host_name = [ h['name'] for h in hosts_chunk if h['hostid'] == host_id ][-1] item_name = [ h['name'] for h in items_chunk if h['itemid'] == h_item['itemid'] ][-1] result = { "host_id": host_id, "item_id": h_item['itemid'], "value": h_item['value'], "clock": h_item['clock'], "metric_name": item_name, "group": groups, "host_name": host_name, "zabbix_host": zabbix_host, "zabbix_name": zabbix_name } log.debug(result) producer.send(kafka_topic, result) sended += 1 log.info("Pushed %s metrics" % sended) log.info("Last item time: %s" % datetime.fromtimestamp(int(result['clock']), tz=tz)) except KafkaError as ex: # Decide what to do if produce request failed... log.exception(ex) except Exception as ex: # Do not break the loop log.exception(ex)