def kafka_get_time(host, topic):
    """
    读取kafka中消费者的消息
    :param host:
    :param topic:
    :return:
    """
    consumer = kafka.KafkaConsumer(topic, bootstrap_servers=host)
    # 连接postgresql数据库
    host = '127.0.0.1'
    db_password = '******'

    connet = postgres.connect(host, db_password)
    # 写入本地文件
    # f = open("test_time.csv", "w")
    # n = 0
    for msg in consumer:
        # print(msg.value)
        # f.write(msg.value)
        # 解决json loads报错'utf-32-be'
        # print(msg.value)
        str_1 = bytes.decode(msg.value)
        # 解决json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
        msg_str = json.loads(str_1)
        #
        # (msg_str)
        # print(msg_str, print(type(msg_str)))
        # 转换处理时间
        # 云侧
        time_end = float(msg_str["it"])
        # 边缘侧
        # time_end = float(msg_str[0]["tt"])
        # 事件发生时间
        # 云侧
        time_start = float(msg_str["t"])
        # 边缘侧
        # time_start = float(msg_str[0]["Timestamp"])
        time_use = time_end - time_start
        insert_sql = "INSERT INTO time(time_end, time_start, time_use) VALUES " \
                     "(" + str(time_end) + "," + str(time_start) + "," + str(time_use) + ")"
        # insert_sql = "INSERT INTO time_kafka(time_end) VALUES " \
        #              "(" + str(time_end) + ")"
        postgres.exec_table(connet, insert_sql)
        connet.commit()
Beispiel #2
0
# author:丑牛
# datetime:2020/5/25 9:03
from PostgresqlTest import postgres

if __name__ == "__main__":
    host = '192.168.175.198'
    db_password = '******'
    connet = postgres.connect(host, db_password)
    create_sql = 'CREATE TABLE use_kafa ( cpu float8,mem float8)'
    drop_sql = 'drop table if exists  time_tsts;'
    insert_sql = 'insert into ts_event(topic, label, value, timestamp, message, state, rule, starttime, endtime) ' \
                 'values (?,?,?,?,?,?,?,?,?) '
    select_sql = 'select * from ts_event'
    postgres.exec_table(connet, select_sql)
    connet.commit()
    connet.close()