def kafka_get_time(host, topic):
    """
    读取kafka中消费者的消息
    :param host:
    :param topic:
    :return:
    """
    consumer = kafka.KafkaConsumer(topic, bootstrap_servers=host)
    # 连接postgresql数据库
    host = '127.0.0.1'
    db_password = '******'

    connet = postgres.connect(host, db_password)
    # 写入本地文件
    # f = open("test_time.csv", "w")
    # n = 0
    for msg in consumer:
        # print(msg.value)
        # f.write(msg.value)
        # 解决json loads报错'utf-32-be'
        # print(msg.value)
        str_1 = bytes.decode(msg.value)
        # 解决json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
        msg_str = json.loads(str_1)
        #
        # (msg_str)
        # print(msg_str, print(type(msg_str)))
        # 转换处理时间
        # 云侧
        time_end = float(msg_str["it"])
        # 边缘侧
        # time_end = float(msg_str[0]["tt"])
        # 事件发生时间
        # 云侧
        time_start = float(msg_str["t"])
        # 边缘侧
        # time_start = float(msg_str[0]["Timestamp"])
        time_use = time_end - time_start
        insert_sql = "INSERT INTO time(time_end, time_start, time_use) VALUES " \
                     "(" + str(time_end) + "," + str(time_start) + "," + str(time_use) + ")"
        # insert_sql = "INSERT INTO time_kafka(time_end) VALUES " \
        #              "(" + str(time_end) + ")"
        postgres.exec_table(connet, insert_sql)
        connet.commit()
Exemple #2
0
# author:丑牛
# datetime:2020/5/25 9:03
from PostgresqlTest import postgres

if __name__ == "__main__":
    host = '192.168.175.198'
    db_password = '******'
    connet = postgres.connect(host, db_password)
    create_sql = 'CREATE TABLE use_kafa ( cpu float8,mem float8)'
    drop_sql = 'drop table if exists  time_tsts;'
    insert_sql = 'insert into ts_event(topic, label, value, timestamp, message, state, rule, starttime, endtime) ' \
                 'values (?,?,?,?,?,?,?,?,?) '
    select_sql = 'select * from ts_event'
    postgres.exec_table(connet, select_sql)
    connet.commit()
    connet.close()
Exemple #3
0
from PostgresqlTest import postgres


def inner_get_avg(url):
    response = requests.get(url)
    # print(response.text)
    cpu = response.json()[1]["totalCores"]
    mem = response.json()[1]["memoryMetrics"]["totalOnHeapStorageMemory"]
    response.close()
    return cpu, mem


if __name__ == '__main__':
    spark_url = 'http://192.168.175.230:18088/api/v1/applications/application_1594110503511_0098/executors'
    host = '127.0.0.1'
    db_password = '******'
    connect = postgres.connect(host, db_password)
    inner_get_avg(spark_url)
    n = 0
    while True:
        cpu_use, mem_use = inner_get_avg(spark_url)
        insert_sql = "INSERT INTO use(cpu, mem) VALUES (" + str(
            cpu_use) + "," + str(mem_use) + ")"
        postgres.exec_table(connect, insert_sql)
        connect.commit()
        time.sleep(1)
        n = n + 1
        if n == 60 * 5:
            break
    connect.close()