Ejemplo n.º 1
0
 def __init__(self, jsondata_list, table_name: str):
     self.connect = get_db_connector()
     self.jsondata_list = jsondata_list
     self.table_name = table_name
     self.level = table_name.split("_")[1]
     self.id_name = self.level + "_id"
     self.redis = RedisClient().redis
Ejemplo n.º 2
0
def get_daily(level: str):     # stat
    table_name = "dms_"+level+"_avg"
    con = get_db_connector()
    cursor = con.cursor(dictionary=True)
    today = datetime.datetime.today()
    datetime_now = datetime.datetime(today.year, today.month, today.day,
                                     DAILY_UPDATE_TIME["hour"], DAILY_UPDATE_TIME["minute"], DAILY_UPDATE_TIME["second"])
    # 从avg表中查找当前(00:00)之后五分钟的数据
    sql = "SELECT a_efficiency, a_accuracy ,a_workhour, time, {} FROM {} WHERE time between '{}' and '{}' "\
        .format(level+"_id", table_name, datetime_now, datetime_now+datetime.timedelta(seconds=DAILY_UPDATE_ITERVAL))
    cursor.execute(sql)
    avg_data_list = cursor.fetchall()
    if not avg_data_list:
        raise Exception
    return avg_data_list
Ejemplo n.º 3
0
def get_structure(level: str):
    """
    :param level: 要查询的结构的级别
    :return: 该级别对应下级的结构 structure:dict, 级别名称level: str
    """
    structure = dict()
    connection = get_db_connector()
    cursor = connection.cursor()
    if level is "team":
        cursor.execute(
            "SELECT team_id FROM sms_team_stat_member GROUP BY team_id")
        team_id_list = [i[0] for i in cursor.fetchall()]
        for team_id in team_id_list:
            cursor.execute(
                "SELECT stat_id FROM sms_team_stat_member WHERE team_id = {}".
                format(team_id))
            structure[team_id] = [i[0] for i in cursor.fetchall()]
    elif level in ["group", "workshop"]:
        cursor = connection.cursor()
        cursor.execute(
            "SELECT {} FROM sms_team_group_workshop GROUP BY {}".format(
                level + "_id", level + "_id"))
        group_id_list = [i[0] for i in cursor.fetchall()]
        for group_id in group_id_list:
            cursor.execute(
                "SELECT {} FROM sms_team_group_workshop WHERE {} = {} GROUP BY {}"
                .format(levels[level] + "_id", level + "_id", group_id,
                        levels[level] + "_id"))
            structure[group_id] = [i[0] for i in cursor.fetchall()]
    elif level is "dpt":
        cursor.execute(
            "SELECT workshop_id FROM sms_team_group_workshop GROUP BY workshop_id"
        )
        workshop_list = [i[0] for i in cursor.fetchall()]
        structure[1] = workshop_list
    else:
        # raise Exception
        return None
    cursor.close()
    connection.close()
    return structure, level
Ejemplo n.º 4
0
# coding: utf-8
import random
from decimal import Decimal
from db_tools.tools.tools import date_range
from db_tools.tools.tools import get_db_connector

# 打开数据库连接
db = get_db_connector()
cursor = db.cursor()
random.seed(12345)


def insert_to_mysql(table_name, begin_date, end_date):
    employee_sql = "SELECT employee_id FROM sms_member WHERE type=1;"
    cursor.execute(employee_sql)
    employee_data = cursor.fetchall()
    # 每个工位的标准
    stat_standard_sql = "SELECT m.stat_id, t.s_efficiency, t.s_accuracy, t.s_workhour FROM standard_team AS t " \
                        "JOIN sms_team_stat_member AS m ON t.team_id=m.team_id;"
    cursor.execute(stat_standard_sql)
    stat_standard_data = cursor.fetchall()

    print(len(employee_data))
    print(len(stat_standard_data))
    for _date in date_range(begin_date, end_date):
        # print(_date)
        flag = 0
        for i in range(0, len(employee_data)):
            if i % 3 == 0:
                # print(flag)
                data = stat_standard_data[flag]
Ejemplo n.º 5
0
def insert_avg_data(jsondata_list, level):
    connect = get_db_connector()
    cursor = connect.cursor(dictionary=True)
    # 先查询当前avg表的所有数据
    sql = "SELECT * FROM dms_{}_avg".format(level)
    cursor.execute(sql)
    old_avg_data = cursor.fetchall()

    # 当前avg表为空或者有空隔时间段,则重新查表计算平均值插入到avg表
    if len(old_avg_data) == 0:
        cursor.close()
        return "insert", jsondata_list

    # 先查询最后一条avg的time是否与当前时间相差1min以上
    # step2: select avg(*) from dms_{}_online WHERE time >= '{}' and time <= '{}'   # 24小时以内
    # step3: 把重新查表得到的avg值更新到jsondata_list当中
    # 查询距离现在一天时间的所有online数据条数
    cur_time = datetime.datetime.strptime(jsondata_list[0]['time'],
                                          "%Y-%m-%dT%H:%M:%S.%f")
    # cur_time = jsondata_list[0]["time"]
    yesterday = cur_time - datetime.timedelta(days=1) - datetime.timedelta(
        minutes=1)  # 昨天最早实时数据时间
    last_research_time = cur_time - datetime.timedelta(
        seconds=RESEARCH_INTERVAL)  # 重新查询的最晚时间
    sql = "SELECT count(*) FROM dms_{}_avg WHERE time > '{}'".format(
        level, last_research_time)
    cursor.execute(sql)
    if not cursor.fetchone()['count(*)']:
        sql = "SELECT AVG(efficiency) as efficiency, AVG(accuracy) as accuracy, AVG(workhour) as workhour, {} as id " \
              "FROM dms_{}_online WHERE time >= '{}' AND time <= '{}' GROUP BY {} ORDER BY id"\
            .format(level+"_id", level, yesterday, cur_time, level+"_id")
        cursor.execute(sql)
        new_avg_data = cursor.fetchall()
        for index, json_data in enumerate(jsondata_list):
            for key in ['efficiency', 'accuracy', 'workhour']:

                json_data[key] = new_avg_data[index][key]
        return "update", jsondata_list

    sql = "SELECT count(*) FROM dms_{}_online \
            WHERE time >= '{}' and time <= '{}'".format(
        level, yesterday, cur_time)
    cursor.execute(sql)
    # 正常情况下num是7,200,000
    num = cursor.fetchone()
    old_online_data_num = num['count(*)'] / len(jsondata_list) - 1

    # 查询距离现在一天时间的最早的online数据
    sql = "SELECT * FROM dms_{}_online \
            WHERE time >= '{}' and time <= '{}' LIMIT 1".format(
        level, yesterday, cur_time)
    cursor.execute(sql)
    earliest_online_data = cursor.fetchone()
    for i, one_old_avg_data in enumerate(old_avg_data):  # 上次的平均值
        if earliest_online_data['time'] > yesterday:
            jsondata_list[i] = get_avg_no_fine(jsondata_list[i],
                                               one_old_avg_data,
                                               old_online_data_num)
        else:
            jsondata_list[i] = get_avg_fine(jsondata_list[i],
                                            earliest_online_data,
                                            one_old_avg_data,
                                            old_online_data_num)
    cursor.close()
    return "update", jsondata_list