def operate_data_send(operate_data): send_data = [] for key in operate_data.keys(): describe = operate_data.get(key) if describe is not None: datetime = dt.datetime.strftime(dt.datetime.now(), '%Y-%m-%d %H:%M:%S') oper_num = describe.get('OperNum') json_describe = json.dumps(describe, ensure_ascii=False) send_data.append([datetime,json_describe,key,oper_num]) pg = Postgres() conn, cr = pg.db_conn() for data in send_data: # print(data) cr.execute(sql_insert, data) conn.commit() pg.db_close()
def call_pg_data(sql,pg_inf=None): if pg_inf: db = Postgres(pg_inf) else: db = Postgres() pg_conn, cr = db.db_conn() if pg_conn: cr.execute(sql) index = cr.description result = cr.fetchall() db.db_close() if result: fresult = tuple2frame(result, index) return fresult else: return pd.DataFrame({})
class OperateStatistics(): def __init__(self, operate_data=pd.DataFrame({})): self.operate_data = operate_data self.count_dict = {} for key in KEYWORD: self.count_dict[key] = 0 def operate_type_percent(self): operate_data = self.operate_data for i in range(len(operate_data)): oper_type = operate_data.iloc[i][6] for type in oper_type: count = self.count_dict.get(type) if count is not None: self.count_dict[type] += 1 return self.count_dict def user_operate_sum(self): operate_data = self.operate_data grouped = operate_data.groupby('userid') result = {} # operate_data.drop_duplicates() for index, group in grouped: operater = Operater() sum_num = len(group['opertime'].drop_duplicates().tolist()) result[index] = {'OperNum':sum_num} for i in range(len(group)): oper_type = group.iloc[i][6] for type in oper_type: count = operater.count_dict.get(type) if count is not None: operater.count_dict[type] += 1 result[index]['TypeCount'] = operater.count_dict return result def alarm_operate_match(self, s_datetime, e_datetime): pg_inf = pg_inf_46 self.pg = Postgres(pg_inf) host = pg_inf.get('host') conn,cr = self.pg.db_conn() if cr: cr.execute(sql_alarm_operate_match.format(host,s_datetime,e_datetime,MATCH_LIMITTIME)) conn.commit()