async def get_estimated_value_by_id( conn: Database, asset_id: int, data_id: int, session: Session = session_make(engine=None), ): hi_model = AssetHI.model(point_id=asset_id) query = session.query(hi_model.id, hi_model.est).filter(hi_model.id == data_id) res = await conn.fetch_one(query2sql(query)) dic = {"id": res["id"], "est": json.loads(res["est"])} return dic
async def get_similarity_threshold_recently( conn: Database, asset_id: int, limit: int, session: Session = session_make(engine=None), ): hi_model = AssetHI.model(point_id=asset_id) query = (session.query(hi_model.id, hi_model.time, hi_model.similarity, hi_model.threshold).order_by( hi_model.time.desc()).limit(limit)) res = await conn.fetch_all(query2sql(query)) res.reverse() dic = multi_result_to_array(res) return dic
async def get_similarity_threshold_during_time( conn: Database, asset_id: int, time_before: str, time_after: str, session: Session = session_make(engine=None), ): hi_model = AssetHI.model(point_id=asset_id) query = session.query(hi_model.id, hi_model.time, hi_model.similarity, hi_model.threshold).filter( hi_model.time.between(str(time_after), str(time_before))) res = await conn.fetch_all(query2sql(query)) dic = multi_result_to_array(res) return dic
async def create(conn: Database, data): data = jsonable_encoder(data) transaction = await conn.transaction() id = False try: id = await conn.execute(query=Asset.__table__.insert(), values=data["base"]) model = AssetHI.model(point_id=id) # register to metadata for all pump_unit if data["base"]["asset_type"] == 0: await conn.execute(str(CreateTable(model.__table__).compile(meta_engine))) await transaction.commit() return True except Exception as e: # print(e) if id: query = Asset.__table__.delete().where(Asset.__table__.c.id == id) await conn.execute( query=str(query.compile(compile_kwargs={"literal_binds": True})) ) await transaction.commit() return False
async def get_estimated_value_multi( conn: Database, asset_id: int, time_before: str, time_after: str, session: Session = session_make(engine=None), ): hi_model = AssetHI.model(point_id=asset_id) query = session.query(hi_model.id, hi_model.time, hi_model.est).filter( hi_model.time.between(str(time_after), str(time_before))) res = await conn.fetch_all(query2sql(query)) dic = {} for row in res: dic.setdefault("id", []).append(row["id"]) dic.setdefault("time", []).append(str(row["time"])) serialized = json.loads(row["est"]) for index, fileds in enumerate(serialized["label"]): dic.setdefault(fileds + "—原始值", []).append(serialized["raw"][index]) dic.setdefault(fileds + "-估计值", []).append(serialized["est"][index]) return dic
def mset_evaluate(cycle_number): estimate_count = 0 session = session_make(engine=meta_engine) pumps = fetch_pumps(session) for pump in pumps: asset_hi_model = AssetHI.model(point_id=pump.asset_id) mps = fetch_mps(session=session, asset_id=pump.asset_id) if len(mps) > 0: base_data_list = fetch_base_data( session=session, cycle_number=cycle_number, base_mp=mps[0], asset_id=pump.asset_id, ) if len(base_data_list) == cycle_number: feature_matrix = fetch_feature_matrix( session=session, base_data_list=base_data_list, mps=mps) sim, thres, Kest, warning_index = evaluate( path=pump.mset_model_path, feature_matrix=feature_matrix) evaluate_res_insert_value = [] for i in range(len(base_data_list)): evaluate_res_insert_value.append( asset_hi_model( health_indicator=float(sim[i][0] * 100), similarity=float(sim[i][0]), threshold=float(thres[i][0]), time=base_data_list[i]["time"], data_id=base_data_list[i]["id"], est={ "label": [mp.name for mp in mps], "raw": feature_matrix[i].tolist(), "est": Kest[i].tolist(), }, )) try: for index, row in enumerate(evaluate_res_insert_value): session.add(row) session.commit() if len(warning_index) != 0: if index in warning_index: session.add( MsetWarningLog( cr_time=base_data_list[index]["time"], description=mps[np.argmax( feature_matrix[index] - Kest[index])].name + "异常。", asset_id=pump.asset_id, reporter_id=row.id, )) session.commit() session.query(Asset).filter( Asset.id == pump.asset_id).update({ "statu": determine_statu(feature_matrix=feature_matrix), "health_indicator": evaluate_res_insert_value[-1].health_indicator, "md_time": datetime.datetime.now(), }) session.commit() estimate_count += len(evaluate_res_insert_value) except Exception as e: session.rollback() print(e) session.close() return estimate_count
import datetime import random from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import create_engine from db import session_make, meta_engine from db_model import Asset, AssetHI session = session_make(meta_engine) x = session.query(Asset.id).filter(Asset.asset_type == 0).all() Base = declarative_base() for row in x: model = AssetHI.model(point_id=row.id, base=Base) # registe to metadata for all pump_unit META_URL = "mysql://*****:*****@123.56.7.137/op_meta_merged?charset=utf8" engine = create_engine(META_URL, encoding="utf-8", pool_pre_ping=True) Base.metadata.create_all(engine) for row in x: initial_datetime = datetime.datetime(2016, 1, 1, 0, 0, 0, 0) tmp = [] model = AssetHI.model( point_id=row.id) # registe to metadata for all pump_unit for i in range(1, 900): r = model(id=i, time=str(initial_datetime), health_indicator=80 + random.random() * 10) initial_datetime += datetime.timedelta(days=1)