def processor(time_=None): ''' Esta función detecta las transacciones en la tabla transactions. y las ejecuta sobre el nbi. Por ahora no se reintentan transacciones fallidas. ''' logger.debug(f"time_ {time_}") if not time_: return engine = get_engine() session = get_session(engine=engine) # detectar las transacciones a procesar # trxs = session.query(Transaction).filter(Transaction.sent.is_(null())) # trxs = session.query(Transaction).filter( # or_(Transaction.sent.is_(null()), # Transaction.oldtilt != Transaction.newtilt)).first() trxs = session.query(Transaction).filter( or_(Transaction.sent.is_(null()), Transaction.oldtilt != Transaction.newtilt)) if ENV == 'sim': for trx in trxs: # logger.info(f"trx \n{trx}") nbi_simulator(time_=time_, session_=session, trx_=trx) if ENV == 'prod': nbi_processor(time_=time_, session_=session, trxs_=trxs) session.commit() session.close()
def scheduler(time_=None): logger.debug(f"time_ {time_}") if not time_: return engine = get_engine() db_connection = engine.connect() # set más reciente de 'High' overshooters en terreno plano query_ = ''' select o.cellname from overshooters o, terrains t where o.cellname = t.cellname and o.overshooter and t.is_plain and o.intensity = 'High' and o.datetimeid = (select max(datetimeid) from overshooters) and t.datetimeid = (select max(datetimeid) from terrains); ''' candidates_df = pd.read_sql(query_, db_connection) db_connection.close() # entregar los candidatos a mid_term_evaluator() mid_term_evaluator(time_=time_, candidates_df=candidates_df)
def load_rets(time_=None): logger.info(f'ENV {ENV}') if not time_: return list_ = [ { 'datetimeid': time_, 'node': 'MBTS-AIS_3G_003', 'cellname': 'AIS_4G_003_3', 'eci': 2816002, 'devicename': 'RET82', 'deviceno': 2, 'tilt': 30, 'subname': 'RET82', 'subunitno': 1, 'localcellid': 2, }, { 'datetimeid': time_, 'node': 'MBTS-ARA_3G_013', 'cellname': 'ARA_4G_013_3', 'eci': 2304258, 'devicename': 'RET82R_S3', 'deviceno': 2, 'tilt': 40, 'subname': 'RET82R_S3', 'subunitno': 1, 'localcellid': 2, }, { 'datetimeid': time_, 'node': 'MBTS-ARA_3G_013', 'cellname': 'ARA_4G_013_3', 'eci': 2304258, 'devicename': 'RET82L_S3', 'deviceno': 12, 'tilt': 40, 'subname': 'RET82L_S3', 'subunitno': 1, 'localcellid': 2, }, ] if ENV == 'sim': df = pd.DataFrame.from_dict(list_) if ENV == 'prod': df = rets_data(time_=time_) engine = get_engine() session = get_session(engine=engine) df.to_sql('rets', con=engine, if_exists='append', index=False) session.commit() session.close()
def load_terrains(time_=None, neighborhood_df=pd.DataFrame(), cells_df=pd.DataFrame()): logger.info(f'ENV {ENV}') if not time_: return list_ = [ { 'datetimeid': time_, 'cellname': 'AIS_4G_003_3', 'is_plain': True, 'slope': 0, }, { 'datetimeid': time_, 'cellname': 'ARA_4G_013_3', 'is_plain': True, 'slope': 0, }, ] if ENV == 'sim': terrain_df = pd.DataFrame.from_dict(list_) if ENV == 'prod': terrain_df, neighborhood_df, cells_df = check_terrain( time_=time_, neighborhood_df=neighborhood_df, cells_df=cells_df) logger.info(f'terrain_df.shape {terrain_df.shape}') logger.info(f'terrain_df.columns {terrain_df.columns}') terrains_dict = terrain_df.to_dict('index') engine = get_engine() session = get_session(engine=engine) now_ = datetime.datetime.now() for index, dict_ in terrains_dict.items(): is_plain_ = not dict_['HILL'] if is_plain_: slope_ = 0 else: slope_ = 1 if int(dict_['HEIGHT_DIFF']) > 0 else -1 obj_ = Terrain(datetimeid=now_, cellname=dict_['CELLNAME'], slope=slope_, is_plain=is_plain_) session.add(obj_) session.commit() session.close() return neighborhood_df, cells_df
def enabler(cellnames=None): logger.debug(f'ENV {ENV}') engine = get_engine() session = get_session(engine=engine) for cellname in cellnames: # logger.debug(f'cellname {cellname}') antennas = session.query(Ret).filter(Ret.cellname == cellname, ) for antenna in antennas: antenna.enabled = True # logger.info(f'node {antenna.node} deviceno {antenna.deviceno}') # session.commit() session.commit() session.close()
def trx_updater(commands=None, sent_=None): ''' Esta función recibe una lista de diccionarios, con las respuestas a los comandos de cambio de tilt ejecutados en el NBI. Si el resultado es exitoso se actualizan las tablas rets y transactions. ''' logger.debug(f"ENV {ENV}") if not commands: return engine = get_engine() session = get_session(engine=engine) for command in commands: result = command['data']['result'] logger.debug(f"result {result}") executed_time_stamp_str = command['data']['executed_time_stamp'] executed_time_stamp = datetime.datetime.strptime( executed_time_stamp_str, '%Y-%m-%d %H:%M:%S') object_id = command['object_id'] trx = session.query(Transaction).filter( Transaction.id == object_id).first() if not trx: session.commit() session.close() return trx.sent = sent_ if result: trx.oldtilt = trx.newtilt trx.success = executed_time_stamp ret_updater(node=trx.node, deviceno=trx.deviceno, tilt=trx.newtilt, session=session) else: logger.info(f"result {result}") trx.failure = executed_time_stamp session.commit() session.close()
def load_overshooters(): logger.info(f'load_overshooters:') neighborhood_df, overshooters_df = overshooting() overshooters_dict = overshooters_df.to_dict('index') engine = get_engine() session = get_session(engine=engine) now_ = datetime.now() for index, dict_ in overshooters_dict.items(): obj_ = Overshooter( date_time = now_, cell_name = dict_['CELLNAME'], time_advanced = int(dict_['ta_']), average_distance = int(dict_['distance_']), is_overshooter = dict_['overshooter'] ) session.add(obj_) session.commit() session.close()
def load_overshooters(time_=None, neighborhood_df=pd.DataFrame(), cells_df=pd.DataFrame()): logger.info(f'load_overshooters:') if not time_: return list_ = [ { 'datetimeid': time_, 'cellname': 'AIS_4G_003_3', 'ta_calculated': 14.4, 'average_distance': 2.43982546537283, 'overshooter': True, 'intensity': 'High', }, { 'datetimeid': time_, 'cellname': 'ARA_4G_013_3', 'ta_calculated': 14.4, 'average_distance': 6.14587256200947, 'overshooter': True, 'intensity': 'High', }, ] if ENV == 'sim': df = pd.DataFrame.from_dict(list_) if ENV == 'prod': df = overshooters(time_=time_, neighborhood_df=neighborhood_df, cells_df=cells_df) engine = get_engine() session = get_session(engine=engine) df.to_sql('overshooters', con=engine, if_exists='append', index=False) session.commit() session.close()
def transactions(time_=None): ''' Esta función detecta las transacciones en la tabla transactions. y las ejecuta sobre el nbi. Por ahora no se reintentan transacciones fallidas. ''' logger.debug(f"time_ {time_}") if not time_: return engine = get_engine() session = get_session(engine=engine) # detectar las transacciones a procesar trxs = session.query(Transaction).filter(Transaction.sent.is_(null())) for trx in trxs: # logger.info(f"trx \n{trx}") processor(time_=time_,session_=session,trx_=trx) session.commit() session.close()
def evaluator(time_=None, candidates_kpis_df=pd.DataFrame()): ''' Esta función recibe todas las celdas candidatas y sus kpis promedio, para el instante actual. Dependiendo de si la celda existe en la tabla transactions, hay comparaciones con kpis promedio iniciales. En base a reglas pueden entrar transacciones a la tabla transactions. ''' logger.debug(f"time_ {time_}") if not time_: return if candidates_kpis_df.empty: return logger.debug(f"candidates_kpis_df \n{candidates_kpis_df}") engine = get_engine() session = get_session(engine=engine) for idx in candidates_kpis_df.index: # overshooters plain terrain node = candidates_kpis_df['eNodeB_Name'][idx] user_avg = candidates_kpis_df['user_avg'][idx] user_thrp_dl = candidates_kpis_df['user_thrp_dl'][idx] traffic_dl = candidates_kpis_df['traffic_dl'][idx] antennas = session.query(Ret).filter(Ret.node == node, ) for antenna in antennas: if not antenna.enabled: continue logger.debug(f"node {antenna.node} deviceno {antenna.deviceno}") trx = session.query(Transaction).filter( and_(Transaction.node == antenna.node, Transaction.deviceno == antenna.deviceno)).first() if trx: # si trx anterior no fue exitosa if not trx.success: logger.debug(f"continue: success {trx.success}") continue cond_ = delta_percentaje( trx.user_thrp_dl_initial, user_thrp_dl) > MAX_DELTA_USER_THRP_DL_PERCENTAJE cond_ = cond_ or delta_percentaje( trx.traffic_dl_initial, traffic_dl) > MAX_DELTA_TRAFFIC_DL_PERCENTAJE if cond_: # rollback logger.debug(f"rollback") newtilt_ = trx.oldtilt else: newtilt_ = newtilt(trx.newtilt) if trx.newtilt == newtilt_: logger.debug(f"continue: newtilt_ {newtilt_}") continue # si nuevo tilt es distinto al último trx.newtilt = newtilt_ trx.generated = datetime.now() else: if not (user_avg >= MIN_USER_AVG and user_avg <= MAX_USER_AVG): logger.debug(f"continue: user_avg {user_avg}") continue if antenna.tilt == newtilt(antenna.tilt): logger.debug( f"continue: antenna.tilt == newtilt(antenna.tilt)") continue # se crea entrada en tabla transactions trx = Transaction( node=antenna.node, cellname=antenna.cellname, deviceno=antenna.deviceno, subunitno=antenna.subunitno, tilt_initial=antenna.tilt, # oldtilt = tilt_initial, oldtilt=antenna.tilt, # originalmente # user_thrp_dl_initial = user_thrp_dl, # traffic_dl_initial = traffic_dl, # para ver si pasa user_thrp_dl_initial=float(user_thrp_dl), traffic_dl_initial=float(traffic_dl), newtilt=newtilt(antenna.tilt), datetimeid=time_, generated=datetime.now(), ) logger.debug(f"trx \n{trx}") session.add(trx) session.commit() session.commit() session.close()