def _worker_process_to_calculate_tod_reliabilities(idx, queue, lck, data_path, db_info): import gc from pyticas.tool import tb from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra logger = getLogger(__name__) # initialize logger.debug('[TOD Reliability Worker %d] starting...' % (idx)) ticas.initialize(data_path) Infra.get_infra() conn.connect(db_info) logger.debug('[TOD Reliability Worker %d] is ready' % (idx)) while True: ttr_id, target_date, num, total = queue.get() if target_date is None: exit(1) try: logger.debug('[TOD Reliability Worker %d] (%d/%d) calculating for route=%s at %s' % ( idx, num, total, ttr_id, target_date.strftime('%Y-%m-%d'))) traveltime_info.calculate_TOD_reliabilities(ttr_id, target_date, lock=lck) gc.collect() except Exception as ex: tb.traceback(ex) continue
def _estimation_process(id, queue, counters, lock, data_path, DB_INFO, CAD_DB_INFO, IRIS_DB_INFO): """ :type id: int :type queue: Queue :type counters: dict :type lock: Lock :type data_path: str :type DB_INFO: dict :type CAD_DB_INFO: dict :type IRIS_DB_INFO: dict """ from pyticas_tetres.db.tetres import conn from pyticas_tetres.db.iris import conn as iris_conn from pyticas_tetres.db.cad import conn as cad_conn logger = getLogger(__name__) # initialize logger.debug('[EST WORKER %d] starting...' % (id)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(DB_INFO) cad_conn.connect(CAD_DB_INFO) iris_conn.connect(IRIS_DB_INFO) # db session is created here ttr_da = TTRouteDataAccess() logger.debug('[EST WORKER %d] is ready' % (id)) while True: (a_route_id, eparam, uid) = queue.get() try: logger.debug('[EST WORKER %d] >>>>> start estimation (uid=%s, route=%d)' % (id, uid, a_route_id)) _eparam = eparam.clone() try: _eparam.add_start_time_offset(offset=5) except Exception as e: logger.debug('Could not add five minutes offset to the starting time. Error: {}'.format(e)) _eparam.travel_time_route = ttr_da.get_by_id(a_route_id) estimation.estimate(_eparam, uid) logger.debug('[EST WORKER %d] <<<<< end of estimation (uid=%s, route=%d)' % (id, uid, a_route_id)) except Exception as ex: tb.traceback(ex) logger.debug('[EST WORKER %d] <<<<< end of task (exception occured) (uid=%s)' % (id, uid)) should_pack = False with lock: counters[uid] = counters[uid] - 1 if counters[uid] <= 0: del counters[uid] should_pack = True if should_pack: logger.debug('[EST WORKER %d] >>> make compressed file (uid=%s)' % (id, uid)) _pack_result(uid) logger.debug('[EST WORKER %d] <<< end of making compressed file (uid=%s)' % (id, uid))
def _worker_process_to_specific_categorization(idx, queue, lck, data_path, db_info, **kwargs): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb from pyticas_tetres.rengine.cats import incident, snowmgmt, specialevent, weather, workzone logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) categorizers = [] categorizer_names = kwargs.get("categorizer_names") categorizer_map = { "incident": incident, "snowmgmt": snowmgmt, "specialevent": specialevent, "weather": weather, "workzone": workzone } for categorizer_name in categorizer_names: categorizers.append(categorizer_map.get(categorizer_name)) da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) tt_da = TravelTimeDataAccess(prd.start_date.year) tt_data_list = tt_da.list_by_period(ttri.id, prd) tt_da.close_session() for cidx, categorizer in enumerate(categorizers): n_inserted = categorizer.categorize(ttri, prd, tt_data_list, lock=lck) gc.collect() except Exception as ex: tb.traceback(ex) continue
def _worker_process_to_calculate_tt_and_categorize(idx, queue, lck, data_path, db_info): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb from pyticas_tetres.rengine.cats import weather, incident, snowmgmt, specialevent, workzone logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) categorizers = [weather, incident, workzone, specialevent, snowmgmt] da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) is_inserted = traveltime.calculate_a_route( prd, ttri, dbsession=da_route.get_session(), lock=lck) if not is_inserted: logger.warning( '[TT-Categorization Worker %d] - fail to add travel time data' % idx) tt_da = TravelTimeDataAccess(prd.start_date.year) tt_data_list = tt_da.list_by_period(ttri.id, prd) tt_da.close_session() for cidx, categorizer in enumerate(categorizers): n_inserted = categorizer.categorize(ttri, prd, tt_data_list, lock=lck) gc.collect() except Exception as ex: tb.traceback(ex) continue
def start(self, port=None, debug=True, ssl_path=None, **kwargs): """ start server :type port: int :type debug: bool :type ssl_path: str :rtype: """ logger = getLogger(__name__) if not ticas.is_initialized(): logger.info('initializing TICAS') ticas.initialize(self.data_path) Infra.get_infra('', download=True) # load_data recent roadway network logger.info('starting PyTICAS Apps') # create key and crt for HTTPS if ssl_path and len(ssl_path) == 2: # ssl_path[0] : `crt` file path # ssl_path[1] : `key` file path logger.info('creating SSL context...') # make_ssl_devcert(os.path.join(ssl_path, 'ssl'), host='localhost') # make dummy ssl context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.load_cert_chain(os.path.join(ssl_path[0]), os.path.join(ssl_path[1])) else: context = None # call init modules logger.info('loading init modules...') for app in self.apps: app.init(self.server) logger.info('registering service modules...') for app in self.apps: app.register_service(self.server) # run api web service if not port: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # faverolles 12/19/2019: socket change # sock.bind(('localhost', 0)) sock.bind(('0.0.0.0', 0)) port = sock.getsockname()[1] sock.close() self.server.run(debug=debug, port=port, ssl_context=context, **kwargs) logger.info('program terminated')
def _worker_process_to_create_or_update_tt_and_moe(idx, queue, lck, data_path, db_info, **kwargs): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) rw_moe_param_json = kwargs.get("rw_moe_param_json") da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) traveltime.calculate_tt_moe_a_route( prd, ttri, dbsession=da_route.get_session(), lock=lck, create_or_update=True, rw_moe_param_json=rw_moe_param_json) gc.collect() except Exception as ex: logger.warning( '[TT-Categorization Worker %d] - fail to add travel time data' % idx) tb.traceback(ex) continue
def _worker_process(id, queue, counters, lock, data_path, DB_INFO, CAD_DB_INFO, IRIS_DB_INFO): """ :type id: int :type queue: Queue :type counters: dict :type lock: Lock :type data_path: str :type DB_INFO: dict :type CAD_DB_INFO: dict :type IRIS_DB_INFO: dict """ from pyticas_tetres.db.tetres import conn from pyticas_tetres.db.iris import conn as iris_conn from pyticas_tetres.db.cad import conn as cad_conn logger = getLogger(__name__) # initialize logger.debug('[ADMIN WORKER %d] starting...' % (id)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(DB_INFO) cad_conn.connect(CAD_DB_INFO) iris_conn.connect(IRIS_DB_INFO) logger.debug('[ADMIN WORKER %d] is ready' % (id)) while True: (_uid, _task_added_time, _task, _args, _kwargs) = queue.get() try: logger.debug('[ADMIN WORKER %d] >>>>> start to run task (uid=%s)' % (id, _uid)) _task(*_args, **_kwargs) logger.debug('[ADMIN WORKER %d] <<<<< end of task (uid=%s)' % (id, _uid)) except Exception as ex: tb.traceback(ex) logger.debug( '[ADMIN WORKER %d] <<<<< end of task (exception occured) (uid=%s)' % (id, _uid))
import datetime import time import sys sys.path.append("Server/src") import global_settings import dbinfo if __name__ == '__main__': from pyticas import ticas from pyticas.infra import Infra from pyticas_tetres.db.cad import conn as conn_cad from pyticas_tetres.db.iris import conn as conn_iris from pyticas_tetres.db.tetres import conn ticas.initialize(global_settings.DATA_PATH) infra = Infra.get_infra() conn.connect(dbinfo.tetres_db_info()) conn_cad.connect(dbinfo.cad_db_info()) conn_iris.connect(dbinfo.iris_incident_db_info()) time.sleep(1) print('') print( '!! Do not run multiple instances of this program. (DB sync problem can be caused in bulk-insertion and deletion)') print('!! Stop TeTRES Server if it is running.') print('') print('# loads weather data for the given time period') print('')
initialize_colorama(autoreset=True) # import required modules from pyticas_server.server import TICASServer from pyticas_ncrtes.app import NCRTESApp from pyticas_tetres.app import TeTRESApp from pyticas import ticas from pyticas.rn import infra_loader data_path = global_settings.DATA_PATH print(data_path) # initialize with `DATA_PATH` ticas.initialize(data_path) # download the new 'metro.config.xml' from TMC infra_loader.load_metro('', download=True) # create server instance ticasServer = TICASServer(data_path, local_mode=False) ticasServer.add_app( NCRTESApp("NCRTES: Normal Condition Recovery Time Estimation System", dbinfo.ncrtes_db_info(data_path))) ticasServer.add_app( TeTRESApp("TeTRES: Travel Time Reliability Management System", TeTRES_DB_INFO, CAD_DB_INFO, IRIS_DB_INFO)) # ticasServer.add_app(RWISApiApp("RWIS: RWIS Proxy Server", dbinfo.rwis_db_info())) # start server try: ticasServer.start(host="0.0.0.0", port=5000, debug=True, use_reloader=False)
__author__ = 'Chongmyung Park ([email protected])' import datetime import time import common import dbinfo if __name__ == '__main__': from pyticas import ticas from pyticas.infra import Infra from pyticas_tetres.db.cad import conn as conn_cad from pyticas_tetres.db.iris import conn as conn_iris from pyticas_tetres.db.tetres import conn ticas.initialize(common.DATA_PATH) infra = Infra.get_infra() conn.connect(dbinfo.tetres_db_info()) conn_cad.connect(dbinfo.cad_db_info()) conn_iris.connect(dbinfo.iris_incident_db_info()) time.sleep(1) print('') print('!! Do not run multiple instances of this program. ' '(DB sync problem can be caused in bulk-insertion and deletion)') print('!! Stop TeTRES Server if it is running.') print('') print( '# Have you defined the travel time reliability route in administrator client?'
def initialize(cls, data_path, cfg_profile=None): ticas.initialize(data_path, cfg_profile)