def _worker_process_to_calculate_tod_reliabilities(idx, queue, lck, data_path, db_info): import gc from pyticas.tool import tb from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra logger = getLogger(__name__) # initialize logger.debug('[TOD Reliability Worker %d] starting...' % (idx)) ticas.initialize(data_path) Infra.get_infra() conn.connect(db_info) logger.debug('[TOD Reliability Worker %d] is ready' % (idx)) while True: ttr_id, target_date, num, total = queue.get() if target_date is None: exit(1) try: logger.debug('[TOD Reliability Worker %d] (%d/%d) calculating for route=%s at %s' % ( idx, num, total, ttr_id, target_date.strftime('%Y-%m-%d'))) traveltime_info.calculate_TOD_reliabilities(ttr_id, target_date, lock=lck) gc.collect() except Exception as ex: tb.traceback(ex) continue
def _estimation_process(id, queue, counters, lock, data_path, DB_INFO, CAD_DB_INFO, IRIS_DB_INFO): """ :type id: int :type queue: Queue :type counters: dict :type lock: Lock :type data_path: str :type DB_INFO: dict :type CAD_DB_INFO: dict :type IRIS_DB_INFO: dict """ from pyticas_tetres.db.tetres import conn from pyticas_tetres.db.iris import conn as iris_conn from pyticas_tetres.db.cad import conn as cad_conn logger = getLogger(__name__) # initialize logger.debug('[EST WORKER %d] starting...' % (id)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(DB_INFO) cad_conn.connect(CAD_DB_INFO) iris_conn.connect(IRIS_DB_INFO) # db session is created here ttr_da = TTRouteDataAccess() logger.debug('[EST WORKER %d] is ready' % (id)) while True: (a_route_id, eparam, uid) = queue.get() try: logger.debug('[EST WORKER %d] >>>>> start estimation (uid=%s, route=%d)' % (id, uid, a_route_id)) _eparam = eparam.clone() try: _eparam.add_start_time_offset(offset=5) except Exception as e: logger.debug('Could not add five minutes offset to the starting time. Error: {}'.format(e)) _eparam.travel_time_route = ttr_da.get_by_id(a_route_id) estimation.estimate(_eparam, uid) logger.debug('[EST WORKER %d] <<<<< end of estimation (uid=%s, route=%d)' % (id, uid, a_route_id)) except Exception as ex: tb.traceback(ex) logger.debug('[EST WORKER %d] <<<<< end of task (exception occured) (uid=%s)' % (id, uid)) should_pack = False with lock: counters[uid] = counters[uid] - 1 if counters[uid] <= 0: del counters[uid] should_pack = True if should_pack: logger.debug('[EST WORKER %d] >>> make compressed file (uid=%s)' % (id, uid)) _pack_result(uid) logger.debug('[EST WORKER %d] <<< end of making compressed file (uid=%s)' % (id, uid))
def _worker_process_to_specific_categorization(idx, queue, lck, data_path, db_info, **kwargs): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb from pyticas_tetres.rengine.cats import incident, snowmgmt, specialevent, weather, workzone logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) categorizers = [] categorizer_names = kwargs.get("categorizer_names") categorizer_map = { "incident": incident, "snowmgmt": snowmgmt, "specialevent": specialevent, "weather": weather, "workzone": workzone } for categorizer_name in categorizer_names: categorizers.append(categorizer_map.get(categorizer_name)) da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) tt_da = TravelTimeDataAccess(prd.start_date.year) tt_data_list = tt_da.list_by_period(ttri.id, prd) tt_da.close_session() for cidx, categorizer in enumerate(categorizers): n_inserted = categorizer.categorize(ttri, prd, tt_data_list, lock=lck) gc.collect() except Exception as ex: tb.traceback(ex) continue
def _worker_process_to_calculate_tt_and_categorize(idx, queue, lck, data_path, db_info): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb from pyticas_tetres.rengine.cats import weather, incident, snowmgmt, specialevent, workzone logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) categorizers = [weather, incident, workzone, specialevent, snowmgmt] da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) is_inserted = traveltime.calculate_a_route( prd, ttri, dbsession=da_route.get_session(), lock=lck) if not is_inserted: logger.warning( '[TT-Categorization Worker %d] - fail to add travel time data' % idx) tt_da = TravelTimeDataAccess(prd.start_date.year) tt_data_list = tt_da.list_by_period(ttri.id, prd) tt_da.close_session() for cidx, categorizer in enumerate(categorizers): n_inserted = categorizer.categorize(ttri, prd, tt_data_list, lock=lck) gc.collect() except Exception as ex: tb.traceback(ex) continue
def _worker_process_to_update_moe_values(start_date, end_date, db_info, **kwargs): from pyticas_tetres.db.tetres import conn from pyticas.tool import tb logger = getLogger(__name__) stime = datetime.time(0, 0, 0, 0) etime = datetime.time(23, 55, 0, 0) daily_periods = period.create_periods(start_date, end_date, stime, etime, cfg.TT_DATA_INTERVAL, target_days=[0, 1, 2, 3, 4, 5, 6], remove_holiday=False) logger.debug('>>> Starting Multi Processing (duration= %s to %s)' % (start_date, end_date)) rw_moe_param_json = kwargs.get("rw_moe_param_json") reference_tt_route_id = rw_moe_param_json.get('reference_tt_route_id') if db_info: conn.connect(db_info) da_route = TTRouteDataAccess() ttri = da_route.get_by_id(reference_tt_route_id) if not ttri: logger.debug('route is not found (%s)' % (reference_tt_route_id)) return for pidx, prd in enumerate(daily_periods): if prd is None: da_route.close_session() exit(1) try: da_route.close_session() traveltime.calculate_tt_moe_a_route( prd, ttri, dbsession=da_route.get_session(), create_or_update=True, rw_moe_param_json=rw_moe_param_json) gc.collect() except Exception as ex: tb.traceback(ex) continue
def _worker_process_to_create_or_update_tt_and_moe(idx, queue, lck, data_path, db_info, **kwargs): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) rw_moe_param_json = kwargs.get("rw_moe_param_json") da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) traveltime.calculate_tt_moe_a_route( prd, ttri, dbsession=da_route.get_session(), lock=lck, create_or_update=True, rw_moe_param_json=rw_moe_param_json) gc.collect() except Exception as ex: logger.warning( '[TT-Categorization Worker %d] - fail to add travel time data' % idx) tb.traceback(ex) continue
def init(self, app): conn.connect(self.DB_INFO) conn.check_version() if self.CAD_DB_INFO: from pyticas_tetres.db.cad import conn as cad_conn cad_conn.connect(self.CAD_DB_INFO) if self.IRIS_DB_INFO: from pyticas_tetres.db.iris import conn as iris_conn iris_conn.connect(self.IRIS_DB_INFO) from pyticas_tetres.util import systemconfig systemconfig.initialize_system_config_info() rservice.start() admin_worker.start(1, self.DB_INFO, self.CAD_DB_INFO, self.IRIS_DB_INFO) user_workers.start(cfg.N_WORKERS_FOR_USER_CLIENT, self.DB_INFO, self.CAD_DB_INFO, self.IRIS_DB_INFO)
def _worker_process(id, queue, counters, lock, data_path, DB_INFO, CAD_DB_INFO, IRIS_DB_INFO): """ :type id: int :type queue: Queue :type counters: dict :type lock: Lock :type data_path: str :type DB_INFO: dict :type CAD_DB_INFO: dict :type IRIS_DB_INFO: dict """ from pyticas_tetres.db.tetres import conn from pyticas_tetres.db.iris import conn as iris_conn from pyticas_tetres.db.cad import conn as cad_conn logger = getLogger(__name__) # initialize logger.debug('[ADMIN WORKER %d] starting...' % (id)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(DB_INFO) cad_conn.connect(CAD_DB_INFO) iris_conn.connect(IRIS_DB_INFO) logger.debug('[ADMIN WORKER %d] is ready' % (id)) while True: (_uid, _task_added_time, _task, _args, _kwargs) = queue.get() try: logger.debug('[ADMIN WORKER %d] >>>>> start to run task (uid=%s)' % (id, _uid)) _task(*_args, **_kwargs) logger.debug('[ADMIN WORKER %d] <<<<< end of task (uid=%s)' % (id, _uid)) except Exception as ex: tb.traceback(ex) logger.debug( '[ADMIN WORKER %d] <<<<< end of task (exception occured) (uid=%s)' % (id, _uid))
sys.path.append("Server/src") import global_settings import dbinfo if __name__ == '__main__': from pyticas import ticas from pyticas.infra import Infra from pyticas_tetres.db.cad import conn as conn_cad from pyticas_tetres.db.iris import conn as conn_iris from pyticas_tetres.db.tetres import conn ticas.initialize(global_settings.DATA_PATH) infra = Infra.get_infra() conn.connect(dbinfo.tetres_db_info()) conn_cad.connect(dbinfo.cad_db_info()) conn_iris.connect(dbinfo.iris_incident_db_info()) time.sleep(1) print('') print( '!! Do not run multiple instances of this program. (DB sync problem can be caused in bulk-insertion and deletion)') print('!! Stop TeTRES Server if it is running.') print('') print('# loads weather data for the given time period') print('') sdt_str = input('# Enter start date to load data (e.g. 2015-01-01) : ') sdate = datetime.datetime.strptime(sdt_str, '%Y-%m-%d').date()