def _estimation_process(id, queue, counters, lock, data_path, DB_INFO, CAD_DB_INFO, IRIS_DB_INFO): """ :type id: int :type queue: Queue :type counters: dict :type lock: Lock :type data_path: str :type DB_INFO: dict :type CAD_DB_INFO: dict :type IRIS_DB_INFO: dict """ from pyticas_tetres.db.tetres import conn from pyticas_tetres.db.iris import conn as iris_conn from pyticas_tetres.db.cad import conn as cad_conn logger = getLogger(__name__) # initialize logger.debug('[EST WORKER %d] starting...' % (id)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(DB_INFO) cad_conn.connect(CAD_DB_INFO) iris_conn.connect(IRIS_DB_INFO) # db session is created here ttr_da = TTRouteDataAccess() logger.debug('[EST WORKER %d] is ready' % (id)) while True: (a_route_id, eparam, uid) = queue.get() try: logger.debug('[EST WORKER %d] >>>>> start estimation (uid=%s, route=%d)' % (id, uid, a_route_id)) _eparam = eparam.clone() try: _eparam.add_start_time_offset(offset=5) except Exception as e: logger.debug('Could not add five minutes offset to the starting time. Error: {}'.format(e)) _eparam.travel_time_route = ttr_da.get_by_id(a_route_id) estimation.estimate(_eparam, uid) logger.debug('[EST WORKER %d] <<<<< end of estimation (uid=%s, route=%d)' % (id, uid, a_route_id)) except Exception as ex: tb.traceback(ex) logger.debug('[EST WORKER %d] <<<<< end of task (exception occured) (uid=%s)' % (id, uid)) should_pack = False with lock: counters[uid] = counters[uid] - 1 if counters[uid] <= 0: del counters[uid] should_pack = True if should_pack: logger.debug('[EST WORKER %d] >>> make compressed file (uid=%s)' % (id, uid)) _pack_result(uid) logger.debug('[EST WORKER %d] <<< end of making compressed file (uid=%s)' % (id, uid))
def _worker_process_to_specific_categorization(idx, queue, lck, data_path, db_info, **kwargs): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb from pyticas_tetres.rengine.cats import incident, snowmgmt, specialevent, weather, workzone logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) categorizers = [] categorizer_names = kwargs.get("categorizer_names") categorizer_map = { "incident": incident, "snowmgmt": snowmgmt, "specialevent": specialevent, "weather": weather, "workzone": workzone } for categorizer_name in categorizer_names: categorizers.append(categorizer_map.get(categorizer_name)) da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) tt_da = TravelTimeDataAccess(prd.start_date.year) tt_data_list = tt_da.list_by_period(ttri.id, prd) tt_da.close_session() for cidx, categorizer in enumerate(categorizers): n_inserted = categorizer.categorize(ttri, prd, tt_data_list, lock=lck) gc.collect() except Exception as ex: tb.traceback(ex) continue
def _worker_process_to_calculate_tt_and_categorize(idx, queue, lck, data_path, db_info): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb from pyticas_tetres.rengine.cats import weather, incident, snowmgmt, specialevent, workzone logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) categorizers = [weather, incident, workzone, specialevent, snowmgmt] da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) is_inserted = traveltime.calculate_a_route( prd, ttri, dbsession=da_route.get_session(), lock=lck) if not is_inserted: logger.warning( '[TT-Categorization Worker %d] - fail to add travel time data' % idx) tt_da = TravelTimeDataAccess(prd.start_date.year) tt_data_list = tt_da.list_by_period(ttri.id, prd) tt_da.close_session() for cidx, categorizer in enumerate(categorizers): n_inserted = categorizer.categorize(ttri, prd, tt_data_list, lock=lck) gc.collect() except Exception as ex: tb.traceback(ex) continue
def _tt_route(ttr_id): """ :type ttr_id: int :rtype: pyticas_tetres.ttypes.TTRouteInfo """ ttrda = TTRouteDataAccess() ttri = ttrda.get_by_id(ttr_id) #ttri = ttrda.list()[0] ttrda.close_session() return ttri
def tetres_route_opposite_route(): route_id = request.form.get('id') da = TTRouteDataAccess() ttri = da.get_by_id(route_id) da.close_session() route_setup(ttri.route) opposite_route = route.opposite_route(ttri.route) if not isinstance(opposite_route, Route): return prot.response_fail( 'fail to load_data route configuration file') return prot.response_success(opposite_route)
def _worker_process_to_update_moe_values(start_date, end_date, db_info, **kwargs): from pyticas_tetres.db.tetres import conn from pyticas.tool import tb logger = getLogger(__name__) stime = datetime.time(0, 0, 0, 0) etime = datetime.time(23, 55, 0, 0) daily_periods = period.create_periods(start_date, end_date, stime, etime, cfg.TT_DATA_INTERVAL, target_days=[0, 1, 2, 3, 4, 5, 6], remove_holiday=False) logger.debug('>>> Starting Multi Processing (duration= %s to %s)' % (start_date, end_date)) rw_moe_param_json = kwargs.get("rw_moe_param_json") reference_tt_route_id = rw_moe_param_json.get('reference_tt_route_id') if db_info: conn.connect(db_info) da_route = TTRouteDataAccess() ttri = da_route.get_by_id(reference_tt_route_id) if not ttri: logger.debug('route is not found (%s)' % (reference_tt_route_id)) return for pidx, prd in enumerate(daily_periods): if prd is None: da_route.close_session() exit(1) try: da_route.close_session() traveltime.calculate_tt_moe_a_route( prd, ttri, dbsession=da_route.get_session(), create_or_update=True, rw_moe_param_json=rw_moe_param_json) gc.collect() except Exception as ex: tb.traceback(ex) continue
def _calculate_tt_for_routes(start_date, end_date, ttr_ids): stime = datetime.time(0, 0, 0, 0) etime = datetime.time(23, 55, 0, 0) da_route = TTRouteDataAccess() daily_periods = period.create_periods(start_date, end_date, stime, etime, cfg.TT_DATA_INTERVAL, target_days=[0, 1, 2, 3, 4, 5, 6], remove_holiday=False) for pidx, prd in enumerate(daily_periods): for ridx, ttr_id in enumerate(ttr_ids): ttri = da_route.get_by_id(ttr_id) is_inserted = traveltime.calculate_a_route( prd, ttri, dbsession=da_route.get_session())
def _worker_process_to_create_or_update_tt_and_moe(idx, queue, lck, data_path, db_info, **kwargs): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) rw_moe_param_json = kwargs.get("rw_moe_param_json") da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) traveltime.calculate_tt_moe_a_route( prd, ttri, dbsession=da_route.get_session(), lock=lck, create_or_update=True, rw_moe_param_json=rw_moe_param_json) gc.collect() except Exception as ex: logger.warning( '[TT-Categorization Worker %d] - fail to add travel time data' % idx) tb.traceback(ex) continue
def traveltime_info(ttr_id, weather_type, depart_time, dbsession=None): """ :type ttr_id: int :type weather_type: int :type depart_time: datetime.datetime :rtype: list[dict], list[float] """ logger = getLogger(__name__) logger.debug( '# public travel time information is requested (route_id=%s, weather_type=%s, depart_time=%s)' % (ttr_id, weather_type, depart_time)) ttrda = TTRouteDataAccess(session=dbsession) ttri = ttrda.get_by_id(ttr_id) if weather_type: try: weather_type = int(weather_type) except: weather_type = None if not weather_type or weather_type not in [WC_NORMAL, WC_RAIN, WC_SNOW]: weather_type = _weather(depart_time, ttri.route) regime_type = _regime_type(weather_type, depart_time) logger.debug(' > regime type = %d (%s)' % (regime_type, REGIME_STRING[regime_type])) da = TODReliabilityDataAccess(session=ttrda.get_session()) tods = da.list_by_route(ttr_id, regime_type) res = [] dbg_from, dbg_to = 60, len(tods) - 12 for idx, tod in enumerate(tods): tod_res = json.loads(tod.result) if not tod_res: continue if idx >= dbg_from and idx < dbg_to: logger.debug( ' - time=%02d:%02d, avg_tt=%s, 95%%p_tt=%s, count=%s' % (tod.hour, tod.minute, tod_res['avg_tt'], tod_res['percentile_tts']['95'], tod_res['count'])) res.append({ 'hour': tod.hour, 'minute': tod.minute, 'avg_tt': _roundup(tod_res['avg_tt']), 'p95_tt': _roundup(tod_res['percentile_tts']['95']), 'p90_tt': _roundup(tod_res['percentile_tts']['90']), 'p85_tt': _roundup(tod_res['percentile_tts']['85']), 'p80_tt': _roundup(tod_res['percentile_tts']['80']), 'count': tod_res['count'] }) today_to = depart_time now = datetime.datetime.now() if today_to >= now: today_to = now # 5 minute interval delta = (today_to.minute - math.floor(today_to.minute / 5) * 5) * 60 + today_to.second today_to = today_to - datetime.timedelta(seconds=delta) try: today_from = datetime.datetime.combine(today_to.date(), datetime.time(0, 0, 0)) prd = period.Period(today_from, today_to, cfg.TT_DATA_INTERVAL) tts = moe.travel_time(ttri.route, prd) tts = moe.imputation(tts, imp_module=time_avg) traveltimes = _moving_average(tts[-1].data, 5) except Exception as ex: getLogger(__name__).warn('error to calculate travel times') traveltimes = [] traveltimes = _roundup(traveltimes) ttrda.close_session() return res[60:-12], traveltimes[60:]