def traveltime_route_list(): """ :rtype: list[dict] """ da = TTRouteDataAccess() ttris = da.list() da.close_session() res = [] for ttri in ttris: stations = ttri.route.get_stations() res.append({ 'id': ttri.id, 'name': ttri.name, 'start_station': { 'station_id': stations[0].station_id, 'label': stations[0].label, 'lat': stations[0].lat, 'lon': stations[0].lon }, 'end_station': { 'station_id': stations[-1].station_id, 'label': stations[-1].label, 'lat': stations[-1].lat, 'lon': stations[-1].lon } }) return res
def tetres_user_route_list(): # retrieven user parameter corridor_name = request.form.get('corridor') da = TTRouteDataAccess() ttris = list(da.list_by_corridor(corridor_name, order_by=('name', 'desc'), window_size=10000)) da.close_session() return prot.response_success({'list': ttris})
def _worker_process_to_specific_categorization(idx, queue, lck, data_path, db_info, **kwargs): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb from pyticas_tetres.rengine.cats import incident, snowmgmt, specialevent, weather, workzone logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) categorizers = [] categorizer_names = kwargs.get("categorizer_names") categorizer_map = { "incident": incident, "snowmgmt": snowmgmt, "specialevent": specialevent, "weather": weather, "workzone": workzone } for categorizer_name in categorizer_names: categorizers.append(categorizer_map.get(categorizer_name)) da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) tt_da = TravelTimeDataAccess(prd.start_date.year) tt_data_list = tt_da.list_by_period(ttri.id, prd) tt_da.close_session() for cidx, categorizer in enumerate(categorizers): n_inserted = categorizer.categorize(ttri, prd, tt_data_list, lock=lck) gc.collect() except Exception as ex: tb.traceback(ex) continue
def _estimation_process(id, queue, counters, lock, data_path, DB_INFO, CAD_DB_INFO, IRIS_DB_INFO): """ :type id: int :type queue: Queue :type counters: dict :type lock: Lock :type data_path: str :type DB_INFO: dict :type CAD_DB_INFO: dict :type IRIS_DB_INFO: dict """ from pyticas_tetres.db.tetres import conn from pyticas_tetres.db.iris import conn as iris_conn from pyticas_tetres.db.cad import conn as cad_conn logger = getLogger(__name__) # initialize logger.debug('[EST WORKER %d] starting...' % (id)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(DB_INFO) cad_conn.connect(CAD_DB_INFO) iris_conn.connect(IRIS_DB_INFO) # db session is created here ttr_da = TTRouteDataAccess() logger.debug('[EST WORKER %d] is ready' % (id)) while True: (a_route_id, eparam, uid) = queue.get() try: logger.debug('[EST WORKER %d] >>>>> start estimation (uid=%s, route=%d)' % (id, uid, a_route_id)) _eparam = eparam.clone() try: _eparam.add_start_time_offset(offset=5) except Exception as e: logger.debug('Could not add five minutes offset to the starting time. Error: {}'.format(e)) _eparam.travel_time_route = ttr_da.get_by_id(a_route_id) estimation.estimate(_eparam, uid) logger.debug('[EST WORKER %d] <<<<< end of estimation (uid=%s, route=%d)' % (id, uid, a_route_id)) except Exception as ex: tb.traceback(ex) logger.debug('[EST WORKER %d] <<<<< end of task (exception occured) (uid=%s)' % (id, uid)) should_pack = False with lock: counters[uid] = counters[uid] - 1 if counters[uid] <= 0: del counters[uid] should_pack = True if should_pack: logger.debug('[EST WORKER %d] >>> make compressed file (uid=%s)' % (id, uid)) _pack_result(uid) logger.debug('[EST WORKER %d] <<< end of making compressed file (uid=%s)' % (id, uid))
def _tt_route(ttr_id): """ :type ttr_id: int :rtype: pyticas_tetres.ttypes.TTRouteInfo """ ttrda = TTRouteDataAccess() ttri = ttrda.get_by_id(ttr_id) #ttri = ttrda.list()[0] ttrda.close_session() return ttri
def tetres_route_opposite_route(): route_id = request.form.get('id') da = TTRouteDataAccess() ttri = da.get_by_id(route_id) da.close_session() route_setup(ttri.route) opposite_route = route.opposite_route(ttri.route) if not isinstance(opposite_route, Route): return prot.response_fail( 'fail to load_data route configuration file') return prot.response_success(opposite_route)
def run(target_date): """ :type target_date: datetime.datetime :return: """ ttr_route_da = TTRouteDataAccess() route_list = ttr_route_da.list() ttr_route_da.close_session() for ttri in route_list: try: traveltime_info.calculate_TOD_reliabilities(ttri.id, target_date) except Exception as ex: tb.traceback(ex) getLogger(__name__).warning('Fail to calculate TOD reliabilities for route=%d' % ttri.id)
def _calculate_tt_for_routes(start_date, end_date, ttr_ids): stime = datetime.time(0, 0, 0, 0) etime = datetime.time(23, 55, 0, 0) da_route = TTRouteDataAccess() daily_periods = period.create_periods(start_date, end_date, stime, etime, cfg.TT_DATA_INTERVAL, target_days=[0, 1, 2, 3, 4, 5, 6], remove_holiday=False) for pidx, prd in enumerate(daily_periods): for ridx, ttr_id in enumerate(ttr_ids): ttri = da_route.get_by_id(ttr_id) is_inserted = traveltime.calculate_a_route( prd, ttri, dbsession=da_route.get_session())
def _run_multi_process(target_function, target_date, db_info): """ :type target_function: callable :type target_date: datetime.date :type db_info: dict """ logger = getLogger(__name__) logger.debug('>>> Starting Multi Processing (target-date= %s)' % (target_date)) m = Manager() queue = m.Queue() lck = Lock() N = DEFAULT_NUMBER_OF_PROCESSES data_path = ticas._TICAS_.data_path procs = [] for idx in range(N): p = Process(target=target_function, args=(idx, queue, lck, data_path, db_info)) p.start() procs.append(p) ttr_route_da = TTRouteDataAccess() ttr_ids = [ttri.id for ttri in ttr_route_da.list()] ttr_route_da.close_session() real_target_date = datetime.datetime.combine(target_date, datetime.time(12, 0, 0, 0)) total = len(ttr_ids) for ridx, ttr_id in enumerate(ttr_ids): queue.put((ttr_id, real_target_date, (ridx + 1), total)) for idx in range(N * 3): queue.put((None, None, None, None)) for p in procs: try: p.join() except: pass # flush queue while not queue.empty(): queue.get() logger.debug('<<< End of Multi Processing (target-date=%s)' % (target_date))
def run(prd): """ :type prd: pyticas.ttypes.Period :return: """ tlogger = task_logger.get_task_logger(TASK_LOGGER_NAME, capacity=365) ttr_route_da = TTRouteDataAccess() routes = ttr_route_da.list() ttr_route_da.close_session() logger = getLogger(__name__) has_error = 0 for ttri in routes: try: result = DataCategorizer.categorize(ttri, prd) if result['has_error']: logger.debug( ' - error occured when doing categorization for route %s (id=%s) during %s' % (ttri.name, ttri.id, prd.get_date_string())) tlogger.add_log({ 'time': tlogger.now(), 'route_id': ttri.id, 'target_period': prd, 'failed': True }) has_error += 1 except Exception as ex: logger.debug( ' - exception occured when doing categorization for route %s (id=%s) during %s' % (ttri.name, ttri.id, prd.get_date_string())) tlogger.add_log({ 'time': tlogger.now(), 'route_id': ttri.id, 'target_period': prd, 'failed': True }) has_error += 1 logger.debug(' - categorization for %s routes are done (has_error=%s)' % (len(routes), has_error)) tlogger.set_registry('last_executed', tlogger.now()) tlogger.save()
def calculate_all_routes(prd, **kwargs): """ calculate travel time, average speed and VMT during the given time period and put whole_data to database (travel time table) :type prd: pyticas.ttypes.Period :rtype: list[dict] """ logger = getLogger(__name__) logger.info('calculating travel time : %s' % prd.get_period_string()) res = [] ttr_route_da = TTRouteDataAccess() routes = ttr_route_da.list() ttr_route_da.close_session() total = len(routes) for ridx, ttri in enumerate(routes): logger.info('(%d/%d) calculating travel time for %s(%s) : %s' % ((ridx + 1), total, ttri.name, ttri.id, prd.get_period_string())) is_inserted = calculate_a_route(prd, ttri, lock=kwargs.get('lock', nonop_with())) res.append({'route_id': ttri.id, 'done': is_inserted}) return res
def _worker_process_to_update_moe_values(start_date, end_date, db_info, **kwargs): from pyticas_tetres.db.tetres import conn from pyticas.tool import tb logger = getLogger(__name__) stime = datetime.time(0, 0, 0, 0) etime = datetime.time(23, 55, 0, 0) daily_periods = period.create_periods(start_date, end_date, stime, etime, cfg.TT_DATA_INTERVAL, target_days=[0, 1, 2, 3, 4, 5, 6], remove_holiday=False) logger.debug('>>> Starting Multi Processing (duration= %s to %s)' % (start_date, end_date)) rw_moe_param_json = kwargs.get("rw_moe_param_json") reference_tt_route_id = rw_moe_param_json.get('reference_tt_route_id') if db_info: conn.connect(db_info) da_route = TTRouteDataAccess() ttri = da_route.get_by_id(reference_tt_route_id) if not ttri: logger.debug('route is not found (%s)' % (reference_tt_route_id)) return for pidx, prd in enumerate(daily_periods): if prd is None: da_route.close_session() exit(1) try: da_route.close_session() traveltime.calculate_tt_moe_a_route( prd, ttri, dbsession=da_route.get_session(), create_or_update=True, rw_moe_param_json=rw_moe_param_json) gc.collect() except Exception as ex: tb.traceback(ex) continue
def _worker_process_to_calculate_tt_and_categorize(idx, queue, lck, data_path, db_info): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb from pyticas_tetres.rengine.cats import weather, incident, snowmgmt, specialevent, workzone logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) categorizers = [weather, incident, workzone, specialevent, snowmgmt] da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) is_inserted = traveltime.calculate_a_route( prd, ttri, dbsession=da_route.get_session(), lock=lck) if not is_inserted: logger.warning( '[TT-Categorization Worker %d] - fail to add travel time data' % idx) tt_da = TravelTimeDataAccess(prd.start_date.year) tt_data_list = tt_da.list_by_period(ttri.id, prd) tt_da.close_session() for cidx, categorizer in enumerate(categorizers): n_inserted = categorizer.categorize(ttri, prd, tt_data_list, lock=lck) gc.collect() except Exception as ex: tb.traceback(ex) continue
def _worker_process_to_create_or_update_tt_and_moe(idx, queue, lck, data_path, db_info, **kwargs): from pyticas_tetres.db.tetres import conn from pyticas.infra import Infra from pyticas.tool import tb logger = getLogger(__name__) # initialize logger.debug('[TT-Categorization Worker %d] starting...' % (idx)) ticas.initialize(data_path) infra = Infra.get_infra() conn.connect(db_info) rw_moe_param_json = kwargs.get("rw_moe_param_json") da_route = TTRouteDataAccess() logger.debug('[TT-Categorization Worker %d] is ready' % (idx)) while True: ttr_id, prd, num, total = queue.get() if prd is None: da_route.close_session() exit(1) try: ttri = da_route.get_by_id(ttr_id) if not ttri: logger.debug( '[TT-Categorization Worker %d] route is not found (%s)' % (idx, ttr_id)) continue logger.debug( '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' % (idx, num, total, ttri.name, ttri.id, prd.get_date_string())) traveltime.calculate_tt_moe_a_route( prd, ttri, dbsession=da_route.get_session(), lock=lck, create_or_update=True, rw_moe_param_json=rw_moe_param_json) gc.collect() except Exception as ex: logger.warning( '[TT-Categorization Worker %d] - fail to add travel time data' % idx) tb.traceback(ex) continue
def traveltime_info(ttr_id, weather_type, depart_time, dbsession=None): """ :type ttr_id: int :type weather_type: int :type depart_time: datetime.datetime :rtype: list[dict], list[float] """ logger = getLogger(__name__) logger.debug( '# public travel time information is requested (route_id=%s, weather_type=%s, depart_time=%s)' % (ttr_id, weather_type, depart_time)) ttrda = TTRouteDataAccess(session=dbsession) ttri = ttrda.get_by_id(ttr_id) if weather_type: try: weather_type = int(weather_type) except: weather_type = None if not weather_type or weather_type not in [WC_NORMAL, WC_RAIN, WC_SNOW]: weather_type = _weather(depart_time, ttri.route) regime_type = _regime_type(weather_type, depart_time) logger.debug(' > regime type = %d (%s)' % (regime_type, REGIME_STRING[regime_type])) da = TODReliabilityDataAccess(session=ttrda.get_session()) tods = da.list_by_route(ttr_id, regime_type) res = [] dbg_from, dbg_to = 60, len(tods) - 12 for idx, tod in enumerate(tods): tod_res = json.loads(tod.result) if not tod_res: continue if idx >= dbg_from and idx < dbg_to: logger.debug( ' - time=%02d:%02d, avg_tt=%s, 95%%p_tt=%s, count=%s' % (tod.hour, tod.minute, tod_res['avg_tt'], tod_res['percentile_tts']['95'], tod_res['count'])) res.append({ 'hour': tod.hour, 'minute': tod.minute, 'avg_tt': _roundup(tod_res['avg_tt']), 'p95_tt': _roundup(tod_res['percentile_tts']['95']), 'p90_tt': _roundup(tod_res['percentile_tts']['90']), 'p85_tt': _roundup(tod_res['percentile_tts']['85']), 'p80_tt': _roundup(tod_res['percentile_tts']['80']), 'count': tod_res['count'] }) today_to = depart_time now = datetime.datetime.now() if today_to >= now: today_to = now # 5 minute interval delta = (today_to.minute - math.floor(today_to.minute / 5) * 5) * 60 + today_to.second today_to = today_to - datetime.timedelta(seconds=delta) try: today_from = datetime.datetime.combine(today_to.date(), datetime.time(0, 0, 0)) prd = period.Period(today_from, today_to, cfg.TT_DATA_INTERVAL) tts = moe.travel_time(ttri.route, prd) tts = moe.imputation(tts, imp_module=time_avg) traveltimes = _moving_average(tts[-1].data, 5) except Exception as ex: getLogger(__name__).warn('error to calculate travel times') traveltimes = [] traveltimes = _roundup(traveltimes) ttrda.close_session() return res[60:-12], traveltimes[60:]
print('\nAborted!') exit(1) filename = '_initial_data_maker.log' with open(filename, 'w') as f: f.write('started at ' + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + '\n') from pyticas_tetres.systasks import initial_data_maker from pyticas_tetres.da.route import TTRouteDataAccess try: if route_ids: ttr_ids = route_ids else: ttr_route_da = TTRouteDataAccess() ttr_ids = [ttri.id for ttri in ttr_route_da.list()] ttr_route_da.close_session() initial_data_maker.create_or_update_tt_and_moe( sdate, edate, db_info=dbinfo.tetres_db_info(), rw_moe_param_json=rw_moe_param_json, route_ids=route_ids) for route_id in ttr_ids: rw_moe_param_info = create_rw_moe_param_object( route_id, moe_critical_density, moe_lane_capacity, moe_congestion_threshold_speed,
def _handler_systemconfig(da, item, action_log): """ :type da: pyticas_tetres.da.config.ConfigDataAccess :type item: pyticas_tetres.ttypes.SystemConfigInfo :type action_log: pyticas_tetres.ttypes.ActionLogInfo """ ttr_da = TTRouteDataAccess() routes = ttr_da.list() ttr_da.close_session() start_date = datetime.datetime.strptime('%s-01-01' % cfg.DATA_ARCHIVE_START_YEAR, '%Y-%m-%d') last_date = datetime.datetime.now() - datetime.timedelta(days=cfg.DAILY_JOB_OFFSET_DAYS) daily_periods = _get_daily_periods(start_date, last_date) # faverolles 1/16/2020 NOTE: If the AdminClient changes the Archive Start Year parameter # and an entry is made in the action_log database table. # The server will repeatedly rerun initial_data_maker which is also run by (dataloader.py) # until this entry is removed from the database. # The issue is that the entry won't be removed because "target data is not handled" which # i think means "until all traffic data is downloaded" for the archive start year. # This never happens because the traffic data is hundreds of GB's. # if action_log.data_desc.startswith('DATA_ARCHIVE_START_YEAR_EXTENDED'): # calculate travel time data and the related non-traffic data during the extended years _, year_change = action_log.data_desc.split(':') prev_year, changed_year = year_change.split('->') prev_year, changed_year = int(prev_year.strip()), int(changed_year.strip()) prev_end_date = datetime.datetime.strptime('%s-12-31' % (prev_year - 1), '%Y-%m-%d').date() try: # faverolles 1/16/2020 NOTE: Why is there no parameter db_info passed # I'm guessing its expected to fail because try-catch maybe? from pyticas_tetres.util.traffic_file_checker import has_traffic_files start_date_str, end_date_str = start_date.strftime('%Y-%m-%d'), prev_end_date.strftime('%Y-%m-%d') if not has_traffic_files(start_date_str, end_date_str): return False, "Missing traffic files for the given time range from {} to {}.".format(start_date_str, end_date_str) import dbinfo initial_data_maker.run(start_date.date(), prev_end_date, db_info=dbinfo.tetres_db_info()) return True except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - Data Archive Start Year (Extended) : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.data_desc.startswith('DATA_ARCHIVE_START_YEAR_SHRINKED'): # delete the travel time data and the related non-traffic data during the shrinked years _, year_change = action_log.data_desc.split(':') prev_year, changed_year = year_change.split('->') prev_year, changed_year = int(prev_year.strip()), int(changed_year.strip()) years = [y for y in range(prev_year, changed_year)] for y in years: sdt = datetime.datetime.strptime('%s-01-01 00:00:00' % y, '%Y-%m-%d %H:%M:%S') edt = datetime.datetime.strptime('%s-12-31 23:59:59' % y, '%Y-%m-%d %H:%M:%S') try: tt_da = TravelTimeDataAccess(y) for a_route in routes: tt_da.delete_range(a_route.id, sdt, edt) tt_da.close_session() weather_da = NoaaWeatherDataAccess(y) weather_da.delete_range(None, None, start_time=sdt, end_time=edt) weather_da.commit() weather_da.close_session() incident_da = IncidentDataAccess() incident_da.delete_range_all(start_time=sdt, end_time=edt) incident_da.commit() incident_da.close_session() except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - Data Archive Start Year (Shrinked) : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.target_datatype == ActionLogDataAccess.DT_INCIDENT: for a_route in routes: for prd in daily_periods: try: categorization.categorize(a_route, prd, categorizers=[categorization.incident]) except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - Incident Parameters Changes : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.target_datatype == ActionLogDataAccess.DT_WORKZONE: for a_route in routes: for prd in daily_periods: try: categorization.categorize(a_route, prd, categorizers=[categorization.workzone]) except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - Workzone Parameters Changes : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.target_datatype == ActionLogDataAccess.DT_SPECIALEVENT: for a_route in routes: for prd in daily_periods: try: categorization.categorize(a_route, prd, categorizers=[categorization.specialevent]) except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - SpecialEvent Parameters Changes : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.target_datatype == ActionLogDataAccess.DT_SNOWMGMT: for a_route in routes: for prd in daily_periods: try: categorization.categorize(a_route, prd, categorizers=[categorization.snowmgmt]) except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - SnowManagement Parameters Changes : %s' % tb.traceback(ex, f_print=False)) return False return True
def _run_multi_process(target_function, start_date, end_date, db_info): """ :type target_function: callable :type start_date: datetime.date :type end_date: datetime.date :type db_info: dict """ logger = getLogger(__name__) stime = datetime.time(0, 0, 0, 0) etime = datetime.time(23, 55, 0, 0) daily_periods = period.create_periods(start_date, end_date, stime, etime, cfg.TT_DATA_INTERVAL, target_days=[0, 1, 2, 3, 4, 5, 6], remove_holiday=False) logger.debug('>>> Starting Multi Processing (duration= %s to %s)' % (start_date, end_date)) m = Manager() queue = m.Queue() lck = Lock() N = DEFAULT_NUMBER_OF_PROCESSES data_path = ticas._TICAS_.data_path procs = [] for idx in range(N): p = Process(target=target_function, args=(idx, queue, lck, data_path, db_info)) p.start() procs.append(p) ttr_route_da = TTRouteDataAccess() ttr_ids = [ttri.id for ttri in ttr_route_da.list()] ttr_route_da.close_session() total = len(daily_periods) * len(ttr_ids) cnt = 1 for pidx, prd in enumerate(daily_periods): for ridx, ttr_id in enumerate(ttr_ids): queue.put((ttr_id, prd, cnt, total)) cnt += 1 for idx in range(N * 3): queue.put((None, None, None, None)) for p in procs: try: p.join() except: pass # flush queue while not queue.empty(): queue.get() logger.debug('<<< End of Multi Processing (duration= %s to %s)' % (start_date, end_date))
def ff_api_create_new_route(): da = TTRouteDataAccess() da.get_tablename() return da.get_tablename()
def ff_api_get_all_routes(): da = TTRouteDataAccess() return json.dumps(da.list())
def api_extension_all_routes(): da = TTRouteDataAccess() return json.dumps(da.list())
def test_data(): import datetime from pyticas import route from pyticas_tetres.ttypes import WorkZoneInfo, WorkZoneGroupInfo, TTRouteInfo, SpecialEventInfo from pyticas_tetres.da.wz import WorkZoneDataAccess from pyticas_tetres.da.wz_group import WZGroupDataAccess from pyticas_tetres.da.specialevent import SpecialEventDataAccess from pyticas_tetres.da.route import TTRouteDataAccess # route data r1 = route.create_route('S38', 'S40', name='Route I-35W NB') # I-35W NB r2 = route.create_route('S186', 'S188', name='Route I-494 WB') # I-494 WB r3 = route.create_route('S428', 'S430', name='Route US-169 NB') # US169 NB ri1 = TTRouteInfo(r1) ri2 = TTRouteInfo(r2) ri3 = TTRouteInfo(r3) dsRoute = TTRouteDataAccess() dsWZ = WorkZoneDataAccess(session=dsRoute.da_base.session) dsWZGroup = WZGroupDataAccess(session=dsRoute.da_base.session) dsSE = SpecialEventDataAccess(session=dsRoute.da_base.session) rm = dsRoute.insert(ri1) rm = dsRoute.insert(ri2) rm = dsRoute.insert(ri3) # workzone data def _wzg(idx, r1, r2, y1, y2): wgi = WorkZoneGroupInfo() wgi.name = 'test wz group %d' % idx wgi.desc = 'test is test wz group %d' % idx wgi.years = WorkZoneGroupInfo.years_string(y1, y2) wgi.corridors = WorkZoneGroupInfo.corridor_string([r1, r2]) return wgi def _wzi(idx, r1, r2, y1, y2, wgid): wi = WorkZoneInfo() wi.wz_group_id = wgid wi.route1 = r1 wi.route2 = r2 wi.memo = 'memo of test wz %d' % idx sdt = datetime.datetime(y1, 3, 2, 1, 0, 0) edt = datetime.datetime(y2, 7, 2, 1, 0, 0) wi.start_time = sdt.strftime('%Y-%m-%d %H:%M:%S') wi.end_time = edt.strftime('%Y-%m-%d %H:%M:%S') wi.years = WorkZoneInfo.years_string(y1, y2) return wi r1 = route.create_route('S38', 'S40', name='Route I-35W NB') # I-35W NB r2 = route.opposite_route(r1) r3 = route.create_route('S186', 'S188', name='Route I-494 WB') # I-494 WB r4 = route.opposite_route(r3) wgi1 = _wzg(1, r1, r2, 2012, 2012) wgi2 = _wzg(2, r3, r4, 2012, 2013) wgi3 = _wzg(3, r1, r2, 2014, 2016) ac = True wgm1 = dsWZGroup.insert(wgi1) wgm2 = dsWZGroup.insert(wgi2) wgm3 = dsWZGroup.insert(wgi3) wi1 = _wzi(1, r1, r2, 2012, 2012, wgm1.id) wi2 = _wzi(2, r3, r4, 2012, 2013, wgm2.id) wi3 = _wzi(3, r1, r2, 2014, 2016, wgm3.id) wm1 = dsWZ.insert(wi1) wm2 = dsWZ.insert(wi2) wm3 = dsWZ.insert(wi3) # special event data def _sei(idx, y1, m1, d1, y2, m2, d2, att): sei = SpecialEventInfo() sei.name = 'test se %d' % idx sei.description = 'test is test se %d' % idx sdt = datetime.datetime(y1, m1, d1, 1, 0, 0) edt = datetime.datetime(y2, m2, d2, 1, 0, 0) sei.start_time = sei.datetime2str(sdt) sei.end_time = sei.datetime2str(edt) sei.set_years() sei.attendance = att sei.lon = -93.331893 sei.lat = 44.970797 return sei se1 = _sei(1, 2014, 3, 1, 2014, 3, 2, 1000) se2 = _sei(2, 2012, 4, 1, 2012, 4, 2, 20000) se3 = _sei(3, 2016, 5, 1, 2016, 5, 2, 300) sem1 = dsSE.insert(se1) sem2 = dsSE.insert(se2) sem3 = dsSE.insert(se3) dsWZ.close_session() dsSE.close_session() dsRoute.close_session()