import global_settings from pyticas import tetresconf def read_host_ip(): """ read server host ip from `tetres.conf` file :rtype: str """ return tetresconf.get_property('ticas.python_server_host_ip') if __name__ == '__main__': port = 5000 TeTRES_DB_INFO = dbinfo.tetres_db_info() CAD_DB_INFO = dbinfo.cad_db_info() IRIS_DB_INFO = dbinfo.iris_incident_db_info() if tetresconf.get_property("ticas.download_traffic_data_files").capitalize() is "TRUE": global_settings.DOWNLOAD_TRAFFIC_DATA_FILES = True else: global_settings.DOWNLOAD_TRAFFIC_DATA_FILES = False print(f"DOWNLOAD_TRAFFIC_DATA_FILES: {global_settings.DOWNLOAD_TRAFFIC_DATA_FILES}") print('DATA PATH : ', global_settings.DATA_PATH) from colorama import init as initialize_colorama initialize_colorama(autoreset=True) # import required modules
sys.path.append("Server/src") import global_settings import dbinfo if __name__ == '__main__': from pyticas import ticas from pyticas.infra import Infra from pyticas_tetres.db.cad import conn as conn_cad from pyticas_tetres.db.iris import conn as conn_iris from pyticas_tetres.db.tetres import conn ticas.initialize(global_settings.DATA_PATH) infra = Infra.get_infra() conn.connect(dbinfo.tetres_db_info()) conn_cad.connect(dbinfo.cad_db_info()) conn_iris.connect(dbinfo.iris_incident_db_info()) time.sleep(1) print('') print( '!! Do not run multiple instances of this program. (DB sync problem can be caused in bulk-insertion and deletion)') print('!! Stop TeTRES Server if it is running.') print('') print('# loads weather data for the given time period') print('') sdt_str = input('# Enter start date to load data (e.g. 2015-01-01) : ') sdate = datetime.datetime.strptime(sdt_str, '%Y-%m-%d').date()
import sys sys.path.append("Server/src") import global_settings import dbinfo if __name__ == '__main__': from pyticas import ticas from pyticas.infra import Infra from pyticas_tetres.db.cad import conn as conn_cad from pyticas_tetres.db.iris import conn as conn_iris from pyticas_tetres.db.tetres import conn ticas.initialize(global_settings.DATA_PATH) infra = Infra.get_infra() conn.connect(dbinfo.tetres_db_info()) conn_cad.connect(dbinfo.cad_db_info()) conn_iris.connect(dbinfo.iris_incident_db_info()) time.sleep(1) print('') print( '!! Do not run multiple instances of this program. (DB sync problem can be caused in bulk-insertion and deletion)' ) print('!! Stop TeTRES Server if it is running.') print('') print( '# Have you defined the travel time reliability route in administrator client?' ) print('# This program loads weather and incident data,')
import sys sys.path.append("Server/src") import global_settings import dbinfo if __name__ == '__main__': from pyticas import ticas from pyticas.infra import Infra from pyticas_tetres.db.cad import conn as conn_cad from pyticas_tetres.db.iris import conn as conn_iris from pyticas_tetres.db.tetres import conn ticas.initialize(global_settings.DATA_PATH) infra = Infra.get_infra() conn.connect(dbinfo.tetres_db_info()) conn_cad.connect(dbinfo.cad_db_info()) conn_iris.connect(dbinfo.iris_incident_db_info()) time.sleep(1) print('') print( '!! Do not run multiple instances of this program. (DB sync problem can be caused in bulk-insertion and deletion)' ) print('!! Stop TeTRES Server if it is running.') print('') print( '# Have you defined the travel time reliability route in administrator client?' ) print('# calculates travel times during the given time period')
import sys sys.path.append("Server/src") import global_settings import dbinfo if __name__ == '__main__': from pyticas import ticas from pyticas.infra import Infra from pyticas_tetres.db.cad import conn as conn_cad from pyticas_tetres.db.iris import conn as conn_iris from pyticas_tetres.db.tetres import conn ticas.initialize(global_settings.DATA_PATH) infra = Infra.get_infra() conn.connect(dbinfo.tetres_db_info()) conn_cad.connect(dbinfo.cad_db_info()) conn_iris.connect(dbinfo.iris_incident_db_info()) time.sleep(1) print('') print( '!! Do not run multiple instances of this program. (DB sync problem can be caused in bulk-insertion and deletion)') print('!! Stop TeTRES Server if it is running.') print('') print('# Have you defined the travel time reliability route in administrator client?') print('# categorizes travel times during the given time period') print('') sdt_str = input('# Enter start date to load data (e.g. 2015-01-01) : ')
def _handler_systemconfig(da, item, action_log): """ :type da: pyticas_tetres.da.config.ConfigDataAccess :type item: pyticas_tetres.ttypes.SystemConfigInfo :type action_log: pyticas_tetres.ttypes.ActionLogInfo """ ttr_da = TTRouteDataAccess() routes = ttr_da.list() ttr_da.close_session() start_date = datetime.datetime.strptime('%s-01-01' % cfg.DATA_ARCHIVE_START_YEAR, '%Y-%m-%d') last_date = datetime.datetime.now() - datetime.timedelta(days=cfg.DAILY_JOB_OFFSET_DAYS) daily_periods = _get_daily_periods(start_date, last_date) # faverolles 1/16/2020 NOTE: If the AdminClient changes the Archive Start Year parameter # and an entry is made in the action_log database table. # The server will repeatedly rerun initial_data_maker which is also run by (dataloader.py) # until this entry is removed from the database. # The issue is that the entry won't be removed because "target data is not handled" which # i think means "until all traffic data is downloaded" for the archive start year. # This never happens because the traffic data is hundreds of GB's. # if action_log.data_desc.startswith('DATA_ARCHIVE_START_YEAR_EXTENDED'): # calculate travel time data and the related non-traffic data during the extended years _, year_change = action_log.data_desc.split(':') prev_year, changed_year = year_change.split('->') prev_year, changed_year = int(prev_year.strip()), int(changed_year.strip()) prev_end_date = datetime.datetime.strptime('%s-12-31' % (prev_year - 1), '%Y-%m-%d').date() try: # faverolles 1/16/2020 NOTE: Why is there no parameter db_info passed # I'm guessing its expected to fail because try-catch maybe? from pyticas_tetres.util.traffic_file_checker import has_traffic_files start_date_str, end_date_str = start_date.strftime('%Y-%m-%d'), prev_end_date.strftime('%Y-%m-%d') if not has_traffic_files(start_date_str, end_date_str): return False, "Missing traffic files for the given time range from {} to {}.".format(start_date_str, end_date_str) import dbinfo initial_data_maker.run(start_date.date(), prev_end_date, db_info=dbinfo.tetres_db_info()) return True except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - Data Archive Start Year (Extended) : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.data_desc.startswith('DATA_ARCHIVE_START_YEAR_SHRINKED'): # delete the travel time data and the related non-traffic data during the shrinked years _, year_change = action_log.data_desc.split(':') prev_year, changed_year = year_change.split('->') prev_year, changed_year = int(prev_year.strip()), int(changed_year.strip()) years = [y for y in range(prev_year, changed_year)] for y in years: sdt = datetime.datetime.strptime('%s-01-01 00:00:00' % y, '%Y-%m-%d %H:%M:%S') edt = datetime.datetime.strptime('%s-12-31 23:59:59' % y, '%Y-%m-%d %H:%M:%S') try: tt_da = TravelTimeDataAccess(y) for a_route in routes: tt_da.delete_range(a_route.id, sdt, edt) tt_da.close_session() weather_da = NoaaWeatherDataAccess(y) weather_da.delete_range(None, None, start_time=sdt, end_time=edt) weather_da.commit() weather_da.close_session() incident_da = IncidentDataAccess() incident_da.delete_range_all(start_time=sdt, end_time=edt) incident_da.commit() incident_da.close_session() except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - Data Archive Start Year (Shrinked) : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.target_datatype == ActionLogDataAccess.DT_INCIDENT: for a_route in routes: for prd in daily_periods: try: categorization.categorize(a_route, prd, categorizers=[categorization.incident]) except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - Incident Parameters Changes : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.target_datatype == ActionLogDataAccess.DT_WORKZONE: for a_route in routes: for prd in daily_periods: try: categorization.categorize(a_route, prd, categorizers=[categorization.workzone]) except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - Workzone Parameters Changes : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.target_datatype == ActionLogDataAccess.DT_SPECIALEVENT: for a_route in routes: for prd in daily_periods: try: categorization.categorize(a_route, prd, categorizers=[categorization.specialevent]) except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - SpecialEvent Parameters Changes : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.target_datatype == ActionLogDataAccess.DT_SNOWMGMT: for a_route in routes: for prd in daily_periods: try: categorization.categorize(a_route, prd, categorizers=[categorization.snowmgmt]) except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - SnowManagement Parameters Changes : %s' % tb.traceback(ex, f_print=False)) return False return True
import sys sys.path.append("Server/src") import global_settings import dbinfo if __name__ == '__main__': from pyticas import ticas from pyticas.infra import Infra from pyticas_tetres.db.cad import conn as conn_cad from pyticas_tetres.db.iris import conn as conn_iris from pyticas_tetres.db.tetres import conn ticas.initialize(global_settings.DATA_PATH) infra = Infra.get_infra() conn.connect(dbinfo.tetres_db_info()) conn_cad.connect(dbinfo.cad_db_info()) conn_iris.connect(dbinfo.iris_incident_db_info()) time.sleep(1) print('') print('!! Do not run multiple instances of this program. (DB sync problem can be caused in bulk-insertion and deletion)') print('!! Stop TeTRES Server if it is running.') print('') print('# This program calculates time-of-day (TOD) reliability for all routes.') print('# If you entered target-date as 2014-02-10,') print('# TOD reliability will be calculated using travel time data during 2013-02-10 ~ 2014-02-09') print('')