print( "Please check if you have put the traffic files in the proper directory structure." ) print("Failed to calculate moe for the given time range.") exit(1) print('') print('!! Data during the given time period will be deleted.') res = input('!! Do you want to proceed data loading process ? [N/y] : ') if res.lower() not in ['y', 'ye', 'yes']: print('\nAported!') exit(1) filename = '_initial_data_maker.log' with open(filename, 'w') as f: f.write('started at ' + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + '\n') from pyticas_tetres.systasks import initial_data_maker try: initial_data_maker.run(sdate, edate, db_info=dbinfo.tetres_db_info()) with open(filename, 'a+') as f: f.write('ended at ' + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + '\n') except Exception as ex: print('exception:', ex) with open(filename, 'a+') as f: f.write('exception occured at ' + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + '\n')
def _handler_systemconfig(da, item, action_log): """ :type da: pyticas_tetres.da.config.ConfigDataAccess :type item: pyticas_tetres.ttypes.SystemConfigInfo :type action_log: pyticas_tetres.ttypes.ActionLogInfo """ ttr_da = TTRouteDataAccess() routes = ttr_da.list() ttr_da.close_session() start_date = datetime.datetime.strptime('%s-01-01' % cfg.DATA_ARCHIVE_START_YEAR, '%Y-%m-%d') last_date = datetime.datetime.now() - datetime.timedelta(days=cfg.DAILY_JOB_OFFSET_DAYS) daily_periods = _get_daily_periods(start_date, last_date) # faverolles 1/16/2020 NOTE: If the AdminClient changes the Archive Start Year parameter # and an entry is made in the action_log database table. # The server will repeatedly rerun initial_data_maker which is also run by (dataloader.py) # until this entry is removed from the database. # The issue is that the entry won't be removed because "target data is not handled" which # i think means "until all traffic data is downloaded" for the archive start year. # This never happens because the traffic data is hundreds of GB's. # if action_log.data_desc.startswith('DATA_ARCHIVE_START_YEAR_EXTENDED'): # calculate travel time data and the related non-traffic data during the extended years _, year_change = action_log.data_desc.split(':') prev_year, changed_year = year_change.split('->') prev_year, changed_year = int(prev_year.strip()), int(changed_year.strip()) prev_end_date = datetime.datetime.strptime('%s-12-31' % (prev_year - 1), '%Y-%m-%d').date() try: # faverolles 1/16/2020 NOTE: Why is there no parameter db_info passed # I'm guessing its expected to fail because try-catch maybe? from pyticas_tetres.util.traffic_file_checker import has_traffic_files start_date_str, end_date_str = start_date.strftime('%Y-%m-%d'), prev_end_date.strftime('%Y-%m-%d') if not has_traffic_files(start_date_str, end_date_str): return False, "Missing traffic files for the given time range from {} to {}.".format(start_date_str, end_date_str) import dbinfo initial_data_maker.run(start_date.date(), prev_end_date, db_info=dbinfo.tetres_db_info()) return True except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - Data Archive Start Year (Extended) : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.data_desc.startswith('DATA_ARCHIVE_START_YEAR_SHRINKED'): # delete the travel time data and the related non-traffic data during the shrinked years _, year_change = action_log.data_desc.split(':') prev_year, changed_year = year_change.split('->') prev_year, changed_year = int(prev_year.strip()), int(changed_year.strip()) years = [y for y in range(prev_year, changed_year)] for y in years: sdt = datetime.datetime.strptime('%s-01-01 00:00:00' % y, '%Y-%m-%d %H:%M:%S') edt = datetime.datetime.strptime('%s-12-31 23:59:59' % y, '%Y-%m-%d %H:%M:%S') try: tt_da = TravelTimeDataAccess(y) for a_route in routes: tt_da.delete_range(a_route.id, sdt, edt) tt_da.close_session() weather_da = NoaaWeatherDataAccess(y) weather_da.delete_range(None, None, start_time=sdt, end_time=edt) weather_da.commit() weather_da.close_session() incident_da = IncidentDataAccess() incident_da.delete_range_all(start_time=sdt, end_time=edt) incident_da.commit() incident_da.close_session() except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - Data Archive Start Year (Shrinked) : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.target_datatype == ActionLogDataAccess.DT_INCIDENT: for a_route in routes: for prd in daily_periods: try: categorization.categorize(a_route, prd, categorizers=[categorization.incident]) except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - Incident Parameters Changes : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.target_datatype == ActionLogDataAccess.DT_WORKZONE: for a_route in routes: for prd in daily_periods: try: categorization.categorize(a_route, prd, categorizers=[categorization.workzone]) except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - Workzone Parameters Changes : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.target_datatype == ActionLogDataAccess.DT_SPECIALEVENT: for a_route in routes: for prd in daily_periods: try: categorization.categorize(a_route, prd, categorizers=[categorization.specialevent]) except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - SpecialEvent Parameters Changes : %s' % tb.traceback(ex, f_print=False)) return False elif action_log.target_datatype == ActionLogDataAccess.DT_SNOWMGMT: for a_route in routes: for prd in daily_periods: try: categorization.categorize(a_route, prd, categorizers=[categorization.snowmgmt]) except Exception as ex: getLogger(__name__).warning( 'exception occured when handling SystemConfig - SnowManagement Parameters Changes : %s' % tb.traceback(ex, f_print=False)) return False return True