def _handler_ttroute(da, item, action_log):
    """

    :type da: pyticas_tetres.da.route.TTRouteDataAccess
    :type item: pyticas_tetres.ttypes.TTRouteInfo
    :type action_log: pyticas_tetres.ttypes.ActionLogInfo
    """
    # 1. calculate travel time
    # 2. categorize (all)
    try:
        from pyticas_tetres.util.traffic_file_checker import has_traffic_files
        start = datetime.date(cfg.DATA_ARCHIVE_START_YEAR, 1, 1)
        last_day = datetime.date.today() - datetime.timedelta(days=cfg.DAILY_JOB_OFFSET_DAYS)
        start_date_str, end_date_str = start.strftime('%Y-%m-%d'), last_day.strftime('%Y-%m-%d')
        if not has_traffic_files(start_date_str, end_date_str):
            return False, "Missing traffic files for the given time range from {} to {}.".format(start_date_str, end_date_str)
    except Exception as e:
        getLogger(__name__).warning(
            'Exception occured while checking if traffic files exist during handling travel time routes. Error: {}'.format(
                e))
    daily_periods = _get_all_daily_periods()
    cnt = 0
    for prd in daily_periods:
        try:
            inserted_ids = traveltime.calculate_a_route(prd, item)
            if inserted_ids:
                categorization.categorize(item, prd)
                cnt += len(inserted_ids)
        except Exception as ex:
            getLogger(__name__).warning(
                'Exception occured when handling route changes : %s' % tb.traceback(ex, f_print=False))

    return cnt > 0
def _checkup_tt_for_a_route(ttri):
    """

    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    :rtype: list[(pyticas_tetres.ttypes.TTRouteInfo, pyticas.ttypes.Period)]
    """
    R_TH = 0.9

    logger = getLogger(__name__)
    logger.debug('>>> checkup travel time data for a route : %s' % ttri)

    no_ttdata = []
    yearly_periods = _get_yearly_periods()
    for prd in yearly_periods:
        year = prd.start_date.year
        n = _get_count_of_tt_data(ttri, prd)
        expected = _get_expected_tt_data(prd)
        logger.debug('  - n of tt data for %s = %s/%d' % (year, n, expected))
        if expected - n >= DAILY_N:
            continue
        logger.debug('  -> check monthly data')

        monthly_periods = _get_monthly_periods_for_a_year(year)
        for mprd in monthly_periods:
            month = mprd.start_date.month
            n = _get_count_of_tt_data(ttri, mprd)
            expected = _get_expected_tt_data(mprd)
            logger.debug('  - n of tt data for %04d-%02d = %s/%d' % (year, month, n, expected))
            if expected - n >= DAILY_N:
                continue

            logger.debug('  -> check monthly data')

            daily_periods = _get_daily_periods_for_a_month(year, month)
            for dprd in daily_periods:
                day = dprd.start_date.day
                n = _get_count_of_tt_data(ttri, dprd)
                expected = _get_expected_tt_data(dprd)
                rate = n / expected
                logger.debug('  - n of tt data for %04d-%02d-%02d = %s/%d (%.2f)' % (
                    year, month, day, n, expected, rate))
                if rate >= R_TH:
                    continue
                logger.debug('     -> it needs to be re-calculated')
                try:
                    from pyticas_tetres.util.traffic_file_checker import has_traffic_files
                    start_date_str, end_date_str = prd.start_date.strftime('%Y-%m-%d'), prd.end_date.strftime('%Y-%m-%d')
                    if not has_traffic_files(start_date_str, end_date_str):
                        logger.warning(
                            'Missing traffic files for performing monthly check up for the time range starting from {} to {}'.format(
                                start_date_str, end_date_str))
                        return
                except Exception as e:
                    logger.warning(
                        'Exception occured while checking if traffic files exist during performing monthly task. Error: {}'.format(e))

                _perform_calculation_of_tt(ttri, prd)

    logger.debug('<<< end of checkup travel time data for a route : %s' % ttri)
Beispiel #3
0
def handle_route_wise_moe_parameters(config_json_string):
    import json
    config_json = json.loads(config_json_string)
    route_id = config_json.get("reference_tt_route_id")
    if route_id:
        rw_moe_critical_density = config_json.get("rw_moe_critical_density")
        rw_moe_lane_capacity = config_json.get("rw_moe_lane_capacity")
        rw_moe_congestion_threshold_speed = config_json.get("rw_moe_congestion_threshold_speed")
        rw_moe_start_date = config_json.get("rw_moe_start_date")
        rw_moe_end_date = config_json.get("rw_moe_end_date")
        rw_moe_param_info = create_rw_moe_param_object(route_id, rw_moe_critical_density, rw_moe_lane_capacity,
                                                       rw_moe_congestion_threshold_speed, rw_moe_start_date,
                                                       rw_moe_end_date)

        rw_moe_object_id = save_rw_param_object(rw_moe_param_info)
        if has_traffic_files(rw_moe_start_date, rw_moe_end_date):
            try:
                update_moe_values(config_json)
                update_rw_moe_status(rw_moe_object_id, status="Completed")
            except Exception as e:
                print(e)
                update_rw_moe_status(rw_moe_object_id, status="Failed", reason=str(e))
        else:
            update_rw_moe_status(rw_moe_object_id, status="Failed", reason="Missing traffic files for the given time range.")
Beispiel #4
0
        '# Have you defined the travel time reliability route in administrator client?'
    )
    print('# This program loads weather and incident data,')
    print('# and calculate travel times during the given time period')
    print(
        '# Then the operating condition data will be linked to each travel time data'
    )
    print('')

    sdt_str = input('# Enter start date to load data (e.g. 2015-01-01) : ')
    sdate = datetime.datetime.strptime(sdt_str, '%Y-%m-%d').date()

    edt_str = input('# Enter end date to load data (e.g. 2017-12-31) : ')
    edate = datetime.datetime.strptime(edt_str, '%Y-%m-%d').date()
    from pyticas_tetres.util.traffic_file_checker import has_traffic_files
    if not has_traffic_files(sdt_str, edt_str):
        print("Missing traffic files for the given time range.")
        print(
            "Please check if you have put the traffic files in the proper directory structure."
        )
        print("Failed to calculate moe for the given time range.")
        exit(1)
    print('')
    print('!! Data during the given time period will be deleted.')
    res = input('!! Do you want to proceed data loading process ? [N/y] : ')
    if res.lower() not in ['y', 'ye', 'yes']:
        print('\nAported!')
        exit(1)

    filename = '_initial_data_maker.log'
    with open(filename, 'w') as f:
def _handler_systemconfig(da, item, action_log):
    """

    :type da: pyticas_tetres.da.config.ConfigDataAccess
    :type item: pyticas_tetres.ttypes.SystemConfigInfo
    :type action_log: pyticas_tetres.ttypes.ActionLogInfo
    """
    ttr_da = TTRouteDataAccess()
    routes = ttr_da.list()
    ttr_da.close_session()

    start_date = datetime.datetime.strptime('%s-01-01' % cfg.DATA_ARCHIVE_START_YEAR, '%Y-%m-%d')
    last_date = datetime.datetime.now() - datetime.timedelta(days=cfg.DAILY_JOB_OFFSET_DAYS)
    daily_periods = _get_daily_periods(start_date, last_date)

    # faverolles 1/16/2020 NOTE: If the AdminClient changes the Archive Start Year parameter
    #  and an entry is made in the action_log database table.
    #  The server will repeatedly rerun initial_data_maker which is also run by (dataloader.py)
    #  until this entry is removed from the database.
    #  The issue is that the entry won't be removed because "target data is not handled" which
    #  i think means "until all traffic data is downloaded" for the archive start year.
    #  This never happens because the traffic data is hundreds of GB's.

    #
    if action_log.data_desc.startswith('DATA_ARCHIVE_START_YEAR_EXTENDED'):
        # calculate travel time data and the related non-traffic data during the extended years
        _, year_change = action_log.data_desc.split(':')
        prev_year, changed_year = year_change.split('->')
        prev_year, changed_year = int(prev_year.strip()), int(changed_year.strip())
        prev_end_date = datetime.datetime.strptime('%s-12-31' % (prev_year - 1), '%Y-%m-%d').date()
        try:
            # faverolles 1/16/2020 NOTE: Why is there no parameter db_info passed
            #  I'm guessing its expected to fail because try-catch maybe?
            from pyticas_tetres.util.traffic_file_checker import has_traffic_files
            start_date_str, end_date_str = start_date.strftime('%Y-%m-%d'), prev_end_date.strftime('%Y-%m-%d')
            if not has_traffic_files(start_date_str, end_date_str):
                return False, "Missing traffic files for the given time range from {} to {}.".format(start_date_str, end_date_str)
            import dbinfo
            initial_data_maker.run(start_date.date(), prev_end_date, db_info=dbinfo.tetres_db_info())
            return True
        except Exception as ex:
            getLogger(__name__).warning(
                'exception occured when handling  SystemConfig - Data Archive Start Year (Extended) : %s'
                % tb.traceback(ex, f_print=False))
            return False

    elif action_log.data_desc.startswith('DATA_ARCHIVE_START_YEAR_SHRINKED'):
        # delete the travel time data and the related non-traffic data during the shrinked years
        _, year_change = action_log.data_desc.split(':')
        prev_year, changed_year = year_change.split('->')
        prev_year, changed_year = int(prev_year.strip()), int(changed_year.strip())
        years = [y for y in range(prev_year, changed_year)]

        for y in years:
            sdt = datetime.datetime.strptime('%s-01-01 00:00:00' % y, '%Y-%m-%d %H:%M:%S')
            edt = datetime.datetime.strptime('%s-12-31 23:59:59' % y, '%Y-%m-%d %H:%M:%S')

            try:
                tt_da = TravelTimeDataAccess(y)
                for a_route in routes:
                    tt_da.delete_range(a_route.id, sdt, edt)
                tt_da.close_session()

                weather_da = NoaaWeatherDataAccess(y)
                weather_da.delete_range(None, None, start_time=sdt, end_time=edt)
                weather_da.commit()
                weather_da.close_session()

                incident_da = IncidentDataAccess()
                incident_da.delete_range_all(start_time=sdt, end_time=edt)
                incident_da.commit()
                incident_da.close_session()
            except Exception as ex:
                getLogger(__name__).warning(
                    'exception occured when handling  SystemConfig - Data Archive Start Year (Shrinked) : %s'
                    % tb.traceback(ex, f_print=False))
                return False

    elif action_log.target_datatype == ActionLogDataAccess.DT_INCIDENT:
        for a_route in routes:
            for prd in daily_periods:
                try:
                    categorization.categorize(a_route, prd, categorizers=[categorization.incident])
                except Exception as ex:
                    getLogger(__name__).warning(
                        'exception occured when handling SystemConfig - Incident Parameters Changes : %s'
                        % tb.traceback(ex, f_print=False))
                    return False

    elif action_log.target_datatype == ActionLogDataAccess.DT_WORKZONE:
        for a_route in routes:
            for prd in daily_periods:
                try:
                    categorization.categorize(a_route, prd, categorizers=[categorization.workzone])
                except Exception as ex:
                    getLogger(__name__).warning(
                        'exception occured when handling SystemConfig - Workzone Parameters Changes : %s'
                        % tb.traceback(ex, f_print=False))
                    return False


    elif action_log.target_datatype == ActionLogDataAccess.DT_SPECIALEVENT:
        for a_route in routes:
            for prd in daily_periods:
                try:
                    categorization.categorize(a_route, prd, categorizers=[categorization.specialevent])
                except Exception as ex:
                    getLogger(__name__).warning(
                        'exception occured when handling SystemConfig - SpecialEvent Parameters Changes : %s'
                        % tb.traceback(ex, f_print=False))
                    return False


    elif action_log.target_datatype == ActionLogDataAccess.DT_SNOWMGMT:
        for a_route in routes:
            for prd in daily_periods:
                try:
                    categorization.categorize(a_route, prd, categorizers=[categorization.snowmgmt])
                except Exception as ex:
                    getLogger(__name__).warning(
                        'exception occured when handling SystemConfig - SnowManagement Parameters Changes : %s'
                        % tb.traceback(ex, f_print=False))
                    return False

    return True
Beispiel #6
0
def run():
    if '_01_tt' not in sys.modules:
        from pyticas_tetres.sched.daily_tasks import _01_tt, _02_load_weather, _03_load_incident, _04_tagging

    periods = []

    # faverolles 1/16/2020 NOTE: always starts at datetime.today
    today = datetime.datetime.today()
    target_day = today - datetime.timedelta(days=cfg.DAILY_JOB_OFFSET_DAYS)
    last_day_with_tt_data = _find_last_date(today.year)

    if last_day_with_tt_data and last_day_with_tt_data <= target_day:
        periods = period.create_periods(last_day_with_tt_data.date(),
                                        target_day.date(), '00:00:00',
                                        '23:59:00', cfg.TT_DATA_INTERVAL)
    else:
        prd = period.create_period(target_day.strftime("%Y-%m-%d 00:00:00"),
                                   target_day.strftime("%Y-%m-%d 23:59:00"),
                                   cfg.TT_DATA_INTERVAL)
        periods.append(prd)

    try:
        non_completed_dates_and_routes = _check_logs()
        periods = [_prd for _route_id, _prd in non_completed_dates_and_routes
                   ] + periods
    except Exception as ex:
        getLogger(__name__).warning(
            'error occured when checking daily-processing log : %s' %
            tb.traceback(ex, f_print=False))

    periods = list(set(periods))

    if len(periods) > N_LIMIT_OF_DAYS_TO_PROCESS:
        getLogger(__name__).warning(
            'too many days to process. please use data loader program to process the long-time periods'
        )
        return
    try:
        from pyticas_tetres.util.traffic_file_checker import has_traffic_files
        for prd in periods:
            start_date_str, end_date_str = prd.start_date.strftime(
                '%Y-%m-%d'), prd.end_date.strftime('%Y-%m-%d')
            if not has_traffic_files(start_date_str, end_date_str):
                getLogger(__name__).warning(
                    'Missing traffic files for performing daily task for the time range starting from {} to {}'
                    .format(start_date_str, end_date_str))
                return
    except Exception as e:
        getLogger(__name__).warning(
            'Exception occured while checking if traffic files exist during performing daily task. Error: {}'
            .format(e))

    for prd in periods:
        getLogger(__name__).info('>> running daily task for %s' %
                                 prd.get_date_string())
        try:
            _01_tt.run(prd)
            _02_load_weather.run(prd)
            _03_load_incident.run(prd)
            _04_tagging.run(prd)
        except Exception as ex:
            tb.traceback(ex)
            getLogger(__name__).warning(
                'Exception occured while performing daily task')