Esempio n. 1
0
def traveltime_route_list():
    """
    :rtype: list[dict]
    """
    da = TTRouteDataAccess()
    ttris = da.list()
    da.close_session()

    res = []
    for ttri in ttris:
        stations = ttri.route.get_stations()

        res.append({
            'id': ttri.id,
            'name': ttri.name,
            'start_station': {
                'station_id': stations[0].station_id,
                'label': stations[0].label,
                'lat': stations[0].lat,
                'lon': stations[0].lon
            },
            'end_station': {
                'station_id': stations[-1].station_id,
                'label': stations[-1].label,
                'lat': stations[-1].lat,
                'lon': stations[-1].lon
            }
        })
    return res
def run(target_date):
    """

    :type target_date: datetime.datetime
    :return:
    """
    ttr_route_da = TTRouteDataAccess()
    route_list = ttr_route_da.list()
    ttr_route_da.close_session()

    for ttri in route_list:
        try:
            traveltime_info.calculate_TOD_reliabilities(ttri.id, target_date)
        except Exception as ex:
            tb.traceback(ex)
            getLogger(__name__).warning('Fail to calculate TOD reliabilities for route=%d' % ttri.id)
def _run_multi_process(target_function, target_date, db_info):
    """
    :type target_function: callable
    :type target_date: datetime.date
    :type db_info: dict
    """
    logger = getLogger(__name__)

    logger.debug('>>> Starting Multi Processing (target-date= %s)' % (target_date))

    m = Manager()
    queue = m.Queue()

    lck = Lock()
    N = DEFAULT_NUMBER_OF_PROCESSES
    data_path = ticas._TICAS_.data_path
    procs = []
    for idx in range(N):
        p = Process(target=target_function,
                    args=(idx, queue, lck, data_path, db_info))
        p.start()
        procs.append(p)

    ttr_route_da = TTRouteDataAccess()
    ttr_ids = [ttri.id for ttri in ttr_route_da.list()]
    ttr_route_da.close_session()

    real_target_date = datetime.datetime.combine(target_date, datetime.time(12, 0, 0, 0))
    total = len(ttr_ids)
    for ridx, ttr_id in enumerate(ttr_ids):
        queue.put((ttr_id, real_target_date, (ridx + 1), total))

    for idx in range(N * 3):
        queue.put((None, None, None, None))

    for p in procs:
        try:
            p.join()
        except:
            pass

    # flush queue
    while not queue.empty():
        queue.get()

    logger.debug('<<< End of Multi Processing (target-date=%s)' % (target_date))
def run(prd):
    """

    :type prd: pyticas.ttypes.Period
    :return:
    """
    tlogger = task_logger.get_task_logger(TASK_LOGGER_NAME, capacity=365)

    ttr_route_da = TTRouteDataAccess()
    routes = ttr_route_da.list()
    ttr_route_da.close_session()
    logger = getLogger(__name__)
    has_error = 0
    for ttri in routes:
        try:
            result = DataCategorizer.categorize(ttri, prd)
            if result['has_error']:
                logger.debug(
                    '  - error occured when doing categorization for route %s (id=%s) during %s'
                    % (ttri.name, ttri.id, prd.get_date_string()))
                tlogger.add_log({
                    'time': tlogger.now(),
                    'route_id': ttri.id,
                    'target_period': prd,
                    'failed': True
                })
                has_error += 1
        except Exception as ex:
            logger.debug(
                '  - exception occured when doing categorization for route %s (id=%s) during %s'
                % (ttri.name, ttri.id, prd.get_date_string()))
            tlogger.add_log({
                'time': tlogger.now(),
                'route_id': ttri.id,
                'target_period': prd,
                'failed': True
            })
            has_error += 1

    logger.debug('  - categorization for %s routes are done (has_error=%s)' %
                 (len(routes), has_error))

    tlogger.set_registry('last_executed', tlogger.now())
    tlogger.save()
Esempio n. 5
0
def calculate_all_routes(prd, **kwargs):
    """ calculate travel time, average speed and VMT during the given time period
    and put whole_data to database (travel time table)

    :type prd: pyticas.ttypes.Period
    :rtype: list[dict]
    """
    logger = getLogger(__name__)
    logger.info('calculating travel time : %s' % prd.get_period_string())

    res = []
    ttr_route_da = TTRouteDataAccess()
    routes = ttr_route_da.list()
    ttr_route_da.close_session()
    total = len(routes)
    for ridx, ttri in enumerate(routes):
        logger.info('(%d/%d) calculating travel time for %s(%s) : %s'
                    % ((ridx + 1), total, ttri.name, ttri.id, prd.get_period_string()))
        is_inserted = calculate_a_route(prd, ttri, lock=kwargs.get('lock', nonop_with()))
        res.append({'route_id': ttri.id, 'done': is_inserted})

    return res
Esempio n. 6
0
        exit(1)

    filename = '_initial_data_maker.log'
    with open(filename, 'w') as f:
        f.write('started at ' +
                datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + '\n')

    from pyticas_tetres.systasks import initial_data_maker
    from pyticas_tetres.da.route import TTRouteDataAccess

    try:
        if route_ids:
            ttr_ids = route_ids
        else:
            ttr_route_da = TTRouteDataAccess()
            ttr_ids = [ttri.id for ttri in ttr_route_da.list()]
            ttr_route_da.close_session()

        initial_data_maker.create_or_update_tt_and_moe(
            sdate,
            edate,
            db_info=dbinfo.tetres_db_info(),
            rw_moe_param_json=rw_moe_param_json,
            route_ids=route_ids)
        for route_id in ttr_ids:
            rw_moe_param_info = create_rw_moe_param_object(
                route_id,
                moe_critical_density,
                moe_lane_capacity,
                moe_congestion_threshold_speed,
                datetime.datetime.strptime(
Esempio n. 7
0
def _handler_systemconfig(da, item, action_log):
    """

    :type da: pyticas_tetres.da.config.ConfigDataAccess
    :type item: pyticas_tetres.ttypes.SystemConfigInfo
    :type action_log: pyticas_tetres.ttypes.ActionLogInfo
    """
    ttr_da = TTRouteDataAccess()
    routes = ttr_da.list()
    ttr_da.close_session()

    start_date = datetime.datetime.strptime('%s-01-01' % cfg.DATA_ARCHIVE_START_YEAR, '%Y-%m-%d')
    last_date = datetime.datetime.now() - datetime.timedelta(days=cfg.DAILY_JOB_OFFSET_DAYS)
    daily_periods = _get_daily_periods(start_date, last_date)

    # faverolles 1/16/2020 NOTE: If the AdminClient changes the Archive Start Year parameter
    #  and an entry is made in the action_log database table.
    #  The server will repeatedly rerun initial_data_maker which is also run by (dataloader.py)
    #  until this entry is removed from the database.
    #  The issue is that the entry won't be removed because "target data is not handled" which
    #  i think means "until all traffic data is downloaded" for the archive start year.
    #  This never happens because the traffic data is hundreds of GB's.

    #
    if action_log.data_desc.startswith('DATA_ARCHIVE_START_YEAR_EXTENDED'):
        # calculate travel time data and the related non-traffic data during the extended years
        _, year_change = action_log.data_desc.split(':')
        prev_year, changed_year = year_change.split('->')
        prev_year, changed_year = int(prev_year.strip()), int(changed_year.strip())
        prev_end_date = datetime.datetime.strptime('%s-12-31' % (prev_year - 1), '%Y-%m-%d').date()
        try:
            # faverolles 1/16/2020 NOTE: Why is there no parameter db_info passed
            #  I'm guessing its expected to fail because try-catch maybe?
            from pyticas_tetres.util.traffic_file_checker import has_traffic_files
            start_date_str, end_date_str = start_date.strftime('%Y-%m-%d'), prev_end_date.strftime('%Y-%m-%d')
            if not has_traffic_files(start_date_str, end_date_str):
                return False, "Missing traffic files for the given time range from {} to {}.".format(start_date_str, end_date_str)
            import dbinfo
            initial_data_maker.run(start_date.date(), prev_end_date, db_info=dbinfo.tetres_db_info())
            return True
        except Exception as ex:
            getLogger(__name__).warning(
                'exception occured when handling  SystemConfig - Data Archive Start Year (Extended) : %s'
                % tb.traceback(ex, f_print=False))
            return False

    elif action_log.data_desc.startswith('DATA_ARCHIVE_START_YEAR_SHRINKED'):
        # delete the travel time data and the related non-traffic data during the shrinked years
        _, year_change = action_log.data_desc.split(':')
        prev_year, changed_year = year_change.split('->')
        prev_year, changed_year = int(prev_year.strip()), int(changed_year.strip())
        years = [y for y in range(prev_year, changed_year)]

        for y in years:
            sdt = datetime.datetime.strptime('%s-01-01 00:00:00' % y, '%Y-%m-%d %H:%M:%S')
            edt = datetime.datetime.strptime('%s-12-31 23:59:59' % y, '%Y-%m-%d %H:%M:%S')

            try:
                tt_da = TravelTimeDataAccess(y)
                for a_route in routes:
                    tt_da.delete_range(a_route.id, sdt, edt)
                tt_da.close_session()

                weather_da = NoaaWeatherDataAccess(y)
                weather_da.delete_range(None, None, start_time=sdt, end_time=edt)
                weather_da.commit()
                weather_da.close_session()

                incident_da = IncidentDataAccess()
                incident_da.delete_range_all(start_time=sdt, end_time=edt)
                incident_da.commit()
                incident_da.close_session()
            except Exception as ex:
                getLogger(__name__).warning(
                    'exception occured when handling  SystemConfig - Data Archive Start Year (Shrinked) : %s'
                    % tb.traceback(ex, f_print=False))
                return False

    elif action_log.target_datatype == ActionLogDataAccess.DT_INCIDENT:
        for a_route in routes:
            for prd in daily_periods:
                try:
                    categorization.categorize(a_route, prd, categorizers=[categorization.incident])
                except Exception as ex:
                    getLogger(__name__).warning(
                        'exception occured when handling SystemConfig - Incident Parameters Changes : %s'
                        % tb.traceback(ex, f_print=False))
                    return False

    elif action_log.target_datatype == ActionLogDataAccess.DT_WORKZONE:
        for a_route in routes:
            for prd in daily_periods:
                try:
                    categorization.categorize(a_route, prd, categorizers=[categorization.workzone])
                except Exception as ex:
                    getLogger(__name__).warning(
                        'exception occured when handling SystemConfig - Workzone Parameters Changes : %s'
                        % tb.traceback(ex, f_print=False))
                    return False


    elif action_log.target_datatype == ActionLogDataAccess.DT_SPECIALEVENT:
        for a_route in routes:
            for prd in daily_periods:
                try:
                    categorization.categorize(a_route, prd, categorizers=[categorization.specialevent])
                except Exception as ex:
                    getLogger(__name__).warning(
                        'exception occured when handling SystemConfig - SpecialEvent Parameters Changes : %s'
                        % tb.traceback(ex, f_print=False))
                    return False


    elif action_log.target_datatype == ActionLogDataAccess.DT_SNOWMGMT:
        for a_route in routes:
            for prd in daily_periods:
                try:
                    categorization.categorize(a_route, prd, categorizers=[categorization.snowmgmt])
                except Exception as ex:
                    getLogger(__name__).warning(
                        'exception occured when handling SystemConfig - SnowManagement Parameters Changes : %s'
                        % tb.traceback(ex, f_print=False))
                    return False

    return True
Esempio n. 8
0
def _run_multi_process(target_function, start_date, end_date, db_info):
    """
    :type target_function: callable
    :type start_date: datetime.date
    :type end_date: datetime.date
    :type db_info: dict
    """
    logger = getLogger(__name__)

    stime = datetime.time(0, 0, 0, 0)
    etime = datetime.time(23, 55, 0, 0)

    daily_periods = period.create_periods(start_date,
                                          end_date,
                                          stime,
                                          etime,
                                          cfg.TT_DATA_INTERVAL,
                                          target_days=[0, 1, 2, 3, 4, 5, 6],
                                          remove_holiday=False)

    logger.debug('>>> Starting Multi Processing (duration= %s to %s)' %
                 (start_date, end_date))

    m = Manager()
    queue = m.Queue()

    lck = Lock()
    N = DEFAULT_NUMBER_OF_PROCESSES
    data_path = ticas._TICAS_.data_path
    procs = []
    for idx in range(N):
        p = Process(target=target_function,
                    args=(idx, queue, lck, data_path, db_info))
        p.start()
        procs.append(p)

    ttr_route_da = TTRouteDataAccess()
    ttr_ids = [ttri.id for ttri in ttr_route_da.list()]
    ttr_route_da.close_session()

    total = len(daily_periods) * len(ttr_ids)
    cnt = 1
    for pidx, prd in enumerate(daily_periods):
        for ridx, ttr_id in enumerate(ttr_ids):
            queue.put((ttr_id, prd, cnt, total))
            cnt += 1

    for idx in range(N * 3):
        queue.put((None, None, None, None))

    for p in procs:
        try:
            p.join()
        except:
            pass

    # flush queue
    while not queue.empty():
        queue.get()

    logger.debug('<<< End of Multi Processing (duration= %s to %s)' %
                 (start_date, end_date))
 def ff_api_get_all_routes():
     da = TTRouteDataAccess()
     return json.dumps(da.list())
Esempio n. 10
0
 def api_extension_all_routes():
     da = TTRouteDataAccess()
     return json.dumps(da.list())