Exemplo n.º 1
0
def _worker_process_to_specific_categorization(idx, queue, lck, data_path,
                                               db_info, **kwargs):
    from pyticas_tetres.db.tetres import conn
    from pyticas.infra import Infra
    from pyticas.tool import tb
    from pyticas_tetres.rengine.cats import incident, snowmgmt, specialevent, weather, workzone

    logger = getLogger(__name__)
    # initialize
    logger.debug('[TT-Categorization Worker %d] starting...' % (idx))
    ticas.initialize(data_path)
    infra = Infra.get_infra()
    conn.connect(db_info)
    categorizers = []
    categorizer_names = kwargs.get("categorizer_names")
    categorizer_map = {
        "incident": incident,
        "snowmgmt": snowmgmt,
        "specialevent": specialevent,
        "weather": weather,
        "workzone": workzone
    }
    for categorizer_name in categorizer_names:
        categorizers.append(categorizer_map.get(categorizer_name))
    da_route = TTRouteDataAccess()
    logger.debug('[TT-Categorization Worker %d] is ready' % (idx))
    while True:
        ttr_id, prd, num, total = queue.get()
        if prd is None:
            da_route.close_session()
            exit(1)
        try:
            ttri = da_route.get_by_id(ttr_id)
            if not ttri:
                logger.debug(
                    '[TT-Categorization Worker %d] route is not found (%s)' %
                    (idx, ttr_id))
                continue
            logger.debug(
                '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' %
                (idx, num, total, ttri.name, ttri.id, prd.get_date_string()))

            tt_da = TravelTimeDataAccess(prd.start_date.year)
            tt_data_list = tt_da.list_by_period(ttri.id, prd)
            tt_da.close_session()

            for cidx, categorizer in enumerate(categorizers):
                n_inserted = categorizer.categorize(ttri,
                                                    prd,
                                                    tt_data_list,
                                                    lock=lck)

            gc.collect()

        except Exception as ex:
            tb.traceback(ex)
            continue
def _get_count_of_tt_data(ttri, prd):
    """

    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    :type prd: pyticas.ttypes.Period
    :rtype: int
    """
    da_tt = TravelTimeDataAccess(prd.start_date.year)
    cnt = da_tt.get_count(ttri.id, prd.start_date, prd.end_date)
    da_tt.close_session()
    return cnt
Exemplo n.º 3
0
def _worker_process_to_calculate_tt_and_categorize(idx, queue, lck, data_path,
                                                   db_info):
    from pyticas_tetres.db.tetres import conn
    from pyticas.infra import Infra
    from pyticas.tool import tb
    from pyticas_tetres.rengine.cats import weather, incident, snowmgmt, specialevent, workzone

    logger = getLogger(__name__)
    # initialize
    logger.debug('[TT-Categorization Worker %d] starting...' % (idx))
    ticas.initialize(data_path)
    infra = Infra.get_infra()
    conn.connect(db_info)

    categorizers = [weather, incident, workzone, specialevent, snowmgmt]
    da_route = TTRouteDataAccess()
    logger.debug('[TT-Categorization Worker %d] is ready' % (idx))
    while True:
        ttr_id, prd, num, total = queue.get()
        if prd is None:
            da_route.close_session()
            exit(1)
        try:
            ttri = da_route.get_by_id(ttr_id)
            if not ttri:
                logger.debug(
                    '[TT-Categorization Worker %d] route is not found (%s)' %
                    (idx, ttr_id))
                continue
            logger.debug(
                '[TT-Categorization Worker %d] (%d/%d) %s (id=%s) at %s' %
                (idx, num, total, ttri.name, ttri.id, prd.get_date_string()))
            is_inserted = traveltime.calculate_a_route(
                prd, ttri, dbsession=da_route.get_session(), lock=lck)
            if not is_inserted:
                logger.warning(
                    '[TT-Categorization Worker %d]  - fail to add travel time data'
                    % idx)

            tt_da = TravelTimeDataAccess(prd.start_date.year)
            tt_data_list = tt_da.list_by_period(ttri.id, prd)
            tt_da.close_session()

            for cidx, categorizer in enumerate(categorizers):
                n_inserted = categorizer.categorize(ttri,
                                                    prd,
                                                    tt_data_list,
                                                    lock=lck)

            gc.collect()

        except Exception as ex:
            tb.traceback(ex)
            continue
Exemplo n.º 4
0
def calculate_old(ttri, sdate, edate, stime, etime):
    """

    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    :type sdate: datetime.date
    :type edate: datetime.date
    :type stime: datetime.time
    :type etime: datetime.time
    :return:
    """
    logger = getLogger(__name__)
    logger.info('calculate_reliability()')
    sdt = datetime.datetime.combine(sdate, stime)
    edt = datetime.datetime.combine(edate, etime)
    prd = Period(sdt, edt, TT_DATA_INTERVAL)
    years = prd.years()
    ttDAs = {}
    tt_data_all = []
    for y in years:
        if ttDAs.get(y, None):
            tt_da = ttDAs.get(y)
        else:
            tt_da = TravelTimeDataAccess(prd.start_date.year)
            ttDAs[y] = tt_da
        tt_data = tt_da.list_by_period(ttri.id, prd)
        tt_data_all.extend(tt_data)

    for ttd in tt_data_all:
        print(ttd.id, ttd.time, ttd.tt, ttd.speed, ttd.vmt)

    tts = [ttd.tt for ttd in tt_data_all if ttd.tt > 0]
    percentiles = [0.8, 0.85, 0.9, 0.95]
    avg_tt = statistics.mean(tts)
    ffs_tt = _tt_by_freeflowspeed(ttri)
    congested_tts = _congested_travel_times(ffs_tt, tts)
    congested_avg_tt = statistics.mean(congested_tts) if congested_tts else None

    traveltime_index = (congested_avg_tt / ffs_tt) if congested_tts else None
    buffer_indice = {}
    planning_indice = {}
    for pct in percentiles:
        pct_tt = num.percentile(tts, pct)
        buffer_indice[pct] = (pct_tt - avg_tt) / avg_tt
        planning_indice[pct] = (pct_tt / ffs_tt)

    print('Avg TT : ', avg_tt)
    print('TT by FFS : ', ffs_tt)
    print('Travel Time Index : ', traveltime_index)
    print('Buffer Index : ', buffer_indice)
    print('Planning Index : ', planning_indice)
Exemplo n.º 5
0
def _find_last_date(current_year):
    for y in range(current_year, cfg.DATA_ARCHIVE_START_YEAR - 1, -1):
        tt_da = TravelTimeDataAccess(y)
        year_prd = period.Period(datetime.datetime(y, 1, 1, 0, 0, 0),
                                 datetime.datetime(y, 12, 31, 23, 59, 59),
                                 cfg.TT_DATA_INTERVAL)

        items = tt_da.list_by_period(None,
                                     year_prd,
                                     limit=1,
                                     order_by=('time', 'desc'))
        if not items:
            continue
        return datetime.datetime.strptime(items[0].time, '%Y-%m-%d %H:%M:%S')

    return None
Exemplo n.º 6
0
 def __init__(self, year, dbModel, extModel, dataInfoType, **kwargs):
     """
     :param dbModel: DB model for tt_<ext data> table defined in `pyticas_tetres.db.model`
     :param extModel: DB model for external data defined in `pyticas_tetres.db.model`
     :param dataInfoType:  corresponding class to DB model defined in `pyticas_tetres.ttrms_types`
     """
     super().__init__(dbModel, dataInfoType, **kwargs)
     self.year = year
     self.extModel = extModel
     self.ttModel = model_yearly.get_tt_table(year)
     self.ttDA = TravelTimeDataAccess(year)
Exemplo n.º 7
0
def categorize(ttri, prd, **kwargs):
    """

    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    :type prd: pyticas.ttypes.Period
    :rtype: dict
    """
    tt_da = TravelTimeDataAccess(prd.start_date.year)
    tt_data_list = tt_da.list_by_period(ttri.id, prd)
    tt_da.close_session()

    n_data = len(tt_data_list)
    res = {
        'route_id': ttri.id,
        'duration': prd.get_period_string(),
        'tt_counts': n_data,
        'has_error': False,
        'inserted': {}
    }

    if not tt_data_list:
        getLogger(__name__).warning(
            '!categorization.categorize(): no data (%s, %s)' %
            (ttri.name, prd.get_period_string()))
        res['has_error'] = True
        return res

    categorizers = kwargs.get(
        'categorizers', [weather, workzone, specialevent, snowmgmt, incident])
    for categorizer in categorizers:
        n_inserted = categorizer.categorize(ttri, prd, tt_data_list, **kwargs)
        res['inserted'][categorizer.__name__] = n_inserted
        if n_inserted < 0:
            res['has_error'] = True

    return res
Exemplo n.º 8
0
def calculate_a_route(prd, ttri, **kwargs):
    """

    :type prd: pyticas.ttypes.Period
    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    """
    logger = getLogger(__name__)
    dbsession = kwargs.get('dbsession', None)

    if dbsession:
        da_tt = TravelTimeDataAccess(prd.start_date.year, session=dbsession)
    else:
        da_tt = TravelTimeDataAccess(prd.start_date.year)
    creatable_list = list()
    lock = kwargs.get('lock', nonop_with())
    # delete data to avoid duplicated data
    with lock:
        is_deleted = da_tt.delete_range(ttri.id, prd.start_date, prd.end_date, print_exception=True)
        if not is_deleted or not da_tt.commit():
            logger.warning('fail to delete the existing travel time data')
            if not dbsession:
                da_tt.close_session()
            return False

    print(f"{Fore.GREEN}CALCULATING TRAVEL-TIME FOR ROUTE[{ttri.name}]")
    res_dict = _calculate_tt(ttri.route, prd)

    if not res_dict or not res_dict['tt']:
        logger.warning('fail to calculate travel time')
        return False

    travel_time_results = res_dict['tt']
    travel_time = travel_time_results[-1].data
    avg_speeds = _route_avgs(res_dict['speed'])
    res_vmt = _route_total(res_dict['vmt'])
    timeline = prd.get_timeline(as_datetime=False, with_date=True)
    print(f"{Fore.CYAN}Start[{timeline[0]}] End[{timeline[-1]}] TimelineLength[{len(timeline)}]")
    for index, dateTimeStamp in enumerate(timeline):
        tt_data = {
            'route_id': ttri.id,
            'time': dateTimeStamp,
            'tt': travel_time[index],
            'speed': avg_speeds[index],
            'vmt': res_vmt[index],
        }
        creatable_list.append(tt_data)
    inserted_ids = list()
    if creatable_list:
        with lock:
            inserted_ids = da_tt.bulk_insert(creatable_list)
            if not inserted_ids or not da_tt.commit():
                logger.warning('fail to insert the calculated travel time into database')
    if not dbsession:
        da_tt.close_session()
    return inserted_ids
Exemplo n.º 9
0
def calculate_tt_moe_a_route(prd, ttri, **kwargs):
    """

    :type prd: pyticas.ttypes.Period
    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    """
    logger = getLogger(__name__)
    dbsession = kwargs.get('dbsession', None)
    create_or_update = kwargs.get("create_or_update", True)
    rw_moe_param_json = kwargs.get("rw_moe_param_json")
    if rw_moe_param_json:
        moe_param_config = RouteWiseMOEParametersInfo()
        moe_param_config.moe_lane_capacity = rw_moe_param_json.get('rw_moe_lane_capacity')
        moe_param_config.moe_critical_density = rw_moe_param_json.get('rw_moe_critical_density')
        moe_param_config.moe_congestion_threshold_speed = rw_moe_param_json.get('rw_moe_congestion_threshold_speed')
    else:
        moe_param_config = get_system_config_info()

    if dbsession:
        da_tt = TravelTimeDataAccess(prd.start_date.year, session=dbsession)
    else:
        da_tt = TravelTimeDataAccess(prd.start_date.year)
    creatable_list = list()
    updatable_dict = {}
    existing_data_dict = {}
    if create_or_update:
        existing_data_list = da_tt.list_by_period(ttri.id, prd)
        for existing_data in existing_data_list:
            existing_data_dict[(existing_data.route_id, existing_data.time)] = existing_data
    lock = kwargs.get('lock', nonop_with())
    if not create_or_update:
        # delete data to avoid duplicated data
        with lock:
            is_deleted = da_tt.delete_range(ttri.id, prd.start_date, prd.end_date, print_exception=True)
            if not is_deleted or not da_tt.commit():
                logger.warning('fail to delete the existing travel time data')
                if not dbsession:
                    da_tt.close_session()
                return False

    # latest_moe_parameter_object = None
    # try:
    #     rw_moe_da = RouteWiseMOEParametersDataAccess()
    #     latest_moe_parameter_object = rw_moe_da.get_latest_moe_param_for_a_route(ttri.id)
    #     rw_moe_da.close_session()
    # except Exception as e:
    #     logger = getLogger(__name__)
    #     logger.warning('fail to fetch the latest MOE parameter for this route. Error: {}'.format(e))

    print(f"{Fore.GREEN}CALCULATING TRAVEL-TIME FOR ROUTE[{ttri.name}]")
    res_dict = _calculate_tt_moe(ttri.route, prd)

    if not res_dict:
        logger.warning('fail to calculate travel time')
        return False

    flow_data, raw_flow_data = res_dict["flow_data"]
    density_data, raw_density_data = res_dict['density_data']
    speed_data_without_virtual_node, speed_data, raw_speed_data = res_dict["speed_data"]
    travel_time_results = res_dict['tt']
    res_mrf = res_dict["mrf"]
    travel_time = travel_time_results[-1].data
    avg_speeds = _route_avgs(speed_data_without_virtual_node)
    accelerator_data = _raw_route_avgs(_calculate_accel(speed_data_without_virtual_node, prd.interval, **kwargs), prd)
    timeline = prd.get_timeline(as_datetime=False, with_date=True)
    print(f"{Fore.CYAN}Start[{timeline[0]}] End[{timeline[-1]}] TimelineLength[{len(timeline)}]")
    for index, dateTimeStamp in enumerate(timeline):
        meta_data = generate_meta_data(raw_flow_data, raw_speed_data, raw_density_data,
                                       flow_data, speed_data, density_data, speed_data_without_virtual_node, res_mrf,
                                       moe_param_config,
                                       index)
        meta_data_string = json.dumps(meta_data)
        interval = TT_DATA_INTERVAL
        moe_critical_density = moe_param_config.moe_critical_density
        moe_lane_capacity = moe_param_config.moe_lane_capacity
        moe_congestion_threshold_speed = moe_param_config.moe_congestion_threshold_speed
        vmt = calculate_vmt_dynamically(meta_data, interval)
        vht = calculate_vht_dynamically(meta_data, interval)
        dvh = calculate_dvh_dynamically(meta_data, interval)
        lvmt = calculate_lvmt_dynamically(meta_data, interval, moe_critical_density, moe_lane_capacity)
        uvmt = calculate_uvmt_dynamically(meta_data, interval, moe_critical_density, moe_lane_capacity)
        cm = calculate_cm_dynamically(meta_data, moe_congestion_threshold_speed)
        cmh = calculate_cmh_dynamically(meta_data, interval, moe_congestion_threshold_speed)
        tt_data = {
            'route_id': ttri.id,
            'time': dateTimeStamp,
            'tt': travel_time[index],
            'speed': avg_speeds[index],
            'vmt': vmt,
            'vht': vht,
            'dvh': dvh,
            'lvmt': lvmt,
            'uvmt': uvmt,
            'cm': cm,
            'cmh': cmh,
            'acceleration': accelerator_data[index],
            'meta_data': meta_data_string,

        }
        if create_or_update:
            existing_data = existing_data_dict.get((tt_data['route_id'], tt_data['time']))
            if existing_data:
                updatable_dict[existing_data.id] = tt_data
            else:
                creatable_list.append(tt_data)
        else:
            creatable_list.append(tt_data)
    inserted_ids = list()
    if creatable_list:
        with lock:
            inserted_ids = da_tt.bulk_insert(creatable_list)
            if not inserted_ids or not da_tt.commit():
                logger.warning('fail to insert the calculated travel time into database')
    if not inserted_ids:
        inserted_ids = list()
    if updatable_dict:
        with lock:
            for id, tt_data in updatable_dict.items():
                da_tt.update(id, generate_updatable_moe_dict(tt_data))
                inserted_ids.append(id)
            da_tt.commit()
    if not dbsession:
        da_tt.close_session()
    return inserted_ids
Exemplo n.º 10
0
def update_moe_values_a_route(prd, ttri_id, **kwargs):
    """

    :type prd: pyticas.ttypes.Period
    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    """
    import json
    dbsession = kwargs.get('dbsession', None)

    if dbsession:
        da_tt = TravelTimeDataAccess(prd.start_date.year, session=dbsession)
    else:
        da_tt = TravelTimeDataAccess(prd.start_date.year)
    rw_moe_param_json = kwargs.get("rw_moe_param_json")
    updatable_dict = {}
    existing_data_list = da_tt.list_by_period(ttri_id, prd)
    for existing_data in existing_data_list:
        meta_data = json.loads(existing_data.meta_data)
        updatable_data = dict()
        if meta_data.get('moe_congestion_threshold_speed') != rw_moe_param_json.get(
                'rw_moe_congestion_threshold_speed'):
            cm = (calculate_cm_dynamically(meta_data, rw_moe_param_json.get('rw_moe_congestion_threshold_speed')))
            cmh = (calculate_cmh_dynamically(meta_data, TT_DATA_INTERVAL,
                                             rw_moe_param_json.get('rw_moe_congestion_threshold_speed')))
            if existing_data.cm != cm:
                updatable_data['cm'] = cm
            if existing_data.cmh != cmh:
                updatable_data['cmh'] = cmh
            meta_data['moe_congestion_threshold_speed'] = rw_moe_param_json.get('rw_moe_congestion_threshold_speed')
            updatable_data['meta_data'] = json.dumps(meta_data)
        if meta_data.get('moe_lane_capacity') != rw_moe_param_json.get('rw_moe_lane_capacity') or meta_data.get(
                'moe_critical_density') != rw_moe_param_json.get('rw_moe_critical_density'):
            lvmt = calculate_lvmt_dynamically(meta_data, TT_DATA_INTERVAL,
                                              rw_moe_param_json.get('rw_moe_critical_density'),
                                              rw_moe_param_json.get('rw_moe_lane_capacity'))
            uvmt = calculate_uvmt_dynamically(meta_data, TT_DATA_INTERVAL,
                                              rw_moe_param_json.get('rw_moe_critical_density'),
                                              rw_moe_param_json.get('rw_moe_lane_capacity'))
            if existing_data.lvmt != lvmt:
                updatable_data['lvmt'] = lvmt
            if existing_data.uvmt != uvmt:
                updatable_data['uvmt'] = uvmt
            meta_data['moe_lane_capacity'] = rw_moe_param_json.get('rw_moe_lane_capacity')
            meta_data['moe_critical_density'] = rw_moe_param_json.get('rw_moe_critical_density')
            updatable_data['meta_data'] = json.dumps(meta_data)
        if updatable_data:
            updatable_dict[existing_data.id] = updatable_data
    lock = kwargs.get('lock', nonop_with())
    if updatable_dict:
        with lock:
            for id, updatable_data in updatable_dict.items():
                da_tt.update(id, updatable_data)
            da_tt.commit()
    da_tt.close_session()
Exemplo n.º 11
0
def _handler_systemconfig(da, item, action_log):
    """

    :type da: pyticas_tetres.da.config.ConfigDataAccess
    :type item: pyticas_tetres.ttypes.SystemConfigInfo
    :type action_log: pyticas_tetres.ttypes.ActionLogInfo
    """
    ttr_da = TTRouteDataAccess()
    routes = ttr_da.list()
    ttr_da.close_session()

    start_date = datetime.datetime.strptime('%s-01-01' % cfg.DATA_ARCHIVE_START_YEAR, '%Y-%m-%d')
    last_date = datetime.datetime.now() - datetime.timedelta(days=cfg.DAILY_JOB_OFFSET_DAYS)
    daily_periods = _get_daily_periods(start_date, last_date)

    # faverolles 1/16/2020 NOTE: If the AdminClient changes the Archive Start Year parameter
    #  and an entry is made in the action_log database table.
    #  The server will repeatedly rerun initial_data_maker which is also run by (dataloader.py)
    #  until this entry is removed from the database.
    #  The issue is that the entry won't be removed because "target data is not handled" which
    #  i think means "until all traffic data is downloaded" for the archive start year.
    #  This never happens because the traffic data is hundreds of GB's.

    #
    if action_log.data_desc.startswith('DATA_ARCHIVE_START_YEAR_EXTENDED'):
        # calculate travel time data and the related non-traffic data during the extended years
        _, year_change = action_log.data_desc.split(':')
        prev_year, changed_year = year_change.split('->')
        prev_year, changed_year = int(prev_year.strip()), int(changed_year.strip())
        prev_end_date = datetime.datetime.strptime('%s-12-31' % (prev_year - 1), '%Y-%m-%d').date()
        try:
            # faverolles 1/16/2020 NOTE: Why is there no parameter db_info passed
            #  I'm guessing its expected to fail because try-catch maybe?
            from pyticas_tetres.util.traffic_file_checker import has_traffic_files
            start_date_str, end_date_str = start_date.strftime('%Y-%m-%d'), prev_end_date.strftime('%Y-%m-%d')
            if not has_traffic_files(start_date_str, end_date_str):
                return False, "Missing traffic files for the given time range from {} to {}.".format(start_date_str, end_date_str)
            import dbinfo
            initial_data_maker.run(start_date.date(), prev_end_date, db_info=dbinfo.tetres_db_info())
            return True
        except Exception as ex:
            getLogger(__name__).warning(
                'exception occured when handling  SystemConfig - Data Archive Start Year (Extended) : %s'
                % tb.traceback(ex, f_print=False))
            return False

    elif action_log.data_desc.startswith('DATA_ARCHIVE_START_YEAR_SHRINKED'):
        # delete the travel time data and the related non-traffic data during the shrinked years
        _, year_change = action_log.data_desc.split(':')
        prev_year, changed_year = year_change.split('->')
        prev_year, changed_year = int(prev_year.strip()), int(changed_year.strip())
        years = [y for y in range(prev_year, changed_year)]

        for y in years:
            sdt = datetime.datetime.strptime('%s-01-01 00:00:00' % y, '%Y-%m-%d %H:%M:%S')
            edt = datetime.datetime.strptime('%s-12-31 23:59:59' % y, '%Y-%m-%d %H:%M:%S')

            try:
                tt_da = TravelTimeDataAccess(y)
                for a_route in routes:
                    tt_da.delete_range(a_route.id, sdt, edt)
                tt_da.close_session()

                weather_da = NoaaWeatherDataAccess(y)
                weather_da.delete_range(None, None, start_time=sdt, end_time=edt)
                weather_da.commit()
                weather_da.close_session()

                incident_da = IncidentDataAccess()
                incident_da.delete_range_all(start_time=sdt, end_time=edt)
                incident_da.commit()
                incident_da.close_session()
            except Exception as ex:
                getLogger(__name__).warning(
                    'exception occured when handling  SystemConfig - Data Archive Start Year (Shrinked) : %s'
                    % tb.traceback(ex, f_print=False))
                return False

    elif action_log.target_datatype == ActionLogDataAccess.DT_INCIDENT:
        for a_route in routes:
            for prd in daily_periods:
                try:
                    categorization.categorize(a_route, prd, categorizers=[categorization.incident])
                except Exception as ex:
                    getLogger(__name__).warning(
                        'exception occured when handling SystemConfig - Incident Parameters Changes : %s'
                        % tb.traceback(ex, f_print=False))
                    return False

    elif action_log.target_datatype == ActionLogDataAccess.DT_WORKZONE:
        for a_route in routes:
            for prd in daily_periods:
                try:
                    categorization.categorize(a_route, prd, categorizers=[categorization.workzone])
                except Exception as ex:
                    getLogger(__name__).warning(
                        'exception occured when handling SystemConfig - Workzone Parameters Changes : %s'
                        % tb.traceback(ex, f_print=False))
                    return False


    elif action_log.target_datatype == ActionLogDataAccess.DT_SPECIALEVENT:
        for a_route in routes:
            for prd in daily_periods:
                try:
                    categorization.categorize(a_route, prd, categorizers=[categorization.specialevent])
                except Exception as ex:
                    getLogger(__name__).warning(
                        'exception occured when handling SystemConfig - SpecialEvent Parameters Changes : %s'
                        % tb.traceback(ex, f_print=False))
                    return False


    elif action_log.target_datatype == ActionLogDataAccess.DT_SNOWMGMT:
        for a_route in routes:
            for prd in daily_periods:
                try:
                    categorization.categorize(a_route, prd, categorizers=[categorization.snowmgmt])
                except Exception as ex:
                    getLogger(__name__).warning(
                        'exception occured when handling SystemConfig - SnowManagement Parameters Changes : %s'
                        % tb.traceback(ex, f_print=False))
                    return False

    return True
Exemplo n.º 12
0
def calculate_a_route(prd, ttri, **kwargs):
    """

    :type prd: pyticas.ttypes.Period
    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    """
    logger = getLogger(__name__)
    dbsession = kwargs.get('dbsession', None)

    if dbsession:
        da_tt = TravelTimeDataAccess(prd.start_date.year, session=dbsession)
    else:
        da_tt = TravelTimeDataAccess(prd.start_date.year)

    lock = kwargs.get('lock', nonop_with())

    # delete data to avolid duplicated data
    with lock:
        is_deleted = da_tt.delete_range(ttri.id, prd.start_date, prd.end_date, print_exception=True)
        if not is_deleted or not da_tt.commit():
            logger.warning('fail to delete the existing travel time data')
            if not dbsession:
                da_tt.close_session()
            return False

    # calculate tt and vmt
    (res_tt, res_speed, res_vmt) = _calculate_tt(ttri.route, prd)

    if res_tt is None:
        logger.warning('fail to calculate travel time')
        return False

    avg_speeds = _route_avgs(res_speed)
    total_vmts = _route_total(res_vmt)
    seg_tt = res_tt[-1].data
    timeline = prd.get_timeline(as_datetime=False, with_date=True)

    data = []
    for idx, dts in enumerate(timeline):
        data.append({
            'route_id': ttri.id,
            'time': dts,
            'tt': seg_tt[idx],
            'vmt': total_vmts[idx],
            'speed': avg_speeds[idx]
        })

    with lock:
        inserted_ids = da_tt.bulk_insert(data)
        if not inserted_ids or not da_tt.commit():
            logger.warning('fail to insert the calculated travel time into database')
            if not dbsession:
                da_tt.close_session()
            return False

    if not dbsession:
        da_tt.close_session()

    return inserted_ids