def categorize(ttri, prd, ttdata, **kwargs):
    """

    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    :type prd: pyticas.ttypes.Period
    :type ttdata: list[pyticas_tetres.ttypes.TravelTimeInfo]
    :return:
    """
    lock = kwargs.get('lock', nonop_with())

    given_seis = kwargs.get('specialevents', None)
    seis = given_seis or se_helper.find_specialevents(
        prd, SE_ARRIVAL_WINDOW, SE_DEPARTURE_WINDOW1, SE_DEPARTURE_WINDOW2)

    specialevents = []
    for sei in seis:
        distance = loc.minimum_distance(ttri.route, float(sei.lat),
                                        float(sei.lon))
        specialevents.append((sei, distance))

    year = prd.start_date.year
    ttseDA = TTSpecialeventDataAccess(year)

    # avoid to save duplicated data
    with lock:
        is_deleted = ttseDA.delete_range(ttri.id,
                                         prd.start_date,
                                         prd.end_date,
                                         item_ids=[v.id for v in seis])
        if not is_deleted or not ttseDA.commit():
            ttseDA.rollback()
            ttseDA.close_session()
            getLogger(__name__).debug(
                '! specialevent.categorize(): fail to delete existing data')
            return -1

    dict_data = []
    for idx, tti in enumerate(ttdata):
        seis = _find_ses(specialevents, tti.str2datetime(tti.time))
        for (sei, distance, event_type) in seis:
            dict_data.append({
                'tt_id': tti.id,
                'specialevent_id': sei.id,
                'distance': distance,
                'event_type': event_type
            })

    if dict_data:
        with lock:
            inserted_ids = ttseDA.bulk_insert(dict_data, print_exception=True)
            if not inserted_ids or not ttseDA.commit():
                ttseDA.rollback()
                ttseDA.close_session()
                getLogger(__name__).warning(
                    '! specialevent.categorize(): fail to insert categorized data'
                )
                return -1

    ttseDA.close_session()
    return len(dict_data)
Beispiel #2
0
def calculate_a_route(prd, ttri, **kwargs):
    """

    :type prd: pyticas.ttypes.Period
    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    """
    logger = getLogger(__name__)
    dbsession = kwargs.get('dbsession', None)

    if dbsession:
        da_tt = TravelTimeDataAccess(prd.start_date.year, session=dbsession)
    else:
        da_tt = TravelTimeDataAccess(prd.start_date.year)

    lock = kwargs.get('lock', nonop_with())

    # delete data to avolid duplicated data
    with lock:
        is_deleted = da_tt.delete_range(ttri.id, prd.start_date, prd.end_date, print_exception=True)
        if not is_deleted or not da_tt.commit():
            logger.warning('fail to delete the existing travel time data')
            if not dbsession:
                da_tt.close_session()
            return False

    # calculate tt and vmt
    (res_tt, res_speed, res_vmt) = _calculate_tt(ttri.route, prd)

    if res_tt is None:
        logger.warning('fail to calculate travel time')
        return False

    avg_speeds = _route_avgs(res_speed)
    total_vmts = _route_total(res_vmt)
    seg_tt = res_tt[-1].data
    timeline = prd.get_timeline(as_datetime=False, with_date=True)

    data = []
    for idx, dts in enumerate(timeline):
        data.append({
            'route_id': ttri.id,
            'time': dts,
            'tt': seg_tt[idx],
            'vmt': total_vmts[idx],
            'speed': avg_speeds[idx]
        })

    with lock:
        inserted_ids = da_tt.bulk_insert(data)
        if not inserted_ids or not da_tt.commit():
            logger.warning('fail to insert the calculated travel time into database')
            if not dbsession:
                da_tt.close_session()
            return False

    if not dbsession:
        da_tt.close_session()

    return inserted_ids
def categorize(ttri, prd, ttdata, **kwargs):
    """

    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    :type prd: pyticas.ttypes.Period
    :type ttdata: list[pyticas_tetres.ttypes.TravelTimeInfo]
    :rtype: int
    """
    lock = kwargs.get('lock', nonop_with())

    given_incidents = kwargs.get('incidents', None)
    all_incidents = given_incidents or ihelper.find_incidents(ttri.corridors()[0], prd)

    route_length = ttri.route.length()
    incd_locations = []
    for incd in all_incidents:
        distance = loc.location_by_coordinate(ttri.route, incd.lat, incd.lon)
        if distance != False:
            if distance < -INCIDENT_UPSTREAM_DISTANCE_LIMIT or distance > route_length + INCIDENT_DOWNSTREAM_DISTANCE_LIMIT:
                continue
            off_distance = distance if distance < 0 else max(0, distance - route_length)
            incd_locations.append((distance, off_distance, incd))

    year = prd.start_date.year
    ttincident_da = TTIncidentDataAccess(year)

    # avoid to save duplicated data
    with lock:
        is_deleted = ttincident_da.delete_range(ttri.id, prd.start_date, prd.end_date,
                                                item_ids=[v.id for v in all_incidents])
        if not is_deleted or not ttincident_da.commit():
            ttincident_da.rollback()
            ttincident_da.close_session()
            getLogger(__name__).warning('! incident.categorize(): fail to delete existing data')
            return -1

    dict_data = []
    for idx, tti in enumerate(ttdata):
        incds = _find_incident(incd_locations, tti.str2datetime(tti.time))
        for (dist, off_dist, incd) in incds:
            dict_data.append({
                'tt_id': tti.id,
                'incident_id': incd.id,
                'distance': dist,
                'off_distance': off_dist
            })

    if dict_data:
        with lock:
            inserted_ids = ttincident_da.bulk_insert(dict_data)
            if not inserted_ids or not ttincident_da.commit():
                ttincident_da.rollback()
                ttincident_da.close_session()
                getLogger(__name__).warning('! incident.categorize(): fail to insert categorization data')
                return -1

    ttincident_da.close_session()
    return len(dict_data)
Beispiel #4
0
def categorize(ttri, prd, ttdata, **kwargs):
    """

    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    :type prd: pyticas.ttypes.Period
    :type ttdata: list[pyticas_tetres.ttypes.TravelTimeInfo]
    :return:
    """
    lock = kwargs.get('lock', nonop_with())

    snmDA = SnowMgmtDataAccess()
    ttsnmDA = TTSnowManagementDataAccess(prd.start_date.year,
                                         session=snmDA.get_session())

    given_snowmgmts = kwargs.get('snowmgmts', None)
    snowmgmts = given_snowmgmts or snmDA.list_by_period(
        prd.start_date, prd.end_date, set_related_model_info=True)
    snmis = _decide_location(ttri, snowmgmts)

    with lock:
        is_deleted = ttsnmDA.delete_range(ttri.id,
                                          prd.start_date,
                                          prd.end_date,
                                          item_ids=[v.id for v in snowmgmts])
        if not is_deleted or not ttsnmDA.commit():
            ttsnmDA.rollback()
            ttsnmDA.close_session()
            getLogger(__name__).warning(
                '! snowmgmt.categorize(): fail to delete existing data')
            return -1

    dict_data = []
    for idx, tti in enumerate(ttdata):
        dt = tti.str2datetime(tti.time)
        _snmis = _find_snowmgmts(snmis, dt)
        for (loc_type, distance, off_distance, snmi, r) in _snmis:
            dict_data.append({
                'tt_id': tti.id,
                'snowmgmt_id': snmi.id,
                'loc_type': loc_type.value,
                'distance': distance,
                'off_distance': off_distance,
                'road_status': -1,
            })

    if dict_data:
        with lock:
            inserted_ids = ttsnmDA.bulk_insert(dict_data, print_exception=True)
            if not inserted_ids or not ttsnmDA.commit():
                ttsnmDA.rollback()
                ttsnmDA.close_session()
                getLogger(__name__).warning(
                    '! snowmgmt.categorize(): fail to insert categorized data')
                return -1

    ttsnmDA.close_session()

    return len(dict_data)
Beispiel #5
0
def calculate_a_route(prd, ttri, **kwargs):
    """

    :type prd: pyticas.ttypes.Period
    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    """
    logger = getLogger(__name__)
    dbsession = kwargs.get('dbsession', None)

    if dbsession:
        da_tt = TravelTimeDataAccess(prd.start_date.year, session=dbsession)
    else:
        da_tt = TravelTimeDataAccess(prd.start_date.year)
    creatable_list = list()
    lock = kwargs.get('lock', nonop_with())
    # delete data to avoid duplicated data
    with lock:
        is_deleted = da_tt.delete_range(ttri.id, prd.start_date, prd.end_date, print_exception=True)
        if not is_deleted or not da_tt.commit():
            logger.warning('fail to delete the existing travel time data')
            if not dbsession:
                da_tt.close_session()
            return False

    print(f"{Fore.GREEN}CALCULATING TRAVEL-TIME FOR ROUTE[{ttri.name}]")
    res_dict = _calculate_tt(ttri.route, prd)

    if not res_dict or not res_dict['tt']:
        logger.warning('fail to calculate travel time')
        return False

    travel_time_results = res_dict['tt']
    travel_time = travel_time_results[-1].data
    avg_speeds = _route_avgs(res_dict['speed'])
    res_vmt = _route_total(res_dict['vmt'])
    timeline = prd.get_timeline(as_datetime=False, with_date=True)
    print(f"{Fore.CYAN}Start[{timeline[0]}] End[{timeline[-1]}] TimelineLength[{len(timeline)}]")
    for index, dateTimeStamp in enumerate(timeline):
        tt_data = {
            'route_id': ttri.id,
            'time': dateTimeStamp,
            'tt': travel_time[index],
            'speed': avg_speeds[index],
            'vmt': res_vmt[index],
        }
        creatable_list.append(tt_data)
    inserted_ids = list()
    if creatable_list:
        with lock:
            inserted_ids = da_tt.bulk_insert(creatable_list)
            if not inserted_ids or not da_tt.commit():
                logger.warning('fail to insert the calculated travel time into database')
    if not dbsession:
        da_tt.close_session()
    return inserted_ids
Beispiel #6
0
def update_moe_values_a_route(prd, ttri_id, **kwargs):
    """

    :type prd: pyticas.ttypes.Period
    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    """
    import json
    dbsession = kwargs.get('dbsession', None)

    if dbsession:
        da_tt = TravelTimeDataAccess(prd.start_date.year, session=dbsession)
    else:
        da_tt = TravelTimeDataAccess(prd.start_date.year)
    rw_moe_param_json = kwargs.get("rw_moe_param_json")
    updatable_dict = {}
    existing_data_list = da_tt.list_by_period(ttri_id, prd)
    for existing_data in existing_data_list:
        meta_data = json.loads(existing_data.meta_data)
        updatable_data = dict()
        if meta_data.get('moe_congestion_threshold_speed') != rw_moe_param_json.get(
                'rw_moe_congestion_threshold_speed'):
            cm = (calculate_cm_dynamically(meta_data, rw_moe_param_json.get('rw_moe_congestion_threshold_speed')))
            cmh = (calculate_cmh_dynamically(meta_data, TT_DATA_INTERVAL,
                                             rw_moe_param_json.get('rw_moe_congestion_threshold_speed')))
            if existing_data.cm != cm:
                updatable_data['cm'] = cm
            if existing_data.cmh != cmh:
                updatable_data['cmh'] = cmh
            meta_data['moe_congestion_threshold_speed'] = rw_moe_param_json.get('rw_moe_congestion_threshold_speed')
            updatable_data['meta_data'] = json.dumps(meta_data)
        if meta_data.get('moe_lane_capacity') != rw_moe_param_json.get('rw_moe_lane_capacity') or meta_data.get(
                'moe_critical_density') != rw_moe_param_json.get('rw_moe_critical_density'):
            lvmt = calculate_lvmt_dynamically(meta_data, TT_DATA_INTERVAL,
                                              rw_moe_param_json.get('rw_moe_critical_density'),
                                              rw_moe_param_json.get('rw_moe_lane_capacity'))
            uvmt = calculate_uvmt_dynamically(meta_data, TT_DATA_INTERVAL,
                                              rw_moe_param_json.get('rw_moe_critical_density'),
                                              rw_moe_param_json.get('rw_moe_lane_capacity'))
            if existing_data.lvmt != lvmt:
                updatable_data['lvmt'] = lvmt
            if existing_data.uvmt != uvmt:
                updatable_data['uvmt'] = uvmt
            meta_data['moe_lane_capacity'] = rw_moe_param_json.get('rw_moe_lane_capacity')
            meta_data['moe_critical_density'] = rw_moe_param_json.get('rw_moe_critical_density')
            updatable_data['meta_data'] = json.dumps(meta_data)
        if updatable_data:
            updatable_dict[existing_data.id] = updatable_data
    lock = kwargs.get('lock', nonop_with())
    if updatable_dict:
        with lock:
            for id, updatable_data in updatable_dict.items():
                da_tt.update(id, updatable_data)
            da_tt.commit()
    da_tt.close_session()
Beispiel #7
0
def calculate_all_routes(prd, **kwargs):
    """ calculate travel time, average speed and VMT during the given time period
    and put whole_data to database (travel time table)

    :type prd: pyticas.ttypes.Period
    :rtype: list[dict]
    """
    logger = getLogger(__name__)
    logger.info('calculating travel time : %s' % prd.get_period_string())

    res = []
    ttr_route_da = TTRouteDataAccess()
    routes = ttr_route_da.list()
    ttr_route_da.close_session()
    total = len(routes)
    for ridx, ttri in enumerate(routes):
        logger.info('(%d/%d) calculating travel time for %s(%s) : %s'
                    % ((ridx + 1), total, ttri.name, ttri.id, prd.get_period_string()))
        is_inserted = calculate_a_route(prd, ttri, lock=kwargs.get('lock', nonop_with()))
        res.append({'route_id': ttri.id, 'done': is_inserted})

    return res
Beispiel #8
0
def calculate_tt_moe_a_route(prd, ttri, **kwargs):
    """

    :type prd: pyticas.ttypes.Period
    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    """
    logger = getLogger(__name__)
    dbsession = kwargs.get('dbsession', None)
    create_or_update = kwargs.get("create_or_update", True)
    rw_moe_param_json = kwargs.get("rw_moe_param_json")
    if rw_moe_param_json:
        moe_param_config = RouteWiseMOEParametersInfo()
        moe_param_config.moe_lane_capacity = rw_moe_param_json.get('rw_moe_lane_capacity')
        moe_param_config.moe_critical_density = rw_moe_param_json.get('rw_moe_critical_density')
        moe_param_config.moe_congestion_threshold_speed = rw_moe_param_json.get('rw_moe_congestion_threshold_speed')
    else:
        moe_param_config = get_system_config_info()

    if dbsession:
        da_tt = TravelTimeDataAccess(prd.start_date.year, session=dbsession)
    else:
        da_tt = TravelTimeDataAccess(prd.start_date.year)
    creatable_list = list()
    updatable_dict = {}
    existing_data_dict = {}
    if create_or_update:
        existing_data_list = da_tt.list_by_period(ttri.id, prd)
        for existing_data in existing_data_list:
            existing_data_dict[(existing_data.route_id, existing_data.time)] = existing_data
    lock = kwargs.get('lock', nonop_with())
    if not create_or_update:
        # delete data to avoid duplicated data
        with lock:
            is_deleted = da_tt.delete_range(ttri.id, prd.start_date, prd.end_date, print_exception=True)
            if not is_deleted or not da_tt.commit():
                logger.warning('fail to delete the existing travel time data')
                if not dbsession:
                    da_tt.close_session()
                return False

    # latest_moe_parameter_object = None
    # try:
    #     rw_moe_da = RouteWiseMOEParametersDataAccess()
    #     latest_moe_parameter_object = rw_moe_da.get_latest_moe_param_for_a_route(ttri.id)
    #     rw_moe_da.close_session()
    # except Exception as e:
    #     logger = getLogger(__name__)
    #     logger.warning('fail to fetch the latest MOE parameter for this route. Error: {}'.format(e))

    print(f"{Fore.GREEN}CALCULATING TRAVEL-TIME FOR ROUTE[{ttri.name}]")
    res_dict = _calculate_tt_moe(ttri.route, prd)

    if not res_dict:
        logger.warning('fail to calculate travel time')
        return False

    flow_data, raw_flow_data = res_dict["flow_data"]
    density_data, raw_density_data = res_dict['density_data']
    speed_data_without_virtual_node, speed_data, raw_speed_data = res_dict["speed_data"]
    travel_time_results = res_dict['tt']
    res_mrf = res_dict["mrf"]
    travel_time = travel_time_results[-1].data
    avg_speeds = _route_avgs(speed_data_without_virtual_node)
    accelerator_data = _raw_route_avgs(_calculate_accel(speed_data_without_virtual_node, prd.interval, **kwargs), prd)
    timeline = prd.get_timeline(as_datetime=False, with_date=True)
    print(f"{Fore.CYAN}Start[{timeline[0]}] End[{timeline[-1]}] TimelineLength[{len(timeline)}]")
    for index, dateTimeStamp in enumerate(timeline):
        meta_data = generate_meta_data(raw_flow_data, raw_speed_data, raw_density_data,
                                       flow_data, speed_data, density_data, speed_data_without_virtual_node, res_mrf,
                                       moe_param_config,
                                       index)
        meta_data_string = json.dumps(meta_data)
        interval = TT_DATA_INTERVAL
        moe_critical_density = moe_param_config.moe_critical_density
        moe_lane_capacity = moe_param_config.moe_lane_capacity
        moe_congestion_threshold_speed = moe_param_config.moe_congestion_threshold_speed
        vmt = calculate_vmt_dynamically(meta_data, interval)
        vht = calculate_vht_dynamically(meta_data, interval)
        dvh = calculate_dvh_dynamically(meta_data, interval)
        lvmt = calculate_lvmt_dynamically(meta_data, interval, moe_critical_density, moe_lane_capacity)
        uvmt = calculate_uvmt_dynamically(meta_data, interval, moe_critical_density, moe_lane_capacity)
        cm = calculate_cm_dynamically(meta_data, moe_congestion_threshold_speed)
        cmh = calculate_cmh_dynamically(meta_data, interval, moe_congestion_threshold_speed)
        tt_data = {
            'route_id': ttri.id,
            'time': dateTimeStamp,
            'tt': travel_time[index],
            'speed': avg_speeds[index],
            'vmt': vmt,
            'vht': vht,
            'dvh': dvh,
            'lvmt': lvmt,
            'uvmt': uvmt,
            'cm': cm,
            'cmh': cmh,
            'acceleration': accelerator_data[index],
            'meta_data': meta_data_string,

        }
        if create_or_update:
            existing_data = existing_data_dict.get((tt_data['route_id'], tt_data['time']))
            if existing_data:
                updatable_dict[existing_data.id] = tt_data
            else:
                creatable_list.append(tt_data)
        else:
            creatable_list.append(tt_data)
    inserted_ids = list()
    if creatable_list:
        with lock:
            inserted_ids = da_tt.bulk_insert(creatable_list)
            if not inserted_ids or not da_tt.commit():
                logger.warning('fail to insert the calculated travel time into database')
    if not inserted_ids:
        inserted_ids = list()
    if updatable_dict:
        with lock:
            for id, tt_data in updatable_dict.items():
                da_tt.update(id, generate_updatable_moe_dict(tt_data))
                inserted_ids.append(id)
            da_tt.commit()
    if not dbsession:
        da_tt.close_session()
    return inserted_ids
Beispiel #9
0
def categorize(ttri, prd, ttdata, **kwargs):
    """

    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    :type prd: pyticas.ttypes.Period
    :type ttdata: list[pyticas_tetres.ttypes.TravelTimeInfo]
    :return:
    """
    lock = kwargs.get('lock', nonop_with())

    given_wzs = kwargs.get('workzones', None)
    wzs = given_wzs or wz_helper.find_workzones(prd)

    workzones = []
    for wzi in wzs:
        loc_type, distance, off_distance = loc.location(ttri.route, wzi.route1)
        r = wzi.route1
        if loc_type == None:
            # distance : distance between the most upstream rnodes
            # off-distance : 0 if it is overlapeed,
            #                negative : workzone is located in upstream of the route
            #                positive : workzone is located in downstream of the route
            loc_type, distance, off_distance = loc.location(
                ttri.route, wzi.route2)
            r = wzi.route2

        if loc_type == None:
            continue

        if (loc_type == LOC_TYPE.DOWN
                and off_distance > WZ_DOWNSTREAM_DISTANCE_LIMIT
                or loc_type == LOC_TYPE.UP
                and abs(off_distance) > WZ_UPSTREAM_DISTANCE_LIMIT):
            continue

        workzones.append((loc_type, distance, off_distance, wzi, r))

    year = prd.start_date.year
    da_tt_wz = TTWorkZoneDataAccess(year)

    # avoid to save duplicated data
    with lock:
        is_deleted = da_tt_wz.delete_range(ttri.id,
                                           prd.start_date,
                                           prd.end_date,
                                           item_ids=[v.id for v in wzs])
        if not is_deleted or not da_tt_wz.commit():
            da_tt_wz.close_session()
            getLogger(__name__).warning(
                '! workzone.categorize(): fail to delete existing data')
            return -1

    dict_data = []
    for idx, tti in enumerate(ttdata):
        wzs = _find_wzs(workzones, tti.str2datetime(tti.time))
        for (loc_type, distance, off_distance, wzi, r) in wzs:
            dict_data.append({
                'tt_id': tti.id,
                'workzone_id': wzi.id,
                'loc_type': loc_type.value,
                'distance': distance,
                'off_distance': off_distance
            })

    if dict_data:
        with lock:
            inserted_ids = da_tt_wz.bulk_insert(dict_data,
                                                print_exception=True)
            if not inserted_ids or not da_tt_wz.commit():
                da_tt_wz.rollback()
                da_tt_wz.close_session()
                getLogger(__name__).warning(
                    '! workzone.categorize(): fail to insert categorized data')
                return -1

    da_tt_wz.close_session()
    return len(dict_data)
Beispiel #10
0
def categorize(ttri, prd, ttdata, **kwargs):
    """

    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    :type prd: pyticas.ttypes.Period
    :type ttdata: list[pyticas_tetres.ttypes.TravelTimeInfo]
    :rtype: int
    """
    lock = kwargs.get('lock', nonop_with())

    # prepare : coordinates, target year
    lat, lon = route.center_coordinates(ttri.route)
    year = prd.start_date.year

    # nearby weather station list
    all_isd_stations = isd.get_station_list('MN', None, False)
    isd_stations = isd.find_nearby_station(lat, lon, prd.start_date.date(),
                                           all_isd_stations)
    station_idx = 0
    if not isd_stations or not isd_stations[station_idx]:
        getLogger(__name__).warn(
            '! weather.categorize(): no weather information for TTRI(%d)' %
            (ttri.id))
        return -1

    nearby = isd_stations[station_idx][1]

    cprd = prd.clone()
    cprd.extend_start_hour(1)
    cprd.extend_end_hour(1)

    # decide nearby weather station which has data during a given period
    # by trying to read weather data
    wiss = []
    hours = (len(cprd.get_timeline()) * prd.interval) / 60 / 60
    da_noaa = NoaaWeatherDataAccess(year)

    while True:

        nearby = isd_stations[station_idx][1]
        distance = isd_stations[station_idx][0]
        if distance > WEATHER_STATION_DISTANCE_LIMIT:
            nearby = None
            break

        wis = da_noaa.list_by_period(nearby.usaf, nearby.wban, cprd)
        if len(wis) < hours * 0.6:
            station_idx += 1
            continue

        if wis:
            break
        # if wis:
        #     wiss.append(wis)
        #     if len(wiss) >= 3:
        #         break
        #     continue

        if station_idx >= len(isd_stations) - 1:
            nearby = None
            break

    if not nearby:
        getLogger(__name__).warn(
            '! weather.categorize(): no weather information for TTRI(%d)' %
            (ttri.id))
        da_noaa.close_session()
        return -1

    da_ttw = TTWeatherDataAccess(year)

    # avoid to save duplicated data
    with lock:
        is_deleted = da_ttw.delete_range(ttri.id, prd.start_date, prd.end_date)
        if not is_deleted or not da_ttw.commit():
            da_ttw.rollback()
            da_ttw.close_session()  # shared session with `da_noaa`
            da_noaa.close_session()
            return -1

    # insert weather data to database
    sidx = 0

    dict_data = []
    for idx, tti in enumerate(ttdata):
        sidx, wd = _find_wd(wis, tti.time, sidx)
        if not wd:
            getLogger(__name__).warn(
                '! weather.categorize(): weather data is not found for (tti.time=%s, usaf=%s, wban=%s)'
                % (tti.time, wis[-1].usaf, wis[-1].wban))
            continue
        dict_data.append({'tt_id': tti.id, 'weather_id': wd.id})

    if dict_data:
        with lock:
            inserted_ids = da_ttw.bulk_insert(dict_data, print_exception=True)
            if not inserted_ids or not da_ttw.commit():
                getLogger(__name__).warn(
                    '! weather.categorize() fail to insert categorized data')
                da_ttw.rollback()
                da_ttw.close_session()
                da_noaa.close_session()
                return -1

    da_noaa.close_session()
    da_ttw.close_session()

    return len(dict_data)
Beispiel #11
0
def _calculate_for_a_regime(ttri,
                            regime_type,
                            sdate,
                            edate,
                            stime,
                            etime,
                            target_days=(1, 2, 3),
                            except_dates=(),
                            remove_holiday=True,
                            **kwargs):
    """

    :type ttri: pyticas_tetres.ttypes.TTRouteInfo
    :type regime_type: int
    :type sdate: datetime.date
    :type edate: datetime.date
    :type stime: datetime.time
    :type etime: datetime.time
    :type target_days: tuple[int]
    :rtype: bool
    """
    # Regime Filter
    ext_filter = _ext_filter(regime_type)

    lock = kwargs.get('lock', nonop_with())

    extractor.extract_tt(ttri.id,
                         sdate,
                         edate,
                         stime,
                         etime, [ext_filter],
                         target_days=target_days,
                         remove_holiday=remove_holiday,
                         except_dates=except_dates)

    # print('# ', ext_filter.label)
    da = TODReliabilityDataAccess()

    # delete existings
    ttwis = [ttwi for ttwi in da.list_by_route(ttri.id, regime_type)]
    ttwi_ids = [v.id for v in ttwis]
    with lock:
        is_deleted = da.delete_items(ttwi_ids)
        if not is_deleted or not da.commit():
            return False

    tod_res = []
    cursor = datetime.datetime.combine(datetime.date.today(), stime)
    cursor += datetime.timedelta(seconds=cfg.TT_DATA_INTERVAL)
    edatetime = datetime.datetime.combine(datetime.date.today(), etime)
    dict_data = []
    while cursor <= edatetime:
        ctime = cursor.strftime('%H:%M:00')
        res = [
            extdata for extdata in ext_filter.whole_data
            if ctime == extdata.tti.time.strftime('%H:%M:00')
        ]
        ttr_res = reliability.calculate(ttri, res)
        tod_res.append(ttr_res)
        dict_data.append({
            'regime_type': regime_type,
            'route_id': ttri.id,
            'hour': cursor.hour,
            'minute': cursor.minute,
            'result': json.dumps(ttr_res),
        })
        cursor += datetime.timedelta(seconds=cfg.TT_DATA_INTERVAL)

    with lock:
        is_inserted = da.bulk_insert(dict_data)
        if not is_inserted or not da.commit():
            return False

    return True
Beispiel #12
0
def calculate_TOD_reliabilities(ttr_id, today, **kwargs):
    """

    :type ttr_id: int
    :type today: datetime.datetime
    """
    ttri = _tt_route(ttr_id)
    sdate, edate, stime, etime = _time_period(today)

    lock = kwargs.get('lock', nonop_with())
    # _calculate_for_a_regime(ttri, TOD_REGIME_N_0, WC_RAIN, sdate, edate, stime, etime, (1, 2, 3))  # Normal, Tuesday-Thursday

    _calculate_for_a_regime(ttri,
                            TOD_REGIME_N_0,
                            sdate,
                            edate,
                            stime,
                            etime, (0, ),
                            lock=lock)  # Normal, Monday
    _calculate_for_a_regime(ttri,
                            TOD_REGIME_N_123,
                            sdate,
                            edate,
                            stime,
                            etime, (1, 2, 3),
                            lock=lock)  # Normal, Tuesday-Thursday
    _calculate_for_a_regime(ttri,
                            TOD_REGIME_N_4,
                            sdate,
                            edate,
                            stime,
                            etime, (4, ),
                            lock=lock)  # Normal, Friday
    _calculate_for_a_regime(ttri,
                            TOD_REGIME_N_56,
                            sdate,
                            edate,
                            stime,
                            etime, (5, 6),
                            lock=lock)  # Normal, Saturday-Sunday

    _calculate_for_a_regime(ttri,
                            TOD_REGIME_R_0,
                            sdate,
                            edate,
                            stime,
                            etime, (0, ),
                            lock=lock)  # Rain, Monday
    _calculate_for_a_regime(ttri,
                            TOD_REGIME_R_123,
                            sdate,
                            edate,
                            stime,
                            etime, (1, 2, 3),
                            lock=lock)  # Rain, Tuesday-Thursday
    _calculate_for_a_regime(ttri,
                            TOD_REGIME_R_4,
                            sdate,
                            edate,
                            stime,
                            etime, (4, ),
                            lock=lock)  # Rain, Friday
    _calculate_for_a_regime(ttri,
                            TOD_REGIME_R_56,
                            sdate,
                            edate,
                            stime,
                            etime, (5, 6),
                            lock=lock)  # Rain, Saturday-Sunday

    _calculate_for_a_regime(ttri,
                            TOD_REGIME_S_0,
                            sdate,
                            edate,
                            stime,
                            etime, (0, ),
                            lock=lock)  # Snow, Monday
    _calculate_for_a_regime(ttri,
                            TOD_REGIME_S_123,
                            sdate,
                            edate,
                            stime,
                            etime, (1, 2, 3),
                            lock=lock)  # Snow, Tuesday-Thursday
    _calculate_for_a_regime(ttri,
                            TOD_REGIME_S_4,
                            sdate,
                            edate,
                            stime,
                            etime, (4, ),
                            lock=lock)  # Snow, Friday
    _calculate_for_a_regime(ttri,
                            TOD_REGIME_S_56,
                            sdate,
                            edate,
                            stime,
                            etime, (5, 6),
                            lock=lock)  # Snow, Saturday-Sunday