示例#1
0
def _decide_wn2_reduction_interval(edata, usidx, ueidx, after_congested_idx):
    """

    :type edata: pyticas_ncrtes.core.etypes.ESTData
    :type after_congested_idx: int
    :rtype:
    """
    rth = 0.8
    tmp_recovered_u_th = 20
    edata.should_wn_uk_without_ffs = True
    sidx, eidx = max(usidx - setting.INTV1HOUR,
                     0), min(usidx + setting.INTV1HOUR, edata.n_data - 1)
    maxu_idx = np.argmax(edata.sus[sidx:eidx]).item() + sidx

    if edata.sratios[maxu_idx] < rth and edata.lsratios[maxu_idx] < rth:
        getLogger(__name__).debug(
            '  - temporary recovered area is not found according to normal ratio (maxu_idx=%d, r=%f)'
            % (maxu_idx, edata.normal_ratios[maxu_idx]))
        edata.should_wn_uk_without_ffs = False
        return True

    edata.wn2_interval_sidx = maxu_idx
    edata.wn2_interval_eidx = maxu_idx
    for idx in range(maxu_idx, edata.n_data):
        if edata.sus[idx] < tmp_recovered_u_th:
            break
        edata.wn2_interval_eidx = idx

    getLogger(__name__).debug(
        '   - wn2_interval (%d - %d), after_congestion_idx=%d' %
        (maxu_idx, usidx, edata.wn2_after_congestion_idx))
    return True
def _update_ncrt(target_ncrt, edata):
    """

    :param target_ncrt:
    :type target_ncrt: int
    :param edata:
    :type edata: pyticas_ncrtes.core.etypes.ESTData
    """
    getLogger(__name__).debug(
        '  - update : %s %s => %s' %
        (edata.target_station.station_id, edata.ncrt, target_ncrt))
    edata.own_ncrt = edata.ncrt
    edata.ncrt = target_ncrt
def estimate(num, tsi, edata):
    """

    :type num: int
    :type tsi: pyticas_ncrtes.itypes.TargetStationInfo
    :type edata: pyticas_ncrtes.core.etypes.ESTData
    :rtype: pyticas_ncrtes.core.etypes.ESTData
    """
    if not _check_edata(tsi, edata):
        return edata

    logger = getLogger(__name__)
    logger.info('>>> Determine NCRT Process for Target Station %s' % (edata.target_station.station_id))

    try:
        _pre_process_before_determination(edata)

        has_recovered_region = wnffs_finder.find(edata)
        if not has_recovered_region:
            _chart(edata, edata.ratios, edata.lsratios, edata.sratios)
            return

        wn_uk.make(edata)

        ncrt_finder.find(edata)

        _chart(edata, edata.ratios, edata.lsratios, edata.sratios)

    except Exception as ex:
        logger.error(tb.traceback(ex, f_print=False))

    logger.info('<<< End of NCRT Determination Process for Target Station %s' % (edata.target_station.station_id))

    return edata
def target_station_and_snowroute_info(year):
    infra = Infra.get_infra()

    logger = getLogger(__name__)
    logger.info(
        '>>> updating relations between target station and truck route')

    from pyticas_ncrtes.da.snowroute import SnowRouteDataAccess
    from pyticas_ncrtes.da.target_station import TargetStationDataAccess

    snrDA = SnowRouteDataAccess()
    tsDA = TargetStationDataAccess()

    snow_routes = snrDA.list_by_year(year)
    target_stations = tsDA.list_by_year(year, as_model=True)

    for tidx, ts in enumerate(target_stations):
        rnode = infra.get_rnode(ts.station_id)
        if not rnode:
            continue

        for snri in snow_routes:
            if rnode in snri.route1.rnodes or rnode in snri.route2.rnodes:
                ts.snowroute_id = snri.id
                ts.snowroute_name = snri._snowroute_group.name
                if tidx and tidx % 100:
                    snrDA.commit()

    snrDA.commit()
    snrDA.close()
    tsDA.close()

    logger.info(
        '<<< end of updating relations between target station and truck route')
def _reported_events(request_param):
    """
    
    :type request_param: pyticas_ncrtes.itypes.EstimationRequestInfo
    :rtype: dict[str, list[ReportedEvent]]
    """
    logger = getLogger(__name__)
    reported_events = {}
    if hasattr(request_param, 'barelane_regain_time_infos'
               ) and request_param.barelane_regain_time_infos:
        for brt in request_param.barelane_regain_time_infos:
            # logger.debug(' > truck_id=%s, start=%s, end=%s, lost=%s, regain=%s' % (
            #     brt.truckroute_id,
            #     brt.snow_start_time,
            #     brt.snow_end_time,
            #     brt.lane_lost_time,
            #     brt.barelane_regain_time
            # ))
            reported = etypes.ReportedEvent(brt.truckroute_id,
                                            brt.snow_start_time,
                                            brt.snow_end_time,
                                            brt.lane_lost_time,
                                            brt.barelane_regain_time)
            rbtlist = reported_events.get(brt.truckroute_id, [])
            rbtlist.append(reported)
            reported_events[brt.truckroute_id] = rbtlist

    return reported_events
示例#6
0
def make(daytime_data):
    """ makes NSR function with the collected normal dry day data

    :type daytime_data: pyticas_ncrtes.core.etypes.DaytimeData
    :rtype: pyticas_ncrtes.core.etypes.DaytimeFunction, dict[float, float]
    """
    # Procedure
    # 1. validate recovery and reduction speed-density data respectively
    # 2. calibrate normal speed recovery function
    # 3. calibrate normal speed reduction function

    logger = getLogger(__name__)
    logger.debug('>> %s : try to make normal u-k function' %
                 daytime_data.station.station_id)

    recovery_uk = _filter_splited_patterns(daytime_data.recovery_uk)
    recovery_uk_origin = daytime_data.recovery_uk

    recovery_function = _segmented_function(daytime_data, recovery_uk,
                                            recovery_uk_origin)
    daytime_function = etypes.DaytimeFunction(daytime_data.station,
                                              recovery_function)

    logger.debug('  -> ffs=%s, is_valud=%s' %
                 (daytime_function.get_FFS(),
                  daytime_function.recovery_function.is_valid()))

    return daytime_function
示例#7
0
def find(edata):
    """
    :type edata: pyticas_ncrtes.core.etypes.ESTData
    """
    logger = getLogger(__name__)
    logger.debug(' >>> ncrt_finder.find() : wn_ffs_idx=%s, wn_ffs=%s' %
                 (edata.wn_ffs_idx, edata.wn_ffs))
    has_normal_uk_function = edata.normal_func.is_valid()

    if has_normal_uk_function and edata.wn_ffs_idx:
        logger.debug('  - determine NCRT with wet-normal uk function')
        _determine_with_wnuk(edata)
    elif has_normal_uk_function and edata.should_wn_uk_without_ffs:
        if edata.wn2_interval_sidx:
            logger.debug(
                '  - determine NCRT with wet-normal uk function (recovered-from-congested)'
            )
            # recovered from congestion
            _determine_ncrt_from_congestion(edata)
        else:
            # NCRT cannot be determined
            logger.debug(
                '  - cannot determine NCRT (recovered-from-congested)')

    elif edata.wn_ffs_idx:
        logger.debug('  - determine NCRT without wet-normal uk function')
        edata.ncrt = _adjust_with_speed(edata, edata.wn_ffs_idx)
    else:
        logger.debug('  - cannot determine NCRT')

    if edata.ncrt and edata.ncrt_search_sidx and edata.ncrt < edata.ncrt_search_sidx:
        edata.ncrt = edata.wn_ffs_idx

    logger.debug(' <<< end of ncrt_finder.find() ')
def report(case_name, edata_list, output_path):
    """

    :type case_name: str
    :type edata_list: list[pyticas_ncrtes.core.etypes.ESTData]
    :type output_path: str
    :return:
    """
    logger = getLogger(__name__)
    # preparing output folder

    chart_path = os.path.join(output_path, 'charts')
    if not os.path.exists(chart_path):
        os.makedirs(chart_path)

    speed_countour_file = os.path.join(output_path, '%s-speed.png' % case_name)
    speed_contour.write(speed_countour_file, case_name, edata_list)

    # normal_ratio_countour_file = os.path.join(output_path, '%s-normal_ratio.png' % case_name)
    # wetnormal_ratio_countour_file = os.path.join(output_path, '%s-wetnormal_ratio.png' % case_name)
    # ratio_contour.write(normal_ratio_countour_file, wetnormal_ratio_countour_file, case_name, edata_list)

    summary_file = os.path.join(output_path, '%s.xlsx' % case_name)
    summary.write(summary_file, case_name, edata_list)

    station_chart.write(chart_path, case_name, edata_list)

    logger.debug('The whole_data are saved in %s' % output_path)
示例#9
0
文件: setup.py 项目: MValle21/tetres
def initialize_database():
    logger = getLogger(__name__)
    logger.info('    - initialize database : add default nsr_data')

    from pyticas_ncrtes.db import conn

    conn.engine.execute("INSERT INTO config (name, content) VALUES ('version', '{}')".format(__DB_VERSION__))
示例#10
0
def is_dry_day(target_station, prd):
    """ is dry day?

    How-to::

        check weather through RWIS at first,
        if RWIS is not available, use weather sensor (WEATHER_DEVICE.WS35W25)

    :type target_station: pyticas.ttypes.RNodeObject
    :type prd: pyticas.ttypes.Period
    :rtype: (bool, str)
    """
    logger = getLogger(__name__)
    # wbans = [wban for (usaf, wban) in TARGET_ISD_STATIONS]
    # usafs = [usaf for (usaf, wban) in TARGET_ISD_STATIONS]
    # _weather_stations = isd.get_station_list('MN', lambda wst: wst.wban in wbans and wst.usaf in usafs)
    # isd_stations = isd.find_nearby_station(target_station.lat, target_station.lon, prd.start_date.date(), _weather_stations)
    isd_stations = weather_stations(target_station, prd)
    nearby = None
    isd_data_list = None

    cprd = prd.clone()
    cprd.extend_end_hour(2)

    # decide nearby weather station which has nsr_data during a given period
    # by trying to read weather nsr_data
    for dist, isd_station in isd_stations:

        cache_key = '%s-%s' % (isd_station.station_name, prd.get_period_string())
        if cache_key in DRY_DAY_CACHE:
            nearby, isd_data_list = DRY_DAY_CACHE[cache_key]
            break

        if dist > WEATHER_STATION_DISTANCE_LIMIT:
            break

        isd_data_list = isd.get_data(isd_station, cprd)
        """:type: list[pyticas_noaa.isd.isdtypes.ISDData] """

        if not isd_data_list:
            continue

        DRY_DAY_CACHE[cache_key] = (isd_station, isd_data_list)

        nearby = isd_station
        break

    if not nearby:
        #logger.warning('No weather information (for %s during %s)' % (target_station.station_id, prd.get_period_string()))
        return False, None

    if _is_dry(isd_data_list, prd, setting.DRY_RATE_THRESHOLD):
        return True, nearby.station_name
    else:
        return False, nearby.station_name
def _may_recovered_speed(edata, rth=0.9):
    """
    :type edata: pyticas_ncrtes.core.etypes.ESTData
    :rtype:
    """
    getLogger(__name__).debug(' - Snowday FFS : %s' % edata.snowday_ffs)

    normal_ffs = edata.normal_ffs
    snowday_ffs = edata.snowday_ffs

    if normal_ffs and snowday_ffs:
        return max(normal_ffs, snowday_ffs) * rth

    if normal_ffs:
        return normal_ffs* rth

    if snowday_ffs:
        return snowday_ffs * rth

    return None
示例#12
0
def write(output_dir, name, edata_list, prefix=''):
    """

    :type output_dir: str
    :type name: str
    :type edata_list: list[pyticas_ncrtes.core.etypes.ESTData]
    """
    logger = getLogger(__name__)
    for idx, edata in enumerate(edata_list):
        logger.debug('Writing chart image of %s' %
                     edata.target_station.station_id)
        # if edata.ncrt or (not edata.pst and not edata.stable_speed_region_before_pst and not edata.stable_speed_region_after_pst):
        #     continue
        _write(idx, edata, output_dir, prefix)
示例#13
0
def write(filepath, name, edata_list):
    """

    :type filepath: str
    :type name: str
    :type edata_list: list[pyticas_ncrtes.core.etypes.ESTData]
    """
    logger = getLogger(__name__)
    logger.debug('Writing summary sheets of %s' % name)
    nonon_edata_list = [
        edata for edata in edata_list if edata is not None and edata.is_loaded
    ]
    results = [_get_summary_from_edata(edata) for edata in nonon_edata_list]
    _write_summary_xlsx(results, nonon_edata_list, filepath)
示例#14
0
def connect(DB_INFO):
    """
    :rtype: (sqlalchemy.engine.Engine, sqlalchemy.engine.Connection, sqlalchemy.orm.scoped_session)
    """
    logger = getLogger(__name__)
    logger.info('creating database connection...')
    # for SQLite
    engine = create_engine('sqlite:///' + os.path.join(get_path('db'), DB_INFO['filename']))
    connection = engine.connect()
    model.Base.metadata.bind = engine
    session_factory = sessionmaker(bind=engine)
    Session = scoped_session(session_factory)

    return (engine, connection, Session)
示例#15
0
 def _get_timepoint_data(n_data, get_func, ignore_ex=True):
     _data = [None] * n_data
     _time = get_func(as_string=False)
     if _time:
         if isinstance(_time, list):
             for _t in _time:
                 try:
                     _point = edata.snow_event.time_to_index(_t)
                     _data[_point] = edata.sus[_point]
                 except Exception as ex:
                     if not ignore_ex:
                         raise ex
                     else:
                         getLogger(__name__).error(str(ex))
         else:
             try:
                 _point = edata.snow_event.time_to_index(_time)
                 _data[_point] = edata.sus[_point]
             except Exception as ex:
                 if not ignore_ex:
                     raise ex
                 else:
                     getLogger(__name__).warn(str(ex))
     return _data
示例#16
0
    def register_service(self, server):
        """ register web services to the Flask server

        :type server: flask.Flask
        :rtype:
        """

        logger = getLogger(__name__)
        logger.info('  - registering {}'.format(self.name))

        # import API modules
        from pyticas_ncrtes.api import handlers

        for module in handlers:
            module.register_api(server)
示例#17
0
def _after_kt(station, uk, Kt):
    """ make exponential decay function

    :type station: pyticas.ttypes.RNodeObject
    :type uk: dict[float, float]
    :type Kt: float
    :rtype: etypes.LogFunction
    """
    fit_func = lambda x, a, b: a * x + b
    target_us, target_ks = data_util.dict2sorted_list(
        uk, key_filter=lambda k: k > Kt)

    knots = _knots(np.array(target_ks), np.array(target_us), fit_func, 1)
    knot = knots[0]
    for knot in range(int(knots[0]), int(Kt), -2):
        _target_us, _target_ks = data_util.dict2sorted_list(
            uk, key_filter=lambda k: k > knot)
        if len(_target_ks) > 50:
            target_ks, target_us = _target_ks, _target_us
            break

    popts = [90, 0.02]

    target_ks = target_ks + [200]
    target_us = target_us + [2]
    N = len(target_ks)
    sigma = np.ones(N)
    sigma[-1] = 0.01
    try:
        info, popts, rep, func = fitting.curve_fit(
            etypes.LogFunction.get_fitting_function(), target_ks, target_us,
            popts)
        return etypes.LogFunction(popts, {}, Kt)
    except Exception as ex:
        getLogger(__name__).warn('cannot calibrate linear function')
        return etypes.LogFunction(None, {}, Kt)
def _snow_routes(year):
    """
    
    :type year: int 
    :rtype: list[SnowRoute], list[from pyticas_ncrtes.itypes.SnowRouteInfo]
    """
    logger = getLogger(__name__)
    da_snrg = SnowRouteGroupDataAccess()
    da_snr = SnowRouteDataAccess()
    snrgi_list = da_snrg.search([('year', year)], group_by='name')
    snri_list = []
    for snrgi in snrgi_list:
        _snri_list = da_snr.search([('snowroute_group_id', snrgi.id)])
        if _snri_list:
            snri_list.extend(_snri_list)

    snow_routes = []
    for snri in snri_list:
        # logger.debug(' > snow route : %s, %s, %s, %s' % (
        #     snri.id, snri._snowroute_group.region, snri._snowroute_group.sub_region, snri._snowroute_group.name
        # ))
        sr1 = etypes.SnowRoute(snri._snowroute_group.region,
                               snri._snowroute_group.sub_region,
                               snri._snowroute_group.name, '',
                               snri.route1.corridors()[0].route,
                               snri.route1.corridors()[0].dir,
                               snri.route1.get_stations()[0].station_id,
                               snri.route1.get_stations()[-1].station_id,
                               snri.name, snri.description)

        sr2 = etypes.SnowRoute(snri._snowroute_group.region,
                               snri._snowroute_group.sub_region,
                               snri._snowroute_group.name, '',
                               snri.route2.corridors()[0].route,
                               snri.route2.corridors()[0].dir,
                               snri.route2.get_stations()[0].station_id,
                               snri.route2.get_stations()[-1].station_id,
                               snri.name, snri.description)

        # logger.debug('   - %s' % sr1)
        # logger.debug('   - %s' % sr2)
        snow_routes.append(sr1)
        snow_routes.append(sr2)

    return snow_routes, snri_list
示例#19
0
def _station_data(target_station, prd, dc):
    """ return q,k,u data for given period

    :type target_station: pyticas.ttypes.RNodeObject
    :type prd: pyticas.ttypes.Period
    :type dc: function
    :rtype: (pyticas.ttypes.RNodeData, pyticas.ttypes.RNodeData, pyticas.ttypes.RNodeData)
    """
    rdr = ncrtes.get_infra().rdr
    logger = getLogger(__name__)

    us = rdr.get_speed(target_station, prd, dc)
    if not us or _is_missing_day(us.data, prd.interval):
        logger.debug('Station %s is missing at %s!!' % (target_station.station_id, prd.get_date_string()))
        return None, None, None
    ks = rdr.get_density(target_station, prd, dc)
    qs = rdr.get_average_flow(target_station, prd, dc)

    return us, ks, qs
示例#20
0
def connect(DB_INFO):
    """
    :rtype: (sqlalchemy.engine.Engine, sqlalchemy.engine.Connection, sqlalchemy.orm.scoped_session)
    """
    logger = getLogger(__name__)
    logger.info('creating database connection...')
    connection_string = 'postgresql+pg8000://{}:{}@{}:{}/{}'.format(DB_INFO['user'],
                                                    DB_INFO['passwd'],
                                                    DB_INFO['host'],
                                                    DB_INFO['port'],
                                                    DB_INFO['db_name'])

    engine = create_engine(connection_string, encoding='utf-8')
    connection = engine.connect()
    model.Base.metadata.bind = engine
    session_factory = sessionmaker(bind=engine)
    Session = scoped_session(session_factory)

    return (engine, connection, Session)
def adjust_ncrts(edata_list, sections):
    """

    :type edata_list: list[pyticas_ncrtes.core.etypes.ESTData]
    :type sections: list[list[pyticas_ncrtes.core.etypes.ESTData]]
    :rtype: float
    """
    logger = getLogger(__name__)
    logger.debug('>>>>>> Adjust Type1 NCRTs')
    for sidx, s in enumerate(sections):
        logger.debug('>>>>>>>> Section : %s' %
                     [edata.target_station.station_id for edata in s])
        if any(s):
            _adjust_ncrt_for_a_section(s)
        logger.debug('<<<<<<<< End of Section : %s' %
                     [edata.target_station.station_id for edata in s])

    logger.debug('<<<<<< End of Adjust Type1 NCRTs')

    return edata_list
def make(target_station, normal_months, **kwargs):
    """
    
    :type target_station: pyticas.ttypes.RNodeObject 
    :type normal_months: list[tuple(int, int)]
    :rtype: pyticas_ncrtes.core.etypes.NSRFunction 
    """
    log = getLogger(__name__)
    data = kwargs.get('normal_data', None)
    if not data:
        data = nsr_data.get(target_station, normal_months, **kwargs)

    if not data:
        log.warn('Cannot collect normal data for %s' %
                 target_station.station_id)
        return None
    dt_func = daytime.make(data.daytime_data)
    nt_func = nighttime.make(data.night_data)
    return pyticas_ncrtes.core.etypes.NSRFunction(target_station,
                                                  normal_months, dt_func,
                                                  nt_func)
def _check_edata(tsi, edata):
    """

    :type tsi: pyticas_ncrtes.itypes.TargetStationInfo
    :type nsr_func: pyticas_ncrtes.core.etypes.NSRFunction
    :rtype: bool
    """
    log = getLogger(__name__)
    if not edata:
        log.info('> %s : No ESTData' % tsi.station_id)
        return False

    if not edata or not edata.is_loaded:
        log.info('> %s : Data are not loaded for snow event' % tsi.station_id)
        return False

    # if not edata.search_start_idx:
    #     log.info('> %s : No Search Start Idx' % tsi.station_id)
    #     return False

    return True
def prepare_data(tsi, station, year, stime, etime, snow_routes, reported, normal_func):
    """

    :type tsi: pyticas_ncrtes.itypes.TargetStationInfo
    :type station: pyticas.ttypes.RNodeObject
    :type year: int
    :type stime: datetime.datetime
    :type etime: datetime.datetime
    :type snow_routes: list[pyticas_ncrtes.core.etypes.SnowRoute]
    :type reported: dict[str, list[pyticas_ncrtes.core.etypes.ReportedEvent]]
    :type normal_months: list[(int, int)]

    :rtype: pyticas_ncrtes.core.etypes.ESTData
    """
    sevent = etypes.SnowEvent(stime, etime)
    infra = ncrtes.get_infra()
    target_station = infra.get_rnode(tsi.station_id)

    tlcDA = TargetLaneConfigDataAccess()
    tlci = tlcDA.get_by_station_id(year, station.station_id)
    if tlci:
        valid_detectors = [infra.get_detector(det_name.strip()) for det_name in tlci.detectors.split(',')]
    else:
        valid_detectors = target.get_target_detectors(station)

    if not valid_detectors:
        return None

    try:
        edata = etypes.ESTData(tsi, target_station, sevent, snow_routes, reported, normal_func)
        edata.prepare_data(detectors=valid_detectors)
        return edata

    except Exception as ex:
        log = getLogger(__name__)
        log.warning('!!!!! Error : %s : %s' % (tsi.station_id, ex))
        from pyticas.tool import tb
        tb.traceback(ex)
        return None
示例#25
0
def get_target_stations(corr_name):
    """ Returns potential target stations satisfying the following conditions
    
    **Conditions**
        - normal station 
            - not temporary station
            - not wavetronics station
            - not radar station
            - not velocity station
        - has target lanes
            - lane 2 or lane 3 (when lane 1 is an auxiliary lane)
        - not on curve
            - the station on curve shows lower-level of U-K comparing to nearby stations

    :type corr_name: str
    :rtype: list[pyticas.ttypes.RNodeObject]
    """
    logger = getLogger(__name__)

    infra = ncrtes.get_infra()
    corr = infra.get_corridor_by_name(corr_name)
    stations = []
    for st in corr.stations:
        if not st.is_normal_station():
            continue
        if not any(lane.get_target_detectors(st)):
            continue

        up_station, dn_station = _find_up_and_down_stations(st)
        if up_station is not None:
            angle = infra.geo.angle_of_rnodes(up_station, st, dn_station)
            if angle < WITHRAW_STATION_ANGLE:
                logger.debug('Skip the station %s on curve (%.1f)' %
                             (st.station_id, angle))
                continue

        stations.append(st)

    return stations
示例#26
0
def make(edata):
    """
    :type edata: pyticas_ncrtes.core.etypes.ESTData
    """
    logger = getLogger(__name__)

    logger.debug(' >>> wn_uk.make() ')
    is_recovered_from_congestion = _check_directly_recovered_from_congestion(
        edata)

    if is_recovered_from_congestion:
        edata.wn_ffs = None
        edata.wn_ffs_idx = None
        edata.ncrt_search_sidx = None
        edata.ncrt_search_eidx = None
        if edata.normal_func.is_valid() and edata.wn2_interval_sidx:
            logger.debug(
                '  - make wet normal pattern without wn_ffs (tmp-recovered : %d - %d) '
                % (edata.wn2_interval_sidx, edata.wn2_interval_eidx))
            edata.make_wet_normal_pattern_without_wnffs()
        else:
            logger.debug(
                '  - cannot make wet normal pattern without wn_ffs (there is no temp-recovered area)'
            )

    else:
        if edata.normal_func.is_valid():
            wn_sidx, k_at_wn_ffs = _k_at_wn_ffs(edata, edata.wn_ffs)
            edata.k_at_wn_ffs = k_at_wn_ffs
            logger.debug(
                '  - make wet normal pattern with wn_ffs (wn_ffs=%.2f, wn_ffs_idx=%d, k_at_wn_ffs=%.2f)'
                % (edata.wn_ffs, edata.wn_ffs_idx, edata.k_at_wn_ffs))
            edata.make_wet_normal_pattern(edata.wn_ffs, edata.k_at_wn_ffs)
        else:
            logger.debug(
                '  - cannot make wet normal pattern with wn_ffs (no normal uk function)'
            )

    logger.debug(' <<< end of wn_uk.make() ')
def _may_recovered_here(target_ncrt, edata):
    """

    :param target_ncrt:
    :type target_ncrt: int
    :param edata:
    :type edata: pyticas_ncrtes.core.etypes.ESTData
    """
    LOW_K = 10
    CONGESTED_SPEED = 30
    rth = 0.8
    time_limit = setting.INTV30MIN

    logger = getLogger(__name__)
    own_ncrt = edata.ncrt
    wn_sratios = edata.wn_sratios

    if wn_sratios is None or not any(wn_sratios):
        wn_sratios = np.array([_u / edata.wn_ffs for _u in edata.sus])

    # low-ratio
    if wn_sratios[target_ncrt] < rth:
        logger.debug('  -> low ratio : %s, %s' %
                     (target_ncrt, edata.target_station.station_id))
        return False

    # check low-ratios
    wn_tratios = wn_sratios[target_ncrt:own_ncrt + 1]
    sus, sks = edata.sus[target_ncrt:own_ncrt +
                         1], edata.sks[target_ncrt:own_ncrt + 1]
    losted = np.where((wn_tratios < rth) & (sus > CONGESTED_SPEED)
                      & (sks > LOW_K))
    if len(losted[0]) > time_limit:
        logger.debug('  -> time limit : %s, %s' %
                     (target_ncrt, edata.target_station.station_id))
        return False

    return True
示例#28
0
def _adjust_with_speed(edata, target_idx):
    """

    :type edata: pyticas_ncrtes.core.etypes.ESTData
    :type target_idx: int
    :rtype: int
    """
    logger = getLogger(__name__)

    sdata = data_util.smooth(edata.us, setting.SW_30MIN)
    qdata = edata.qus  #data_util.stepping(sdata, 2)

    stick = None
    qu = qdata[target_idx]
    for idx in range(target_idx, 0, -1):
        cu = qdata[idx]
        if cu != qu:
            stick = idx
            break

    if not stick:
        return target_idx

    _stick = stick
    for idx in range(stick, 0, -1):
        if sdata[idx] > qu:
            _stick = idx + 1
        else:
            break

    stick = _stick

    # for tidx in range(stick, stick+setting.INTV15MIN):
    #     if sdata[tidx] >= qu:
    #         return tidx + 1

    # never reaches to here
    return stick
def _pre_process_before_determination(edata):
    """

    :type edata: pyticas_ncrtes.core.etypes.ESTData
    """
    logger = getLogger(__name__)

    # determine FFS
    if edata.normal_func and edata.normal_func.is_valid():
        normal_ffs = edata.normal_func.daytime_func.get_FFS()
    else:
        normal_ffs = None
    edata.normal_ffs = normal_ffs

    # determine snowday-FFS
    snowday_ffs = _snowday_ffs(edata)
    edata.snowday_ffs = snowday_ffs
    if not snowday_ffs:
        logger.warning('Snowday FFS is not found')

    # get/make normal ratios
    normal_ratios = _normal_ratios(edata, snowday_ffs)
    edata.normal_ratios = normal_ratios

    # may-recovered-speed : FFS * 0.9
    may_recovered_speed = _may_recovered_speed(edata, rth=0.9)
    if not may_recovered_speed:
        logger.debug(' - Cannot determine `may-recovered-speed` ')
        return
    edata.may_recovered_speed = may_recovered_speed

    logger.debug(' - FFS : %s' % snowday_ffs)
    logger.debug(' - may_recovered_speed : %s' % may_recovered_speed)

    # speed and ratio threshold for candidate time intervals of free-flow
    edata.ncrt_search_uth, edata.ncrt_search_rth = _uth_rth(edata)
示例#30
0
def sections(edata_list):
    """

    :type edata_list: list[pyticas_ncrtes.core.etypes.ESTData]
    :rtype: list[list[pyticas_ncrtes.core.etypes.ESTData]]
    """
    logger = getLogger(__name__)
    snow_route_data = stations_in_same_truck_route(edata_list)
    res = []
    for idx, edatas in enumerate(snow_route_data):
        # print('# snow_route=%d' % idx)
        _sections = _divide_section(edatas)
        _final_sections = _sections
        # _final_sections = []
        # for sidx, s in enumerate(_sections):
        #     _sub_sections = _divide_section_by_length(s)
        #     _final_sections.extend(_sub_sections)

        res.extend(_final_sections)
        # for sidx, s in enumerate(_final_sections):
        #     print('!! divide=%d' % sidx)
        #     for edata in s:
        #         print('  > ', edata.snow_route.id, ' : ', edata.target_station, ' : ', edata.target_station.lanes, edata.target_station)

    for sidx, s in enumerate(res):
        logger.debug('!! section=%d' % sidx)
        for edata in s:
            logger.debug(
                '  > truck_route=%s, station=%s lanes=%s, s_limit=%s label=%s '
                % (edata.snow_route.id, edata.target_station.station_id,
                   edata.target_station.lanes, edata.target_station.s_limit,
                   edata.target_station.label))

    # raise Exception('Here~~~')

    return res