Exemple #1
0
def collect(target_station, periods, **kwargs):
    """ returns nighttime traffic data

    :type target_station: pyticas.ttypes.RNodeObject
    :type periods: list[pyticas.ttypes.Period]
    :rtype: (list[float], list[float], list[list[float]], list[list[float]], list[pyticas.ttypes.Period])
    """
    rdr = ncrtes.get_infra().rdr

    dc, valid_detectors = kwargs.get('dc',
                                     None), kwargs.get('valid_detectors', None)
    if not dc:
        dc, valid_detectors = lane.get_detector_checker(target_station)

    if not dc:
        return None, None, None, None, None

    uss, kss, used_periods = [], [], []
    for prd in periods:
        us = rdr.get_speed(target_station, prd, dc)
        if not ncrtes.get_infra().is_missing(us.data):
            ks = rdr.get_density(target_station, prd, dc)
            uss.append(us.data)
            kss.append(ks.data)
            used_periods.append(prd)

    if not uss:
        return None, None, None, None, None

    avg_us = data_util.avg_multi(uss, only_positive=True)
    avg_ks = data_util.avg_multi(kss, only_positive=True)

    import xlsxwriter
    import os
    output_path = ncrtes.get_infra().get_path('ncrtes', create=True)
    filepath = os.path.join(output_path,
                            'nighttime-%s.xlsx' % target_station.station_id)
    wb = xlsxwriter.Workbook(filepath)
    ws = wb.add_worksheet('nsr_data')
    ws.write_row(0, 0,
                 ['time', 'avg'] + [prd.get_date_string() for prd in periods])
    ws.write_column(1, 0, [
        dt.strftime('%H:%M')
        for dt in periods[0].get_timeline(as_datetime=True)
    ])
    ws.write_column(1, 1, avg_us)
    col = 2
    for idx, us in enumerate(uss):
        ws.write_column(1, col, us)
        col += 1
    wb.close()

    return avg_us, avg_ks, uss, kss, used_periods
Exemple #2
0
def get_target_stations(corr_name):
    """ Returns potential target stations satisfying the following conditions
    
    **Conditions**
        - normal station 
            - not temporary station
            - not wavetronics station
            - not radar station
            - not velocity station
        - has target lanes
            - lane 2 or lane 3 (when lane 1 is an auxiliary lane)
        - not on curve
            - the station on curve shows lower-level of U-K comparing to nearby stations

    :type corr_name: str
    :rtype: list[pyticas.ttypes.RNodeObject]
    """
    infra = ncrtes.get_infra()
    corr = infra.get_corridor_by_name(corr_name)
    stations = []
    for st in corr.stations:
        if not st.is_normal_station():
            continue
        if not any(lane.get_target_detectors(st)):
            continue

        up_station, dn_station = _find_up_and_down_stations(st)
        if up_station is not None:
            angle = infra.geo.angle_of_rnodes(up_station, st, dn_station)
            if angle < WITHRAW_STATION_ANGLE:
                continue

        stations.append(st)

    return stations
    def ncrtes_ts_list():
        try:
            year = int(request.form.get('year'))
            corridor_name = request.form.get('corridor_name')
            months = get_normal_months_from_year(year)
            wsDA = WinterSeasonDataAccess()
            wsi = wsDA.get_by_year(year)
            if not wsi:
                wsi = itypes.WinterSeasonInfo()
                wsi.set_months(months)
                wsi.name = 'WinterSeason %s-%s' % (months[0][0], months[-1][0])
                wsDA.insert(wsi, autocommit=True)
            wsDA.close()

            tsDA = TargetStationDataAccess()
            ts_list = tsDA.list_by_corridor_name(year, corridor_name)
            tsDA.close()

            snrDA = SnowRouteDataAccess()
            snow_routes = snrDA.list_by_year(year)
            snrDA.close()

            tlcDA = TargetLaneConfigDataAccess()
            tlc_list = tlcDA.list_by_corridor_name(year, corridor_name)
            tlcDA.close()

            # sort (from upstream to downstream)
            infra = ncrtes.get_infra()
            corr = infra.get_corridor_by_name(corridor_name)

            res = []
            for idx, st in enumerate(corr.stations):
                tsi = _find_target_station_info(st.station_id, ts_list)
                tlci = _find_target_lane_info(st.station_id, tlc_list)
                if not tsi:
                    snow_route = _find_snow_route(st.station_id, snow_routes)
                    print(idx, st.station_id)
                    tsi = itypes.TargetStationInfo()
                    tsi.winterseason_id = wsi.id if wsi else None
                    tsi.station_id = st.station_id
                    tsi.snowroute_id = snow_route.id if snow_route else None
                    tsi.snowroute_name = snow_route._snowroute_group.name if snow_route else None
                    tsi.corridor_name = st.corridor.name
                    tsi.detectors = tlci.detectors if tlci else ','.join(
                        [det.name for det in lane.get_target_detectors(st)])
                    tsi.normal_function_id = None
                    res.append(tsi)
                else:
                    print(idx, st.station_id)
                    tsi.detectors = tlci.detectors if tlci else ','.join(
                        [det.name for det in lane.get_target_detectors(st)])
                    res.append(tsi)

            res = [v for v in res if v.detectors]

        except Exception as ex:
            return prot.response_error(
                'exception occured when retrieving data')

        return prot.response_success({'list': res})
def cache_file_path(station_id, months):
    """ returns nsr_data path

    :type station_id: str
    :type months: list[(int, int)]
    :rtype: str
    """
    # ymds = ['%d%02d' % (y, m) for (y, m) in months]
    years = list(set(['%d' % y for (y, m) in months]))
    ystr = '-'.join(sorted(years))
    path = 'ncrtes/%s/%s' % (ystr, DATA_DIR)
    return os.path.join(ncrtes.get_infra().get_path(path, create=True),
                        '%s.json' % station_id)
def _output_path(prd, corr_name):
    infra = ncrtes.get_infra()

    output_path = os.path.join(
        infra.get_path('ncrtes', create=True), 'wetnormal',
        '%s - %s' % (prd.start_date.strftime('%Y-%m-%d'), corr_name))

    # output_path = os.path.join(infra.get_path(setting.OUTPUT_DIR, create=True), 'output',
    #                            datetime.datetime.now().strftime('%Y%m%d %H%M%S'))

    if not os.path.exists(output_path):
        os.makedirs(output_path)
    return output_path
Exemple #6
0
def _output_path(*filename):
    import os
    from pyticas_ncrtes import ncrtes
    infra = ncrtes.get_infra()
    ncrtesdir = infra.get_path('ncrtes', create=True)
    outputdir = os.path.join(ncrtesdir, 'debug')
    if not os.path.exists(outputdir):
        os.makedirs(outputdir)
    if filename:
        outputfile = os.path.join(outputdir, *filename)
        _outputdir = os.path.dirname(outputfile)
        if not os.path.exists(_outputdir):
            os.makedirs(_outputdir)
        return outputfile
    else:
        return outputdir
def _station_data(target_station, prd, dc):
    """ return q,k,u data for given period

    :type target_station: pyticas.ttypes.RNodeObject
    :type prd: pyticas.ttypes.Period
    :type dc: function
    :rtype: (pyticas.ttypes.RNodeData, pyticas.ttypes.RNodeData, pyticas.ttypes.RNodeData)
    """
    rdr = ncrtes.get_infra().rdr
    logger = getLogger(__name__)

    us = rdr.get_speed(target_station, prd, dc)
    if not us or _is_missing_day(us.data, prd.interval):
        logger.debug('Station %s is missing at %s!!' % (target_station.station_id, prd.get_date_string()))
        return None, None, None
    ks = rdr.get_density(target_station, prd, dc)
    qs = rdr.get_average_flow(target_station, prd, dc)

    return us, ks, qs
def _read_snow_uk(station_id):
    """

    :type station_id: str
    :rtype: dict[float, float]
    """
    import os
    import json
    from pyticas_ncrtes import ncrtes
    infra = ncrtes.get_infra()
    json_path = os.path.join(infra.get_path('ncrtes', create=True), 'uk-snow',
                             '%s.json' % station_id)
    if not os.path.exists(json_path):
        return None
    data = json.load(open(json_path, 'r'))
    uk = {}
    for k, u in data.items():
        k = float(k)
        while k in uk:
            k += 0.000001
        uk[k] = u
    return uk
def prepare_data(tsi, station, year, stime, etime, snow_routes, reported, normal_func):
    """

    :type tsi: pyticas_ncrtes.itypes.TargetStationInfo
    :type station: pyticas.ttypes.RNodeObject
    :type year: int
    :type stime: datetime.datetime
    :type etime: datetime.datetime
    :type snow_routes: list[pyticas_ncrtes.core.etypes.SnowRoute]
    :type reported: dict[str, list[pyticas_ncrtes.core.etypes.ReportedEvent]]
    :type normal_months: list[(int, int)]

    :rtype: pyticas_ncrtes.core.etypes.ESTData
    """
    sevent = etypes.SnowEvent(stime, etime)
    infra = ncrtes.get_infra()
    target_station = infra.get_rnode(tsi.station_id)

    tlcDA = TargetLaneConfigDataAccess()
    tlci = tlcDA.get_by_station_id(year, station.station_id)
    if tlci:
        valid_detectors = [infra.get_detector(det_name.strip()) for det_name in tlci.detectors.split(',')]
    else:
        valid_detectors = target.get_target_detectors(station)

    if not valid_detectors:
        return None

    try:
        edata = etypes.ESTData(tsi, target_station, sevent, snow_routes, reported, normal_func)
        edata.prepare_data(detectors=valid_detectors)
        return edata

    except Exception as ex:
        log = getLogger(__name__)
        log.warning('!!!!! Error : %s : %s' % (tsi.station_id, ex))
        from pyticas.tool import tb
        tb.traceback(ex)
        return None
Exemple #10
0
def _write(num, edata, output_path, prefix=''):
    """

    :type num: int
    :type edata: pyticas_ncrtes.core.etypes.ESTData
    :type output_path: str
    :type prefix: str
    """
    logger = getLogger(__name__)
    if edata is None or not edata.is_loaded:
        logger.debug('  - Data is not loaded : %s' %
                     edata.target_station.station_id if edata else 'N/A')
        return

    sw = setting.SW_3HOURS
    sks, sus = data_util.smooth(edata.ks, sw), data_util.smooth(edata.us, sw)

    hills = hill_helper.create_uk_hills(edata.ks, edata.us, sks, sus)
    ecs = ['#FF0000', '#FF7F00', '#FFFF00', '#00FF00', '#0000FF', '#9400D3']

    long_title = '%s (%s[prj_id=%s, s_limit=%d, label=%s], %s)' % (
        edata.snow_event.snow_period.get_date_string(),
        edata.target_station.station_id, edata.snow_route.id
        if edata.snow_route else 'N/A', edata.target_station.s_limit,
        edata.target_station.label, edata.target_station.corridor.name)

    title = '%s (%s, %s)' % (edata.snow_event.snow_period.get_date_string(),
                             edata.target_station.station_id,
                             edata.target_station.corridor.name)

    if REPORT_MODE:
        fig = plt.figure(figsize=(16, 9), dpi=100, facecolor='white')
    else:
        fig = plt.figure(dpi=100, facecolor='white')
    ax1 = plt.subplot(211)
    ax2 = plt.subplot(223)
    ax3 = plt.subplot(224)

    ax1.axhline(y=edata.target_station.s_limit, c='b')
    if edata.wn_ffs:
        ax1.axhline(y=edata.wn_ffs, c='r')

    #ax1.plot(edata.us, marker='', label='Speed', c='#3794D5')
    ax1.plot(edata.ks, marker='', label='Density', c='#ADE2CD')
    # if edata.wn_avg_us is not None:
    #     ax1.plot(edata.wn_avg_us, c='#8C65C5', label='WN Avg. Speed')

    # ax1.plot(edata.qus, c='#746F69', label='Q-Us')

    if edata.normal_func:
        nt_us = edata.normal_func.nighttime_func.speeds(
            edata.snow_event.data_period.get_timeline(as_datetime=True))
    else:
        nt_us = [None] * edata.n_data

    ax1.plot(nt_us, c='k', label='Nighttime Speed')

    #nt_ks = edata.normal_func.nighttime_func.densities(edata.snow_event.data_period.get_timeline(as_datetime=True))
    #ax1.plot(nt_ks, c='k', label='Nighttime Density')

    # snow start and end time
    sstime = edata.snow_event.time_to_index(edata.snow_event.snow_start_time)
    setime = edata.snow_event.time_to_index(edata.snow_event.snow_end_time)
    ax1.axvline(x=sstime, c='#948D90')  # grey vertical line
    ax1.axvline(x=setime, c='#948D90')

    # draw chart using UK-Hills
    n_data = len(edata.us)
    ecs = ['#FF0000', '#FF7F00', '#FFFF00', '#00FF00', '#0000FF', '#9400D3']
    markers = ['o', 'x', 'd', '^', '<', '>', 'v', 's', '*', '+']
    ls = '-'
    used = []
    for gidx, hill in enumerate(hills):
        lw = 1
        c = ecs[gidx % len(ecs)]
        marker = '|'
        for midx in range(len(markers)):
            marker_stick = '%s-%s' % (c, markers[midx])
            if marker_stick not in used:
                marker = markers[midx]
                used.append(marker_stick)
                break

        sidx, eidx = hill.sidx, hill.eidx
        _sidx, _eidx = sidx, eidx
        ax2.plot(edata.sks[_sidx:_eidx + 1],
                 edata.sus[_sidx:_eidx + 1] * edata.sks[_sidx:_eidx + 1],
                 c=c,
                 marker=marker,
                 ms=4,
                 zorder=2)
        ax3.plot(edata.sks[_sidx:_eidx + 1],
                 edata.sus[_sidx:_eidx + 1],
                 c=c,
                 marker=marker,
                 ms=4,
                 zorder=2)

        _ks, _us = [None] * len(edata.ks), [None] * len(edata.us)
        for idx in range(sidx, eidx + 1):
            if idx >= n_data - 1:
                break
            # _ks[idx] = edata.merged_sks[idx]
            # _us[idx] = edata.merged_sus[idx]

            _ks[idx] = edata.ks[idx]
            _us[idx] = edata.us[idx]

        ax1.plot(_us, lw=lw, ls=ls, c=c)

    # Normal and Wet-Normal Avg UK
    _recv_ks = np.array(list(range(5, 120)))

    if edata.normal_func:
        uk_function = edata.normal_func.daytime_func.get_uk_function()
        if uk_function and uk_function.is_valid():
            ax2.plot(_recv_ks,
                     np.array(uk_function.speeds(_recv_ks)) * _recv_ks,
                     ls=':',
                     lw=2,
                     label='Normal Avg. QK')
            # ax2.plot(_recv_ks, uk_function.speeds(_recv_ks), ls=':', lw=2, label='Normal Avg. UK')
            ax3.plot(_recv_ks,
                     uk_function.speeds(_recv_ks),
                     ls=':',
                     lw=2,
                     label='Normal Avg. UK')

            if uk_function._wn_uk:
                ax2.plot(_recv_ks,
                         np.array(uk_function.wet_normal_speeds(_recv_ks)) *
                         _recv_ks,
                         ls=':',
                         lw=2,
                         label='Wet-Normal QK')
                # ax2.plot(_recv_ks, uk_function.wet_normal_speeds(_recv_ks, edata.wn_ffs, edata.k_at_wn_ffs), ls=':', lw=2, label='Wet-Normal UK')
                ax3.plot(_recv_ks,
                         uk_function.wet_normal_speeds(_recv_ks),
                         ls=':',
                         lw=2,
                         label='Wet-Normal UK')

    # ax2.scatter(edata.ks, edata.us, c='#5D63FF', marker='.')
    # ax3.scatter(edata.sks, edata.sus, c='#5D63FF', marker='.')

    ms = [14, 13, 12, 11, 10, 8, 7]
    us = data_util.smooth(edata.us, 15)

    # srst
    if edata.srst is not None:
        data = np.array([None] * len(us))
        data[edata.srst] = us[edata.srst]
        ax1.plot(data, marker='o', ms=ms[0], c='#4ED8FF',
                 label='SRST')  # light blue circle

    # lst
    if edata.lst is not None:
        data = np.array([None] * len(us))
        data[edata.lst] = us[edata.lst]
        ax1.plot(data, marker='o', ms=ms[1], c='#FFAD2A',
                 label='LST')  # orange circle

    # sist
    if edata.sist is not None:
        data = np.array([None] * len(us))
        data[edata.sist] = us[edata.sist]
        ax1.plot(data, marker='o', ms=ms[2], c='#68BD20',
                 label='SIST')  # orange green

    # # rst
    # if edata.rst is not None:
    #     data = np.array([None] * len(us))
    #     data[edata.rst] = us[edata.rst]
    #     ax1.plot(data, marker='o', ms=ms[2], c='#68BD20', label='RST')  # green

    # ncrt
    if edata.ncrt is not None:
        data = np.array([None] * len(us))
        data[edata.ncrt] = us[edata.ncrt]
        # ax1.plot(data, marker='o', ms=ms[3], c='#FF0512', label='NCRT (%d)' % edata.ncrt_type)  # navy blue
        ax1.plot(data, marker='o', ms=ms[3], c='#FF0512',
                 label='NCRT')  # navy blue

    # pst
    if edata.pst:
        data = np.array([None] * len(us))
        data[edata.pst] = us[edata.pst]
        ax1.plot(data, marker='o', ms=ms[4], c='#9535CC',
                 label='PST')  # purple diamod

    # sbpst
    # if edata.sbpst:
    #     data = np.array([None] * len(us))
    #     data[edata.sbpst] = us[edata.sbpst]
    #     ax1.plot(data, marker='p', ms=ms[4], c='#bdb76b', label='BPST')  # DarkKhaki
    #
    # # sapst
    # if edata.sapst:
    #     data = np.array([None] * len(us))
    #     data[edata.sapst] = us[edata.sapst]
    #     ax1.plot(data, marker='p', ms=ms[4], c='#cdad00', label='APST')  # gold3

    # nfrt
    if edata.nfrt:
        data = np.array([None] * len(us))
        data[edata.nfrt] = us[edata.nfrt]
        ax1.plot(data, marker='*', ms=ms[4], c='#0072CC',
                 label='NFRT')  # light blue

    # # stable speed area around pst
    # if not edata.ncrt and (edata.sbpst or edata.sapst):
    #     data = np.array([None] * len(us))
    #     if edata.sbpst:
    #         data[edata.sbpst] = us[edata.sbpst]
    #         ax1.plot(data, marker='p', ms=ms[4], c='#FF00F8', label='NCRT-C')  # light-pink pentagon
    #     if edata.sapst and not edata.ncrt:
    #         data[edata.sapst] = us[edata.sapst]
    #         ax1.plot(data, marker='p', ms=ms[4], c='#FF00F8', label='')  # light-pink pentagon

    # snowday-ffs
    # if edata.snowday_ffs:
    #     ax1.axhline(y=edata.snowday_ffs, c='#33CC0A')
    # if edata.nfrt:
    #     data = np.array([None] * len(us))
    #     data[edata.nfrt] = us[edata.nfrt]
    #     ax1.plot(data, marker='*', ms=ms[5], c='#CC00B8', label='SnowDay-FFS')  # navy blue

    # reported time
    for rp in edata.rps:
        data = np.array([None] * len(us))
        data[rp] = us[rp]
        ax1.plot(data, marker='^', ms=ms[6], c='#FF0512',
                 label='RBRT')  # red circle

    box = ax1.get_position()
    ax1.set_position([box.x0, box.y0, box.width * 0.9, box.height])
    ax1.legend(loc='center left',
               bbox_to_anchor=(1, 0.5),
               numpoints=1,
               prop={'size': 12})

    ulist = np.array([90, 100, 110, 120, 150, 200, 300])
    klist = np.array([80, 100, 150, 200, 250, 300, 350, 400, 450, 500, 600])
    qlist = np.array([0, 2000, 3000, 3500, 4000, 4500, 5000, 6000, 7000])
    _maxq = max(edata.sks * edata.sus)
    _maxk = max(edata.sks)
    _maxu = max(edata.sus)
    maxq = qlist[np.where(qlist > _maxq)][0]
    maxu = ulist[np.where(ulist > _maxu)][0]
    maxk = klist[np.where(klist > _maxk)][0]

    ax1.set_title(long_title)
    ax2.set_title(title + ' - Smoothed Q-K')
    ax3.set_title(title + ' - Smoothed U-K')
    ax1.set_ylabel('speed, density')
    ax1.set_xlabel('time')
    ax1.set_ylim(ymin=0, ymax=maxu)
    ax1.set_xlim(xmin=0, xmax=len(edata.sus))
    # ax2.set_ylabel('speed')
    ax2.set_ylabel('flow')
    ax2.set_xlabel('density')
    # ax2.set_ylim(ymin=0, ymax=100)

    ax2.set_ylim(ymin=0, ymax=maxq)
    ax2.set_xlim(xmin=0, xmax=maxk)
    ax3.set_ylim(ymin=0, ymax=maxu)
    ax3.set_xlim(xmin=0, xmax=maxk)
    ax3.set_ylabel('speed')
    ax3.set_xlabel('density')

    ax1.grid()
    ax2.grid()
    ax3.grid()
    ax2.legend(prop={'size': 12})
    ax3.legend(prop={'size': 12})

    if not REPORT_MODE:
        plt.show()
    else:
        import datetime
        timeline = edata.snow_event.data_period.get_timeline(as_datetime=True)
        timeline = [
            t - datetime.timedelta(seconds=setting.DATA_INTERVAL)
            for t in timeline
        ]

        ntimes = [
            t.strftime('%H:%M') for idx, t in enumerate(timeline)
            if idx % (2 * 3600 / setting.DATA_INTERVAL) == 0
        ]
        loc_times = [
            idx for idx, t in enumerate(timeline)
            if idx % (2 * 3600 / setting.DATA_INTERVAL) == 0
        ]
        ax1.set_xticks(loc_times)
        ax1.set_xticklabels(ntimes, rotation=90)

        plt.tight_layout()
        postfix = ' (rp)' if edata.reported_events else ''
        file_path = os.path.join(
            output_path, '(%03d) %s%s%s.png' %
            (num, prefix, edata.target_station.station_id, postfix))
        fig.savefig(file_path, dpi=100, bbox_inches='tight')

    plt.close(fig)

    if 0:
        wb = xlsxwriter.Workbook(
            os.path.join(
                ncrtes.get_infra().get_path('tmp', create=True),
                '%s %s (night).xlsx' %
                (edata.snow_event.snow_period.get_date_string(),
                 edata.target_station.station_id)))
        ws = wb.add_worksheet('night-data')
        prd = edata.snow_event.data_period.clone()
        prd.interval = setting.DATA_INTERVAL

        ws.write_column(0, 0, ['time'] + prd.get_timeline(as_datetime=False))
        ws.write_column(0, 1, ['speed'] + edata.us.tolist())
        ws.write_column(0, 2,
                        ['nighttime average speed'] + edata.night_us.tolist())
        ws.write_column(0, 3, ['smoothed speed'] + edata.sus.tolist())
        # ws.write_column(0, 4, ['smoothed speed (2h)'] + edata.lsus.tolist())
        # ws.write_column(0, 5, ['smoothed speed (1h)'] + edata.llsus.tolist())
        ws.write_column(0, 6,
                        ['nighttime ratio'] + edata.night_ratios.tolist())

        wb.close()
Exemple #11
0
    def ncrtes_target_station_identification():

        from pyticas_ncrtes import ncrtes
        from pyticas_ncrtes.service import target_station_identification
        from multiprocessing import Process
        from pyticas import ticas
        from pyticas_ncrtes.app import NCRTESApp
        from pyticas_ncrtes.service.truck_route_updater import target_station_and_snowroute_info

        logger = getLogger(__name__)

        year = int(request.form.get('year'))
        months = get_normal_months_from_year(year)
        infra = ncrtes.get_infra()

        ws = WinterSeasonDataAccess()
        wsi = ws.get_by_months(months)
        if not wsi:
            wsi = itypes.WinterSeasonInfo()
            wsi.set_months(months)
            wsi.name = 'WinterSeason %s-%s' % (months[0][0], months[-1][0])
            ws.insert(wsi, autocommit=True)
        ws.close()

        #######################
        data_path = ticas._TICAS_.data_path
        n_process = 5
        stations = []
        for n in range(n_process):
            stations.append([])

        for cidx, corr in enumerate(infra.get_corridors()):
            if not corr.dir or corr.is_CD() or corr.is_Rev() or corr.is_RTMC():
                continue

            for sidx, st in enumerate(corr.stations):
                if not st.is_normal_station() or not st.detectors:
                    continue
                stations[sidx % n_process].append(st.station_id)

        procs = []
        for idx in range(n_process):
            p = Process(target=target_station_identification.run,
                        args=(idx + 1, stations[idx], months, data_path,
                              NCRTESApp.DB_INFO))
            p.start()
            procs.append(p)

        for p in procs:
            p.join()

        # print('# find alternatives....')
        # for n in range(n_process):
        #     stations[n] = []
        #
        # for cidx, corr in enumerate(infra.get_corridors()):
        #     if not corr.dir or corr.is_CD() or corr.is_Rev() or corr.is_RTMC():
        #         continue
        #
        #     counts = [ len(st_list) for idx, st_list in enumerate(stations)]
        #     tidx = counts.index(min(counts))
        #
        #     for sidx, st in enumerate(corr.stations):
        #         if not st.is_normal_station() or not st.detectors:
        #             continue
        #         stations[tidx].append(st.station_id)

        # for idx, stlist in enumerate(stations):
        #     print('# %d (%d): %s' % (idx, len(stlist), stlist))
        # input('# enter to continue: ')

        # procs = []
        # for idx in range(n_process):
        #     p = Process(target=tsi_alternative.run, args=(idx + 1, stations[idx], months, data_path, NCRTESApp.DB_INFO))
        #     p.start()
        #     procs.append(p)

        for p in procs:
            p.join()

        ##################

        # corr = infra.get_corridor_by_name('I-35W (NB)')
        # for sidx, st in enumerate(corr.stations):
        #     nf = nsrf.get_normal_function(st, months)

        target_station_and_snowroute_info(year)

        return jsonify({'code': 1, 'message': 'success'})
def _prepare_est_data(stations, **kwargs):
    """

    :type stations: list[pyticas_ncrtes.itypes.TargetStationInfo]
    :type kwargs:
    :rtype: (list[pyticas_ncrtes.core.etypes.ESTData], list[pyticas_ncrtes.itypes.TargetStationInfo], list[int])
    """

    case_name = kwargs.get('case_name', None)
    reported_events = kwargs.get('reported_events', None)
    msg_queue = kwargs.get('msg_queue', None)
    from_station = kwargs.get('from_station', None)
    to_station = kwargs.get('to_station', None)
    for_normalday = kwargs.get('for_normalday', None)
    origin_prd = kwargs.get('origin_prd', None)
    nd_offset = kwargs.get('nd_offset', None)
    snow_routes = kwargs.get('snow_routes', None)
    prd = kwargs.get('prd', None)
    year = kwargs.get('year', None)

    logger = getLogger(__name__)
    infra = ncrtes.get_infra()

    sidx_list, tsi_list, edata_list = [], [], []
    msg_queue.put('Case : %s ' % case_name)
    logger.debug(' > start : %s' % case_name)

    # for test
    f_found = False
    f_to_found = False

    # iterate target stations to prepare ESTData list
    for sidx, tsi in enumerate(stations):

        if f_to_found:
            break

        # for test
        if from_station:
            if tsi.station_id == from_station:
                f_found = True
            if not f_found:
                continue

        # target station object
        station = infra.get_rnode(tsi.station_id)

        if not station.is_normal_station() or not station.detectors:
            continue

        # for test
        if to_station and tsi.station_id == to_station:
            f_to_found = True

        # for test
        if for_normalday:
            prd = normalday.get_normal_period(station, origin_prd, nd_offset)
            reported_events = {}

        msg_queue.put('  - Station : %s ' % tsi.station_id)
        logger.debug('   - Station : %s' % tsi.station_id)

        try:
            # Normal Average UK Function
            if tsi._normal_function:
                nf = tsi._normal_function.func
            else:
                nf = None

            edata = est.prepare_data(tsi, station, year, prd.start_date,
                                     prd.end_date, snow_routes,
                                     reported_events, nf)

        except Exception as ex:
            logger.debug(tb.traceback(ex, f_print=False))
            continue

        if not edata:
            continue

        edata_list.append(edata)
        tsi_list.append(tsi)
        sidx_list.append(sidx)

    return edata_list, tsi_list, sidx_list
def run(pid, stations, months, data_path, db_info):
    """ target station identification main process

    Parameters
    ===========
        - pid : process identification for multi-processing
        - stations : station list
        - months : month list
        - data_path : TICAS data path

    :type pid: int
    :type stations: list[str]
    :type months: list[(int, int)]
    :type data_path : str
    :type db_info: dict
    :return:
    """
    if db_info:
        Infra.initialize(data_path)
        infra = Infra.get_infra()

        if conn.Session == None:
            conn.connect(db_info)
    else:
        infra = ncrtes.get_infra()

    logger = getLogger(__name__)
    logger.info('starting target station identification')

    wsDA = WinterSeasonDataAccess()
    nfDA = NormalFunctionDataAccess()
    tsDA = TargetStationDataAccess()

    # process start time
    stime = time.time()
    n_stations = len(stations)
    cnt = 0
    for sidx, st in enumerate(stations):
        station = infra.get_rnode(st)
        logger.info('# PID=%d, SIDX=%d/%d, STATION=%s' % (pid, sidx, n_stations, st))
        try:
            nf = nsrf.get_normal_function(station, months, wsDA=wsDA, nfDA=nfDA, tsDA=tsDA, autocommit=True)
            if nf and nf.is_valid():
                logger.info('  - %s is valid' % station.station_id)
            else:
                logger.debug('  - %s is not valid (nf=%s)' % (station.station_id, nf))

            # cnt += 1
            #
            # if cnt and cnt % 20 == 0:
            #     logger.warning('  - commmit!!')
            #     wsDA.commit()
            #     nfDA.commit()
            #     tsDA.commit()

        except Exception as ex:
            logger.warning(tb.traceback(ex, False))

    # wsDA.commit()
    # nfDA.commit()
    # tsDA.commit()
    # logger.warning('  - commmit!! (final)')

    wsDA.close()
    nfDA.close()
    tsDA.close()

    etime = time.time()

    logger.info('end of target station identification (elapsed time=%s)' % timeutil.human_time(seconds=(etime - stime)))
def _chart(edata, ratios, lsratios, sratios):
    """

    :type edata: pyticas_ncrtes.core.etypes.ESTData
    :type ratios:  numpy.ndarray
    :type lsratios:  numpy.ndarray
    :type sratios:  numpy.ndarray
    """
    if not DEBUG_MODE:
        return

    import matplotlib.pyplot as plt

    infra = ncrtes.get_infra()
    output_path = infra.get_path('ncrtes/ut', create=True)

    plt.figure(facecolor='white', figsize=(16, 8))
    plt.plot(edata.lsks, edata.lsus)
    _ks = list(range(5, 100))
    _nus = edata.normal_func.daytime_func.recovery_speeds(_ks)
    _wnus = edata.normal_func.daytime_func.recovery_function.wet_normal_speeds(_ks)
    plt.plot(edata.lsks, edata.lsus)
    plt.plot(_ks, _nus, c='k')
    plt.plot(_ks, _wnus, c='orange')
    plt.grid()

    plt.figure(facecolor='white', figsize=(16, 8))
    ax1 = plt.subplot(111)
    ax1.plot(edata.us, c='#aaaaaa')
    ax1.plot(edata.ks, c='#90A200')
    ax1.plot(edata.night_us, c='k')
    ax1.plot(edata.qus, c='r')
    # ax1.plot(edata.qks, c='#C2CC34')
    ax1.plot(edata.sus, c='#0B7CCC')
    # ax1.plot(edata.qus, c='#CC50AA')

    if edata.wn_ffs:
        ax1.axhline(y=edata.wn_ffs, c='r', label='wn_ffs') # r

    if edata.snowday_ffs:
        ax1.axhline(y=edata.snowday_ffs, c='#E5E900', label='snowday_ffs') # light yellow

    ax1.axhline(y=edata.target_station.s_limit, c='b')
    if edata.ncrt_search_uth:
        ax1.axhline(y=edata.ncrt_search_uth, c='k')

    if edata.may_recovered_speed:
        ax1.axhline(y=edata.may_recovered_speed, c='k')

    ax1.axvline(x=edata.snow_event.snow_start_index, c='#EAEAEA', ls='-.')
    ax1.axvline(x=edata.snow_event.snow_end_index, c='#EAEAEA', ls='-.')

    if edata.ncrt_search_sidx:
        ax1.axvline(x=edata.ncrt_search_sidx, c='b', ls='-.')
    if edata.ncrt_search_eidx:
        ax1.axvline(x=edata.ncrt_search_eidx, c='b', ls='-.')

    if edata.wn_ffs_idx:
        ax1.axvline(x=edata.wn_ffs_idx, c='g')

    if edata.ncrt:
        ax1.axvline(x=edata.ncrt, c='r', lw=2, label='ncrt')

    ax2 = None
    if ratios is not None and any(ratios):
        ax2 = ax1.twinx()
        # ax2.plot(edata.normal_ratios, c='#FF001A')
        ax2.plot(lsratios, c='#00CC04', label='normal-ratios')
        # ax2.plot(sratios, c='#CC34B1')
        ax2.set_ylim(ymax=1.4, ymin=0.5)

    if edata.wn_ffs and edata.wn_sratios is not None and any(edata.wn_sratios):
        if not ax2:
            ax2 = ax1.twinx()
        ax2.plot(edata.wn_sratios, c='#CC07B7', label='wn-ratios')
        ax2.plot(edata.wn_qratios, c='k', label='wn-qatios')

    plt.title('%s (%s, s_limit=%d)' % (
        edata.target_station.station_id,
        edata.target_station.corridor.name,
        edata.target_station.s_limit
    ))

    plt.grid()

    if ax2:
        h1, l1 = ax1.get_legend_handles_labels()
        h2, l2 = ax2.get_legend_handles_labels()
        ax1.legend(h1+h2, l1+l2)
    else:
        ax1.legend()

    plt.show()
    filename = '%s.png' % edata.target_station.station_id
    # plt.savefig(os.path.join(output_path, filename))
    plt.show()

    #
    # filename = '%s.xlsx' % edata.target_station.station_id
    # wb = xlsxwriter.Workbook(np.os.path.join(output_path, filename))
    # ws = wb.add_worksheet('data')
    # ws.write_column(0, 0, ['Time'] + edata.snow_event.data_period.get_timeline(as_datetime=False))
    # ws.write_column(0, 1, ['Density'] + edata.ks.tolist() )
    # ws.write_column(0, 2, ['Speed'] + edata.us.tolist() )
    # ws.write_column(0, 3, ['QK'] + qks.tolist() )
    # ws.write_column(0, 4, ['QU'] + qus.tolist() )
    # if edata.normal_func and edata.normal_func.is_valid():
    #     klist = list(range(5, 100))
    #     ulist = edata.normal_func.daytime_func.recovery_speeds(klist)
    #     ws.write_column(0, 5, ['NormalK'] + klist )
    #     ws.write_column(0, 6, ['NormalU'] + ulist )
    # wb.close()
Exemple #15
0
def _write_chart_and_data(ddata, uk_origin, Kl, seg_func):
    if not DEBUG_MODE:
        return

    ecs = ['#FF0000', '#FF7F00', '#FFFF00', '#00FF00', '#0000FF', '#9400D3']
    cnt = 0

    import matplotlib.pyplot as plt
    plt.figure()

    _us_origin, _ks_origin = data_util.dict2sorted_list(uk_origin)
    ks_origin = np.array(_ks_origin)
    us_origin = np.array(_us_origin)

    plt.scatter(ks_origin, us_origin, marker='x', color='#999999')

    for func in seg_func.funcs:

        # print('> ', func.x1, func.x2, func.line_func, type(func))
        isinstance(func, etypes.FitFunction)
        uf = func.get_function()
        lus = []
        lks = []
        for _k in range(int(func.x1 * 10), int(func.x2 * 10)):
            k = _k / 10.0
            if k > 140:
                break
            lks.append(k)
            lus.append(uf(k))

        c = ecs[cnt % len(ecs)]
        cnt += 1
        # plt.scatter(lks, lus, marker='^', color=c)

    _ks, _us = [], []
    for k in range(10, 200):
        _ks.append(k)
        _us.append(seg_func.speed(k))

    plt.plot(_ks, data_util.smooth(_us, 11), c='b', lw=2)

    plt.xlim(xmin=0, xmax=160)
    plt.ylim(ymin=0, ymax=120)
    plt.title('%s (Kt=%.2f, SL=%s, label=%s)' %
              (ddata.station.station_id, Kl, ddata.station.s_limit,
               ddata.station.label))
    plt.grid()
    periods = sorted(ddata.periods + ddata.not_congested_periods,
                     key=lambda prd: prd.start_date)
    ffpath = _output_path(
        'normal_function', '(%d-%d) %s.png' %
        (periods[0].start_date.year, periods[-1].end_date.year,
         ddata.station.station_id))
    plt.tight_layout()
    plt.savefig(ffpath, dpi=100, bbox_inches='tight')
    # plt.show()
    plt.close()

    import xlsxwriter
    import os
    from pyticas_ncrtes import ncrtes
    infra = ncrtes.get_infra()
    output_dir = infra.get_path('ncrtes/normal-data-set', create=True)
    data_file = os.path.join(
        output_dir, '(%d-%d) %s (func).xlsx' %
        (periods[0].start_date.year, periods[-1].end_date.year,
         ddata.station.station_id))
    wb = xlsxwriter.Workbook(data_file)
    ws = wb.add_worksheet('uk')
    ws.write_column(0, 0, ['k'] + _ks)
    ws.write_column(0, 1, ['u'] + _us)
    wb.close()
    def ncrtes_ts_update():
        year = int(request.form.get('year'))
        station_id = request.form.get('station_id')
        detectors = request.form.get('detectors')

        infra = ncrtes.get_infra()
        str_detectors = str(detectors)

        if not year:
            return prot.response_fail('Year Info must be entered.')

        if not station_id:
            return prot.response_fail('Station Info must be entered.')

        if not detectors:
            return prot.response_fail('Detectors must be entered.')

        station = infra.get_rnode(station_id)
        if not station:
            return prot.response_fail('Station %s does not exists.' %
                                      station_id)

        station_detectors = [det.name for det in station.detectors]
        detectors = [x.strip() for x in detectors.split(',')]
        for det_name in detectors:
            det = infra.get_detector(det_name)
            if not det:
                return prot.response_fail('Detector %s does not exists.' %
                                          det_name)
            if det.name not in station_detectors:
                return prot.response_fail(
                    'Detector %s does not exists on the station.' % det_name)

        wsDA = WinterSeasonDataAccess()
        wsi = wsDA.get_by_year(year)
        if not wsi:
            months = get_normal_months_from_year(year)
            wsi = itypes.WinterSeasonInfo()
            wsi.set_months(months)
            wsi.name = 'WinterSeason %s-%s' % (months[0][0], months[-1][0])
            wsDA.insert(wsi, autocommit=True)
        wsDA.close()

        tsDA = TargetStationDataAccess()
        ex_tsi = tsDA.get_by_station_id(year, station_id)
        tsDA.close()

        tlcDA = TargetLaneConfigDataAccess()
        ex_tlci = tlcDA.get_by_station_id(year, station_id)
        if not ex_tlci:
            ex_tlci = itypes.TargetLaneConfigInfo()
            ex_tlci.winterseason_id = wsi.id
            ex_tlci.station_id = station_id
            ex_tlci.detectors = ''
            ex_tlci.corridor_name = station.corridor.name
            model = tlcDA.insert(ex_tlci, autocommit=True)
            ex_tlci.id = model.id

        # update normal function
        if ex_tsi and ex_tlci.detectors != str_detectors:
            target_station = infra.get_rnode(ex_tlci.station_id)
            valid_detectors = [
                infra.get_detector(det_name) for det_name in detectors
            ]
            normal_months = get_normal_months_from_year(year)
            nf, nfi = None, None
            try:
                nf, nfi = _update_normal_function(target_station,
                                                  normal_months,
                                                  valid_detectors)
            except Exception as ex:
                from pyticas.tool import tb
                tb.traceback(ex)

        # query to update database
        ex_tlci.detectors = ','.join(detectors)
        updated = tlcDA.update(ex_tlci.id, ex_tlci.get_dict(), autocommit=True)

        tlcDA.close()
        if updated:
            return prot.response_success(obj=ex_tlci.id)
        else:
            return prot.response_fail("fail to update (id={})".format(
                ex_tlci.id))
def _est_groups(year,
                request_param,
                snow_routes,
                snri_list,
                from_station=None,
                to_station=None):
    """

    :type year: int
    :type request_param: pyticas_ncrtes.itypes.EstimationRequestInfo
    :type snow_routes: list[etypes.SnowRoute]
    :type snri_list: list[pyticas_ncrtes.itypes.SnowRouteInfo]
    :rtype: dict[str, list[pyticas_ncrtes.itypes.TargetStationInfo]]
    """
    infra = ncrtes.get_infra()
    logger = getLogger(__name__)

    tsDA = TargetStationDataAccess()
    nfDA = NormalFunctionDataAccess()

    # make station groups by corridor or snow route
    est_groups = {}
    TS_CACHE = {}

    if hasattr(request_param,
               'target_stations') and request_param.target_stations:

        target_stations = [
            tsDA.get_by_station_id(year, st)
            for st in request_param.target_stations
        ]
        est_groups['user-defined'] = [tsi for tsi in target_stations if tsi]

    elif request_param.target_corridors:

        logger.debug('!estimate > prepare station list for each corridor')

        for corr_name in request_param.target_corridors:

            # TODO: for test
            if 'I-94(EB)' == corr_name:
                from_station = 'S1115'
                to_station = 'S1051'
            elif 'I-94(WB)' == corr_name:
                from_station = 'S1058'
                to_station = 'S1111'

            logger.debug(' -> %s' % corr_name)

            tsis = tsDA.list_by_corridor_name(year, corr_name)
            if not tsis:
                logger.debug('!!! no target stations for %s' % corr_name)
                continue

            # TODO: for test
            f_found, f_to_found = False, False
            _tsis = []
            for tsi in tsis:

                if f_to_found:
                    break

                # for test
                if from_station:
                    if tsi.station_id == from_station:
                        f_found = True
                    if not f_found:
                        continue

                if to_station and tsi.station_id == to_station:
                    f_to_found = True

                _tsis.append(tsi)

            if from_station:
                tsis = _tsis
            ###############

            corr = infra.get_corridor_by_name(corr_name)
            st_indices = {
                st.station_id: idx
                for idx, st in enumerate(corr.stations)
            }
            stsis = sorted(tsis, key=lambda tsi: st_indices[tsi.station_id])

            if from_station:
                sidx = st_indices.get(from_station, None)
                eidx = st_indices.get(to_station, None)
                if sidx and eidx:
                    stsis = [
                        tsi for tsi in stsis
                        if st_indices[tsi.station_id] >= sidx
                        or st_indices[tsi.station_id] <= sidx
                    ]

            # remove `station`s in curve
            candidates = target.station.get_target_stations(corr_name)
            candidate_stations = [st for st in candidates]
            candidate_station_ids = [st.station_id for st in candidates]

            _group = [
                tsi for tsi in stsis if tsi.snowroute_name
                and tsi.station_id in candidate_station_ids
            ]
            _group_st_ids = [
                tsi.station_id for tsi in _group if tsi._normal_function
            ]

            res = []
            for st in candidate_stations:

                # no target station
                if st.station_id not in _group_st_ids:
                    tsi = TargetStationInfo()
                    tsi.station_id = st.station_id
                    tsi.detectors = ','.join(
                        [det.name for det in target.get_target_detectors(st)])

                    snri = _find_snow_route(st, snri_list)
                    if snri:
                        tsi.snowroute_name = snri.name
                        tsi.snowroute_id = snri.id

                    nfi = nfDA.get_by_station(year, st.station_id)
                    if nfi:
                        tsi.normal_function_id = nfi.id
                        tsi._normal_function = nfi
                        res.append(tsi)
                else:
                    # target station
                    _sts = [
                        tsi for tsi in stsis if tsi.station_id == st.station_id
                    ]
                    res.append(_sts[0])

            est_groups[corr_name] = res

    elif request_param.target_snow_routes:
        logger.debug('!estimate > prepare station list for each snow route')
        for snow_route_id in request_param.target_snow_routes:
            logger.debug(' -> snow_route.id=%d' % snow_route_id)
            snri, target_stations1, target_stations2 = _get_stations_from_snowrouteinfos(
                year, snow_route_id, snri_list, tsDA, TS_CACHE)

            if not snri:
                logger.warn('Cannot find snow-route information')
                continue
            if not target_stations1:
                logger.debug(
                    '!!! no target stations for route1 of snowroute.id=%d' %
                    snow_route_id)
            else:
                est_groups['%d-route1' % snri.id] = target_stations1

            if not target_stations2:
                logger.debug(
                    '!!! no target stations for route2 of snowroute.id=%d' %
                    snow_route_id)
            else:
                est_groups['%d-route2' % snri.id] = target_stations2

    tsDA.close()

    # Remove the target stations having normal-function adjusted from other target station
    # for k in est_groups.keys():
    #     est_groups[k] = [ tsi for tsi in est_groups[k] if not tsi._normal_function or not tsi._normal_function.adjusted_from]

    for k in est_groups.keys():
        est_groups[k] = [tsi for tsi in est_groups[k] if tsi.snowroute_id]

    return est_groups
def estimate(request_param, msg_queue=None, **kwargs):
    """

    :type request_param: pyticas_ncrtes.itypes.EstimationRequestInfo
    :type msg_queue: queue.Queue
    :rtype: list[pyticas_ncrtes.est.etypes.SnowData]
    """

    logger = getLogger(__name__)

    from_station = kwargs.get('from_station', None)
    to_station = kwargs.get('to_station', None)
    for_normalday = kwargs.get('normalday', None)
    nd_offset = kwargs.get('nd_offset', 2)

    stime = time.time()

    infra = ncrtes.get_infra()
    msg_queue = msg_queue or Queue()

    logger.info('@@@ NCRT estimation process start')

    # prepare reported events
    reported_events = _reported_events(request_param)

    # prepare time period and route information
    year = _get_year(request_param.snow_start_time)
    snow_routes, snri_list = _snow_routes(year)
    prd = _snow_event(request_param)

    origin_prd = prd.clone()

    # make station groups by corridor or snow route
    # to run by the the group
    logger.info('year : %d' % year)
    est_groups = _est_groups(year, request_param, snow_routes, snri_list,
                             from_station, to_station)

    if not est_groups:
        logger.info('! No target stations to estimate')
        return False

    # run estimation process for each station group
    logger.debug('!estimate > run estimation process for each station group')

    for case_name, stations in est_groups.items():

        msg_queue.put('Case : %s ' % case_name)
        logger.debug(' > start : %s' % case_name)

        # iterate target stations to prepare ESTData list
        edata_list, tsi_list, sidx_list = _prepare_est_data(
            stations,
            prd=prd,
            year=year,
            reported_events=reported_events,
            snow_routes=snow_routes,
            msg_queue=msg_queue,
            from_station=from_station,
            to_station=to_station,
            for_normalday=for_normalday,
            origin_prd=origin_prd,
            nd_offset=nd_offset)

        # logger.debug(' > finding NCRT by u')
        # for edata in edata_list:
        #     ncrt_by_u.estimate(edata)
        # logger.debug('end...')
        # return

        logger.debug(' > creating sections')
        sections = section.sections(edata_list)

        logger.debug(' > finding interested points')
        # determine u-k change points by snowing
        # bysnowing.find(edata_list, sections)

        logger.debug(' > start estimating')
        for idx, edata in enumerate(edata_list):
            # Run estimation process
            # logger.debug(' ==> %s' % edata.target_station.station_id)
            est.estimate(sidx_list[idx], tsi_list[idx], edata)

        # adjust NCRT of individual stations
        logger.debug(' > sectionwide adjust NCRTs')

        # TODO: enable this...
        ncrt_sectionwide.adjust_ncrts(edata_list, sections)

        # remove single-station section without NCRT (type1)
        edata_list, sections = ncrt_sectionwide.remove_section_with_single_station(
            edata_list, sections)

        # determine NCRT (type2) using section-wide data
        # logger.debug(' > sectionwide alternatives')
        # ncrt_sectionwide.determine(edata_list, sections)

        # determine phase change points
        logger.debug(' > phase change points')
        for idx, edata in enumerate(edata_list):
            try:
                phase.determine(edata)
            except Exception as ex:
                tb.traceback(ex)
                continue

        output_path = os.path.join(infra.get_path('ncrtes',
                                                  create=True), 'whole_data',
                                   prd.start_date.strftime('%Y-%m-%d'),
                                   case_name)
        est.report(case_name, edata_list, output_path)

        etime = time.time()

        logger.info('@@@ End of NCRT estimation process (elapsed time=%s)' %
                    timeutil.human_time(seconds=(etime - stime)))
def dumps_function_without_commit(ncr_func,
                                  months,
                                  wsDA,
                                  nfDA,
                                  tsDA,
                                  autocommit=False):
    """

    :type ncr_func: pyticas_ncrtes.core.etypes.NSRFunction
    :type months: list[(int, int)]
    :type wsDA: WinterSeasonDataAccess
    :type nfDA: NormalFunctionDataAccess
    :type tsDA: TargetStationDataAccess
    :rtype: itypes.NormalFunctionInfo
    """
    wsi = wsDA.get_by_months(months)
    if not wsi:
        wsi = itypes.WinterSeasonInfo()
        wsi.set_months(months)
        wsi.name = 'WinterSeason %s-%s' % (months[0][0], months[-1][0])
        wsDA.insert(wsi, autocommit=True)

    nfi = itypes.NormalFunctionInfo()
    nfi.func = ncr_func
    nfi.station_id = ncr_func.station.station_id
    nfi.winterseason_id = wsi.id

    ex = nfDA.get_by_station(wsi.id, ncr_func.station.station_id)
    if ex:
        nfDA.delete(ex.id, autocommit=True)

    nfDA.insert(nfi, autocommit=autocommit)

    if ncr_func.is_valid():
        tsi = itypes.TargetStationInfo()
        tsi.station_id = ncr_func.station.station_id
        tsi.winterseason_id = wsi.id
        tsi.corridor_name = ncr_func.station.corridor.name
        tsi.sroute_id = None
        tsi.normal_function_id = nfi.id

        infra = ncrtes.get_infra()
        target_dets = get_target_detectors(infra.get_rnode(tsi.station_id))

        ex = tsDA.get_by_station_id(wsi.year, tsi.station_id, as_model=True)
        if ex:
            if not target_dets:
                ex.detectors = None
            else:
                ex.detectors = ','.join([det.name for det in target_dets])
            if autocommit:
                tsDA.commit()
        else:
            if not target_dets:
                tsi.detectors = None
            else:
                tsi.detectors = ','.join([det.name for det in target_dets])
            tsDA.insert(tsi)
            if autocommit:
                tsDA.commit()

    return nfi
def collect(target_station, periods, **kwargs):
    """ return recovery and reduction pattern UK data set

    :type target_station: pyticas.ttypes.RNodeObject
    :type periods: list[pyticas.ttypes.Period]

    :rtype: (list[pyticas.ttypes.Period],
             dict[float, float], dict[float, float], dict[float, float],
             list[list[float]], list[list[float]],
             list[pyticas.ttypes.RNodeData], list[pyticas.ttypes.RNodeData],
             list[pyticas.ttypes.DetectorObject,
             list[pyticas.ttypes.Period],
            dict[float, float])
    """
    # Procedure
    # 1. check malfunctioned detector in a given target station
    # 2. iterate for all time periods
    #    2.1 call _collect_data_a_data with a time period
    #    2.2 save the daily data from 2.1
    global wb

    logger = getLogger(__name__)
    allow_not_congested = kwargs.get('allow_not_congested', False)
    dc, valid_detectors = kwargs.get('dc', None), kwargs.get('valid_detectors', None)
    if not dc:
        dc, valid_detectors = lane.get_detector_checker(target_station)

    if not dc:
        return None, None, None, None, None, None, None, None, None, None, None

    n_count = 0
    uss, kss, rnode_data_ks, rnode_data_us, used_periods, all_hills = ([] for _ in range(6))
    all_patterns, recovery_patterns, reduction_patterns = {}, {}, {}

    import xlsxwriter
    import os
    from pyticas_ncrtes import ncrtes
    infra = ncrtes.get_infra()
    output_dir = infra.get_path('ncrtes/normal-data-set', create=True)
    data_file = os.path.join(output_dir, '(%d-%d) %s.xlsx' % (periods[0].start_date.year, periods[-1].end_date.year, target_station.station_id))

    wb = xlsxwriter.Workbook(data_file)

    not_congested_periods, not_congested_patterns = [], {}

    for prd in periods:

        all_uk, recovery_uk, reduction_uk, u_all, k_all, us, ks, hills_a_day, is_not_congested = _collect_data_a_day(target_station, prd, dc)

        if not recovery_uk:
            continue

        if is_not_congested:
            not_congested_periods.append(prd)
            for rk in recovery_uk.keys():
                _tk = rk if rk in not_congested_patterns else _unique_key(rk, list(not_congested_patterns.keys()))
                if not _tk:
                    continue
                not_congested_patterns[_tk] = recovery_uk[rk]
            continue

        if recovery_uk and reduction_uk:
            uss.append(u_all)
            kss.append(k_all)
            rnode_data_us.append(us)
            rnode_data_ks.append(ks)

        n_count += 1

        for rk in recovery_uk.keys():
            _tk = rk if rk in recovery_patterns else _unique_key(rk, list(recovery_patterns.keys()))
            if not _tk:
                continue
            recovery_patterns[_tk] = recovery_uk[rk]

        used_periods.append(prd)


    ks = list(recovery_patterns.keys())
    us = [ recovery_patterns[_k] for _k in ks ]

    if has_abnormal_ffs(target_station, recovery_patterns):
        logger.debug('!! has abnormal ffs : station=%s, s_limit=%s' % (target_station.station_id, target_station.s_limit))

        all_uk = dict(not_congested_patterns)
        all_uk.update(recovery_patterns)

        ks = list(all_uk.keys())
        us = [ all_uk[_k] for _k in ks ]

        ws = wb.add_worksheet('uk')
        ws.write_column(0, 0, ['k'] + ks)
        ws.write_column(0, 1, ['u'] + us)
        wb.close()
        return None, None, None, None, None, None, None, None, None, None, None

    ks, us = filter_abnormal_data(target_station, ks, us)
    recovery_patterns = { _k : us[idx] for idx, _k in enumerate(ks) }
    reduction_patterns = recovery_patterns
    all_patterns = recovery_patterns
    ws = wb.add_worksheet('uk')
    ws.write_column(0, 0, ['k'] + ks)
    ws.write_column(0, 1, ['u'] + us)
    wb.close()

    return (used_periods, all_patterns, recovery_patterns, reduction_patterns,
            uss, kss, rnode_data_us, rnode_data_ks, valid_detectors,
            not_congested_periods, not_congested_patterns)