Exemplo n.º 1
0
def test6():
    # Fail
    # Large date variation and seconds granularity although
    # prevent_overflow = True tries to prevent overflow by setting smallest date (01JAN2019) as julianBaseDate
    pathname = "/IRREGULAR/TIMESERIES/PARAM//IR-DAY/Ex14_Test6/"
    T = ['01JAN2019 01:00', '01JAN2455 00:00']
    tsc = TimeSeriesContainer()
    tsc.pathname = pathname
    tsc.interval = -1
    tsc.granularity = 1
    tsc.times = T
    tsc.values = [2019, 5000]
    tsc.numberValues = 2
    fid.put_ts(tsc, prevent_overflow=True)
    #
    ts = fid.read_ts(pathname, regular=False)
    print(ts.pytimes)
    print(ts.values)
Exemplo n.º 2
0
def test7():
    # Pass
    # Writing one time,value pair with prevent_overflow = True is safest
    pathname = "/IRREGULAR/TIMESERIES/PARAM//IR-DECADE/Ex14_Test7/"
    T = ['01JAN2019 02:01:05', '01JAN5000 01:02:06']
    V = [2019, 5000]
    for t, v in zip(T, V):
        tsc = TimeSeriesContainer()
        tsc.pathname = pathname
        tsc.interval = -1
        tsc.granularity = 60
        tsc.times = [t]
        tsc.values = [v]
        tsc.numberValues = 1
        fid.put_ts(tsc, prevent_overflow=True)

    ts = fid.read_ts(pathname, regular=False)
    print(ts.times)
    print(ts.pytimes)
    print(ts.values)
    return tsc, ts
Exemplo n.º 3
0
def zstat2dss(zs_list, basin, ds, dss_file):
    dates = [i.grid.date for i in zs_list]
    sbasin_avg = [np.round(i.sbasin_avg, 4) for i in zs_list]
    sbasin_vol = [i.sbasin_vol for i in zs_list]
    basin_avg = [np.round(i.basin_avg, 4) for i in zs_list]
    basin_vol = [i.basin_vol for i in zs_list]
    names = [i.sbasin_names for i in zs_list]

    date_rav = np.ravel(np.repeat(dates, len(zs_list[0].sbasin_names)))
    sbasin_avg_rav = np.ravel(sbasin_avg)
    sbasin_vol_rav = np.ravel(sbasin_vol)
    names_rav = np.ravel(names)

    idx = pd.MultiIndex.from_tuples(zip(date_rav, names_rav),
                                    names=['date', 'name'])
    sbasin = pd.DataFrame(index=idx,
                          data={
                              'mean_swe': sbasin_avg_rav,
                              'vol': sbasin_vol_rav
                          })
    sbasin = sbasin.sort_index(level=0)

    idx = pd.MultiIndex.from_tuples(zip(
        dates, np.ravel(np.repeat('Total_Basin', len(dates)))),
                                    names=['date', 'name'])
    tbasin = pd.DataFrame(index=idx,
                          data={
                              'mean_swe': basin_avg,
                              'vol': basin_vol
                          })
    tbasin = tbasin.sort_index(level=0)

    idx = pd.date_range(
        sbasin.index.get_level_values(0).min(),
        sbasin.index.get_level_values(0).max())

    fid = HecDss.Open(dss_file, version=6)
    fid.close()

    for name, group in sbasin.groupby(level=1):

        #group.loc[:, 'wy'] = np.where(group.index.get_level_values(0).month>9,group.index.get_level_values(0).year+1,group.index.get_level_values(0).year)
        group.index = group.index.droplevel(1)

        #group.index = group.index.sort_values()
        group = group.reindex(idx, fill_value=-901.0)
        group.index = group.index + pd.DateOffset(hours=12)

        start_date = group.index.min().strftime('%d%b%Y %H:%M:%S')
        print(start_date)
        pname = '/{0}/{1}/AVG_SWE//1DAY/{2}/'.format(
            basin,
            name.upper().replace(' ', '_'), ds)

        print(pname)
        tsc = TimeSeriesContainer()
        tsc.granularity = 60  #seconds i.e. minute granularity
        tsc.numberValues = group.mean_swe.size
        tsc.startDateTime = start_date
        tsc.pathname = pname
        tsc.units = "M"
        tsc.type = "INST-VAL"
        tsc.interval = 1
        #must a +ve integer for regular time-series
        #actual interval implied from E part of pathname
        tsc.values = group.mean_swe.values
        #values may be list,array, numpy array

        fid = HecDss.Open(dss_file)
        fid.deletePathname(tsc.pathname)
        status = fid.put(tsc)
        fid.close()

        pname = '/{0}/{1}/VOL//1DAY/{2}/'.format(
            basin,
            name.upper().replace(' ', '_'), ds)
        print(pname)

        tsc = TimeSeriesContainer()
        tsc.granularity = 60  #seconds i.e. minute granularity
        tsc.numberValues = group.index.size
        tsc.startDateTime = start_date
        tsc.pathname = pname
        tsc.units = "CUBIC_METERS"
        tsc.type = "INST-VAL"
        tsc.interval = 1
        #must a +ve integer for regular time-series
        #actual interval implied from E part of pathname
        tsc.values = group.vol.values
        #values may be list,array, numpy array

        fid = HecDss.Open(dss_file)
        fid.deletePathname(tsc.pathname)
        status = fid.put(tsc)
        fid.close()

    for name, group in tbasin.groupby(level=1):
        #group.loc[:, 'wy'] = np.where(group.index.get_level_values(0).month>9,group.index.get_level_values(0).year+1,group.index.get_level_values(0).year)
        group.index = group.index.droplevel(1)

        #group.index = group.index.sort_values()

        group = group.reindex(idx, fill_value=-901.0)
        group.index = group.index + pd.DateOffset(hours=12)
        start_date = group.index.min().strftime('%d%b%Y %H:%M:%S')
        print(start_date)
        pname = '/{0}/{1}/AVG_SWE//1DAY/{2}/'.format(
            basin,
            name.upper().replace(' ', '_'), ds)

        print(pname)
        tsc = TimeSeriesContainer()
        tsc.granularity = 60  #seconds i.e. minute granularity
        tsc.numberValues = group.mean_swe.size
        tsc.startDateTime = start_date
        tsc.pathname = pname
        tsc.units = "M"
        tsc.type = "INST-VAL"
        tsc.interval = 1
        #must a +ve integer for regular time-series
        #actual interval implied from E part of pathname
        tsc.values = group.mean_swe.values
        #values may be list,array, numpy array

        fid = HecDss.Open(dss_file)
        fid.deletePathname(tsc.pathname)
        status = fid.put(tsc)
        fid.close()

        pname = '/{0}/{1}/VOL//1DAY/{2}/'.format(
            basin,
            name.upper().replace(' ', '_'), ds)
        print(pname)

        tsc = TimeSeriesContainer()
        tsc.granularity = 60  #seconds i.e. minute granularity
        tsc.numberValues = group.index.size
        tsc.startDateTime = start_date
        tsc.pathname = pname
        tsc.units = "CUBIC_METERS"
        tsc.type = "INST-VAL"
        tsc.interval = 1
        #must a +ve integer for regular time-series
        #actual interval implied from E part of pathname
        tsc.values = group.vol.values
        #values may be list,array, numpy array

        fid = HecDss.Open(dss_file)
        fid.deletePathname(tsc.pathname)
        status = fid.put(tsc)
        fid.close()
    return sbasin, tbasin