コード例 #1
0
def load_CR_climax_daily_data(fname, start_date, end_date, anom=False):
    from dateutil.relativedelta import relativedelta

    raw = np.loadtxt(fname)
    time = []
    datenow = date(1994, 1, 1)
    delta = timedelta(days=1)
    for t in range(raw.shape[0]):
        time.append(datenow.toordinal())

        datenow += delta

    print raw.shape
    print len(time)
    g = DataField(data=np.array(raw), time=np.array(time))
    g.location = 'Climax, CO cosmic data'

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality(True)
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], seasonality[2])
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
コード例 #2
0
def load_nino34_wavelet_phase(start_date, end_date, anom=True):
    raw = np.loadtxt('/home/nikola/Work/phd/data/nino34monthly.txt')
    data = []
    time = []
    for y in range(raw.shape[0]):
        for m in range(1, 13):
            dat = float(raw[y, m])
            data.append(dat)
            time.append(date(int(raw[y, 0]), m, 1).toordinal())

    g = DataField(data=np.array(data), time=np.array(time))
    g.location = "NINO3.4"
    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    k0 = 6.  # wavenumber of Morlet wavelet used in analysis
    fourier_factor = (4 * np.pi) / (k0 + np.sqrt(2 + np.power(k0, 2)))
    per = PERIOD * 12  # frequency of interest
    s0 = per / fourier_factor  # get scale

    wave, _, _, _ = wvlt.continous_wavelet(g.data,
                                           1,
                                           False,
                                           wvlt.morlet,
                                           dj=0,
                                           s0=s0,
                                           j1=0,
                                           k0=6.)
    phase = np.arctan2(np.imag(wave), np.real(wave))[0, :]

    return phase
コード例 #3
0
def load_neutron_NESDIS_data(fname, start_date, end_date, anom=True):

    raw = np.loadtxt(fname, skiprows=2)
    data = []
    time = []
    for year in range(raw.shape[0]):
        for month in range(1, 13):
            dat = float(raw[year, month])
            if dat == 9999.:
                dat = (float(raw[year, month - 2]) + float(
                    raw[year, month - 1]) + float(raw[year, month + 1]) +
                       float(raw[year, month + 2])) / 4.
            data.append(dat)
            time.append(date(int(raw[year, 0]), month, 1).toordinal())

    g = DataField(data=np.array(data), time=np.array(time))
    g.location = ('%s cosmic data' % (fname[32].upper() + fname[33:-4]))

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality()
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], None)
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
コード例 #4
0
def load_cosmic_data(fname,
                     start_date,
                     end_date,
                     anom=True,
                     daily=False,
                     corrected=True):
    # corrected stands for if use corrected data or not
    from dateutil.relativedelta import relativedelta

    raw = open(fname).read()
    lines = raw.split('\n')
    data = []
    time = []
    d = date(int(lines[0][:4]), int(lines[0][5:7]), 1)
    if not daily:
        delta = relativedelta(months=+1)
    elif daily:
        delta = timedelta(days=1)
    for line in lines:
        row = line.split(' ')
        if len(row) < 6:
            continue
        time.append(d.toordinal())
        if corrected:
            data.append(float(row[4]))
        else:
            data.append(float(row[5]))
        d += delta

    g = DataField(data=np.array(data), time=np.array(time))
    g.location = 'Oulu cosmic data'

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    g.data = X[:, 0].copy()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality(True)
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], seasonality[2])
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
コード例 #5
0
ファイル: AR_model.py プロジェクト: Kianqunki/multi-scale
## -----------------
ANOMALISE = True
PERIOD = 8  # years, period of wavelet
WINDOW_LENGTH = 16384  # 13462, 16384
WINDOW_SHIFT = 1  # years, delta in the sliding window analysis
MEANS = True  # if True, compute conditional means, if False, compute conditional variance
WORKERS = 4
NUM_SURR = 50  # how many surrs will be used to evaluate
SURR_TYPE = 'MF'
diff_ax = (0, 8)  # means -> 0, 2, var -> 1, 8
mean_ax = (-1, 1)  # means -> -1, 1.5, var -> 9, 18

g = load_station_data('TG_STAID000027.txt', date(1834, 7, 28),
                      date(2014, 1, 1), ANOMALISE)
g_working = DataField()
g_surrs = DataField()

TS_LEN = g.data.shape[0]

# map coeffs to numpy array
if RANDOM_COEFFS:
    A_COEFFS = []
    for i in range(k):
        A_COEFFS.append((2 * np.random.rand(1) - 1)[0])
a_coeffs = np.array(A_COEFFS)

# initialize first k time points to noise
ts = np.zeros((TS_LEN, ))
for i in range(k):
    ts[i] = np.random.normal(0, SIGMA_NOISE, 1)
コード例 #6
0
    axplot = [2.5, 5.5]
elif MOMENT == 'skewness':
    func = sts.skew
    axplot = [-0.5, 1]
elif MOMENT == 'kurtosis':
    func = sts.kurtosis
    axplot = [0, 5]

# load data - at least 32k of data because of surrogates
# 00047 - Hamburg, 00054 - Potsdam
g = load_station_data('TG_STAID000027.txt', date(1834, 4, 28),
                      date(2013, 10, 1), ANOMALISE)
if AMPLITUDE:
    g_amp = load_station_data('TG_STAID000027.txt', date(1834, 4, 28),
                              date(2013, 10, 1), False)
g_data = DataField()

print(
    "[%s] Wavelet analysis in progress with %d year window shifted by %d year(s)..."
    % (str(datetime.now()), WINDOW_LENGTH, WINDOW_SHIFT))
k0 = 6.  # wavenumber of Morlet wavelet used in analysis
y = 365.25  # year in days
fourier_factor = (4 * np.pi) / (k0 + np.sqrt(2 + np.power(k0, 2)))
period = PERIOD * y  # frequency of interest
s0 = period / fourier_factor  # get scale
# wavelet - data
wave, _, _, _ = wavelet_analysis.continous_wavelet(g.data,
                                                   1,
                                                   False,
                                                   wavelet_analysis.morlet,
                                                   dj=0,
コード例 #7
0
                      date(2015, 1, 1),
                      ANOMALISE)  # 15-01-1924 if 32k, 28-04-1834 if 64k
if AMPLITUDE:
    g_amp = load_station_data('../data/TG_STAID000027.txt', date(1775, 1, 1),
                              date(2015, 1, 1), False)

## HAMBURG -- TG_STAID000047, POTSDAM -- TG_STAID000054
# g = load_station_data('../data/TG_STAID000054.txt', date(1893,1,1), date(2014,1,1), ANOMALISE) # 15-01-1924 if 32k, 28-04-1834 if 64k
# if AMPLITUDE:
#     g_amp = load_station_data('../data/TG_STAID000054.txt', date(1893,1,1), date(2014, 1, 1), False)

# ERA
#g = load_bin_data('../data/ERA_time_series_50.0N_15.0E.bin', date(1958,4,28), date(2013,10,1), ANOMALISE)
# ECA
#g = load_bin_data('../data/ECA&D_time_series_50.1N_14.4E.bin', date(1950,4,28), date(2013,10,1), ANOMALISE)
g_working = DataField()
g_surrs = DataField()
if AMPLITUDE:
    g_working_amp = DataField()
    g_surrs_amp = DataField()
if MOMENT == 'mean':
    func = np.mean
    if AMPLITUDE:
        diff_ax = (0, 2)  # means -> 0, 2, var -> 1, 8
        mean_ax = (18, 22)  # means -> -1, 1.5, var -> 9, 18
    else:
        diff_ax = (0, 5)
        mean_ax = (-1, 1.5)
elif MOMENT == 'std':
    func = np.var
    diff_ax = (1, 15)
コード例 #8
0
#     "grid" : "2.5/2.5",
#     "time" : "00/06/12/18", ## daily
#     "date" : "20010101/to/20131231",
#     "area" : "50/-15/30/5", ## north/west/south/east
#     "type" : "an",
#     "class" : "e4",
#     "format" : "netcdf",
#     "padding" : "0",
#     "target" : "test.nc"
#    })

#==============================================================================

# load ERA-40 as g1 and ERA-Interim as g2
print("[%s] Loading data..." % (str(datetime.now())))
g1 = DataField()
g2 = DataField()

g1.load('Spain.ERA.58-01.nc', 't2m', 'ERA-40')
g2.load('Spain.ERA.01-13.nc', 't2m', 'ERA-40')

# concatenate
last = g1.time[-1]
idx = np.where(g2.time == last)[0] + 1
ndays = g1.time.shape[0] + g2.time[idx:].shape[0]

data = np.zeros((ndays, g1.lats.shape[0], g1.lons.shape[0]))
time = np.zeros((ndays, ))

data[:g1.time.shape[0], ...] = g1.data
data[g1.time.shape[0]:, ...] = g2.data[idx:]
コード例 #9
0
ファイル: quick_render.py プロジェクト: Kianqunki/multi-scale
    plt.title(title)
    cbar = plt.colorbar(format=r"%2.2f",
                        shrink=0.75,
                        ticks=np.arange(mi, ma + step, (ma - mi) / 8),
                        aspect=25,
                        drawedges=False)
    cbar.set_label(cbar_label)
    cbar_obj = plt.getp(cbar.ax.axes, 'yticklabels')
    plt.setp(cbar_obj, fontsize=10, color=(.1, .1, .1))
    if filename != None:
        plt.savefig(filename)
    else:
        plt.show()


g = DataField()
g.load('tg_0.25deg_reg_v9.0.nc', 'tg')
means = True
daily = False

if means:
    idx = 0
    y = 1950
    while idx < g.data.shape[0]:
        idx2 = g.find_date_ndx(date(y, 1, 1))
        render_geo_field(g.data[idx:idx2, ...], g.lats, g.lons, None, None,
                         False, 'Yearly mean temperature %s' % str(y),
                         'temperature [$^{\circ}C$]',
                         'imgs/temp_mean%s.png' % str(y))
        y += 1
        idx = idx2
コード例 #10
0
ev_start_year = 1861 if USE_SURR else 1802

for MIDDLE_YEAR in range(ev_start_year, 1988):

    if USE_SURR:

        result_temp_surr = np.zeros((NUM_SURR, 8, 2))
        for surr in range(NUM_SURR):
            sg.construct_surrogates_with_residuals()
            sg.add_seasonality(mean[:-1], var[:-1], trend[:-1])  # so SAT data
            g.data = sg.surr_data.copy()
            tg_sat = g.copy_data()
            g.time = g.time[:-1]
            g.anomalise()

            g_temp = DataField()
            tg_temp = tg_sat.copy()
            sy = int(MIDDLE_YEAR - (WINDOW_LENGTH / year) / 2)
            g_temp.data = g.data.copy()
            g_temp.time = g.time.copy()
            start = g_temp.find_date_ndx(date(sy - 4, sm, sd))
            end = start + 16384 if WINDOW_LENGTH < 16000 else start + 32768

            g_temp.data = g_temp.data[start:end]
            g_temp.time = g_temp.time[start:end]
            tg_temp = tg_temp[start:end]

            k0 = 6.  # wavenumber of Morlet wavelet used in analysis
            fourier_factor = (4 * np.pi) / (k0 + np.sqrt(2 + np.power(k0, 2)))
            period = PERIOD * year  # frequency of interest
            s0 = period / fourier_factor  # get scale
コード例 #11
0
#                             [LON - 5, LON + 5], False, parts = 3)
GRID_POINTS = [[50, 15], [50, 12.5], [52.5, 12.5], [52.5, 15]]

for lat, lon in GRID_POINTS:

    g = load_NCEP_data_monthly('../data/air.mon.mean.levels.nc',
                               'air',
                               date(1948, 1, 1),
                               date(2014, 1, 1), [lat - 1, lat + 1],
                               [lon - 1, lon + 1],
                               level=LEVEL,
                               anom=False)

    print g.data.shape

    lat_arg = np.argmin(np.abs(LAT - g.lats))
    lon_arg = np.argmin(np.abs(LON - g.lons))

    ts = g.data[:, lat_arg, lon_arg].copy()
    time = g.time.copy()
    loc = ("GRID | lat: %.1f, lon: %.1f" % (g.lats[lat_arg], g.lons[lon_arg]))
    g_grid = DataField(data=ts, time=time)
    g_grid.location = loc

    with open("%s_time_series_%.1fN_%.1fE.bin" % ('NCEP30hPa', lat, lon),
              'wb') as f:
        cPickle.dump({'g': g_grid}, f, protocol=cPickle.HIGHEST_PROTOCOL)

print("[%s] Dumped time-series from %.1f N and %.1f E." %
      (str(datetime.now()), g.lats[lat_arg], g.lons[lon_arg]))
コード例 #12
0
                           date(2015, 1, 1),
                           None,
                           None,
                           0,
                           dataset="NCEP",
                           sampling='monthly',
                           anom=False)

pool = Pool(NUM_WORKERS)
net.wavelet(1, 'y', pool=pool, cut=1)
net.get_continuous_phase(pool=pool)
net.get_phase_fluctuations(rewrite=True, pool=pool)
pool.close()
pool.join()

nao = DataField()
raw = np.loadtxt("%sWeMO.monthly.1821-2013.txt" % (path_to_data))
raw = raw[:, 1:]
nao.data = raw.reshape(-1)
nao.create_time_array(date_from=date(1821, 1, 1), sampling='m')
nao.select_date(date(1949, 1, 1), date(2014, 1, 1))
nao.anomalise()
jfm_index = nao.select_months([1, 2, 3], apply_to_data=False)

jfm_nao = nao.data[jfm_index]
_, _, y = nao.extract_day_month_year()
y = y[jfm_index]
ann_nao = []
for year in np.unique(y):
    ann_nao.append(np.mean(jfm_nao[np.where(year == y)[0]]))
コード例 #13
0
def _corrs_surrs_ind(args):
    index_surr = DataField()
    index_surr.data = get_single_FT_surrogate(index_data.data)
    index_correlations_surrs = get_corrs(net, index_surr)

    return index_correlations_surrs
コード例 #14
0
net.get_continuous_phase(pool=pool)
net.get_phase_fluctuations(rewrite=True, pool=pool)
pool.close()
pool.join()

# index_correlations = {}
# index_datas = {}

# # SURROGATES
# for index, ndx_type, start_date, end_year in zip(INDICES, DATE_TYPE, START_DATES, END_YEARS):
# load index
# print index

# if index != 'NINO3.4':
index_data = DataField()
raw = np.loadtxt("%sNAO.station.monthly.1865-2016.txt" % (path_to_data))
raw = raw[:, 1:]
index_data.data = raw.reshape(-1)
index_data.create_time_array(date_from=date(1865, 1, 1), sampling='m')
index_data.select_date(date(1951, 1, 1), date(2014, 1, 1))
index_data.anomalise()
index_correlations = get_corrs(net, index_data)

# with open("20CRtemp-phase-fluct-corr-with-%sindex-1950-2014.bin" % index, "wb") as f:
# cPickle.dump({('%scorrs' % index) : index_correlations[index].reshape(np.prod(index_correlations[index].shape))}, f)

# # plotting
# tit = ("ECA&D annual phase SSA RC fluctuations x %s correlations" % index)
# fname = ("../scale-nets/ECAD-SAT-annual-phase-fluc-SSA-RC-%scorrs.png" % index)
# net.quick_render(field_to_plot = index_correlations[index], tit = tit, symm = True, whole_world = False, fname = fname)
コード例 #15
0
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec


AMPLITUDE = True
PERIOD = 8
BINS = 8
SEASON = None#[[12, 1, 2], [6, 7, 8]]
STATIONS = None # ['TG_STAID000047.txt', 'TG_STAID000054.txt']


if STATIONS == None:
    g = load_station_data('../data/TG_STAID000027.txt', date(1958, 1, 1), date(2013, 11, 10), True)
    if AMPLITUDE:
        g_amp = load_station_data('../data/TG_STAID000027.txt', date(1958, 1, 1), date(2013, 11, 10), False)
    g_data = DataField()
else:
    for i in range(len(STATIONS)):
        locals()['g' + str(i)] = load_station_data(STATIONS[i], date(1924,1,15), date(2013,10,1), True)
        if AMPLITUDE:
            locals()['g_amp' + str(i)] = load_station_data(STATIONS[i], date(1924,1,15), date(2013, 10, 1), False)
        locals()['g_data' + str(i)] = DataField()

k0 = 6. # wavenumber of Morlet wavelet used in analysis
y = 365.25 # year in days
fourier_factor = (4 * np.pi) / (k0 + np.sqrt(2 + np.power(k0,2)))
period = PERIOD * y # frequency of interest
s0 = period / fourier_factor # get scale 
# wavelet - data    
if STATIONS == None:
    wave, _, _, _ = wavelet_analysis.continous_wavelet(g.data, 1, False, wavelet_analysis.morlet, dj = 0, s0 = s0, j1 = 0, k0 = k0) # perform wavelet