def load_cosmic_data(fname,
                     start_date,
                     end_date,
                     anom=True,
                     daily=False,
                     corrected=True):
    # corrected stands for if use corrected data or not
    from dateutil.relativedelta import relativedelta

    raw = open(fname).read()
    lines = raw.split('\n')
    data = []
    time = []
    d = date(int(lines[0][:4]), int(lines[0][5:7]), 1)
    if not daily:
        delta = relativedelta(months=+1)
    elif daily:
        delta = timedelta(days=1)
    for line in lines:
        row = line.split(' ')
        if len(row) < 6:
            continue
        time.append(d.toordinal())
        if corrected:
            data.append(float(row[4]))
        else:
            data.append(float(row[5]))
        d += delta

    g = DataField(data=np.array(data), time=np.array(time))
    g.location = 'Oulu cosmic data'

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    g.data = X[:, 0].copy()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality(True)
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], seasonality[2])
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
Example #2
0
def load_cosmic_data(fname, start_date, end_date, anom = True, daily = False, corrected = True):
    # corrected stands for if use corrected data or not
    from dateutil.relativedelta import relativedelta

    raw = open(fname).read()
    lines = raw.split('\n')
    data = []
    time = []
    d = date(int(lines[0][:4]), int(lines[0][5:7]), 1)
    if not daily:
        delta = relativedelta(months = +1)
    elif daily:
        delta = timedelta(days = 1)
    for line in lines:
        row = line.split(' ')
        if len(row) < 6:
            continue
        time.append(d.toordinal())
        if corrected:
            data.append(float(row[4]))
        else:
            data.append(float(row[5]))
        d += delta

    g = DataField(data = np.array(data), time = np.array(time))
    g.location = 'Oulu cosmic data'

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    g.data = X[:, 0].copy()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality(True)
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], seasonality[2])
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
Example #3
0
start_year = date.fromordinal(g.time[0]).year + 4
sm = date.fromordinal(g.time[0]).month
sd = date.fromordinal(g.time[0]).day

start_idx = 0
end_idx = to_wavelet

_, _, idx = g.get_data_of_precise_length(WINDOW_LENGTH,
                                         date.fromordinal(g.time[4 * y]), None,
                                         False)
first_mid_year = date.fromordinal(g.time[idx[0] + WINDOW_LENGTH / 2]).year

while end_idx < g.data.shape[0]:

    # data
    g_working.data = g.data[start_idx:end_idx].copy()
    g_working.time = g.time[start_idx:end_idx].copy()
    if np.all(np.isnan(g_working.data) == False):
        wave, _, _, _ = wavelet_analysis.continous_wavelet(
            g_working.data,
            1,
            False,
            wavelet_analysis.morlet,
            dj=0,
            s0=s0,
            j1=0,
            k0=k0)  # perform wavelet
        phase = np.arctan2(np.imag(wave),
                           np.real(wave))  # get phases from oscillatory modes
        start_cut = date(start_year + cnt * WINDOW_SHIFT, sm, sd)
        idx = g_working.get_data_of_precise_length(WINDOW_LENGTH, start_cut,
Example #4
0
    fit_x = np.vstack([reconstruction, np.ones(reconstruction.shape[0])]).T
    m, c = np.linalg.lstsq(fit_x, g_amp.data)[0]
    amplitude = m * amplitude + c
    print(
        "Oscillatory series fitted to SAT data with coeff. %.3f and intercept %.3f"
        % (m, c))

cond_means = np.zeros((BINS, ))


def get_equidistant_bins(num):
    return np.array(np.linspace(-np.pi, np.pi, num + 1))


start_cut = date(1958, 1, 1)
g_data.data, g_data.time, idx = g.get_data_of_precise_length(
    '16k', start_cut, None, False)
phase = phase[0, idx[0]:idx[1]]
if AMPLITUDE:
    amplitude = amplitude[idx[0]:idx[1]]
# subselect season
if EVAL_SEASON:
    ndx_season = g_data.select_months(season)
    phase = phase[ndx_season]
    if AMPLITUDE:
        amplitude = amplitude[ndx_season]

phase_bins = get_equidistant_bins(BINS)  # equidistant bins

if AMPLITUDE:
    data_mom = func(amplitude)
else:
start_idx = 0
end_idx = to_wavelet

_, _, idx = g.get_data_of_precise_length(WINDOW_LENGTH, date.fromordinal(g.time[4 * y]), None, False)
first_mid_year = date.fromordinal(g.time[idx[0] + WINDOW_LENGTH / 2]).year
last_mid_year = first_mid_year
if PLOT_PHASE:
    phase_total = []
if PLOT_PHASE and not BEGIN:
    last_day = g.get_date_from_ndx(4 * y)

while end_idx < g.data.shape[0]:

    # data
    g_working.data = g.data[start_idx:end_idx].copy()
    g_working.time = g.time[start_idx:end_idx].copy()
    if AMPLITUDE:
        g_working_amp.data = g_amp.data[start_idx:end_idx].copy()
    if np.all(np.isnan(g_working.data) == False):
        wave, _, _, _ = wavelet_analysis.continous_wavelet(
            g_working.data, 1, False, wavelet_analysis.morlet, dj=0, s0=s0, j1=0, k0=k0
        )  # perform wavelet
        phase = np.arctan2(np.imag(wave), np.real(wave))  # get phases from oscillatory modes

        if AMPLITUDE:
            wave, _, _, _ = wavelet_analysis.continous_wavelet(
                g_working_amp.data, 1, False, wavelet_analysis.morlet, dj=0, s0=s0_amp, j1=0, k0=k0
            )  # perform wavelet
            amplitude = np.sqrt(np.power(np.real(wave), 2) + np.power(np.imag(wave), 2))
            amplitude = amplitude[0, :]

net = ScaleSpecificNetwork('%sair.mon.mean.levels.nc' % path_to_data, 'air', 
                            date(1948,1,1), date(2016,1,1), None, None, 0, dataset = "NCEP", sampling = 'monthly', anom = False)

pool = Pool(NUM_WORKERS)
net.wavelet(1, 'y', pool = pool, cut = 1)
net.get_continuous_phase(pool = pool)
net.get_phase_fluctuations(rewrite = True, pool = pool)
pool.close()
pool.join()

nao = DataField()
raw = np.loadtxt("%sNAO.station.monthly.1865-2016.txt" % (path_to_data))
raw = raw[:, 1:]
nao.data = raw.reshape(-1)
nao.create_time_array(date_from = date(1865, 1, 1), sampling = 'm')
nao.select_date(date(1949, 1, 1), date(2015, 1, 1))
nao.anomalise()
jfm_index = nao.select_months([1,2,3], apply_to_data = False)

jfm_nao = nao.data[jfm_index]
_, _, y = nao.extract_day_month_year()
y = y[jfm_index]
ann_nao = []
for year in np.unique(y):
    ann_nao.append(np.mean(jfm_nao[np.where(year == y)[0]]))
    
ann_nao = np.array(ann_nao)

ann_phase_fluc = np.zeros([ann_nao.shape[0]] + list(net.get_spatial_dims()))
Example #7
0
    if USE_SURR:

        result_temp_surr = np.zeros((NUM_SURR, 8, 2))
        for surr in range(NUM_SURR):
            sg.construct_surrogates_with_residuals()
            sg.add_seasonality(mean[:-1], var[:-1], trend[:-1])  # so SAT data
            g.data = sg.surr_data.copy()
            tg_sat = g.copy_data()
            g.time = g.time[:-1]
            g.anomalise()

            g_temp = DataField()
            tg_temp = tg_sat.copy()
            sy = int(MIDDLE_YEAR - (WINDOW_LENGTH / year) / 2)
            g_temp.data = g.data.copy()
            g_temp.time = g.time.copy()
            start = g_temp.find_date_ndx(date(sy - 4, sm, sd))
            end = start + 16384 if WINDOW_LENGTH < 16000 else start + 32768

            g_temp.data = g_temp.data[start:end]
            g_temp.time = g_temp.time[start:end]
            tg_temp = tg_temp[start:end]

            k0 = 6.  # wavenumber of Morlet wavelet used in analysis
            fourier_factor = (4 * np.pi) / (k0 + np.sqrt(2 + np.power(k0, 2)))
            period = PERIOD * year  # frequency of interest
            s0 = period / fourier_factor  # get scale
            wave, _, _, _ = wvlt.continous_wavelet(g_temp.data,
                                                   1,
                                                   False,
        resq.put([cond_temp, amp_diff, data_diff, data_diff])



# plt.figure(figsize=(20,10))
# plt.plot(amplitude, color = '#867628', linewidth = 2)
# plt.plot(g_amp.data, color = '#004739', linewidth = 1)
# plt.show()

cond_means = np.zeros((BINS, 2))

start_cut = date(1962,1,1) # 1958, 1, 1
# l = int(16384 - 8*y)
l = 17532
g_data.data, g_data.time, idx = g.get_data_of_precise_length(l, start_cut, None, False) # 16k
phase = phase[0, idx[0] : idx[1]]
amplitude = amplitude[idx[0] : idx[1]]
# amp_to_plot = amp_to_plot[idx[0] : idx[1]]
# amp_to_plot = g_data.copy_data()


phase_bins = get_equidistant_bins(BINS)

for i in range(cond_means.shape[0]):
    ndx = ((phase >= phase_bins[i]) & (phase <= phase_bins[i+1]))
    cond_means[i, 0] = np.mean(amplitude[ndx])
    cond_means[i, 1] = np.mean(g_data.data[ndx])
    # cond_means[i] = np.mean(g_data.data[ndx])
    # if SURR:
    #     cond_means[i, 1] = np.mean(amplitude2[ndx])
Example #9
0
if STATIONS == None:
    if SEASON == None:
        cond_means = np.zeros((BINS, 2, 1))
    else:
        cond_means = np.zeros((BINS, 2, 2))
else:
    cond_means = np.zeros((BINS, 2, 2))

def get_equidistant_bins(num):
    return np.array(np.linspace(-np.pi, np.pi, num+1))

# start_cut = date(1958,1,1)
start_cut = date(1962,1,1)
l = 17532
if STATIONS == None:
    g_data.data, g_data.time, idx = g.get_data_of_precise_length(l, start_cut, None, False)
    print g_data.get_date_from_ndx(0), g_data.get_date_from_ndx(-1)
    phase = phase[0, idx[0] : idx[1]]
    if AMPLITUDE:
        amplitude = amplitude[idx[0] : idx[1]]
else:
    for i in range(len(STATIONS)):
        locals()['g_data' + str(i)].data, locals()['g_data' + str(i)].time, idx = locals()['g' + str(i)].get_data_of_precise_length('16k', start_cut, None, False)
        locals()['phase' + str(i)] = locals()['phase' + str(i)][0, idx[0] : idx[1]]
        if AMPLITUDE:
            locals()['amplitude' + str(i)] = locals()['amplitude' + str(i)][idx[0] : idx[1]]

phase_bins = get_equidistant_bins(BINS)
mons = {0: 'J', 1: 'F', 2: 'M', 3: 'A', 4: 'M', 5: 'J', 6: 'J', 7: 'A', 8: 'S', 9: 'O', 10: 'N', 11: 'D'}
if SEASON != None:
    idx = 0
Example #10
0
 
 if USE_SURR:
     
     result_temp_surr = np.zeros((NUM_SURR, 8,2))
     for surr in range(NUM_SURR):
         sg.construct_surrogates_with_residuals()
         sg.add_seasonality(mean[:-1], var[:-1], trend[:-1]) # so SAT data
         g.data = sg.surr_data.copy()
         tg_sat = g.copy_data()
         g.time = g.time[:-1]
         g.anomalise()
         
         g_temp = DataField()
         tg_temp = tg_sat.copy()
         sy = int(MIDDLE_YEAR - (WINDOW_LENGTH/year)/2)
         g_temp.data = g.data.copy()
         g_temp.time = g.time.copy()
         start = g_temp.find_date_ndx(date(sy - 4, sm, sd))
         end = start + 16384 if WINDOW_LENGTH < 16000 else start + 32768
        
         g_temp.data = g_temp.data[start : end]
         g_temp.time = g_temp.time[start : end]
         tg_temp = tg_temp[start : end]
         
         k0 = 6. # wavenumber of Morlet wavelet used in analysis
         fourier_factor = (4 * np.pi) / (k0 + np.sqrt(2 + np.power(k0,2)))
         period = PERIOD * year # frequency of interest
         s0 = period / fourier_factor # get scale
         wave, _, _, _ = wvlt.continous_wavelet(g_temp.data, 1, False, wvlt.morlet, dj = 0, s0 = s0, j1 = 0, k0 = k0) # perform wavelet
         phase = np.arctan2(np.imag(wave), np.real(wave)) # get phases from oscillatory modes
         
Example #11
0
                           0,
                           dataset="NCEP",
                           sampling='monthly',
                           anom=False)

pool = Pool(NUM_WORKERS)
net.wavelet(1, 'y', pool=pool, cut=1)
net.get_continuous_phase(pool=pool)
net.get_phase_fluctuations(rewrite=True, pool=pool)
pool.close()
pool.join()

nao = DataField()
raw = np.loadtxt("%sWeMO.monthly.1821-2013.txt" % (path_to_data))
raw = raw[:, 1:]
nao.data = raw.reshape(-1)
nao.create_time_array(date_from=date(1821, 1, 1), sampling='m')
nao.select_date(date(1949, 1, 1), date(2014, 1, 1))
nao.anomalise()
jfm_index = nao.select_months([1, 2, 3], apply_to_data=False)

jfm_nao = nao.data[jfm_index]
_, _, y = nao.extract_day_month_year()
y = y[jfm_index]
ann_nao = []
for year in np.unique(y):
    ann_nao.append(np.mean(jfm_nao[np.where(year == y)[0]]))

ann_nao = np.array(ann_nao)

ann_phase_fluc = np.zeros([ann_nao.shape[0]] + list(net.get_spatial_dims()))
Example #12
0
import calendar

ts = OscillatoryTimeSeries('TG_STAID000027.txt', date(1834, 7, 28),
                           date(2014, 1, 1), False)
sg = SurrogateField()
g = DataField()

daily_var = np.zeros((365, 3))
mean, var_data, trend = ts.g.get_seasonality(True)
sg.copy_field(ts.g)

#MF
sg.construct_multifractal_surrogates()
sg.add_seasonality(mean, var_data, trend)

g.data = sg.surr_data.copy()
g.time = sg.time.copy()

_, var_surr_MF, _ = g.get_seasonality(True)

#FT
sg.construct_fourier_surrogates_spatial()
sg.add_seasonality(mean, var_data, trend)

g.data = sg.surr_data.copy()
g.time = sg.time.copy()

_, var_surr_FT, _ = g.get_seasonality(True)

delta = timedelta(days=1)
d = date(1895, 1, 1)
Example #13
0
def _corrs_surrs_ind(args):
    index_surr = DataField()
    index_surr.data = get_single_FT_surrogate(index_data.data)
    index_correlations_surrs = get_corrs(net, index_surr)

    return index_correlations_surrs
Example #14
0
pool.close()
pool.join()

# index_correlations = {}
# index_datas = {}

# # SURROGATES
# for index, ndx_type, start_date, end_year in zip(INDICES, DATE_TYPE, START_DATES, END_YEARS):
# load index
# print index

# if index != 'NINO3.4':
index_data = DataField()
raw = np.loadtxt("%sNAO.station.monthly.1865-2016.txt" % (path_to_data))
raw = raw[:, 1:]
index_data.data = raw.reshape(-1)
index_data.create_time_array(date_from=date(1865, 1, 1), sampling='m')
index_data.select_date(date(1951, 1, 1), date(2014, 1, 1))
index_data.anomalise()
index_correlations = get_corrs(net, index_data)

# with open("20CRtemp-phase-fluct-corr-with-%sindex-1950-2014.bin" % index, "wb") as f:
# cPickle.dump({('%scorrs' % index) : index_correlations[index].reshape(np.prod(index_correlations[index].shape))}, f)

# # plotting
# tit = ("ECA&D annual phase SSA RC fluctuations x %s correlations" % index)
# fname = ("../scale-nets/ECAD-SAT-annual-phase-fluc-SSA-RC-%scorrs.png" % index)
# net.quick_render(field_to_plot = index_correlations[index], tit = tit, symm = True, whole_world = False, fname = fname)

# def _corrs_surrs(args):
#     index_correlations_surrs = {}
Example #15
0
pool.close()
pool.join()

# index_correlations = {}
# index_datas = {}

# # SURROGATES
# for index, ndx_type, start_date, end_year in zip(INDICES, DATE_TYPE, START_DATES, END_YEARS):
    # load index
    # print index

    # if index != 'NINO3.4':
index_data = DataField()
raw = np.loadtxt("%sNAO.station.monthly.1865-2016.txt" % (path_to_data))
raw = raw[:, 1:]
index_data.data = raw.reshape(-1)
index_data.create_time_array(date_from = date(1865, 1, 1), sampling = 'm')
index_data.select_date(date(1951, 1, 1), date(2014, 1, 1))
index_data.anomalise()
index_correlations = get_corrs(net, index_data)

    # with open("20CRtemp-phase-fluct-corr-with-%sindex-1950-2014.bin" % index, "wb") as f:
        # cPickle.dump({('%scorrs' % index) : index_correlations[index].reshape(np.prod(index_correlations[index].shape))}, f)

    # # plotting
    # tit = ("ECA&D annual phase SSA RC fluctuations x %s correlations" % index)
    # fname = ("../scale-nets/ECAD-SAT-annual-phase-fluc-SSA-RC-%scorrs.png" % index)
    # net.quick_render(field_to_plot = index_correlations[index], tit = tit, symm = True, whole_world = False, fname = fname)


# def _corrs_surrs(args):
Example #16
0
def _corrs_surrs_ind(args):
    index_surr = DataField()
    index_surr.data = get_single_FT_surrogate(index_data.data)
    index_correlations_surrs = get_corrs(net, index_surr)

    return index_correlations_surrs

ts = OscillatoryTimeSeries('TG_STAID000027.txt', date(1834,7,28), date(2014,1,1), False)
sg = SurrogateField()
g = DataField()


daily_var = np.zeros((365,3))
mean, var_data, trend = ts.g.get_seasonality(True)
sg.copy_field(ts.g)

#MF
sg.construct_multifractal_surrogates()
sg.add_seasonality(mean, var_data, trend)

g.data = sg.surr_data.copy()
g.time = sg.time.copy()

_, var_surr_MF, _ = g.get_seasonality(True)

#FT
sg.construct_fourier_surrogates_spatial()
sg.add_seasonality(mean, var_data, trend)

g.data = sg.surr_data.copy()
g.time = sg.time.copy()

_, var_surr_FT, _ = g.get_seasonality(True)

delta = timedelta(days = 1)
d = date(1895,1,1)