예제 #1
0
def load_neutron_NESDIS_data(fname, start_date, end_date, anom=True):

    raw = np.loadtxt(fname, skiprows=2)
    data = []
    time = []
    for year in range(raw.shape[0]):
        for month in range(1, 13):
            dat = float(raw[year, month])
            if dat == 9999.:
                dat = (float(raw[year, month - 2]) + float(
                    raw[year, month - 1]) + float(raw[year, month + 1]) +
                       float(raw[year, month + 2])) / 4.
            data.append(dat)
            time.append(date(int(raw[year, 0]), month, 1).toordinal())

    g = DataField(data=np.array(data), time=np.array(time))
    g.location = ('%s cosmic data' % (fname[32].upper() + fname[33:-4]))

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality()
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], None)
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
예제 #2
0
def load_CR_climax_daily_data(fname, start_date, end_date, anom=False):
    from dateutil.relativedelta import relativedelta

    raw = np.loadtxt(fname)
    time = []
    datenow = date(1994, 1, 1)
    delta = timedelta(days=1)
    for t in range(raw.shape[0]):
        time.append(datenow.toordinal())

        datenow += delta

    print raw.shape
    print len(time)
    g = DataField(data=np.array(raw), time=np.array(time))
    g.location = 'Climax, CO cosmic data'

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality(True)
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], seasonality[2])
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
예제 #3
0
def load_CR_climax_daily_data(fname, start_date, end_date, anom = False):
    from dateutil.relativedelta import relativedelta

    raw = np.loadtxt(fname)
    time = []
    datenow = date(1994, 1, 1)
    delta = timedelta(days = 1)
    for t in range(raw.shape[0]):
        time.append(datenow.toordinal())

        datenow += delta

    print raw.shape
    print len(time)
    g = DataField(data = np.array(raw), time = np.array(time))
    g.location = 'Climax, CO cosmic data'

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality(True)
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], seasonality[2])
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
예제 #4
0
def load_nino34_wavelet_phase(start_date, end_date, anom=True):
    raw = np.loadtxt('/home/nikola/Work/phd/data/nino34monthly.txt')
    data = []
    time = []
    for y in range(raw.shape[0]):
        for m in range(1, 13):
            dat = float(raw[y, m])
            data.append(dat)
            time.append(date(int(raw[y, 0]), m, 1).toordinal())

    g = DataField(data=np.array(data), time=np.array(time))
    g.location = "NINO3.4"
    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    k0 = 6.  # wavenumber of Morlet wavelet used in analysis
    fourier_factor = (4 * np.pi) / (k0 + np.sqrt(2 + np.power(k0, 2)))
    per = PERIOD * 12  # frequency of interest
    s0 = per / fourier_factor  # get scale

    wave, _, _, _ = wvlt.continous_wavelet(g.data,
                                           1,
                                           False,
                                           wvlt.morlet,
                                           dj=0,
                                           s0=s0,
                                           j1=0,
                                           k0=6.)
    phase = np.arctan2(np.imag(wave), np.real(wave))[0, :]

    return phase
예제 #5
0
def load_neutron_NESDIS_data(fname, start_date, end_date, anom = True):


    raw = np.loadtxt(fname, skiprows = 2)
    data = []
    time = []
    for year in range(raw.shape[0]):
        for month in range(1,13):
            dat = float(raw[year, month])
            if dat == 9999.:
                dat = (float(raw[year, month-2]) + float(raw[year, month-1]) + float(raw[year, month+1]) + float(raw[year, month+2])) / 4.
            data.append(dat)
            time.append(date(int(raw[year,0]), month, 1).toordinal())

    g = DataField(data = np.array(data), time = np.array(time))
    g.location = ('%s cosmic data' % (fname[32].upper() + fname[33:-4]))

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality()
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], None)
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
예제 #6
0
def load_nino34_wavelet_phase(start_date, end_date, anom = True):
    raw = np.loadtxt('/home/nikola/Work/phd/data/nino34monthly.txt')
    data = []
    time = []
    for y in range(raw.shape[0]):
        for m in range(1,13):
            dat = float(raw[y, m])
            data.append(dat)
            time.append(date(int(raw[y, 0]), m, 1).toordinal())

    g = DataField(data = np.array(data), time = np.array(time))
    g.location = "NINO3.4"
    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    k0 = 6. # wavenumber of Morlet wavelet used in analysis
    fourier_factor = (4 * np.pi) / (k0 + np.sqrt(2 + np.power(k0,2)))
    per = PERIOD * 12 # frequency of interest
    s0 = per / fourier_factor # get scale

    wave, _, _, _ = wvlt.continous_wavelet(g.data, 1, False, wvlt.morlet, dj = 0, s0 = s0, j1 = 0, k0 = 6.)
    phase = np.arctan2(np.imag(wave), np.real(wave))[0, :]

    return phase
예제 #7
0
def load_cosmic_data(fname,
                     start_date,
                     end_date,
                     anom=True,
                     daily=False,
                     corrected=True):
    # corrected stands for if use corrected data or not
    from dateutil.relativedelta import relativedelta

    raw = open(fname).read()
    lines = raw.split('\n')
    data = []
    time = []
    d = date(int(lines[0][:4]), int(lines[0][5:7]), 1)
    if not daily:
        delta = relativedelta(months=+1)
    elif daily:
        delta = timedelta(days=1)
    for line in lines:
        row = line.split(' ')
        if len(row) < 6:
            continue
        time.append(d.toordinal())
        if corrected:
            data.append(float(row[4]))
        else:
            data.append(float(row[5]))
        d += delta

    g = DataField(data=np.array(data), time=np.array(time))
    g.location = 'Oulu cosmic data'

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    g.data = X[:, 0].copy()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality(True)
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], seasonality[2])
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
예제 #8
0
def load_cosmic_data(fname, start_date, end_date, anom = True, daily = False, corrected = True):
    # corrected stands for if use corrected data or not
    from dateutil.relativedelta import relativedelta

    raw = open(fname).read()
    lines = raw.split('\n')
    data = []
    time = []
    d = date(int(lines[0][:4]), int(lines[0][5:7]), 1)
    if not daily:
        delta = relativedelta(months = +1)
    elif daily:
        delta = timedelta(days = 1)
    for line in lines:
        row = line.split(' ')
        if len(row) < 6:
            continue
        time.append(d.toordinal())
        if corrected:
            data.append(float(row[4]))
        else:
            data.append(float(row[5]))
        d += delta

    g = DataField(data = np.array(data), time = np.array(time))
    g.location = 'Oulu cosmic data'

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    g.data = X[:, 0].copy()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality(True)
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], seasonality[2])
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
예제 #9
0
# index_correlations = {}
# index_datas = {}

# # SURROGATES
# for index, ndx_type, start_date, end_year in zip(INDICES, DATE_TYPE, START_DATES, END_YEARS):
    # load index
    # print index

    # if index != 'NINO3.4':
index_data = DataField()
raw = np.loadtxt("%sNAO.station.monthly.1865-2016.txt" % (path_to_data))
raw = raw[:, 1:]
index_data.data = raw.reshape(-1)
index_data.create_time_array(date_from = date(1865, 1, 1), sampling = 'm')
index_data.select_date(date(1951, 1, 1), date(2014, 1, 1))
index_data.anomalise()
index_correlations = get_corrs(net, index_data)

    # with open("20CRtemp-phase-fluct-corr-with-%sindex-1950-2014.bin" % index, "wb") as f:
        # cPickle.dump({('%scorrs' % index) : index_correlations[index].reshape(np.prod(index_correlations[index].shape))}, f)

    # # plotting
    # tit = ("ECA&D annual phase SSA RC fluctuations x %s correlations" % index)
    # fname = ("../scale-nets/ECAD-SAT-annual-phase-fluc-SSA-RC-%scorrs.png" % index)
    # net.quick_render(field_to_plot = index_correlations[index], tit = tit, symm = True, whole_world = False, fname = fname)


# def _corrs_surrs(args):
#     index_correlations_surrs = {}
#     surr_field.construct_fourier_surrogates()
예제 #10
0
time[:g1.time.shape[0]] = g1.time
time[g1.time.shape[0]:] = g2.time[idx:]

# get daily values from 6-hourly values
data_new = np.zeros((ndays // 4, g1.lats.shape[0], g2.lons.shape[0]))
time_new = np.zeros((ndays // 4))
for i in range(data_new.shape[0]):
    data_new[i, ...] = np.mean(data[4 * i:4 * i + 3, ...], axis=0)
    time_new[i] = time[4 * i]

# enroll into one DataField class
g = DataField(data=data_new, lons=g1.lons, lats=g1.lats, time=time_new)
del g1, g2

# anomalise
g.select_date(date(1969, 2, 22), date(2014, 1, 1))
g.anomalise()
print("[%s] Data loaded. Now performing wavelet analysis..." %
      str(datetime.now()))

MEANS = True
WORKERS = 3
num_surr = 1000

k0 = 6.  # wavenumber of Morlet wavelet used in analysis
y = 365.25  # year in days
fourier_factor = (4 * np.pi) / (k0 + np.sqrt(2 + np.power(k0, 2)))
period = 8 * y  # frequency of interest
s0 = period / fourier_factor  # get scale

cond_means = np.zeros((8, ))
예제 #11
0
time[:g1.time.shape[0]] = g1.time
time[g1.time.shape[0]:] = g2.time[idx:]

# get daily values from 6-hourly values
data_new = np.zeros((ndays // 4, g1.lats.shape[0], g2.lons.shape[0]))
time_new = np.zeros((ndays // 4))
for i in range(data_new.shape[0]):
    data_new[i, ...] = np.mean(data[4*i : 4*i+3, ...], axis = 0)
    time_new[i] = time[4*i]

# enroll into one DataField class
g = DataField(data = data_new, lons = g1.lons, lats = g1.lats, time = time_new)
del g1, g2

# anomalise
g.select_date(date(1969, 2, 22), date(2014, 1, 1))
g.anomalise()
print("[%s] Data loaded. Now performing wavelet analysis..." % str(datetime.now()))


MEANS = True
WORKERS = 3
num_surr = 1000

k0 = 6. # wavenumber of Morlet wavelet used in analysis
y = 365.25 # year in days
fourier_factor = (4 * np.pi) / (k0 + np.sqrt(2 + np.power(k0,2)))
period = 8 * y # frequency of interest
s0 = period / fourier_factor # get scale 

cond_means = np.zeros((8,))
예제 #12
0
net = ScaleSpecificNetwork('%sair.mon.mean.levels.nc' % path_to_data, 'air', 
                            date(1948,1,1), date(2016,1,1), None, None, 0, dataset = "NCEP", sampling = 'monthly', anom = False)

pool = Pool(NUM_WORKERS)
net.wavelet(1, 'y', pool = pool, cut = 1)
net.get_continuous_phase(pool = pool)
net.get_phase_fluctuations(rewrite = True, pool = pool)
pool.close()
pool.join()

nao = DataField()
raw = np.loadtxt("%sNAO.station.monthly.1865-2016.txt" % (path_to_data))
raw = raw[:, 1:]
nao.data = raw.reshape(-1)
nao.create_time_array(date_from = date(1865, 1, 1), sampling = 'm')
nao.select_date(date(1949, 1, 1), date(2015, 1, 1))
nao.anomalise()
jfm_index = nao.select_months([1,2,3], apply_to_data = False)

jfm_nao = nao.data[jfm_index]
_, _, y = nao.extract_day_month_year()
y = y[jfm_index]
ann_nao = []
for year in np.unique(y):
    ann_nao.append(np.mean(jfm_nao[np.where(year == y)[0]]))
    
ann_nao = np.array(ann_nao)

ann_phase_fluc = np.zeros([ann_nao.shape[0]] + list(net.get_spatial_dims()))
for lat in range(net.lats.shape[0]):
    for lon in range(net.lons.shape[0]):
예제 #13
0
                           sampling='monthly',
                           anom=False)

pool = Pool(NUM_WORKERS)
net.wavelet(1, 'y', pool=pool, cut=1)
net.get_continuous_phase(pool=pool)
net.get_phase_fluctuations(rewrite=True, pool=pool)
pool.close()
pool.join()

nao = DataField()
raw = np.loadtxt("%sWeMO.monthly.1821-2013.txt" % (path_to_data))
raw = raw[:, 1:]
nao.data = raw.reshape(-1)
nao.create_time_array(date_from=date(1821, 1, 1), sampling='m')
nao.select_date(date(1949, 1, 1), date(2014, 1, 1))
nao.anomalise()
jfm_index = nao.select_months([1, 2, 3], apply_to_data=False)

jfm_nao = nao.data[jfm_index]
_, _, y = nao.extract_day_month_year()
y = y[jfm_index]
ann_nao = []
for year in np.unique(y):
    ann_nao.append(np.mean(jfm_nao[np.where(year == y)[0]]))

ann_nao = np.array(ann_nao)

ann_phase_fluc = np.zeros([ann_nao.shape[0]] + list(net.get_spatial_dims()))
for lat in range(net.lats.shape[0]):
    for lon in range(net.lons.shape[0]):
예제 #14
0
# index_correlations = {}
# index_datas = {}

# # SURROGATES
# for index, ndx_type, start_date, end_year in zip(INDICES, DATE_TYPE, START_DATES, END_YEARS):
# load index
# print index

# if index != 'NINO3.4':
index_data = DataField()
raw = np.loadtxt("%sNAO.station.monthly.1865-2016.txt" % (path_to_data))
raw = raw[:, 1:]
index_data.data = raw.reshape(-1)
index_data.create_time_array(date_from=date(1865, 1, 1), sampling='m')
index_data.select_date(date(1951, 1, 1), date(2014, 1, 1))
index_data.anomalise()
index_correlations = get_corrs(net, index_data)

# with open("20CRtemp-phase-fluct-corr-with-%sindex-1950-2014.bin" % index, "wb") as f:
# cPickle.dump({('%scorrs' % index) : index_correlations[index].reshape(np.prod(index_correlations[index].shape))}, f)

# # plotting
# tit = ("ECA&D annual phase SSA RC fluctuations x %s correlations" % index)
# fname = ("../scale-nets/ECAD-SAT-annual-phase-fluc-SSA-RC-%scorrs.png" % index)
# net.quick_render(field_to_plot = index_correlations[index], tit = tit, symm = True, whole_world = False, fname = fname)

# def _corrs_surrs(args):
#     index_correlations_surrs = {}
#     surr_field.construct_fourier_surrogates()
#     surr_field.add_seasonality(a[0], a[1], a[2])