Esempio n. 1
0
def load_neutron_NESDIS_data(fname, start_date, end_date, anom=True):

    raw = np.loadtxt(fname, skiprows=2)
    data = []
    time = []
    for year in range(raw.shape[0]):
        for month in range(1, 13):
            dat = float(raw[year, month])
            if dat == 9999.:
                dat = (float(raw[year, month - 2]) + float(
                    raw[year, month - 1]) + float(raw[year, month + 1]) +
                       float(raw[year, month + 2])) / 4.
            data.append(dat)
            time.append(date(int(raw[year, 0]), month, 1).toordinal())

    g = DataField(data=np.array(data), time=np.array(time))
    g.location = ('%s cosmic data' % (fname[32].upper() + fname[33:-4]))

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality()
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], None)
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
Esempio n. 2
0
def load_CR_climax_daily_data(fname, start_date, end_date, anom=False):
    from dateutil.relativedelta import relativedelta

    raw = np.loadtxt(fname)
    time = []
    datenow = date(1994, 1, 1)
    delta = timedelta(days=1)
    for t in range(raw.shape[0]):
        time.append(datenow.toordinal())

        datenow += delta

    print raw.shape
    print len(time)
    g = DataField(data=np.array(raw), time=np.array(time))
    g.location = 'Climax, CO cosmic data'

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality(True)
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], seasonality[2])
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
Esempio n. 3
0
def load_neutron_NESDIS_data(fname, start_date, end_date, anom = True):


    raw = np.loadtxt(fname, skiprows = 2)
    data = []
    time = []
    for year in range(raw.shape[0]):
        for month in range(1,13):
            dat = float(raw[year, month])
            if dat == 9999.:
                dat = (float(raw[year, month-2]) + float(raw[year, month-1]) + float(raw[year, month+1]) + float(raw[year, month+2])) / 4.
            data.append(dat)
            time.append(date(int(raw[year,0]), month, 1).toordinal())

    g = DataField(data = np.array(data), time = np.array(time))
    g.location = ('%s cosmic data' % (fname[32].upper() + fname[33:-4]))

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality()
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], None)
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
Esempio n. 4
0
def load_CR_climax_daily_data(fname, start_date, end_date, anom = False):
    from dateutil.relativedelta import relativedelta

    raw = np.loadtxt(fname)
    time = []
    datenow = date(1994, 1, 1)
    delta = timedelta(days = 1)
    for t in range(raw.shape[0]):
        time.append(datenow.toordinal())

        datenow += delta

    print raw.shape
    print len(time)
    g = DataField(data = np.array(raw), time = np.array(time))
    g.location = 'Climax, CO cosmic data'

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality(True)
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], seasonality[2])
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
Esempio n. 5
0
def load_nino34_wavelet_phase(start_date, end_date, anom=True):
    raw = np.loadtxt('/home/nikola/Work/phd/data/nino34monthly.txt')
    data = []
    time = []
    for y in range(raw.shape[0]):
        for m in range(1, 13):
            dat = float(raw[y, m])
            data.append(dat)
            time.append(date(int(raw[y, 0]), m, 1).toordinal())

    g = DataField(data=np.array(data), time=np.array(time))
    g.location = "NINO3.4"
    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    k0 = 6.  # wavenumber of Morlet wavelet used in analysis
    fourier_factor = (4 * np.pi) / (k0 + np.sqrt(2 + np.power(k0, 2)))
    per = PERIOD * 12  # frequency of interest
    s0 = per / fourier_factor  # get scale

    wave, _, _, _ = wvlt.continous_wavelet(g.data,
                                           1,
                                           False,
                                           wvlt.morlet,
                                           dj=0,
                                           s0=s0,
                                           j1=0,
                                           k0=6.)
    phase = np.arctan2(np.imag(wave), np.real(wave))[0, :]

    return phase
Esempio n. 6
0
def load_nino34_wavelet_phase(start_date, end_date, anom = True):
    raw = np.loadtxt('/home/nikola/Work/phd/data/nino34monthly.txt')
    data = []
    time = []
    for y in range(raw.shape[0]):
        for m in range(1,13):
            dat = float(raw[y, m])
            data.append(dat)
            time.append(date(int(raw[y, 0]), m, 1).toordinal())

    g = DataField(data = np.array(data), time = np.array(time))
    g.location = "NINO3.4"
    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    k0 = 6. # wavenumber of Morlet wavelet used in analysis
    fourier_factor = (4 * np.pi) / (k0 + np.sqrt(2 + np.power(k0,2)))
    per = PERIOD * 12 # frequency of interest
    s0 = per / fourier_factor # get scale

    wave, _, _, _ = wvlt.continous_wavelet(g.data, 1, False, wvlt.morlet, dj = 0, s0 = s0, j1 = 0, k0 = 6.)
    phase = np.arctan2(np.imag(wave), np.real(wave))[0, :]

    return phase
Esempio n. 7
0
def load_cosmic_data(fname,
                     start_date,
                     end_date,
                     anom=True,
                     daily=False,
                     corrected=True):
    # corrected stands for if use corrected data or not
    from dateutil.relativedelta import relativedelta

    raw = open(fname).read()
    lines = raw.split('\n')
    data = []
    time = []
    d = date(int(lines[0][:4]), int(lines[0][5:7]), 1)
    if not daily:
        delta = relativedelta(months=+1)
    elif daily:
        delta = timedelta(days=1)
    for line in lines:
        row = line.split(' ')
        if len(row) < 6:
            continue
        time.append(d.toordinal())
        if corrected:
            data.append(float(row[4]))
        else:
            data.append(float(row[5]))
        d += delta

    g = DataField(data=np.array(data), time=np.array(time))
    g.location = 'Oulu cosmic data'

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    g.data = X[:, 0].copy()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality(True)
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], seasonality[2])
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
Esempio n. 8
0
def load_cosmic_data(fname, start_date, end_date, anom = True, daily = False, corrected = True):
    # corrected stands for if use corrected data or not
    from dateutil.relativedelta import relativedelta

    raw = open(fname).read()
    lines = raw.split('\n')
    data = []
    time = []
    d = date(int(lines[0][:4]), int(lines[0][5:7]), 1)
    if not daily:
        delta = relativedelta(months = +1)
    elif daily:
        delta = timedelta(days = 1)
    for line in lines:
        row = line.split(' ')
        if len(row) < 6:
            continue
        time.append(d.toordinal())
        if corrected:
            data.append(float(row[4]))
        else:
            data.append(float(row[5]))
        d += delta

    g = DataField(data = np.array(data), time = np.array(time))
    g.location = 'Oulu cosmic data'

    g.select_date(start_date, end_date)

    if anom:
        g.anomalise()

    g.data = X[:, 0].copy()

    if NUM_SURR != 0:
        g_surr = SurrogateField()
        seasonality = g.get_seasonality(True)
        g_surr.copy_field(g)

        g.return_seasonality(seasonality[0], seasonality[1], seasonality[2])
    else:
        g_surr, seasonality = None, None

    return g, g_surr, seasonality
Esempio n. 9
0
GRID_POINTS = [[50, 15], [50, 12.5], [52.5, 12.5], [52.5, 15]]

for lat, lon in GRID_POINTS:

    g = load_NCEP_data_monthly(
        "../data/air.mon.mean.levels.nc",
        "air",
        date(1948, 1, 1),
        date(2014, 1, 1),
        [lat - 1, lat + 1],
        [lon - 1, lon + 1],
        level=LEVEL,
        anom=False,
    )

    print g.data.shape

    lat_arg = np.argmin(np.abs(LAT - g.lats))
    lon_arg = np.argmin(np.abs(LON - g.lons))

    ts = g.data[:, lat_arg, lon_arg].copy()
    time = g.time.copy()
    loc = "GRID | lat: %.1f, lon: %.1f" % (g.lats[lat_arg], g.lons[lon_arg])
    g_grid = DataField(data=ts, time=time)
    g_grid.location = loc

    with open("%s_time_series_%.1fN_%.1fE.bin" % ("NCEP30hPa", lat, lon), "wb") as f:
        cPickle.dump({"g": g_grid}, f, protocol=cPickle.HIGHEST_PROTOCOL)

print ("[%s] Dumped time-series from %.1f N and %.1f E." % (str(datetime.now()), g.lats[lat_arg], g.lons[lon_arg]))
Esempio n. 10
0
#                             [LON - 5, LON + 5], False, parts = 3)
GRID_POINTS = [[50, 15], [50, 12.5], [52.5, 12.5], [52.5, 15]]

for lat, lon in GRID_POINTS:

    g = load_NCEP_data_monthly('../data/air.mon.mean.levels.nc',
                               'air',
                               date(1948, 1, 1),
                               date(2014, 1, 1), [lat - 1, lat + 1],
                               [lon - 1, lon + 1],
                               level=LEVEL,
                               anom=False)

    print g.data.shape

    lat_arg = np.argmin(np.abs(LAT - g.lats))
    lon_arg = np.argmin(np.abs(LON - g.lons))

    ts = g.data[:, lat_arg, lon_arg].copy()
    time = g.time.copy()
    loc = ("GRID | lat: %.1f, lon: %.1f" % (g.lats[lat_arg], g.lons[lon_arg]))
    g_grid = DataField(data=ts, time=time)
    g_grid.location = loc

    with open("%s_time_series_%.1fN_%.1fE.bin" % ('NCEP30hPa', lat, lon),
              'wb') as f:
        cPickle.dump({'g': g_grid}, f, protocol=cPickle.HIGHEST_PROTOCOL)

print("[%s] Dumped time-series from %.1f N and %.1f E." %
      (str(datetime.now()), g.lats[lat_arg], g.lons[lon_arg]))