def load_CR_climax_daily_data(fname, start_date, end_date, anom=False): from dateutil.relativedelta import relativedelta raw = np.loadtxt(fname) time = [] datenow = date(1994, 1, 1) delta = timedelta(days=1) for t in range(raw.shape[0]): time.append(datenow.toordinal()) datenow += delta print raw.shape print len(time) g = DataField(data=np.array(raw), time=np.array(time)) g.location = 'Climax, CO cosmic data' g.select_date(start_date, end_date) if anom: g.anomalise() if NUM_SURR != 0: g_surr = SurrogateField() seasonality = g.get_seasonality(True) g_surr.copy_field(g) g.return_seasonality(seasonality[0], seasonality[1], seasonality[2]) else: g_surr, seasonality = None, None return g, g_surr, seasonality
def load_neutron_NESDIS_data(fname, start_date, end_date, anom = True): raw = np.loadtxt(fname, skiprows = 2) data = [] time = [] for year in range(raw.shape[0]): for month in range(1,13): dat = float(raw[year, month]) if dat == 9999.: dat = (float(raw[year, month-2]) + float(raw[year, month-1]) + float(raw[year, month+1]) + float(raw[year, month+2])) / 4. data.append(dat) time.append(date(int(raw[year,0]), month, 1).toordinal()) g = DataField(data = np.array(data), time = np.array(time)) g.location = ('%s cosmic data' % (fname[32].upper() + fname[33:-4])) g.select_date(start_date, end_date) if anom: g.anomalise() if NUM_SURR != 0: g_surr = SurrogateField() seasonality = g.get_seasonality() g_surr.copy_field(g) g.return_seasonality(seasonality[0], seasonality[1], None) else: g_surr, seasonality = None, None return g, g_surr, seasonality
def load_neutron_NESDIS_data(fname, start_date, end_date, anom=True): raw = np.loadtxt(fname, skiprows=2) data = [] time = [] for year in range(raw.shape[0]): for month in range(1, 13): dat = float(raw[year, month]) if dat == 9999.: dat = (float(raw[year, month - 2]) + float( raw[year, month - 1]) + float(raw[year, month + 1]) + float(raw[year, month + 2])) / 4. data.append(dat) time.append(date(int(raw[year, 0]), month, 1).toordinal()) g = DataField(data=np.array(data), time=np.array(time)) g.location = ('%s cosmic data' % (fname[32].upper() + fname[33:-4])) g.select_date(start_date, end_date) if anom: g.anomalise() if NUM_SURR != 0: g_surr = SurrogateField() seasonality = g.get_seasonality() g_surr.copy_field(g) g.return_seasonality(seasonality[0], seasonality[1], None) else: g_surr, seasonality = None, None return g, g_surr, seasonality
def load_CR_climax_daily_data(fname, start_date, end_date, anom = False): from dateutil.relativedelta import relativedelta raw = np.loadtxt(fname) time = [] datenow = date(1994, 1, 1) delta = timedelta(days = 1) for t in range(raw.shape[0]): time.append(datenow.toordinal()) datenow += delta print raw.shape print len(time) g = DataField(data = np.array(raw), time = np.array(time)) g.location = 'Climax, CO cosmic data' g.select_date(start_date, end_date) if anom: g.anomalise() if NUM_SURR != 0: g_surr = SurrogateField() seasonality = g.get_seasonality(True) g_surr.copy_field(g) g.return_seasonality(seasonality[0], seasonality[1], seasonality[2]) else: g_surr, seasonality = None, None return g, g_surr, seasonality
def load_cosmic_data(fname, start_date, end_date, anom=True, daily=False, corrected=True): # corrected stands for if use corrected data or not from dateutil.relativedelta import relativedelta raw = open(fname).read() lines = raw.split('\n') data = [] time = [] d = date(int(lines[0][:4]), int(lines[0][5:7]), 1) if not daily: delta = relativedelta(months=+1) elif daily: delta = timedelta(days=1) for line in lines: row = line.split(' ') if len(row) < 6: continue time.append(d.toordinal()) if corrected: data.append(float(row[4])) else: data.append(float(row[5])) d += delta g = DataField(data=np.array(data), time=np.array(time)) g.location = 'Oulu cosmic data' g.select_date(start_date, end_date) if anom: g.anomalise() g.data = X[:, 0].copy() if NUM_SURR != 0: g_surr = SurrogateField() seasonality = g.get_seasonality(True) g_surr.copy_field(g) g.return_seasonality(seasonality[0], seasonality[1], seasonality[2]) else: g_surr, seasonality = None, None return g, g_surr, seasonality
def load_cosmic_data(fname, start_date, end_date, anom = True, daily = False, corrected = True): # corrected stands for if use corrected data or not from dateutil.relativedelta import relativedelta raw = open(fname).read() lines = raw.split('\n') data = [] time = [] d = date(int(lines[0][:4]), int(lines[0][5:7]), 1) if not daily: delta = relativedelta(months = +1) elif daily: delta = timedelta(days = 1) for line in lines: row = line.split(' ') if len(row) < 6: continue time.append(d.toordinal()) if corrected: data.append(float(row[4])) else: data.append(float(row[5])) d += delta g = DataField(data = np.array(data), time = np.array(time)) g.location = 'Oulu cosmic data' g.select_date(start_date, end_date) if anom: g.anomalise() g.data = X[:, 0].copy() if NUM_SURR != 0: g_surr = SurrogateField() seasonality = g.get_seasonality(True) g_surr.copy_field(g) g.return_seasonality(seasonality[0], seasonality[1], seasonality[2]) else: g_surr, seasonality = None, None return g, g_surr, seasonality
# surrogates if NUM_SURR != 0: surr_completed = 0 diffs = np.zeros((NUM_SURR, )) mean_vars = np.zeros_like(diffs) g_surrs.data = g.data[start_idx:end_idx].copy() g_surrs.time = g.time[start_idx:end_idx].copy() if np.all(np.isnan(g_surrs.data) == False): # construct the job queue jobQ = Queue() resQ = Queue() for i in range(NUM_SURR): jobQ.put(1) for i in range(WORKERS): jobQ.put(None) a = g_surrs.get_seasonality(DETREND=True) sg = SurrogateField() sg.copy_field(g_surrs) if SURR_TYPE == 'AR': sg.prepare_AR_surrogates() workers = [ Process(target=_cond_difference_surrogates, args=(sg, g_surrs, a, start_cut, jobQ, resQ)) for iota in range(WORKERS) ] for w in workers: w.start() while surr_completed < NUM_SURR: # get result diff, meanVar = resQ.get() diffs[surr_completed] = diff
g_surrs_amp.time = g_amp.time[start_idx:end_idx].copy() if CONDITION: total_surrogates_condition = [] if np.all(np.isnan(g_surrs.data) == False): # construct the job queue jobQ = Queue() resQ = Queue() if CONDITION: for i in range(3 * NUM_SURR): jobQ.put(1) else: for i in range(NUM_SURR): jobQ.put(1) for i in range(WORKERS): jobQ.put(None) a = g_surrs.get_seasonality(DETREND=True) sg = SurrogateField() sg.copy_field(g_surrs) if AMPLITUDE: a_amp = g_surrs_amp.get_seasonality(True) sg_amp = SurrogateField() sg_amp.copy_field(g_surrs_amp) else: sg_amp = None a_amp = None if SURR_TYPE == 'AR': sg.prepare_AR_surrogates() if AMPLITUDE: sg_amp.prepare_AR_surrogates() workers = [ Process(target=_cond_difference_surrogates,
else: cond_means[iota] = np.var(g.data[ndx], ddof=1) difference[i, j] = cond_means.max() - cond_means.min( ) # append difference to list mean_var[i, j] = np.mean(cond_means) print( "[%s] Wavelet analysis done. Now computing wavelet for MF surrogates in parallel..." % str(datetime.now())) surrogates_difference = np.zeros([num_surr] + list(difference.shape)) surrogates_mean_var = np.zeros_like(surrogates_difference) surr_completed = 0 sg = SurrogateField() sg.copy_field(g) mean, var, trend = g.get_seasonality(DETREND=True) def _cond_difference_surrogates(sg, jobq, resq): while jobq.get() is not None: difference = np.zeros((sg.lats.shape[0], sg.lons.shape[0])) mean_var = np.zeros_like(difference) sg.construct_multifractal_surrogates() sg.add_seasonality(mean, var, trend) for i in range(sg.lats.shape[0]): for j in range(sg.lons.shape[0]): wave, _, _, _ = wavelet_analysis.continous_wavelet( sg.surr_data[:, i, j], 1, False, wavelet_analysis.morlet,
g_surrs_amp.time = g_amp.time[start_idx:end_idx].copy() if CONDITION: total_surrogates_condition = [] if np.all(np.isnan(g_surrs.data) == False): # construct the job queue jobQ = Queue() resQ = Queue() if CONDITION: for i in range(3 * NUM_SURR): jobQ.put(1) else: for i in range(NUM_SURR): jobQ.put(1) for i in range(WORKERS): jobQ.put(None) a = g_surrs.get_seasonality(DETREND=True) sg = SurrogateField() sg.copy_field(g_surrs) if AMPLITUDE: a_amp = g_surrs_amp.get_seasonality(True) sg_amp = SurrogateField() sg_amp.copy_field(g_surrs_amp) else: sg_amp = None a_amp = None if SURR_TYPE == "AR": sg.prepare_AR_surrogates() if AMPLITUDE: sg_amp.prepare_AR_surrogates() workers = [ Process(
if MEANS: cond_means[iota] = np.mean(g.data[ndx]) else: cond_means[iota] = np.var(g.data[ndx], ddof = 1) difference[i, j] = cond_means.max() - cond_means.min() # append difference to list mean_var[i, j] = np.mean(cond_means) print("[%s] Wavelet analysis done. Now computing wavelet for MF surrogates in parallel..." % str(datetime.now())) surrogates_difference = np.zeros([num_surr] + list(difference.shape)) surrogates_mean_var = np.zeros_like(surrogates_difference) surr_completed = 0 sg = SurrogateField() sg.copy_field(g) mean, var, trend = g.get_seasonality(DETREND = True) def _cond_difference_surrogates(sg, jobq, resq): while jobq.get() is not None: difference = np.zeros((sg.lats.shape[0], sg.lons.shape[0])) mean_var = np.zeros_like(difference) sg.construct_multifractal_surrogates() sg.add_seasonality(mean, var, trend) for i in range(sg.lats.shape[0]): for j in range(sg.lons.shape[0]): wave, _, _, _ = wavelet_analysis.continous_wavelet(sg.surr_data[:, i, j], 1, False, wavelet_analysis.morlet, dj = 0, s0 = s0, j1 = 0, k0 = k0) # perform wavelet phase = np.arctan2(np.imag(wave), np.real(wave)) # get phases from oscillatory modes for iota in range(cond_means.shape[0]): # get conditional means for current phase range #phase_bins = get_equiquantal_bins(phase_temp) # equiquantal bins phase_bins = get_equidistant_bins() # equidistant bins ndx = ((phase[0,:] >= phase_bins[iota]) & (phase[0,:] <= phase_bins[iota+1]))
# surrogates if NUM_SURR != 0: surr_completed = 0 diffs = np.zeros((NUM_SURR,)) mean_vars = np.zeros_like(diffs) g_surrs.data = g.data[start_idx : end_idx].copy() g_surrs.time = g.time[start_idx : end_idx].copy() if np.all(np.isnan(g_surrs.data) == False): # construct the job queue jobQ = Queue() resQ = Queue() for i in range(NUM_SURR): jobQ.put(1) for i in range(WORKERS): jobQ.put(None) a = g_surrs.get_seasonality(DETREND = True) sg = SurrogateField() sg.copy_field(g_surrs) if SURR_TYPE == 'AR': sg.prepare_AR_surrogates() workers = [Process(target = _cond_difference_surrogates, args = (sg, g_surrs, a, start_cut, jobQ, resQ)) for iota in range(WORKERS)] for w in workers: w.start() while surr_completed < NUM_SURR: # get result diff, meanVar = resQ.get() diffs[surr_completed] = diff mean_vars[surr_completed] = meanVar surr_completed += 1 for w in workers: w.join()
sg = SurrogateField() g = DataField() daily_var = np.zeros((365,3)) mean, var_data, trend = ts.g.get_seasonality(True) sg.copy_field(ts.g) #MF sg.construct_multifractal_surrogates() sg.add_seasonality(mean, var_data, trend) g.data = sg.surr_data.copy() g.time = sg.time.copy() _, var_surr_MF, _ = g.get_seasonality(True) #FT sg.construct_fourier_surrogates_spatial() sg.add_seasonality(mean, var_data, trend) g.data = sg.surr_data.copy() g.time = sg.time.copy() _, var_surr_FT, _ = g.get_seasonality(True) delta = timedelta(days = 1) d = date(1895,1,1) for i in range(daily_var.shape[0]): ndx = ts.g.find_date_ndx(d)
date(2014, 1, 1), False) sg = SurrogateField() g = DataField() daily_var = np.zeros((365, 3)) mean, var_data, trend = ts.g.get_seasonality(True) sg.copy_field(ts.g) #MF sg.construct_multifractal_surrogates() sg.add_seasonality(mean, var_data, trend) g.data = sg.surr_data.copy() g.time = sg.time.copy() _, var_surr_MF, _ = g.get_seasonality(True) #FT sg.construct_fourier_surrogates_spatial() sg.add_seasonality(mean, var_data, trend) g.data = sg.surr_data.copy() g.time = sg.time.copy() _, var_surr_FT, _ = g.get_seasonality(True) delta = timedelta(days=1) d = date(1895, 1, 1) for i in range(daily_var.shape[0]): ndx = ts.g.find_date_ndx(d)