def FIRE_HiRes_L1_L2(datafile, ephemfile): full_data = dm.readJSONheadedASCII(datafile) ephem = dm.readJSONheadedASCII(ephemfile) data = Trim_data_file(full_data, ephem) labels = ephem.keys() ephem_fields = ['Lsimple', 'CDMAG_MLT'] dt = spt.Ticktock(data['Epoch']).TAI et = spt.Ticktock(ephem['DateTime']).TAI for i in range(len(ephem_fields)): print ephem_fields[i] y = ephem[ephem_fields[i]] nx = tb.interpol(dt, et, y) data[ephem_fields[i]] = dm.dmarray(nx) ephem_lat = ephem['Rgeod_LatLon'][:,0] ephem_lon = ephem['Rgeod_LatLon'][:,1] nx = tb.interpol(dt, et, ephem_lat) data['Lat'] = dm.dmarray(nx) nx = tb.interpol(dt, et, ephem_lon) data['Lon'] = dm.dmarray(nx) n_lines = len(data['Epoch']) eflux = np.zeros(n_lines,12) day = ephem['DateTime'][0][0:10] outfile = datafile[:-23] + day + '-HiRes_L2.txt' dm.toJSONheadedASCII(outfile, data) return data
def test_interpol_baddata(self): """interpol should give known results in presence of fill values""" ans = array([ 0.5, 1.5, 2.5, 3.5, 4.5]) x = numpy.arange(10) y = numpy.arange(10) numpy.testing.assert_equal(ans, tb.interpol(numpy.arange(5)+0.5, x, y)) # now test with baddata ans = numpy.ma.masked_array([0.5, 1.5, 2.5, 3.5, 4.5], mask = [False, True, True, False, False], fill_value = 1e+20) numpy.testing.assert_equal(ans, tb.interpol(numpy.arange(5)+0.5, x, y, baddata=2)) #test with baddata at end of array ans = array([1.0, 9.0]) numpy.testing.assert_equal(ans, tb.interpol([-1,12], x, y, baddata=0))
def test_interpol_arb(self): """Interpol should give known results for arbitrary float/int wrapping""" # test wrap arb y = list(range(14))*2 x = list(range(len(y))) real_ans = [1.5, 10.5, 13.5] numpy.testing.assert_almost_equal(real_ans, tb.interpol([1.5, 10.5, 13.5], x, y, wrap=14).compressed()) real_ans = [1.5, 10.5, 1.5] numpy.testing.assert_almost_equal(real_ans, tb.interpol([1.5, 10.5, 15.5], x, y)) # as a regression don't need wrap # and test wrap with a float real_ans = [1.5, 10.5, 13.5] numpy.testing.assert_almost_equal(real_ans, tb.interpol([1.5, 10.5, 13.5], x, y, wrap=14.0).compressed()) # test wrap lon using 360.0 as input y = list(range(360))*2 x = list(range(len(y))) real_ans = numpy.ma.masked_array([1.5, 10.5, 359.5], mask = False, fill_value = 1e+20) numpy.testing.assert_equal(real_ans, tb.interpol([1.5, 10.5, 359.5], x, y, wrap=360.0))
def test_interpol_keywords(self): """Interpol should give known results with hour and lon keyword wrapping""" # test wrap hour y = list(range(24))*2 x = list(range(len(y))) real_ans = numpy.ma.masked_array([1.5, 10.5, 23.5], mask = False, fill_value = 1e+20) numpy.testing.assert_equal(real_ans, tb.interpol([1.5, 10.5, 23.5], x, y, wrap='hour')) real_ans = numpy.ma.masked_array([1.5, 10.5, 1.5], mask = False, fill_value = 1e+20) numpy.testing.assert_equal(real_ans, tb.interpol([1.5, 10.5, 1.5], x, y)) # as a regression don't need wrap # test wrap lon y = list(range(360))*2 x = list(range(len(y))) real_ans = numpy.ma.masked_array([1.5, 10.5, 359.5], mask = False, fill_value = 1e+20) numpy.testing.assert_equal(real_ans, tb.interpol([1.5, 10.5, 359.5], x, y, wrap='lon')) real_ans = numpy.ma.masked_array([1.5, 10.5, 10.5], mask = False, fill_value = 1e+20) numpy.testing.assert_equal(real_ans, tb.interpol([1.5, 10.5, 370.5], x, y)) # as a regression don't need wrap
def main(infiles=None): # read SWMF ImfInput file # Originally written to examine data from simulations # by Morley, Welling and Woodroffe (2018). See data at # Zenodo (https://doi.org/10.5281/zenodo.1324562) infilename = 'Event5Ensembles/run_orig/IMF.dat' eventIMF = bats.ImfInput(filename=infilename) data1 = bats.LogFile('Event5Ensembles/run_orig/GM/IO2/log_e20100404-190000.log') # #read IMF for 10 events... # infiles = ['Event5Ensembles/run_{:03d}/IMF.dat'.format(n) # for n in [32,4,36,10,13,17,18,20,24,29]] # read IMF files for all ensemble members if infiles is None: infiles = glob.glob('Event5Ensembles/run_???/IMF.dat') subsetlabel = False else: # got list of run directories from Dst/Kp plotter subsetlabel = True infiles = [os.path.join(d, 'IMF.dat') for d in infiles] nsubset = len(infiles) eventlist = [bats.ImfInput(filename=inf) for inf in infiles] tstart = eventIMF['time'][0] tstop = eventIMF['time'][-1] sym = kyo.KyotoSym(lines=bats.kyoto.symfetch(tstart, tstop)) fig = plt.figure(figsize=(10, 5)) ax1 = fig.add_subplot(211) ax2 = fig.add_subplot(212) for ev in eventlist: gco = '{}'.format(np.random.randint(5, 50)/100.0) pred01B = Dst_Burton(sym['sym-h'][0], ev['ux'], ev['bz'], dt=1./60) pred01O = Dst_OBrien(sym['sym-h'][0], ev['ux'], ev['bz'], dt=1./60) ax1.plot(ev['time'], pred01B, c=gco, alpha=0.5) ax2.plot(ev['time'], pred01O, c=gco, alpha=0.5) # ax1.plot(sym['time'], sym['sym-h'], lw=1.5, c='crimson', label='Sym-H') evtime = spt.Ticktock(eventIMF['time']).RDT datime = spt.Ticktock(data1['time']).RDT simDst = tb.interpol(evtime, datime, data1['dst']) # ax1.plot(eventIMF['time'], simDst+11-(7.26*eventIMF['pram']), lw=1.5, c='seagreen', label='Sym-H (Press.Corr.)') # ax2.plot(sym['time'], sym['sym-h'], lw=1.5, c='crimson') # ax2.plot(eventIMF['time'], simDst+11-(7.26*eventIMF['pram']), lw=1.5, c='seagreen', label='Sym-H (Press.Corr.)') ax1.plot(data1['time'], data1['dst'], linewidth=1.5, color='crimson', alpha=0.65, label='SWMF') ax2.plot(data1['time'], data1['dst'], linewidth=1.5, color='crimson', alpha=0.65) ax1.legend() splot.applySmartTimeTicks(ax1, [tstart, tstop]) splot.applySmartTimeTicks(ax2, [tstart, tstop], dolabel=True) ax1.set_ylabel('Sym-H [nT]') ax2.set_ylabel('Sym-H [nT]') ax1.text(0.05, 0.05, "Burton et al.", transform=ax1.transAxes) ax2.text(0.05, 0.05, "O'Brien et al.", transform=ax2.transAxes)
def FIRE_Context_L1_L2(datafile, ephemfile): full_data = dm.readJSONheadedASCII(datafile) ephem = dm.readJSONheadedASCII(ephemfile) meta = dm.readJSONheadedASCII(ephemfile) data = Trim_data_file(full_data, ephem) labels = ephem.keys() ephem_fields = test_ephem_list dt = spt.Ticktock(data['Epoch']).TAI et = spt.Ticktock(ephem['DateTime']).TAI for i in range(len(ephem_fields)): dim = np.size(ephem[ephem_fields[i]][0]) print ephem_fields[i], dim nx = np.empty([len(dt),dim]) if dim > 1: for j in range(dim): y = ephem[ephem_fields[i]][:,j] nx[:,j] = tb.interpol(dt, et, y) data[ephem_fields[i]] = dm.dmarray(nx, attrs = meta[ephem_fields[i]].attrs) else: y = ephem[ephem_fields[i]] nx = tb.interpol(dt, et, y) data[ephem_fields[i]] = dm.dmarray(nx, attrs = meta[ephem_fields[i]].attrs) col = deepcopy(data['Context'][:,0]) sur = deepcopy(data['Context'][:,1]) despike(col, 250, 10) despike(sur, 250, 10) col = (col/(6*9))*1033 sur = (sur/(6*23))*772 data['col'] = dm.dmarray(col, attrs = {'Description': 'Collimated Detector Energy Flux', 'SCALE_TYPE': 'log'}) data['sur'] = dm.dmarray(sur, attrs = {'Description': 'Surface Detector Energy Flux', 'SCALE_TYPE': 'log'}) day = ephem['DateTime'][0][0:10] outfile = datafile[:-25] + day + '-Context_L2.txt' order = ['Epoch', 'col', 'sur', 'Context'] # order = ['Epoch', 'Context'] order.extend(ephem_fields) dm.toJSONheadedASCII(outfile, data) return data