Exemple #1
0
 def __init__(self):
     self.t = None
     self.coords = coordinate_structure()
     self.data = None
     self.N = None
     self.N_E = None
     self.S_E = None
     self.S = None
     self.L = None
     self.consts_list = None
     self.LK = None
     self.I0 = None
     self.RES_DT = None
     self.RES_FINT = None
     self.NUM_T = None
Exemple #2
0
    def compute_at(self, in_time):
        # Get terminator
        delta = 1
        lons, lats, tau, dec = self.daynight_terminator(in_time, delta, -180, 179)
        
        # # rotate to geomagnetic
        cs = coordinate_structure(lats, lons, np.zeros_like(lats),'geographic')
        # dnt_coords_geom = transform_coords(lats, lons, np.zeros_like(lats),'geographic','geomagnetic')
        cs.transform_to('geomagnetic')
        # print np.min(lons), np.max(lons)
        # print np.min(cs.lon()), np.max(cs.lon())

        # This is a pretty hacky thing -- interp1d won't extrapolate past the bounds of
        # cs.lon(), which is about 178 instead of 180. SO, we fill with a value that's
        # totally ridiculous, such that nearest_index will never select it. Without it,
        # we get little slivers outside the fill region. Go back and write an extrapolater.
        interpolator = interp1d(cs.lon(), cs.lat(), bounds_error=False, fill_value=-100000)
        # interpolator = splrep(cs.lon(), cs.lat(), k=1)
        lats = interpolator(lons)
        # print lats
        # #lats = cs.lat()
        #lons = cs.lon()




        # #plt.plot(cs.lon(), cs.lat())
        # plt.plot(lons, lats)
        # plt.figure()
        # plt.plot(lons)

        lats2 = np.arange(-90,90, delta,dtype=np.float32)
        self.grid_lats = lats2
        nlons = len(lons); nlats = len(lats2)
        lons2, lats2 = np.meshgrid(lons,lats2)
        lats = lats[np.newaxis,:]*np.ones((nlats,nlons),dtype=np.float32)
        daynight = np.ones(lons2.shape, np.int8)
        if dec > 0: # NH summer
            daynight = np.where(lats2>lats,0, daynight)
        else: # NH winter
            daynight = np.where(lats2<lats,0, daynight)
    
        # self.grid_lats = lats2[:,0]
        # self.grid_lons = lons2[0,:]
        self.grid_lons = lons
        self.in_time   = in_time
        self.daynight = daynight
Exemple #3
0
I0 = -200000.0

t = np.linspace(0,30,600)

N = []
for ind, val in enumerate(L):

    a = np.outer([lat,LP,I0, val], np.ones(600))
    b = t
    # print np.shape(a)
    # print np.shape(b)
    X = np.vstack([ a, b ]).T

    tmp = S.predict(X)
    print np.shape(tmp)
    N.append(tmp)

print np.shape(N)
N = np.maximum(0.0, N)

N_log = np.maximum(-100, np.log10(N))


cs = coordinate_structure(L,[-10],[0],'L_dipole')
cs.transform_to('geomagnetic')
print cs.lat()
plt.figure()
plt.pcolor(t,np.flipud(cs.lat()), N_log)
plt.colorbar()
plt.show()
Exemple #4
0
def build_database(input_dir ='outputs', output_filename='database.pkl', t_new_step = None, num_L = 33, old_format = False):

    ev2joule = (1.60217657)*1e-19 # Joules / ev
    joule2millierg = 10*1e10 

    # rootDir = os.getcwd() + '/' + input_dir_name + '/'
    d = os.listdir(input_dir)

    database = dict()

    # d = os.listdir(rootDir + "/" + r)
    ins = sorted([f for f in d if 'in_' in f])

    # Parse constants file
    if old_format:
        consts_file = os.path.join(input_dir, "codesrc/consts.h")
    else:
        consts_file = os.path.join(input_dir, "consts.h")
    
    sc = load_sim_constants(consts_file, old_format = old_format)

    in_lats = np.array(sorted([int(i[3:]) for i in ins]))
    print "in lats:", in_lats

    # Should we downsample?
    if t_new_step is not None:
        t = np.arange(0,sc.T_MAX, step=t_new_step) # New time vector
        intervals = np.round(t*sc.NUM_STEPS/sc.T_MAX).astype(int) # Intervals to sum between
    else:
        t = np.arange(0,sc.T_MAX, step=sc.T_STEP)

    # Electron flux arrays -- [in_lats x out_lats x t]
    N_arr = np.zeros([len(in_lats), num_L, len(t)])
    S_arr = np.zeros([len(in_lats), num_L, len(t)])

    # Energy flux arrays  --  [in_lats x out_lats x t]
    N_e_arr = np.zeros([len(in_lats), num_L, len(t)])
    S_e_arr = np.zeros([len(in_lats), num_L, len(t)])

    # Energy at center of bins:
    E_centers = (1e-3)*pow(10, sc.E_EXP_BOT + (sc.DE_EXP/2.0) + sc.DE_EXP*np.arange(0,sc.NUM_E))
    # bin widths in keV (Jacob did an analytical expression; matches np.diff(sc.E_tot_arr))
    dE = (1e-3)*(pow(10, sc.E_EXP_BOT + (sc.DE_EXP/2.0)) *
                np.exp(sc.DE_EXP*np.arange(0,sc.NUM_E)/np.log10(np.e))  *
                sc.DE_EXP / np.log10(np.e) )

    print dE
    for in_lat_ind, in_lat in enumerate(in_lats):
    # Load some actual data!

        cur_dir = os.path.join(input_dir, "in_%g"%in_lat)
        N, S, L = load_phi_files(dataDir=cur_dir, sc=sc)            
        
        # print np.shape(N)
        NUM_L = len(L)

        # New version (6.2016) returns N, S ~ [n_E x n_T x n_L]
        # Units are [counts / (cm^2 keV sec)]  (I think).
        #
        # Integrate over energy bins (in keV) to get counts/(cm^2 sec)

        # electron flux totals:
        # Dimensions are [n_L x n_T]
        N_el_totals = np.inner(N.swapaxes(0,2), dE)
        S_el_totals = np.inner(S.swapaxes(0,2), dE)
        # N_el_totals = np.sum(N,axis=0).swapaxes(1,0)
        # S_el_totals = np.sum(S,axis=0).swapaxes(1,0)

        # Integrate (N * E) dE to get total energy, ev/(cm^2 sec)
        # Convert from eV to millierg ~1.602*10^-9
        N_energy_totals = np.inner(N.swapaxes(0,2), dE*E_centers)*ev2joule*joule2millierg
        S_energy_totals = np.inner(S.swapaxes(0,2), dE*E_centers)*ev2joule*joule2millierg




        # Downsample data if t_new_step is provided:
        if t_new_step is not None:
            # print "Previous T_STEP:",sc.T_STEP
            # print "New T_STEP:",t_new_step
            for t_ind, Tp in enumerate(zip(intervals[0:], intervals[1:])):
                
                # integrate (N dT) for each new step, then divide by new bin width.
                # (multipy by T_STEP to get total electrons per bin, then divide by new timestep)
                N_arr[in_lat_ind,:,t_ind] = np.sum(N_el_totals[:,Tp[0]:Tp[1]],axis=1)*sc.T_STEP/t_new_step
                S_arr[in_lat_ind,:,t_ind] = np.sum(S_el_totals[:,Tp[0]:Tp[1]],axis=1)*sc.T_STEP/t_new_step
                
                N_e_arr[in_lat_ind,:,t_ind] = np.sum(N_energy_totals[:,Tp[0]:Tp[1]],axis=1)*sc.T_STEP/t_new_step
                S_e_arr[in_lat_ind,:,t_ind] = np.sum(S_energy_totals[:,Tp[0]:Tp[1]],axis=1)*sc.T_STEP/t_new_step

        else:
            N_arr[in_lat_ind,:,:] = N_el_totals
            S_arr[in_lat_ind,:,:] = S_el_totals

            N_e_arr[in_lat_ind,:,:] = N_energy_totals
            S_e_arr[in_lat_ind,:,:] = S_energy_totals


        # Convert output L-shells to geomagnetic latitude
        coords = coordinate_structure(L,[0],[100],'L_dipole')
        coords.transform_to('geomagnetic')



    # If downsampling, overwrite the stored value of T_STEP
    if t_new_step is not None:
        sc.T_STEP_old = sc.T_STEP
        sc.T_STEP = t_new_step

    # print "range (counts N):",np.min(N_arr),np.max(N_arr)
    # print "range (counts S):",np.min(S_arr),np.max(S_arr)
    # print "range (energy N):",np.min(N_e_arr),np.max(N_e_arr)
    # print "range (energy s):",np.min(S_e_arr),np.max(S_e_arr)

    database['N_el'] = N_arr        # el/(cm^2 sec)
    database['S_el'] = S_arr
    database['N_energy'] = N_e_arr  # mErg/(cm^2 sec)
    database['S_energy'] = S_e_arr
    database['t'] = t
    database['L'] = L
    database['in_lats'] = in_lats
    database['out_lats']= coords.lat()
    database['consts']  = sc

    print "Saving database"
    with open(output_filename,'wb') as f:
        # pickle.dump(database,f,pickle.HIGHEST_PROTOCOL)
        pickle.dump(database,f)
Exemple #5
0
 def __init__(self,tle1, tle2, name): 
   self.tle_rec = ephem.readtle(name, tle1, tle2)
   self.curr_time = None
   self.name = name
   #self.coords = None    # Long, Lat! XY on a map, but isn't pleasant to say out loud.
   self.coords = coordinate_structure()
Exemple #6
0
inTime = "2015-11-01T00:25:00"
td = datetime.timedelta(seconds = 30) # Maximum time back to check for lightning (pulse can be up to a minute! But how patient are we, really)
plottime = datetime.datetime.strptime(inTime,  "%Y-%m-%dT%H:%M:%S")

print plottime

# Get satellite location
sat.compute(plottime)
sat.coords.transform_to('geomagnetic')

# Get flashes within timeframe:
flashes, flash_times = gld.load_flashes(plottime, td)
lats = [f[lat_ind] for f in flashes]
lons = [f[lon_ind] for f in flashes]

flash_coords = coordinate_structure(lats, lons, np.zeros(np.size(lats)),'geographic')
# flash_coords.transform_to('geomagnetic')


# (Use these to make a nice grid)
lats = np.linspace(-90,90,90)
lons = np.linspace(-180,180,90)
flash_coords = coordinate_structure(lats, lons, [0],'geomagnetic')

print "%g flashes (pre-filter)" % flash_coords.len()
atten_factors = longitude_scaling(flash_coords, sat.coords)
mask = atten_factors < 24

#mask = (np.abs(flash_coords.lon() - sat.coords.lon()) < 20)
#mask = ionoAbsorp(flash_coords.lat(),4000) < 10
Exemple #7
0
def build_database(input_dir_name='outputs', output_filename='database.pkl'):

    ev2joule = (1.60217657)*1e-19 # Joules / ev
    joule2millierg = 10*1e10 

    print ""
    rootDir = os.getcwd() + '/' + input_dir_name + '/'
    d = os.listdir(rootDir)

    runs = sorted([f for f in d if 'run_' in f])
    print runs

    database = dict()

    for r in runs:
        d = os.listdir(rootDir + "/" + r)
        ins = sorted([f for f in d if 'in_' in f])
        print "Run: ", r    

        consts_file = r + "/codesrc/consts.h"

        # Parse consts.h
        # Loads as many lines from the constants file as it can... will fail on
        consts_list = []
        with open(rootDir + consts_file,'r') as file:
            for line in file:
                if "#define" in line and line[0:1] is not "//":
                    l = line.split()
                    #if len(l)>=3:
                    try:
                        exec('%s=%f'% (l[1], eval(l[2])))
                        consts_list.append(l)
                        #print l[1], eval(l[1])
                    except:
                        #print "failed: ", l
                        None
        for i in ins:

            # Load some actual data!
            # try:
                inp_lat = int(i[3:])
                NUM_T = RES_FINT/RES_DT
                obj = dict() 
                #obj = flux_obj()
                
                N, S, L = load_phi_files( input_dir_name + "/" + r + "/" + i, num_E = NUM_E, num_T = NUM_T)
                
                obj['consts_list'] = consts_list
                #obj.consts_list = consts_list
                NUM_L = len(L)

                obj['NUM_T'] = NUM_T
                obj['I0'] = I0
                obj['LK'] = LK
                obj['RES_DT'] = RES_DT
                obj['RES_FINT'] = RES_FINT

                # obj.NUM_T = NUM_T
                # obj.I0 = I0
                # obj.LK = LK
                # obj.RES_DT = RES_DT
                # obj.RES_FINT = RES_FINT

                # # Scale by energy bin values: 
                # E_EXP_BOT = np.log10(E_MIN)
                # E_EXP_TOP = np.log10(E_MAX)
                # DE_EXP    = ((E_EXP_TOP - E_EXP_BOT)/NUM_E)

                # E_EXP = E_EXP_BOT + np.linspace(1,NUM_E,NUM_E)*DE_EXP
                # E = np.power(10,E_EXP)

                # E_scaled = ev2joule*joule2millierg*E
                # tmp = np.tile(E_scaled.T, [NUM_T, 1]).T
                # N_scaled = N*np.tile(tmp, [NUM_L,1,1]).T
                # S_scaled = S*np.tile(tmp, [NUM_L,1,1]).T
                
                # N_totals = np.sum(N_scaled,axis=1)
                # S_totals = np.sum(S_scaled,axis=1)
                N_totals = np.sum(N,axis=1)
                S_totals = np.sum(S,axis=1)

                # Load each (lat, Lk, I0, L-shell) vector separately
                coords = coordinate_structure(L,[-10],[0],'L_dipole')
                coords.transform_to('geomagnetic')


                # Total flux vs time and latitude
                obj['N'] = N_totals
                obj['S'] = S_totals
                # obj.N = N_totals
                # obj.S = S_totals

                # 3D array, flux vs time, latitude, energy
                obj['N_E'] = N
                obj['S_E'] = S

                obj['coords'] = coords
                obj['t'] = np.linspace(0,RES_FINT, NUM_T)
                key = (inp_lat)
                database[key] = obj


# ------------------- for single row entries
                # for ind, val in enumerate(coords.lat()):
                #     #print val
                #     obj.N = N_totals[:,ind]
                #     obj.S = S_totals[:,ind]
                #     #obj.L = val
                #     obj.Lat = round(100*val)/100.0

                #     print np.shape(obj.N)
                #     key = (inp_lat, LK, I0, round(100*val)/100)

                #     database[key] = obj

                # N_totals = np.maximum(-1000, np.log10(np.sum(N_scaled,axis=1)))
                # S_totals = np.maximum(-1000, np.log10(np.sum(S_scaled,axis=1)))


                print inp_lat
        
                # key = (inp_lat, LK, I0)
                # database[key] = obj
            # except:
            #     print "bruh ;_;"


    #print [o[1].L for o in database.items()]
    print "Saving database"
    with open(output_filename,'wb') as f:
        pickle.dump(database,f,pickle.HIGHEST_PROTOCOL)