def mms_eis_spin_avg(probe='1', species='proton', data_units='flux', datatype='extof', data_rate='srvy', suffix=''): """ This function will spin-average the EIS spectrograms, and is automatically called from mms_load_eis Parameters: probe: str probe #, e.g., '4' for MMS4 data_units: str 'flux' datatype: str 'extof' or 'phxtof' data_rate: str instrument data rate, e.g., 'srvy' or 'brst' suffix: str suffix of the loaded data Returns: List of tplot variables created. """ if data_rate == 'brst': prefix = 'mms' + probe + '_epd_eis_brst_' else: prefix = 'mms' + probe + '_epd_eis_' spin_times, spin_nums = get_data(prefix + datatype + '_' + 'spin' + suffix) if spin_nums is not None: spin_starts = [ spin_start for spin_start in np.where(spin_nums[1:] > spin_nums[:-1])[0] ] telescopes = tnames(prefix + datatype + '_' + species + '_*' + data_units + '_t?' + suffix) out_vars = [] for scope in range(0, 6): this_scope = telescopes[scope] flux_times, flux_data, energies = get_data(this_scope) spin_avg_flux = np.zeros([len(spin_starts), len(energies)]) current_start = 0 for spin_idx in range(0, len(spin_starts)): with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) spin_avg_flux[spin_idx - 1, :] = np.nanmean( flux_data[current_start:spin_starts[spin_idx] + 1, :], axis=0) current_start = spin_starts[spin_idx] + 1 store_data(this_scope + '_spin' + suffix, data={ 'x': flux_times[spin_starts], 'y': spin_avg_flux, 'v': energies }) options(this_scope + '_spin' + suffix, 'spec', True) options(this_scope + '_spin' + suffix, 'ylog', True) options(this_scope + '_spin' + suffix, 'zlog', True) options(this_scope + '_spin' + suffix, 'Colormap', 'jet') out_vars.append(this_scope + '_spin' + suffix) return out_vars else: print( 'Error, problem finding EIS spin variable to calculate spin-averages' ) return None
def gyrophase(trange=['2017-07-11', '2017-07-12'], probe='1', data_rate='srvy', level='l2', datatype='electron'): """ Calculates FEEPS gyrophase angles """ mec_vars = pyspedas.mms.mec(trange=trange, probe=probe, data_rate=data_rate) qeci2sm = get_data('mms'+probe+'_mec_quat_eci_to_sm') qeci2bcs = get_data('mms'+probe+'_mec_quat_eci_to_bcs') rsun = get_data('mms'+probe+'_mec_r_sun_de421_eci') rsunbcs = np.zeros((len(rsun.times), 3)) rduskbcs = np.zeros((len(rsun.times), 3)) rduskeci = np.zeros((1, 3)) rdusksm = [0, 1, 0] for i in range(len(rsun.times)): q = qeci2bcs.y[i, :] # Quaternion rotation matrix: s = 1 # these quaternions are unit-qs R = np.array([[1 - 2*s*(q[2]**2 + q[3]**2), 2*s*(q[1]*q[2] - q[3]*q[0]), 2*s*(q[1]*q[3] + q[2]*q[0])], # ECI to BCS [2*s*(q[1]*q[2] + q[3]*q[0]), 1 - 2*s*(q[1]**2 + q[3]**2), 2*s*(q[2]*q[3] - q[1]*q[0])], [2*s*(q[1]*q[3] - q[2]*q[0]), 2*s*(q[2]*q[3] + q[1]*q[0]), 1 - 2*s*(q[1]**2 + q[2]**2)]]) R = R.T rsunbcs[i, :] = np.array([R[0,0]*rsun.y[i,0] + R[1,0]*rsun.y[i,1] + R[2,0]*rsun.y[i,2], R[0,1]*rsun.y[i,0] + R[1,1]*rsun.y[i,1] + R[2,1]*rsun.y[i,2], R[0,2]*rsun.y[i,0] + R[1,2]*rsun.y[i,1] + R[2,2]*rsun.y[i,2]]) # now make second vector for gyroplane reference, dusk direction (+Y in SM) q = qeci2sm.y[i, :] # Quaternion rotation matrix: s = 1 # these quaternions are unit-qs R2 = np.array([[1 - 2*s*(q[2]**2 + q[3]**2), 2*s*(q[1]*q[2] - q[3]*q[0]), 2*s*(q[1]*q[3] + q[2]*q[0])], # ECI to SM [2*s*(q[1]*q[2] + q[3]*q[0]), 1 - 2*s*(q[1]**2 + q[3]**2), 2*s*(q[2]*q[3] - q[1]*q[0])], [2*s*(q[1]*q[3] - q[2]*q[0]), 2*s*(q[2]*q[3] + q[1]*q[0]), 1 - 2*s*(q[1]**2 + q[2]**2)]]) # going from SM to ECI, so invert R: R2 = np.linalg.inv(R2) # SM to ECI R2 = R2.T rduskeci = [R2[0,0]*rdusksm[0] + R2[1,0]*rdusksm[1] + R2[2,0]*rdusksm[2], R2[0,1]*rdusksm[0] + R2[1,1]*rdusksm[1] + R2[2,1]*rdusksm[2], R2[0,2]*rdusksm[0] + R2[1,2]*rdusksm[1] + R2[2,2]*rdusksm[2]] # Now convert to BCS: rduskbcs[i, :] = np.array([R[0,0]*rduskeci[0] + R[1,0]*rduskeci[1] + R[2,0]*rduskeci[2], R[0,1]*rduskeci[0] + R[1,1]*rduskeci[1] + R[2,1]*rduskeci[2], R[0,2]*rduskeci[0] + R[1,2]*rduskeci[1] + R[2,2]*rduskeci[2]]) saved = store_data('mms'+probe+'_mec_r_sun_bcs', data = {'x': rsun.times, 'y': rsunbcs}) saved = store_data('mms'+probe+'_mec_r_dusk_bcs', data = {'x': rsun.times, 'y': rduskbcs}) # Rotation matrices for FEEPS coord system (FCS) into body coordinate system (BCS): Ttop = np.array([[1./np.sqrt(2.), -1./np.sqrt(2.), 0], [1./np.sqrt(2.), 1./np.sqrt(2.), 0], [0, 0, 1]]).T Tbot = np.array([[-1./np.sqrt(2.), -1./np.sqrt(2.), 0], [-1./np.sqrt(2.), 1./np.sqrt(2.), 0], [0, 0, -1]]).T # Telescope vectors in FCS: # Electrons V1fcs = [0.347, -0.837, 0.423] V2fcs = [0.347, -0.837, -0.423] V3fcs = [0.837, -0.347, 0.423] V4fcs = [0.837, -0.347, -0.423] V5fcs = [-0.087, 0.000, 0.996] V9fcs = [0.837, 0.347, 0.423] V10fcs = [0.837, 0.347, -0.423] V11fcs = [0.347, 0.837, 0.423] V12fcs = [0.347, 0.837, -0.423] # Ions V6fcs = [0.104, 0.180, 0.978] V7fcs = [0.654, -0.377, 0.656] V8fcs = [0.654, -0.377, -0.656] # Now telescope vectors in Body Coordinate System: # Factors of -1 account for 180 deg shift between particle velocity and telescope normal direction: # Top: Vt1bcs = [-1.*(Ttop[0,0]*V1fcs[0] + Ttop[1,0]*V1fcs[1] + Ttop[2,0]*V1fcs[2]), -1.*(Ttop[0,1]*V1fcs[0] + Ttop[1,1]*V1fcs[1] + Ttop[2,1]*V1fcs[2]), -1.*(Ttop[0,2]*V1fcs[0] + Ttop[1,2]*V1fcs[1] + Ttop[2,2]*V1fcs[2])] Vt2bcs = [-1.*(Ttop[0,0]*V2fcs[0] + Ttop[1,0]*V2fcs[1] + Ttop[2,0]*V2fcs[2]), -1.*(Ttop[0,1]*V2fcs[0] + Ttop[1,1]*V2fcs[1] + Ttop[2,1]*V2fcs[2]), -1.*(Ttop[0,2]*V2fcs[0] + Ttop[1,2]*V2fcs[1] + Ttop[2,2]*V2fcs[2])] Vt3bcs = [-1.*(Ttop[0,0]*V3fcs[0] + Ttop[1,0]*V3fcs[1] + Ttop[2,0]*V3fcs[2]), -1.*(Ttop[0,1]*V3fcs[0] + Ttop[1,1]*V3fcs[1] + Ttop[2,1]*V3fcs[2]), -1.*(Ttop[0,2]*V3fcs[0] + Ttop[1,2]*V3fcs[1] + Ttop[2,2]*V3fcs[2])] Vt4bcs = [-1.*(Ttop[0,0]*V4fcs[0] + Ttop[1,0]*V4fcs[1] + Ttop[2,0]*V4fcs[2]), -1.*(Ttop[0,1]*V4fcs[0] + Ttop[1,1]*V4fcs[1] + Ttop[2,1]*V4fcs[2]), -1.*(Ttop[0,2]*V4fcs[0] + Ttop[1,2]*V4fcs[1] + Ttop[2,2]*V4fcs[2])] Vt5bcs = [-1.*(Ttop[0,0]*V5fcs[0] + Ttop[1,0]*V5fcs[1] + Ttop[2,0]*V5fcs[2]), -1.*(Ttop[0,1]*V5fcs[0] + Ttop[1,1]*V5fcs[1] + Ttop[2,1]*V5fcs[2]), -1.*( Ttop[0,2]*V5fcs[0] + Ttop[1,2]*V5fcs[1] + Ttop[2,2]*V5fcs[2])] Vt6bcs = [-1.*(Ttop[0,0]*V6fcs[0] + Ttop[1,0]*V6fcs[1] + Ttop[2,0]*V6fcs[2]), -1.*(Ttop[0,1]*V6fcs[0] + Ttop[1,1]*V6fcs[1] + Ttop[2,1]*V6fcs[2]), -1.*(Ttop[0,2]*V6fcs[0] + Ttop[1,2]*V6fcs[1] + Ttop[2,2]*V6fcs[2])] Vt7bcs = [-1.*(Ttop[0,0]*V7fcs[0] + Ttop[1,0]*V7fcs[1] + Ttop[2,0]*V7fcs[2]), -1.*(Ttop[0,1]*V7fcs[0] + Ttop[1,1]*V7fcs[1] + Ttop[2,1]*V7fcs[2]), -1.*(Ttop[0,2]*V7fcs[0] + Ttop[1,2]*V7fcs[1] + Ttop[2,2]*V7fcs[2])] Vt8bcs = [-1.*(Ttop[0,0]*V8fcs[0] + Ttop[1,0]*V8fcs[1] + Ttop[2,0]*V8fcs[2]), -1.*( Ttop[0,1]*V8fcs[0] + Ttop[1,1]*V8fcs[1] + Ttop[2,1]*V8fcs[2]), -1.*(Ttop[0,2]*V8fcs[0] + Ttop[1,2]*V8fcs[1] + Ttop[2,2]*V8fcs[2])] Vt9bcs = [-1.*(Ttop[0,0]*V9fcs[0] + Ttop[1,0]*V9fcs[1] + Ttop[2,0]*V9fcs[2]), -1.*(Ttop[0,1]*V9fcs[0] + Ttop[1,1]*V9fcs[1] + Ttop[2,1]*V9fcs[2]), -1.*(Ttop[0,2]*V9fcs[0] + Ttop[1,2]*V9fcs[1] + Ttop[2,2]*V9fcs[2])] Vt10bcs = [-1.*(Ttop[0,0]*V10fcs[0] + Ttop[1,0]*V10fcs[1] + Ttop[2,0]*V10fcs[2]), -1.*(Ttop[0,1]*V10fcs[0] + Ttop[1,1]*V10fcs[1] + Ttop[2,1]*V10fcs[2]), -1.*(Ttop[0,2]*V10fcs[0] + Ttop[1,2]*V10fcs[1] + Ttop[2,2]*V10fcs[2])] Vt11bcs = [-1.*(Ttop[0,0]*V11fcs[0] + Ttop[1,0]*V11fcs[1] + Ttop[2,0]*V11fcs[2]), -1.*(Ttop[0,1]*V11fcs[0] + Ttop[1,1]*V11fcs[1] + Ttop[2,1]*V11fcs[2]), -1.*(Ttop[0,2]*V11fcs[0] + Ttop[1,2]*V11fcs[1] + Ttop[2,2]*V11fcs[2])] Vt12bcs = [-1.*(Ttop[0,0]*V12fcs[0] + Ttop[1,0]*V12fcs[1] + Ttop[2,0]*V12fcs[2]), -1.*(Ttop[0,1]*V12fcs[0] + Ttop[1,1]*V12fcs[1] + Ttop[2,1]*V12fcs[2]), -1.*(Ttop[0,2]*V12fcs[0] + Ttop[1,2]*V12fcs[1] + Ttop[2,2]*V12fcs[2])] # Bottom: Vb1bcs = [-1.*(Tbot[0,0]*V1fcs[0] + Tbot[1,0]*V1fcs[1] + Tbot[2,0]*V1fcs[2]), -1.*(Tbot[0,1]*V1fcs[0] + Tbot[1,1]*V1fcs[1] + Tbot[2,1]*V1fcs[2]), -1.*(Tbot[0,2]*V1fcs[0] + Tbot[1,2]*V1fcs[1] + Tbot[2,2]*V1fcs[2])] Vb2bcs = [-1.*(Tbot[0,0]*V2fcs[0] + Tbot[1,0]*V2fcs[1] + Tbot[2,0]*V2fcs[2]), -1.*(Tbot[0,1]*V2fcs[0] + Tbot[1,1]*V2fcs[1] + Tbot[2,1]*V2fcs[2]), -1.*(Tbot[0,2]*V2fcs[0] + Tbot[1,2]*V2fcs[1] + Tbot[2,2]*V2fcs[2])] Vb3bcs = [-1.*(Tbot[0,0]*V3fcs[0] + Tbot[1,0]*V3fcs[1] + Tbot[2,0]*V3fcs[2]), -1.*(Tbot[0,1]*V3fcs[0] + Tbot[1,1]*V3fcs[1] + Tbot[2,1]*V3fcs[2]), -1.*(Tbot[0,2]*V3fcs[0] + Tbot[1,2]*V3fcs[1] + Tbot[2,2]*V3fcs[2])] Vb4bcs = [-1.*(Tbot[0,0]*V4fcs[0] + Tbot[1,0]*V4fcs[1] + Tbot[2,0]*V4fcs[2]), -1.*(Tbot[0,1]*V4fcs[0] + Tbot[1,1]*V4fcs[1] + Tbot[2,1]*V4fcs[2]), -1.*(Tbot[0,2]*V4fcs[0] + Tbot[1,2]*V4fcs[1] + Tbot[2,2]*V4fcs[2])] Vb5bcs = [-1.*(Tbot[0,0]*V5fcs[0] + Tbot[1,0]*V5fcs[1] + Tbot[2,0]*V5fcs[2]), -1.*(Tbot[0,1]*V5fcs[0] + Tbot[1,1]*V5fcs[1] + Tbot[2,1]*V5fcs[2]), -1.*(Tbot[0,2]*V5fcs[0] + Tbot[1,2]*V5fcs[1] + Tbot[2,2]*V5fcs[2])] Vb6bcs = [-1.*(Tbot[0,0]*V6fcs[0] + Tbot[1,0]*V6fcs[1] + Tbot[2,0]*V6fcs[2]), -1.*(Tbot[0,1]*V6fcs[0] + Tbot[1,1]*V6fcs[1] + Tbot[2,1]*V6fcs[2]), -1.*( Tbot[0,2]*V6fcs[0] + Tbot[1,2]*V6fcs[1] + Tbot[2,2]*V6fcs[2])] Vb7bcs = [-1.*(Tbot[0,0]*V7fcs[0] + Tbot[1,0]*V7fcs[1] + Tbot[2,0]*V7fcs[2]), -1.*(Tbot[0,1]*V7fcs[0] + Tbot[1,1]*V7fcs[1] + Tbot[2,1]*V7fcs[2]), -1.*(Tbot[0,2]*V7fcs[0] + Tbot[1,2]*V7fcs[1] + Tbot[2,2]*V7fcs[2])] Vb8bcs = [-1.*(Tbot[0,0]*V8fcs[0] + Tbot[1,0]*V8fcs[1] + Tbot[2,0]*V8fcs[2]), -1.*(Tbot[0,1]*V8fcs[0] + Tbot[1,1]*V8fcs[1] + Tbot[2,1]*V8fcs[2]), -1.*(Tbot[0,2]*V8fcs[0] + Tbot[1,2]*V8fcs[1] + Tbot[2,2]*V8fcs[2])] Vb9bcs = [-1.*(Tbot[0,0]*V9fcs[0] + Tbot[1,0]*V9fcs[1] + Tbot[2,0]*V9fcs[2]), -1.*(Tbot[0,1]*V9fcs[0] + Tbot[1,1]*V9fcs[1] + Tbot[2,1]*V9fcs[2]), -1.*(Tbot[0,2]*V9fcs[0] + Tbot[1,2]*V9fcs[1] + Tbot[2,2]*V9fcs[2])] Vb10bcs = [-1.*(Tbot[0,0]*V10fcs[0] + Tbot[1,0]*V10fcs[1] + Tbot[2,0]*V10fcs[2]), -1.*(Tbot[0,1]*V10fcs[0] + Tbot[1,1]*V10fcs[1] + Tbot[2,1]*V10fcs[2]), -1.*(Tbot[0,2]*V10fcs[0] + Tbot[1,2]*V10fcs[1] + Tbot[2,2]*V10fcs[2])] Vb11bcs = [-1.*(Tbot[0,0]*V11fcs[0] + Tbot[1,0]*V11fcs[1] + Tbot[2,0]*V11fcs[2]), -1.*(Tbot[0,1]*V11fcs[0] + Tbot[1,1]*V11fcs[1] + Tbot[2,1]*V11fcs[2]), -1.*(Tbot[0,2]*V11fcs[0] + Tbot[1,2]*V11fcs[1] + Tbot[2,2]*V11fcs[2])] Vb12bcs = [-1.*(Tbot[0,0]*V12fcs[0] + Tbot[1,0]*V12fcs[1] + Tbot[2,0]*V12fcs[2]), -1.*(Tbot[0,1]*V12fcs[0] + Tbot[1,1]*V12fcs[1] + Tbot[2,1]*V12fcs[2]), -1.*(Tbot[0,2]*V12fcs[0] + Tbot[1,2]*V12fcs[1] + Tbot[2,2]*V12fcs[2])] fgm_vars = pyspedas.mms.fgm(trange=[time_double(trange[0])-600, time_double(trange[1])+600], probe=probe, data_rate=data_rate) # interpolate the FGM var to the MEC var timestamps tinterpol('mms'+probe+'_fgm_b_bcs_' + data_rate + '_l2_bvec', 'mms'+probe+'_mec_r_sun_bcs', newname='mms'+probe+'_fgm_b_bcs_' + data_rate + '_l2_bvec_int') B = get_data('mms'+probe+'_fgm_b_bcs_' + data_rate + '_l2_bvec_int') # Now calculate gyrophase # Sun vector perp to B: Sperp = np.zeros((1, 3)) # Disk vector perp to B: Dperp = np.zeros((1, 3)) # Telescope vectors perp to B: Tperp = np.zeros((len(rsunbcs[:, 0]), 3, 24)) # Gyrophase: phi = np.zeros((len(rsunbcs[:, 0]), 24)) for i in range(len(rsunbcs[:, 0])): uB = B.y[i,:]/np.sqrt(B.y[i,0]**2 + B.y[i,1]**2 + B.y[i,2]**2) Sperp = np.cross(np.cross(uB, rsunbcs[i, :]/np.sqrt(np.nansum(rsunbcs[i, :]**2))), uB) Dperp = np.cross(np.cross(uB, rduskbcs[i, :]/np.sqrt(np.nansum(rduskbcs[i, :]**2))), uB) Tperp[i, :, 0] = np.cross(np.cross(uB, Vt1bcs), uB) Tperp[i, :, 1] = np.cross(np.cross(uB, Vt2bcs), uB) Tperp[i, :, 2] = np.cross(np.cross(uB, Vt3bcs), uB) Tperp[i, :, 3] = np.cross(np.cross(uB, Vt4bcs), uB) Tperp[i, :, 4] = np.cross(np.cross(uB, Vt5bcs), uB) Tperp[i, :, 5] = np.cross(np.cross(uB, Vt6bcs), uB) Tperp[i, :, 6] = np.cross(np.cross(uB, Vt7bcs), uB) Tperp[i, :, 7] = np.cross(np.cross(uB, Vt8bcs), uB) Tperp[i, :, 8] = np.cross(np.cross(uB, Vt9bcs), uB) Tperp[i, :, 9] = np.cross(np.cross(uB, Vt10bcs), uB) Tperp[i, :, 10] = np.cross(np.cross(uB, Vt11bcs), uB) Tperp[i, :, 11] = np.cross(np.cross(uB, Vt12bcs), uB) Tperp[i, :, 12] = np.cross(np.cross(uB, Vb1bcs), uB) Tperp[i, :, 13] = np.cross(np.cross(uB, Vb2bcs), uB) Tperp[i, :, 14] = np.cross(np.cross(uB, Vb3bcs), uB) Tperp[i, :, 15] = np.cross(np.cross(uB, Vb4bcs), uB) Tperp[i, :, 16] = np.cross(np.cross(uB, Vb5bcs), uB) Tperp[i, :, 17] = np.cross(np.cross(uB, Vb6bcs), uB) Tperp[i, :, 18] = np.cross(np.cross(uB, Vb7bcs), uB) Tperp[i, :, 19] = np.cross(np.cross(uB, Vb8bcs), uB) Tperp[i, :, 20] = np.cross(np.cross(uB, Vb9bcs), uB) Tperp[i, :, 21] = np.cross(np.cross(uB, Vb10bcs), uB) Tperp[i, :, 22] = np.cross(np.cross(uB, Vb11bcs), uB) Tperp[i, :, 23] = np.cross(np.cross(uB, Vb12bcs), uB) for j in range(24): th1 = np.arccos(np.nansum(Tperp[i,:,j] * Sperp)/(np.sqrt(np.nansum(Tperp[i,:,j]**2))*np.sqrt(np.nansum(Sperp**2)))) th2 = np.arccos(np.nansum(Tperp[i,:,j] * Dperp)/(np.sqrt(np.nansum(Tperp[i,:,j]**2))*np.sqrt(np.nansum(Dperp**2)))) if th1 <= np.pi/2.0 and th2 < np.pi/2: phi[i, j] = 2*np.pi - th1 if th1 < np.pi/2.0 and th2 >= np.pi/2.0: phi[i, j] = th1 if th1 > np.pi/2.0 and th2 <= np.pi/2.0: phi[i, j] = 270.0*np.pi/180.0 - th2 if th1 >= np.pi/2.0 and th2 > np.pi/2.0: phi[i, j] = th1 saved = store_data('mms'+probe+'_epd_feeps_'+data_rate+'_gyrophase', data={'x': rsun.times, 'y': phi*180./np.pi}) options('mms'+probe+'_epd_feeps_'+data_rate+'_gyrophase', 'yrange', [0, 360.0]) # Gyrophase always returns on time stamps from MEC data, get those closest to FEEPS time stamps: eyes = mms_feeps_active_eyes(trange, probe, data_rate, datatype, level) sensor_types = ['top', 'bottom'] feepst = get_data('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_spinsectnum') indt = np.zeros(len(feepst.times), dtype='int32') gpd = get_data('mms'+probe+'_epd_feeps_'+data_rate+'_gyrophase') for i in range(len(feepst.times)): indt[i] = np.argwhere(np.abs(gpd.times - feepst.times[i]) == np.min(np.abs(gpd.times - feepst.times[i]))).flatten()[0] #tinterpol('mms'+probe+'_epd_feeps_'+data_rate+'_gyrophase', 'mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_spinsectnum') #gpd = get_data('mms'+probe+'_epd_feeps_'+data_rate+'_gyrophase-itrp') # Gyrophase always returns all 24 FEEPS telescopes, downselect based on species: iT = np.array([np.array(eyes[sensor_types[0]])-1, np.array(eyes[sensor_types[0]])+11]).flatten().tolist() gp_data = np.zeros((len(gpd.times[indt]), len(iT))) #return (iT, gp_data, gpd) for i in range(len(iT)): gp_data[:, i] = gpd.y[indt, iT[i]] saved = store_data('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_gyrophase', data = {'x': gpd.times[indt], 'y': gp_data}) if saved: options('mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_gyrophase', 'yrange', [0.0, 360.0]) return 'mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_gyrophase'
def mms_load_brst_segments(trange=None, suffix=''): ''' This function loads the burst segment intervals Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] Returns: Tuple containing (start_times, end_times) ''' if trange is None: logging.error('Error; no trange specified.') return None tr = time_double(trange) save_file = os.path.join(CONFIG['local_data_dir'], 'mms_brst_intervals.sav') brst_file = download( remote_file='http://www.spedas.org/mms/mms_brst_intervals.sav', local_file=save_file) if len(brst_file) == 0: logging.error('Error downloading burst intervals sav file') return None try: intervals = readsav(save_file) except FileNotFoundError: logging.error('Error loading burst intervals sav file: ' + save_file) return None unix_start = intervals['brst_intervals'].start_times[0] unix_end = intervals['brst_intervals'].end_times[0] sorted_idxs = np.argsort(unix_start) unix_start = unix_start[sorted_idxs] unix_end = unix_end[sorted_idxs] times_in_range = (unix_start >= tr[0] - 300.0) & (unix_start <= tr[1] + 300.0) unix_start = unix_start[times_in_range] unix_end = unix_end[times_in_range] # +10 second offset added; there appears to be an extra 10 # seconds of data, consistently, not included in the range here unix_end = [end_time + 10.0 for end_time in unix_end] bar_x = [] bar_y = [] for start_time, end_time in zip(unix_start, unix_end): if end_time >= tr[0] and start_time <= tr[1]: bar_x.extend([start_time, start_time, end_time, end_time]) bar_y.extend([np.nan, 0., 0., np.nan]) vars_created = store_data('mms_bss_burst' + suffix, data={ 'x': bar_x, 'y': bar_y }) if not vars_created: logging.error('Error creating burst segment intervals tplot variable') return None options('mms_bss_burst' + suffix, 'panel_size', 0.09) options('mms_bss_burst' + suffix, 'thick', 2) options('mms_bss_burst' + suffix, 'Color', 'green') options('mms_bss_burst' + suffix, 'border', False) options('mms_bss_burst' + suffix, 'yrange', [-0.001, 0.001]) options('mms_bss_burst' + suffix, 'legend_names', ['Burst']) options('mms_bss_burst' + suffix, 'ytitle', '') return (unix_start, unix_end)
def mms_fpi_set_metadata(probe, data_rate, datatype, level, suffix=''): """ This function updates the metadata for FPI data products Parameters: probe : str or list of str probe or list of probes, valid values for MMS probes are ['1','2','3','4']. data_rate : str or list of str instrument data rates for FPI include 'brst' and 'fast'. The default is 'fast'. level : str indicates level of data processing. the default if no level is specified is 'l2' suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. """ if not isinstance(probe, list): probe = [probe] if not isinstance(data_rate, list): data_rate = [data_rate] if not isinstance(datatype, list): datatype = [datatype] if not isinstance(level, list): level = [level] probe = [str(p) for p in probe] tvars = set(tplot_names()) for this_probe in probe: for this_dr in data_rate: for this_lvl in level: for this_dtype in datatype: if this_dtype == 'des-moms': if 'mms' + this_probe + '_des_energyspectr_par_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_des_energyspectr_par_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DES (eV)') options( 'mms' + this_probe + '_des_energyspectr_par_' + this_dr + suffix, 'ylog', True) options( 'mms' + this_probe + '_des_energyspectr_par_' + this_dr + suffix, 'zlog', True) options( 'mms' + this_probe + '_des_energyspectr_par_' + this_dr + suffix, 'Colormap', 'jet') options( 'mms' + this_probe + '_des_energyspectr_par_' + this_dr + suffix, 'ztitle', '[keV/(cm^2 s sr keV)]') if 'mms' + this_probe + '_des_energyspectr_anti_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_des_energyspectr_anti_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DES (eV)') options( 'mms' + this_probe + '_des_energyspectr_anti_' + this_dr + suffix, 'ylog', True) options( 'mms' + this_probe + '_des_energyspectr_anti_' + this_dr + suffix, 'zlog', True) options( 'mms' + this_probe + '_des_energyspectr_anti_' + this_dr + suffix, 'Colormap', 'jet') options( 'mms' + this_probe + '_des_energyspectr_anti_' + this_dr + suffix, 'ztitle', '[keV/(cm^2 s sr keV)]') if 'mms' + this_probe + '_des_energyspectr_perp_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_des_energyspectr_perp_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DES (eV)') options( 'mms' + this_probe + '_des_energyspectr_perp_' + this_dr + suffix, 'ylog', True) options( 'mms' + this_probe + '_des_energyspectr_perp_' + this_dr + suffix, 'zlog', True) options( 'mms' + this_probe + '_des_energyspectr_perp_' + this_dr + suffix, 'Colormap', 'jet') options( 'mms' + this_probe + '_des_energyspectr_perp_' + this_dr + suffix, 'ztitle', '[keV/(cm^2 s sr keV)]') if 'mms' + this_probe + '_des_energyspectr_omni_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_des_energyspectr_omni_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DES (eV)') options( 'mms' + this_probe + '_des_energyspectr_omni_' + this_dr + suffix, 'ylog', True) options( 'mms' + this_probe + '_des_energyspectr_omni_' + this_dr + suffix, 'zlog', True) options( 'mms' + this_probe + '_des_energyspectr_omni_' + this_dr + suffix, 'Colormap', 'jet') options( 'mms' + this_probe + '_des_energyspectr_omni_' + this_dr + suffix, 'ztitle', '[keV/(cm^2 s sr keV)]') if 'mms' + this_probe + '_des_pitchangdist_lowen_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_des_pitchangdist_lowen_' + this_dr + suffix, 'zlog', True) options( 'mms' + this_probe + '_des_pitchangdist_lowen_' + this_dr + suffix, 'Colormap', 'jet') options( 'mms' + this_probe + '_des_pitchangdist_lowen_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DES (deg)') options( 'mms' + this_probe + '_des_pitchangdist_lowen_' + this_dr + suffix, 'ztitle', '[keV/(cm^2 s sr keV)]') if 'mms' + this_probe + '_des_pitchangdist_miden_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_des_pitchangdist_miden_' + this_dr + suffix, 'zlog', True) options( 'mms' + this_probe + '_des_pitchangdist_miden_' + this_dr + suffix, 'Colormap', 'jet') options( 'mms' + this_probe + '_des_pitchangdist_miden_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DES (deg)') options( 'mms' + this_probe + '_des_pitchangdist_miden_' + this_dr + suffix, 'ztitle', '[keV/(cm^2 s sr keV)]') if 'mms' + this_probe + '_des_pitchangdist_highen_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_des_pitchangdist_highen_' + this_dr + suffix, 'zlog', True) options( 'mms' + this_probe + '_des_pitchangdist_highen_' + this_dr + suffix, 'Colormap', 'jet') options( 'mms' + this_probe + '_des_pitchangdist_highen_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DES (deg)') options( 'mms' + this_probe + '_des_pitchangdist_highen_' + this_dr + suffix, 'ztitle', '[keV/(cm^2 s sr keV)]') if 'mms' + this_probe + '_des_bulkv_dbcs_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_des_bulkv_dbcs_' + this_dr + suffix, 'color', ['b', 'g', 'r']) options( 'mms' + this_probe + '_des_bulkv_dbcs_' + this_dr + suffix, 'legend_names', ['Vx DBCS', 'Vy DBCS', 'Vz DBCS']) options( 'mms' + this_probe + '_des_bulkv_dbcs_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DES velocity (km/s)') if 'mms' + this_probe + '_des_bulkv_gse_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_des_bulkv_gse_' + this_dr + suffix, 'color', ['b', 'g', 'r']) options( 'mms' + this_probe + '_des_bulkv_gse_' + this_dr + suffix, 'legend_names', ['Vx GSE', 'Vy GSE', 'Vz GSE']) options( 'mms' + this_probe + '_des_bulkv_gse_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DES velocity (km/s)') if 'mms' + this_probe + '_des_numberdensity_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_des_numberdensity_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DES density (cm^-3)') elif this_dtype == 'dis-moms': if 'mms' + this_probe + '_dis_energyspectr_omni_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_dis_energyspectr_omni_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DIS (eV)') options( 'mms' + this_probe + '_dis_energyspectr_omni_' + this_dr + suffix, 'ylog', True) options( 'mms' + this_probe + '_dis_energyspectr_omni_' + this_dr + suffix, 'zlog', True) options( 'mms' + this_probe + '_dis_energyspectr_omni_' + this_dr + suffix, 'Colormap', 'jet') options( 'mms' + this_probe + '_dis_energyspectr_omni_' + this_dr + suffix, 'ztitle', '[keV/(cm^2 s sr keV)]') if 'mms' + this_probe + '_dis_bulkv_dbcs_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_dis_bulkv_dbcs_' + this_dr + suffix, 'color', ['b', 'g', 'r']) options( 'mms' + this_probe + '_dis_bulkv_dbcs_' + this_dr + suffix, 'legend_names', ['Vx DBCS', 'Vy DBCS', 'Vz DBCS']) options( 'mms' + this_probe + '_dis_bulkv_dbcs_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DIS velocity (km/s)') if 'mms' + this_probe + '_dis_bulkv_gse_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_dis_bulkv_gse_' + this_dr + suffix, 'color', ['b', 'g', 'r']) options( 'mms' + this_probe + '_dis_bulkv_gse_' + this_dr + suffix, 'legend_names', ['Vx GSE', 'Vy GSE', 'Vz GSE']) options( 'mms' + this_probe + '_dis_bulkv_gse_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DIS velocity (km/s)') if 'mms' + this_probe + '_dis_numberdensity_' + this_dr + suffix in tvars: options( 'mms' + this_probe + '_dis_numberdensity_' + this_dr + suffix, 'ytitle', 'MMS' + this_probe + ' DIS density (cm^-3)')
def mms_eis_omni(probe, species='proton', datatype='extof', suffix='', data_units='flux', data_rate='srvy'): """ This function will calculate the omni-directional EIS spectrograms, and is automatically called from mms_load_eis Parameters: probe: str probe #, e.g., '4' for MMS4 data_units: str 'flux' datatype: str 'extof' or 'phxtof' data_rate: str instrument data rate, e.g., 'srvy' or 'brst' suffix: str suffix of the loaded data Returns: Name of tplot variable created. """ probe = str(probe) species_str = datatype + '_' + species if data_rate == 'brst': prefix = 'mms' + probe + '_epd_eis_brst_' else: prefix = 'mms' + probe + '_epd_eis_' telescopes = tnames(pattern=prefix + species_str + '_*' + data_units + '_t?' + suffix) if len(telescopes) == 6: time, data, energies = get_data(telescopes[0]) flux_omni = np.zeros((len(time), len(energies))) for t in telescopes: time, data, energies = get_data(t) flux_omni = flux_omni + data store_data(prefix + species_str + '_' + data_units + '_omni' + suffix, data={ 'x': time, 'y': flux_omni / 6., 'v': energies }) options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'spec', 1) options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'ylog', 1) options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'zlog', 1) options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'yrange', [14, 45]) options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'Colormap', 'jet') return prefix + species_str + '_' + data_units + '_omni' + suffix else: print( 'Error, problem finding the telescopes to calculate omni-directional spectrograms' ) return None
def corona(iuvs, sameplot=True, density=True, radiance=True, orbit_num=None, species=None, log=False, title='IUVS Corona Observations', qt=True, exec_qt=True): ''' Plot IUVS Corona Scan data against spacecraft altitude. Parameters: iuvs : dict iuvs kp data structure/dictionary read from file(s) orbit_num : list of int The orbit numbers to plot from the IUVS data structure species : list of str The species to plot. Values can be Density - CO2, CO2+, O, N2, C, N, H Radiance - CO2pUVD, CO, H, O_1304, O_1356, O_2972, C_1561, C_1657, N_1493, N2, NO radiance : bool If true, plots the radiance density : bool If true, plots the density sameplot : bool if True, put all curves on same axes if False, generate new axes for each plot title : str The Title to give the plot ylog : bool Displays the log of the y axis qt : bool If true, plots with qt. Else creates an HTML page with bokeh. exec_qt : bool If False, does not run the event loop for pyqtgraph. Returns : None Examples: >>> # Plot CO2 density vs spacecraft altitude. >>> insitu, iuvs = pydivide.read(input_time=['2016-02-01', '2016-02-28']) >>> pydivide.periapse(iuvs, species='N2', orbit=2726, log=True, density=True, radiance=False, qt=False) ''' density_names_to_plot = [] density_legend_names = [] dplot = 0 radiance_names_to_plot = [] radiance_legend_names = [] rplot = 0 if not isinstance(species, builtins.list): species = [species] if not isinstance(orbit_num, builtins.list): orbit_num = [orbit_num] if orbit_num != [None]: restrict_orbit = True else: restrict_orbit = False if species != [None]: restrict_species = True else: restrict_species = False xmin = [] xmax = [] for orbit in iuvs: for obs in orbit: if obs.lower() == 'corona_lores_high': if restrict_orbit and int( orbit[obs]['orbit_number']) not in orbit_num: continue if density: x = np.array(orbit[obs]['density']['ALTITUDE']) for var in orbit[obs]['density']: if var.lower() != "altitude": if restrict_species and var not in species: continue if not np.isnan(orbit[obs]['density'][var]).all(): xmin.append(np.min(x)) xmax.append(np.max(x)) density_names_to_plot.append( obs + '_density_' + var + '_' + str(orbit[obs]['orbit_number'])) density_legend_names.append( 'Orbit ' + str(orbit[obs]['orbit_number']) + ' ' + var + ' density') data = np.array(orbit[obs]['density'][var]) alts = x[~np.isnan(data)] data = data[~np.isnan(data)] fake_times = np.arange(len(alts)) pytplot.store_data( density_names_to_plot[dplot], data={ 'x': fake_times, 'y': data }) pytplot.store_data( density_names_to_plot[dplot] + "_alt", data={ 'x': fake_times, 'y': alts }) pytplot.options( density_names_to_plot[dplot], "link", [ 'alt', density_names_to_plot[dplot] + "_alt" ]) pytplot.options(density_names_to_plot[dplot], 'alt', 1) pytplot.options(density_names_to_plot[dplot], 'alt', 1) dplot += 1 if radiance: x = np.array(orbit[obs]['radiance']['ALTITUDE']) for var in orbit[obs]['radiance']: if var.lower() != "altitude": if restrict_species and var not in species: continue if not np.isnan(orbit[obs]['radiance'][var]).all(): xmin.append(np.min(x)) xmax.append(np.max(x)) radiance_names_to_plot.append( obs + '_radiance_' + var + '_' + str(orbit['corona_lores_high'] ['orbit_number'])) radiance_legend_names.append( 'Orbit ' + str(orbit[obs]['orbit_number']) + ' ' + var + ' radiance') data = np.array(orbit[obs]['radiance'][var]) alts = x[~np.isnan(data)] data = data[~np.isnan(data)] fake_times = np.arange(len(alts)) pytplot.store_data( radiance_names_to_plot[rplot], data={ 'x': fake_times, 'y': data }) pytplot.store_data( radiance_names_to_plot[rplot] + "_alt", data={ 'x': fake_times, 'y': alts }) pytplot.options( radiance_names_to_plot[rplot], "link", [ 'alt', radiance_names_to_plot[rplot] + "_alt" ]) pytplot.options(radiance_names_to_plot[rplot], 'alt', 1) rplot += 1 if radiance and rplot == 0: print("There is no corona radiance data in the given IUVS variable") radiance = False if density and dplot == 0: print("There is no corona density data in the given IUVS variable") density = False list_of_plots = [] if sameplot: if density: pytplot.store_data('corona_lores_high_density', data=density_names_to_plot) list_of_plots.append('corona_lores_high_density') pytplot.options('corona_lores_high_density', 'alt', 1) if log: pytplot.options('corona_lores_high_density', 'ylog', 1) pytplot.options('corona_lores_high_density', 'legend_names', density_legend_names) if radiance: pytplot.store_data('corona_lores_high_radiance', data=radiance_names_to_plot) list_of_plots.append('corona_lores_high_radiance') pytplot.options('corona_lores_high_radiance', 'alt', 1) if log: pytplot.options('corona_lores_high_radiance', 'ylog', 1) pytplot.options('corona_lores_high_radiance', 'legend_names', radiance_legend_names) else: i = 0 for d in density_names_to_plot: list_of_plots.append(d) pytplot.options(d, 'ytitle', density_legend_names[i]) if log: pytplot.options(d, 'ylog', 1) i += 1 i = 0 for r in radiance_names_to_plot: list_of_plots.append(r) pytplot.options(r, 'ytitle', radiance_legend_names[i]) if log: pytplot.options(r, 'ylog', 1) i += 1 pytplot.tplot_options('alt_range', [np.min(xmin), np.max(xmax)]) pytplot.tplot_options('title', title) pytplot.tplot_options('wsize', [1000, 400 * len(list_of_plots)]) pytplot.tplot(list_of_plots, bokeh=not qt, exec_qt=exec_qt) pytplot.del_data(list_of_plots) return
def mms_eis_pad(scopes=['0', '1', '2', '3', '4', '5'], probe='1', level='l2', data_rate='srvy', datatype='extof', species='proton', data_units='flux', energy=[55, 800], size_pabin=15, suffix=''): """ Calculate pitch angle distributions using data from the MMS Energetic Ion Spectrometer (EIS) Parameters ---------- scopes: list of str telescope #s to include in the calculation probe: str probe #, e.g., '4' for MMS4 level: str data level for calculation (default: 'l2') data_rate: str instrument data rate, e.g., 'srvy' or 'brst' (default: 'srvy') datatype: str 'extof' or 'phxtof' (default: 'extof') species: str species for calculation (default: 'proton') data_units: str 'flux' or 'cps' (default: 'flux') energy: list of float energy range to include in the calculation (default: [55, 800]) size_pabin: int size of the pitch angle bins, in degrees (default: 15) suffix: str suffix of the loaded data Returns: Name of tplot variables created. """ # allow for the user to input probe, datatype and species as a string or a list of strings if not isinstance(probe, list): probe = [probe] if not isinstance(datatype, list): datatype = [datatype] if not isinstance(species, list): species = [species] if data_units == 'cps': units_label = '1/s' else: units_label = '1/(cm^2-sr-s-keV)' if len(scopes) == 1: scope_suffix = '_t' + scopes + suffix elif len(scopes) == 6: scope_suffix = '_omni' + suffix # set up the number of pa bins to create n_pabins = 180. / size_pabin pa_label = [ 180. * n_pabin / n_pabins + size_pabin / 2. for n_pabin in range(0, int(n_pabins)) ] # Account for angular response (finite field of view) of instruments pa_halfang_width = 10.0 # deg delta_pa = size_pabin / 2. out_vars = [] # the probes will need to be strings beyond this point probe = [str(p) for p in probe] logging.info( 'Calculating the EIS pitch angle distribution; this may take several minutes' ) for probe_id in probe: prefix = 'mms' + probe_id + '_epd_eis_' + data_rate + '_' + level + '_' for datatype_id in datatype: pa_data = get_data(prefix + datatype_id + '_pitch_angle_t0' + suffix) if pa_data is None: logging.error('No ' + data_rate + ' ' + datatype_id + ' data is currently loaded for MMS' + probe_id + ' for the selected time period') return for species_id in species: pa_times, pa_data = get_data(prefix + datatype_id + '_pitch_angle_t0' + suffix) pa_file = np.zeros([len(pa_times), len(scopes)]) omni_times, omni_data, omni_energies = get_data(prefix + datatype_id + '_' + species_id + '_' + data_units + '_omni' + suffix) erange = get_data(prefix + datatype_id + '_' + species_id + '_energy_range' + suffix) inchan_check_low = np.zeros(len(omni_energies)) inchan_check_hi = np.zeros(len(omni_energies)) inchan_check_low[np.where((erange[:, 0] >= energy[0]) & (erange[:, 0] <= energy[1]))[0]] = 1 inchan_check_hi[np.where((erange[:, 1] >= energy[0]) & (erange[:, 1] <= energy[1]))[0]] = 1 these_energies = np.where((inchan_check_low > 0) | (inchan_check_hi > 0))[0] if sum(inchan_check_low) == 0 or sum(inchan_check_hi) == 0: logging.error( 'Energy range selected is not covered by the detector for ' + datatype_id + ' ' + species_id + ' ' + data_units) continue flux_file = np.zeros( [len(pa_times), len(scopes), len(these_energies)]) flux_file[:] = 'nan' pa_flux = np.zeros( [len(pa_times), int(n_pabins), len(these_energies)]) pa_flux[:] = 'nan' pa_num_in_bin = np.zeros( [len(pa_times), int(n_pabins), len(these_energies)]) for t, scope in enumerate(scopes): pa_times, pa_data = get_data(prefix + datatype_id + '_pitch_angle_t' + scope + suffix) pa_file[:, t] = pa_data # use wild cards to figure out what this variable name should be for telescope 0 this_variable = tnames(prefix + datatype_id + '_' + species_id + '*_' + data_units + '_t0' + suffix) if level == 'l2' or level == 'l1b': pvalue = this_variable[0].split('_')[7] else: pvalue = '' # get flux from each detector flux_times, flux_data, flux_energies = get_data( prefix + datatype_id + '_' + species_id + '_' + pvalue + '_' + data_units + '_t' + scope + suffix) # get energy range of interest e = flux_energies[these_energies] flux_file[:, t, :] = flux_data[:, these_energies] # CREATE PAD VARIABLES FOR EACH ENERGY CHANNEL IN USER-DEFINED ENERGY RANGE for i, flux_time in enumerate(flux_times): for j, pa_bin in enumerate(range(0, int(n_pabins))): for ee in range(0, len(these_energies)): ind = np.where((pa_file[i, :] + pa_halfang_width >= pa_label[j] - delta_pa) & (pa_file[i, :] - pa_halfang_width < pa_label[j] + delta_pa))[0] if ind.size != 0: pa_flux[i, j, ee] = nanmean(flux_file[i, ind, ee], axis=0) for ee in range(0, len(these_energies)): # energy_string = str(int(flux_energies[these_energies[ee]])) + 'keV' energy_string = str(int( erange[these_energies[ee], 0])) + '_' + str( int(erange[these_energies[ee], 1])) + 'keV' new_name = prefix + datatype_id + '_' + energy_string + '_' + species_id + '_' + data_units + scope_suffix + '_pad' store_data(new_name, data={ 'x': flux_times, 'y': pa_flux[:, :, ee], 'v': pa_label }) options(new_name, 'ylog', False) options(new_name, 'zlog', True) options(new_name, 'spec', True) options(new_name, 'Colormap', 'jet') options(new_name, 'ztitle', units_label) options( new_name, 'ytitle', 'MMS' + str(probe_id) + ' ' + datatype_id + ' PA (deg)') out_vars.append(new_name) try: store_data(prefix + datatype_id + '_' + species_id + '_' + data_units + scope_suffix + '_pads', data={ 'x': flux_times, 'y': pa_flux, 'v1': pa_label, 'v2': omni_energies[these_energies] }) out_vars.append(prefix + datatype_id + '_' + species_id + '_' + data_units + scope_suffix + '_pads') except ValueError: # kludge to avoid crash in case of single energy logging.error('Problem creating: ' + prefix + datatype_id + '_' + species_id + '_' + data_units + scope_suffix + '_pads') # only create the integral PAD variable if the user defined energy range covers more than 1 EIS energy channel if these_energies.size == 1: continue # CREATE PAD VARIABLE INTEGRATED OVER USER-DEFINED ENERGY RANGE # energy_range_string = str(int(flux_energies[these_energies[0]])) + '-' + str(int(flux_energies[these_energies[-1]])) + 'keV' energy_range_string = str(int( erange[these_energies[0], 0])) + '-' + str( int(erange[these_energies[-1], 1])) + 'keV' new_name = prefix + datatype_id + '_' + energy_range_string + '_' + species_id + '_' + data_units + scope_suffix + '_pad' avg_pa_flux = np.zeros([len(flux_times), int(n_pabins)]) avg_pa_flux[:] = 'nan' for tt in range(0, len(flux_times)): for bb in range(0, int(n_pabins)): with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) avg_pa_flux[tt, bb] = nanmean(pa_flux[tt, bb, :]) store_data(new_name, data={ 'x': flux_times, 'y': avg_pa_flux, 'v': pa_label }) options(new_name, 'ylog', False) options(new_name, 'zlog', True) options(new_name, 'spec', True) options(new_name, 'Colormap', 'jet') options(new_name, 'ztitle', units_label) options( new_name, 'ytitle', 'MMS' + str(probe_id) + ' ' + datatype_id + ' PA (deg)') out_vars.append(new_name) spin_avg_pads = mms_eis_pad_spinavg(scopes=scopes, probe=probe_id, data_rate=data_rate, level=level, datatype=datatype_id, data_units=data_units, species=species_id, energy=energy, size_pabin=size_pabin, suffix=suffix) for spin_avg_pad in spin_avg_pads: out_vars.append(spin_avg_pad) return out_vars
def mms_feeps_pad(bin_size=16.3636, probe='1', energy=[70, 600], level='l2', suffix='', datatype='electron', data_units='intensity', data_rate='srvy', angles_from_bfield=False): """ This function will calculate pitch angle distributions using data from the MMS Fly's Eye Energetic Particle Sensor (FEEPS) Parameters: probe: str probe #, e.g., '4' for MMS4 data_units: str 'intensity' datatype: str 'electron' or 'ion' data_rate: str instrument data rate, e.g., 'srvy' or 'brst' level: str data level suffix: str suffix of the loaded data energy: list of float energy range to include in the calculation bin_size: float size of the pitch angle bins angles_from_bfield: bool calculate the pitch angles from the B-field data instead of reading from the CDFs Returns: List of tplot variables created. """ # account for angular response (finite field of view) of instruments # electrons can use +/- 21.4 deg on each pitch angle as average response angle; ions can start with +/-10 deg, but both need to be further refined if datatype == 'electron': dangresp = 21.4 # deg elif datatype == 'ion': dangresp = 10.0 # deg if energy[0] < 32.0: logging.error('Please select a starting energy of 32 keV or above') return units_label = '' if data_units == 'intensity': units_label = '1/(cm^2-sr-s-keV)' elif data_units == 'counts': units_label = '[counts/s]' if not isinstance(probe, str): probe=str(probe) prefix = 'mms' + probe n_pabins = 180/bin_size pa_bins = [180.*pa_bin/n_pabins for pa_bin in range(0, int(n_pabins)+1)] pa_label = [180.*pa_bin/n_pabins+bin_size/2. for pa_bin in range(0, int(n_pabins))] if data_rate == 'brst' and angles_from_bfield == False: # v5.5+ = mms1_epd_feeps_srvy_l2_electron_pitch_angle pa_times, pa_data = get_data(prefix+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_pitch_angle'+suffix) else: pa_var, idx_maps = mms_feeps_pitch_angles(probe=probe, level=level, data_rate=data_rate, datatype=datatype, suffix=suffix) pa_times, pa_data = get_data(pa_var) if pa_data is None: logging.error("Error, couldn't find the PA variable") return eyes = mms_feeps_active_eyes([pa_times.min(), pa_times.max()], probe, data_rate, datatype, level) pa_data_map = {} if data_rate == 'srvy': if datatype == 'electron': pa_data_map['top-electron'] = idx_maps['electron-top'] pa_data_map['bottom-electron'] = idx_maps['electron-bottom'] if datatype == 'ion': pa_data_map['top-ion'] = idx_maps['ion-top'] pa_data_map['bottom-ion'] = idx_maps['ion-bottom'] elif data_rate == 'brst': # note: the following are indices of the top/bottom sensors in pa_data # they should be consistent with pa_dlimits.labels pa_data_map['top-electron'] = [0, 1, 2, 3, 4, 5, 6, 7, 8] pa_data_map['bottom-electron'] = [9, 10, 11, 12, 13, 14, 15, 16, 17] # and ions: pa_data_map['top-ion'] = [0, 1, 2] pa_data_map['bottom-ion'] = [3, 4, 5] sensor_types = ['top', 'bottom'] if datatype == 'electron': dflux = np.zeros([len(pa_times), len(pa_data_map['top-electron'])+len(pa_data_map['bottom-electron'])]) dpa = np.zeros([len(pa_times), len(pa_data_map['top-electron'])+len(pa_data_map['bottom-electron'])]) elif datatype == 'ion': dflux = np.zeros([len(pa_times), len(pa_data_map['top-ion'])+len(pa_data_map['bottom-ion'])]) dpa = np.zeros([len(pa_times), len(pa_data_map['top-ion'])+len(pa_data_map['bottom-ion'])]) for s_type in sensor_types: pa_map = pa_data_map[s_type+'-'+datatype] particle_idxs = [eye-1 for eye in eyes[s_type]] for isen, sensor_num in enumerate(particle_idxs): var_name = 'mms'+str(probe)+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+s_type+'_'+data_units+'_sensorid_'+str(sensor_num+1)+'_clean_sun_removed'+suffix times, data, energies = get_data(var_name) data[data == 0] = 'nan' # remove any 0s before averaging if np.isnan(energies[0]): # assumes all energies are NaNs if the first is continue # energy indices to use: indx = np.where((energies >= energy[0]) & (energies <= energy[1])) with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) dflux[:, pa_map[isen]] = nanmean(data[:, indx[0]], axis=1) dpa[:, pa_map[isen]] = pa_data[:, pa_map[isen]] # we need to replace the 0.0s left in after populating dpa with NaNs; these # 0.0s are left in there because these points aren't covered by sensors loaded # for this datatype/data_rate dpa[dpa == 0] = 'nan' pa_flux = np.zeros([len(pa_times), int(n_pabins)]) delta_pa = (pa_bins[1]-pa_bins[0])/2.0 # Now loop through PA bins and time, find the telescopes where there is data in those bins and average it up! for pa_idx, pa_time in enumerate(pa_times): for ipa in range(0, int(n_pabins)): if not np.isnan(dpa[pa_idx, :][0]): with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) ind = np.where((dpa[pa_idx, :] + dangresp >= pa_label[ipa]-delta_pa) & (dpa[pa_idx, :]-dangresp < pa_label[ipa]+delta_pa)) if ind[0].size != 0: if len(ind[0]) > 1: pa_flux[pa_idx, ipa] = nanmean(dflux[pa_idx, ind[0]], axis=0) else: pa_flux[pa_idx, ipa] = dflux[pa_idx, ind[0]] pa_flux[pa_flux == 0] = 'nan' # fill any missed bins with NAN en_range_string = str(int(energy[0])) + '-' + str(int(energy[1])) + 'keV' new_name = 'mms'+probe+'_epd_feeps_'+data_rate+'_'+level+'_'+datatype+'_'+data_units+'_'+ en_range_string +'_pad'+suffix store_data(new_name, data={'x': times, 'y': pa_flux, 'v': pa_label}) options(new_name, 'ylog', False) options(new_name, 'zlog', True) options(new_name, 'spec', True) options(new_name, 'Colormap', 'jet') options(new_name, 'ztitle', units_label) options(new_name, 'ytitle', 'MMS' + str(probe) + ' ' + datatype + ' PA (deg)') # create the spin-averaged PAD spin_avg_var = mms_feeps_pad_spinavg(probe=probe, data_units=data_units, datatype=datatype, data_rate=data_rate, level=level, suffix=suffix, energy=energy, bin_size=bin_size) return [new_name, spin_avg_var]
def cal_fit(probe='a', no_cal=False): """ Converts raw FIT parameter data into physical quantities. Warning: This function is in debug state Currently, it assumes that "th?_fit" variable is already loaded Parameters: probe: str Spacecraft probe letter ('a', 'b', 'c', 'd' and/or 'e') no_cal: bool If ture do not apply boom shortening factor or Ex offset defaults Returns: th?_fgs tplot variable """ import math import numpy as np from pytplot import get_data, store_data, tplot_names, options from pyspedas.utilities.download import download from pyspedas.themis.config import CONFIG from pyspedas.utilities.time_double import time_float_one from copy import deepcopy from numpy.linalg import inv # calibration parameters lv12 = 49.6 # m lv34 = 40.4 # m lv56 = 5.6 # m # This values provide better agreement with IDL lv12 = 49.599997 lv34 = 40.400003 lv56 = 5.59999981 # calibration table cpar = {"e12": {"cal_par_time": '2002-01-01/00:00:00', "Ascale": -15000.0 / (lv12 * 2. ** 15.), "Bscale": -15000.0 / (lv12 * 2. ** 15.), "Cscale": -15000.0 / (lv12 * 2. ** 15.), "theta": 0.0, "sigscale": 15000. / (lv12 * 2. ** 15.), "Zscale": -15000. / (lv56 * 2. ** 15.), "units": 'mV/m'}, "e34": {"cal_par_time": '2002-01-01/00:00:00', "Ascale": -15000.0 / (lv34 * 2. ** 15.), "Bscale": -15000.0 / (lv34 * 2. ** 15.), "Cscale": -15000.0 / (lv34 * 2. ** 15.), "theta": 0.0, "sigscale": 15000. / (lv34 * 2. ** 15.), "Zscale": -15000. / (lv56 * 2. ** 15.), "units": 'mV/m'}, "b": {"cal_par_time": '2002-01-01/00:00:00', "Ascale": 1., "Bscale": 1., "Cscale": 1., "theta": 0.0, "sigscale": 1., "Zscale": 1., "units": 'nT'}} # tplot options color_str = ['blue', 'green', 'red'] color_str2 = ['magenta', 'blue', 'cyan', 'green', 'orange'] b_str = ['Bx', 'By', 'Bz'] e_str = ['Ex', 'Ey', 'Ez'] b_units = cpar['b']['units'] e_units = cpar['e12']['units'] b_units_str = f'[{b_units}]' e_units_str = f'[{e_units}]' b_data_att = {'units': b_units, 'cal_par_time': cpar['b']['cal_par_time'], 'data_type': 'calibrated', 'coord_sys': 'dsl'} b_data_att_sigma = {'units': b_units} e_data_att = {'units': e_units, 'cal_par_time': cpar['e12']['cal_par_time'], 'data_type': 'calibrated', 'coord_sys': 'dsl'} e_data_att_sigma = {'units': e_units} b_opt_dict = {'legend_names': b_str, 'ysubtitle': b_units_str, 'color': color_str, 'alpha': 1} e_opt_dict = {'legend_names': e_str, 'ysubtitle': e_units_str, 'color': color_str, 'alpha': 1} # TODO: tplot does not show 5th legend name b_opt_dict2 = {'legend_names': ['A', 'B', 'C', 'Sig', '<Bz>'], 'ysubtitle': b_units_str, 'color': color_str2, 'alpha': 1} e_opt_dict2 = {'legend_names': ['A', 'B', 'C', 'Sig', '<Ez>'], 'ysubtitle': e_units_str, 'color': color_str2, 'alpha': 1} # Get list of tplot variables tnames = tplot_names(True) # True for quiet output # Get data from th?_fit variable tvar = 'th' + probe + '_fit' # B-field fit (FGM) processing # TODO: Check tvar existance if not tvar in tnames: return # Using deep copy to create an independent instance d = deepcopy(get_data(tvar)) # NOTE: Indexes are not the same as in SPEDAS, e.g. 27888x2x5 # establish probe number in cal tables sclist = {'a': 0, 'b': 1, 'c': 2, 'd': 3, 'e': 4, 'f': -1} # for probe 'f' no flatsat FGM cal files # TODO: Add processing of probe f scn = sclist[probe] # Rotation vectors rotBxy_angles = [29.95, 29.95, 29.95, 29.95, 29.95] # vassilis 6/2/2007: deg to rotate FIT on spin plane to match DSL on 5/4 rotBxy = rotBxy_angles[scn] # vassilis 4/28: probably should be part of CAL table as well... cs = math.cos(rotBxy * math.pi / 180) # vassilis sn = math.sin(rotBxy * math.pi / 180) # vassilis adc2nT = 50000. / 2. ** 24 # vassilis 2007 - 04 - 03 # B - field fit(FGM) i = 1 d.y[:, i, 0] = cpar['b']['Ascale'] * d.y[:, i, 0] * adc2nT # vassilis d.y[:, i, 1] = cpar['b']['Bscale'] * d.y[:, i, 1] * adc2nT # vassilis d.y[:, i, 2] = cpar['b']['Cscale'] * d.y[:, i, 2] * adc2nT # vassilis d.y[:, i, 3] = cpar['b']['sigscale'] * d.y[:, i, 3] * adc2nT # vassilis d.y[:, i, 4] = cpar['b']['Zscale'] * d.y[:, i, 4] * adc2nT # vassilis # Calculating Bzoffset using thx+'/l1/fgm/0000/'+thx+'_fgmcal.txt' thx = 'th' + probe remote_name = thx + '/l1/fgm/0000/' + thx + '_fgmcal.txt' calfile = download(remote_file=remote_name, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=False) # TODO: Add file check caldata = np.loadtxt(calfile[0], converters={0: time_float_one}) # TODO: In SPEDAS we checks if data is already calibrated # Limit to the time of interest caltime = caldata[:, 0] t1 = np.nonzero(caltime < d.times.min())[0] # left time t2 = np.nonzero(caltime <= d.times.max())[0] # right time Bzoffset = np.zeros(d.times.shape) if t1.size + t2.size > 1: # Time range exist in the file tidx = np.arange(t1[-1], t2[-1] + 1) caltime = caltime[tidx] offi = caldata[tidx, 1:4] # col 1-3 cali = caldata[tidx, 4:13] # col 4-13 # spinperii = caldata[tidx, 13] # col 14 not in use flipxz = -1 * np.fliplr(np.identity(3)) # SPEDAS: offi2 = invert(transpose([cali[istart, 0:2], cali[istart, 3:5], cali[istart, 6:8]]) ## flipxz)##offi[istart, *] for t in range(0, caltime.size): offi2 = inv(np.c_[cali[t, 0:3], cali[t, 3:6], cali[t, 6:9]].T @ flipxz) @ offi[t, :] tidx = d.times >= caltime[t] Bzoffset[tidx] = offi2[2] # last element Bxprime = cs * d.y[:, i, 1] + sn * d.y[:, i, 2] Byprime = -sn * d.y[:, i, 1] + cs * d.y[:, i, 2] Bzprime = -d.y[:, i, 4] - Bzoffset # vassilis 4/28 (SUBTRACTING offset from spinaxis POSITIVE direction) # d is a namedtuple and does not support direct copy by value dprime = deepcopy(d) dprime.y[:, i, 1] = Bxprime # vassilis DSL dprime.y[:, i, 2] = Byprime # vassilis DSL dprime.y[:, i, 4] = Bzprime # vassilis DSL # Create fgs variable and remove nans fgs = dprime.y[:, i, [1, 2, 4]] idx = ~np.isnan(fgs[:, 0]) # TODO: check this criteria. IDL returns less number of points fgs_data = {'x': d.times[idx], 'y': fgs[idx, :]} # Save fgs tplot variable tvar = 'th' + probe + '_fgs' store_data(tvar, fgs_data, attr_dict=b_data_att) options(tvar, opt_dict=b_opt_dict) # Save fgs_sigma variable fit_sigma_data = {'x': d.times[idx], 'y': d.y[idx, i, 3]} tvar = 'th' + probe + '_fgs_sigma' store_data(tvar, fit_sigma_data, attr_dict=b_data_att_sigma) options(tvar, opt_dict=b_opt_dict) # Save bfit variable bfit_data = {'x': d.times[:], 'y': d.y[:, i, :].squeeze()} tvar = 'th' + probe + '_fit_bfit' store_data(tvar, bfit_data, attr_dict=b_data_att) options(tvar, opt_dict=b_opt_dict2) # E-field fit (EFI) processing # Get data from th?_fit_code variable tvar = 'th' + probe + '_fit_code' d_code = None # Blank variable, if d_code is not used if tvar in tnames: i = 0 d_code = get_data(tvar) e12_ss = (d_code.y[:, i] == int("e1", 16)) | (d_code.y[:, i] == int("e5", 16)) e34_ss = (d_code.y[:, i] == int("e3", 16)) | (d_code.y[:, i] == int("e7", 16)) else: # Default values (if no code) ne12 = d.times.size e12_ss = np.ones(ne12, dtype=bool) # create an index arrays e34_ss = np.zeros(ne12, dtype=bool) # Save 'efs' datatype before "hard wired" calibrations. # An EFI-style calibration is performed below. i = 0 efs = d.y[:, i, [1, 2, 4]] # Locate samples with non-NaN data values. Save the indices in # efsx_good, then at the end of calibration, pull the "good" # indices out of the calibrated efs[] array to make the thx_efs # tplot variable. efsx_good = ~np.isnan(efs[:, 0]) # TODO: check this criteria. if np.any(efsx_good): # TODO: include processing of 'efs' where efsx_fixed is used if np.any(e34_ss): # rotate efs 90 degrees if necessary, if e34 was used in spinfit efs[e34_ss, :] = d.y[e34_ss, i, [2, 1, 4]] efs[e34_ss, 0] = -efs[e34_ss, 0] efsz = d.y[:, i, 4] # save Ez separately, for possibility that it's the SC potential # Use cpar to calibrate if np.any(e12_ss): d.y[e12_ss, i, 0] = cpar["e12"]["Ascale"] * d.y[e12_ss, i, 0] d.y[e12_ss, i, 1] = cpar["e12"]["Bscale"] * d.y[e12_ss, i, 1] d.y[e12_ss, i, 2] = cpar["e12"]["Cscale"] * d.y[e12_ss, i, 2] d.y[e12_ss, i, 3] = cpar["e12"]["sigscale"] * d.y[e12_ss, i, 3] d.y[e12_ss, i, 4] = cpar["e12"]["Zscale"] * d.y[e12_ss, i, 4] if np.any(e34_ss): d.y[e34_ss, i, 0] = cpar["e34"]["Ascale"] * d.y[e34_ss, i, 0] d.y[e34_ss, i, 1] = cpar["e34"]["Bscale"] * d.y[e34_ss, i, 1] d.y[e34_ss, i, 2] = cpar["e34"]["Cscale"] * d.y[e34_ss, i, 2] d.y[e34_ss, i, 3] = cpar["e34"]["sigscale"] * d.y[e34_ss, i, 3] d.y[e34_ss, i, 4] = cpar["e34"]["Zscale"] * d.y[e34_ss, i, 4] # save fit_efit variable fit_efit_data = {'x': d.times, 'y': d.y[:, i, :]} tvar = 'th' + probe + '_fit_efit' store_data(tvar, fit_efit_data, attr_dict=e_data_att) options(tvar, opt_dict=e_opt_dict2) # thx_efs and thx_efs_sigma, # Calibrate efs data by applying E12 calibration factors, not despinning, then applying despun (spin-dependent) # calibration factors from E12 (the spin-independent offset is subtracted on-board): # Load calibration file, e.g. tha/l1/eff/0000/tha_efi_calib_params.txt remote_name = thx + '/l1/eff/0000/' + thx + '_efi_calib_params.txt' eficalfile = download(remote_file=remote_name, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=False) # TODO: Add file check colnums = {"time": [0], "edc_offset": [14, 15, 16], "edc_gain": [17, 18, 19], "BOOM_LENGTH": [26, 27, 28], "BOOM_SHORTING_FACTOR": [29, 30, 31], "DSC_OFFSET": [32, 33, 34]} # List of columns to be loaded collist = list() [collist.extend(cnum) for cnum in colnums.values()] collist.sort() # ensurer that the list of columns is sorted eficaltxt = np.loadtxt(eficalfile[0], skiprows=1, max_rows=1, converters={0: time_float_one}, usecols=collist) eficaldata = {"time": eficaltxt[0], "gain": eficaltxt[4:7], "offset": eficaltxt[1:4], "boom_length": eficaltxt[7:10], "boom_shorting_factor": eficaltxt[10:13], "dsc_offset": eficaltxt[13:16]} # Boom exx = eficaldata["boom_length"] if not no_cal: exx *= eficaldata["boom_shorting_factor"] # Calibrate E field # Calibrate Ex and Ey spinfits that are derived from E12 only! if np.any(e12_ss): efs[e12_ss, 0:2] = -1000. * eficaldata["gain"][0] * efs[e12_ss, 0:2] / exx[0] if np.any(e34_ss): efs[e34_ss, 0:2] = -1000. * eficaldata["gain"][1] * efs[e34_ss, 0:2] / exx[1] # Calibrate Ez spinfit by itself: efs[:, 2] = -1000. * eficaldata["gain"][2] * efs[:, 2] / exx[2] # DC Offset if not no_cal: efs -= eficaldata["dsc_offset"] # Here, if the fit_code is 'e5'x (229) then efs[*,2] contains the spacecraft potential, so set all of those values # to Nan, jmm, 19-Apr-2010 # Or if the fit_code is 'e7'x (231), this will also be including the SC potential, jmm,22-oct-2010 if d_code is not None: sc_port = (d_code.y[:, i] == int("e5", 16)) | (d_code.y[:, i] == int("e7", 16)) if np.any(sc_port): efs[sc_port, 2] = np.nan # save efs variable efs_data = {'x': d.times[efsx_good], 'y': efs[efsx_good, :]} # efs[efsx_good,*] tvar = 'th' + probe + '_efs' store_data(tvar, efs_data, attr_dict=e_data_att) options(tvar, opt_dict=e_opt_dict) # save efs_sigma variable efs_sigma_data = {'x': d.times[efsx_good], 'y': d.y[efsx_good, i, 3]} # d.y[efsx_good, 3, idx] tvar = 'th' + probe + '_efs_sigma' store_data(tvar, efs_sigma_data, attr_dict=e_data_att_sigma) options(tvar, opt_dict=e_opt_dict) # save efs_0 efs_0_data = deepcopy(efs) efs_0_data[:, 2] = 0 efs_0 = {'x': d.times[efsx_good], 'y': efs_0_data[efsx_good, :]} tvar = 'th' + probe + '_efs_0' store_data(tvar, efs_0, attr_dict=e_data_att_sigma) options(tvar, opt_dict=e_opt_dict) # calculate efs_dot0 Ez = (efs[:, 0]*fgs[:, 0] + efs[:, 1]*fgs[:, 1])/(-1*fgs[:, 2]) angle = np.arccos(fgs[:, 2]/np.sqrt(np.sum(fgs**2, axis=1)))*180/np.pi angle80 = angle > 80 if np.any(angle80): Ez[angle80] = np.NaN efx_dot0_data = deepcopy(efs) efx_dot0_data[:, 2] = Ez # save efs_dot0 efs_dot0 = {'x': d.times[efsx_good], 'y': efx_dot0_data[efsx_good, :]} tvar = 'th' + probe + '_efs_dot0' store_data(tvar, efs_dot0, attr_dict=e_data_att_sigma) options(tvar, opt_dict=e_opt_dict)
def map2d(kp, parameter=None, time=None, list=False, color_table=None, subsolar=False, mso=False, map_limit=None, basemap=None, alpha=None, title='MAVEN Mars', qt=True): if list: x = param_list(kp) for param in x: print(param) return #Check for orbit num rather than time string if isinstance(time, builtins.list): if isinstance(time[0], int): time = orbit_time(time[0], time[1]) elif isinstance(time, int): time = orbit_time(time) # Check existence of parameter if parameter == None: print("Must provide an index (or name) for param to be plotted.") return # Store instrument and observation of parameter(s) in lists inst = [] obs = [] if type(parameter) is int or type(parameter) is str: a, b = get_inst_obs_labels(kp, parameter) inst.append(a) obs.append(b) nparam = 1 else: nparam = len(parameter) for param in parameter: a, b = get_inst_obs_labels(kp, param) inst.append(a) obs.append(b) inst_obs = builtins.list(zip(inst, obs)) # Check the time variable if time != None: kp = range_select(kp, time) # Generate the altitude array if mso: x = kp['SPACECRAFT']['MSO_X'].as_matrix() y = kp['SPACECRAFT']['MSO_Y'].as_matrix() z = kp['SPACECRAFT']['MSO_Z'].as_matrix() r = np.sqrt((x**2) + (y**2) + (z**2)) lat = (90 - np.arccos(z / r) * (180 / math.pi)) lon = (np.arctan2(y, x) * (180 / math.pi)) + 180 else: lon = kp['SPACECRAFT']['SUB_SC_LONGITUDE'] lat = kp['SPACECRAFT']['SUB_SC_LATITUDE'] alt = kp['SPACECRAFT']['ALTITUDE'] # Cycle through the parameters, plotting each according to # the given keywords # names_to_plot = [] iplot = 0 # subplot indexes on 1 for inst, obs in inst_obs: # # First, generate the dependent array from data y = kp[inst][obs] if subsolar and mso == False: pytplot.store_data('sc_lon', data={'x': kp['Time'], 'y': lon}) pytplot.store_data('sc_lat', data={'x': kp['Time'], 'y': lat}) pytplot.store_data('%s.%s' % (inst, obs), data={ 'x': kp['Time'], 'y': y }) pytplot.options('%s.%s' % (inst, obs), 'link', ['lon', 'sc_lon']) pytplot.options('%s.%s' % (inst, obs), 'link', ['lat', 'sc_lat']) pytplot.options('%s.%s' % (inst, obs), 'map', 1) pytplot.store_data( 'ss_lon', data={ 'x': kp['Time'], 'y': kp['SPACECRAFT']['SUBSOLAR_POINT_GEO_LONGITUDE'] }) pytplot.store_data( 'ss_lat', data={ 'x': kp['Time'], 'y': kp['SPACECRAFT']['SUBSOLAR_POINT_GEO_LATITUDE'] }) pytplot.store_data('subsolar', data={'x': kp['Time'], 'y': alt}) pytplot.options('subsolar', 'link', ['lon', 'ss_lon']) pytplot.options('subsolar', 'link', ['lat', 'ss_lat']) pytplot.options('subsolar', 'map', 1) names_to_plot.append('%s.%s.%s' % (inst, obs, 'subsolar')) pytplot.store_data(names_to_plot[iplot], data=['%s.%s' % (inst, obs), 'subsolar']) pytplot.options(names_to_plot[iplot], 'map', 1) pytplot.options(names_to_plot[iplot], 'colormap', ['magma', 'yellow']) else: names_to_plot.append('%s.%s' % (inst, obs)) pytplot.store_data('sc_lon', data={'x': kp['Time'], 'y': lon}) pytplot.store_data('sc_lat', data={'x': kp['Time'], 'y': lat}) pytplot.store_data(names_to_plot[iplot], data={ 'x': kp['Time'], 'y': y }) pytplot.options(names_to_plot[iplot], 'link', ['lon', 'sc_lon']) pytplot.options(names_to_plot[iplot], 'link', ['lat', 'sc_lat']) pytplot.options(names_to_plot[iplot], 'map', 1) if basemap: if basemap == 'mola': map_file = os.path.join(os.path.dirname(__file__), 'basemaps', 'MOLA_color_2500x1250.jpg') elif basemap == 'mola_bw': map_file = os.path.join(os.path.dirname(__file__), 'basemaps', 'MOLA_BW_2500x1250.jpg') elif basemap == 'mdim': map_file = os.path.join(os.path.dirname(__file__), 'basemaps', 'MDIM_2500x1250.jpg') elif basemap == 'elevation': map_file = os.path.join(os.path.dirname(__file__), 'basemaps', 'MarsElevation_2500x1250.jpg') elif basemap == 'mag': map_file = os.path.join(os.path.dirname(__file__), 'basemaps', 'MAG_Connerny_2005.jpg') else: map_file = basemap pytplot.options(names_to_plot[iplot], 'basemap', map_file) if alpha: pytplot.options(names_to_plot[iplot], 'alpha', alpha) iplot = iplot + 1 pytplot.tplot_options('title', title) pytplot.tplot_options('wsize', [1000, 500 * (iplot)]) pytplot.tplot(names_to_plot, bokeh=not qt) pytplot.del_data('ss_lon') pytplot.del_data('ss_lat') pytplot.del_data('sc_lon') pytplot.del_data('sc_lat') pytplot.del_data(names_to_plot) return
def plot(kp, parameter=None, time=None, errors=None, SamePlot=True, list=False, title='', qt=True): ''' Plot the provided data as a time series. For now, do not accept any error bar information. If time is not provided plot entire data set. Input: kp: insitu kp data structure/dictionary read from file(s) Time: Two-element list of strings or integers indicating the range of Time to be plotted. At present, there are no checks on whether provided Times are within provided data Parameter: The parameter(s) to be plotted. Can be provided as integers (by index) or strings (by name: inst.obs). If a single parameter is provided, it must be an int or str. If several are provided it must be a list. A list may contain a mixture of data types. Errors: **Not Yet Implemented** Will be the Parameter(s) to use for the generation of error bars in the created plots. Since each inst.obs *may* define its own unique useage of the 'quality flag', this will be a parameter-dependent determination, requiring an add'l routine. SamePlot: if True, put all curves on same axes if False, generate new axes for each plot SubPlot: if True, stack plots with common x axis if False and nplots > 1, make several distinct plots Output: None -> Generates plot(s) as requested. But since there is no plot object returned, can not alter any plot subsequently (yet) ToDo: Provide mechanism for calculating and plotting error bars ''' if list: x = param_list(kp) for param in x: print(param) return #Check for orbit num rather than time string if isinstance(time, builtins.list): if isinstance(time[0], int): time = orbit_time(time[0], time[1]) elif isinstance(time, int): time = orbit_time(time) # Check existence of parameter if parameter == None: print("Must provide an index (or name) for param to be plotted.") return # Store instrument and observation of parameter(s) in lists inst = [] obs = [] if type(parameter) is int or type(parameter) is str: a, b = get_inst_obs_labels(kp, parameter) inst.append(a) obs.append(b) nparam = 1 else: nparam = len(parameter) for param in parameter: a, b = get_inst_obs_labels(kp, param) inst.append(a) obs.append(b) inst_obs = builtins.list(zip(inst, obs)) # Cycle through the parameters, plotting each according to # the given keywords # iplot = 1 # subplot indexes on 1 y_list = [] legend_names = [] for inst_temp, obs_temp in inst_obs: # First, generate the dependent array from data y = kp[inst_temp][obs_temp] if SamePlot: y_list.append(y) legend_names.append(obs_temp) else: pytplot.store_data(obs_temp, data={'x': kp['Time'], 'y': y}) # Add descriptive plot title pytplot.options(obs_temp, 'ytitle', '%s.%s' % (inst, obs)) # Increment plot number iplot = iplot + 1 if time is not None: pytplot.xlim(time[0], time[1]) if SamePlot: pytplot_name = ''.join(legend_names) result = pd.concat(y_list, axis=1, join_axes=[y_list[0].index]) pytplot.store_data(pytplot_name, data={'x': kp['Time'], 'y': result}) pytplot.options(pytplot_name, 'legend_names', legend_names) pytplot.tplot_options('title', title) pytplot.tplot_options('wsize', [1000, 300]) pytplot.tplot(pytplot_name, bokeh=not qt) pytplot.del_data(pytplot_name) else: pytplot.tplot_options('title', title) pytplot.tplot_options('wsize', [1000, 300 * (iplot - 1)]) pytplot.tplot(obs, bokeh=not qt) pytplot.del_data(obs) return
def mms_edp_set_metadata(probe, data_rate, level, suffix=''): """ This function updates the metadata for EDP data products Parameters: probe : str or list of str probe or list of probes, valid values for MMS probes are ['1','2','3','4']. data_rate : str or list of str instrument data rate for EDP level : str indicates level of data processing. the default if no level is specified is 'l2' suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. """ if not isinstance(probe, list): probe = [probe] if not isinstance(data_rate, list): data_rate = [data_rate] if not isinstance(level, list): level = [level] instrument = 'edp' tvars = set(tnames()) for this_probe in probe: for this_dr in data_rate: for this_lvl in level: if 'mms' + str( this_probe ) + '_' + instrument + '_dce_gse_' + this_dr + '_' + this_lvl + suffix in tvars: options( 'mms' + str(this_probe) + '_' + instrument + '_dce_gse_' + this_dr + '_' + this_lvl + suffix, 'ytitle', 'MMS' + str(this_probe) + ' EDP DCE') options( 'mms' + str(this_probe) + '_' + instrument + '_dce_gse_' + this_dr + '_' + this_lvl + suffix, 'color', ['b', 'g', 'r']) options( 'mms' + str(this_probe) + '_' + instrument + '_dce_gse_' + this_dr + '_' + this_lvl + suffix, 'legend_names', ['Ex GSE', 'Ey GSE', 'Ez GSE']) if 'mms' + str( this_probe ) + '_' + instrument + '_dce_dsl_' + this_dr + '_' + this_lvl + suffix in tvars: options( 'mms' + str(this_probe) + '_' + instrument + '_dce_dsl_' + this_dr + '_' + this_lvl + suffix, 'ytitle', 'MMS' + str(this_probe) + ' EDP DCE') options( 'mms' + str(this_probe) + '_' + instrument + '_dce_dsl_' + this_dr + '_' + this_lvl + suffix, 'color', ['b', 'g', 'r']) options( 'mms' + str(this_probe) + '_' + instrument + '_dce_dsl_' + this_dr + '_' + this_lvl + suffix, 'legend_names', ['Ex DSL', 'Ey DSL', 'Ez DSL']) if 'mms' + str( this_probe ) + '_' + instrument + '_hfesp_' + this_dr + '_' + this_lvl + suffix in tvars: options( 'mms' + str(this_probe) + '_' + instrument + '_hfesp_' + this_dr + '_' + this_lvl + suffix, 'ytitle', 'MMS' + str(this_probe) + ' EDP HFesp [Hz]') options( 'mms' + str(this_probe) + '_' + instrument + '_hfesp_' + this_dr + '_' + this_lvl + suffix, 'ztitle', '(V/m)^2/Hz') options( 'mms' + str(this_probe) + '_' + instrument + '_hfesp_' + this_dr + '_' + this_lvl + suffix, 'ylog', True) options( 'mms' + str(this_probe) + '_' + instrument + '_hfesp_' + this_dr + '_' + this_lvl + suffix, 'zlog', True) options( 'mms' + str(this_probe) + '_' + instrument + '_hfesp_' + this_dr + '_' + this_lvl + suffix, 'spec', True) options( 'mms' + str(this_probe) + '_' + instrument + '_hfesp_' + this_dr + '_' + this_lvl + suffix, 'Colormap', 'jet')
def mepi_tof(trange=['2017-03-27', '2017-03-28'], datatype='flux', level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False, ror=True): """ This function loads data from the MEP-i experiment from the Arase mission Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] datatype: str Data type; Valid options: level: str Data level; Valid options: suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat: str The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. varnames: list of str List of variable names to load (if not specified, all data variables are loaded) downloadonly: bool Set this flag to download the CDF files, but not load them into tplot variables notplot: bool Return the data in hash tables instead of creating tplot variables no_update: bool If set, only load data from your local cache time_clip: bool Time clip the variables to exactly the range specified in the trange keyword ror: bool If set, print PI info and rules of the road Returns: List of tplot variables created. """ initial_notplot_flag = False if notplot: initial_notplot_flag = True file_res = 3600. * 24 prefix = 'erg_mepi_' + level + '_tof' + datatype + '_' pathformat = 'satellite/erg/mepi/'+level+'/tof/%Y/%m/erg_mepi_' + \ level+'_tof'+datatype+'_%Y%m%d_v??_??.cdf' loaded_data = load(pathformat=pathformat, trange=trange, level=level, datatype=datatype, file_res=file_res, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd) if (len(loaded_data) > 0) and ror: try: if isinstance(loaded_data, list): if downloadonly: cdf_file = cdflib.CDF(loaded_data[-1]) gatt = cdf_file.globalattsget() else: gatt = get_data(loaded_data[-1], metadata=True)['CDF']['GATT'] elif isinstance(loaded_data, dict): gatt = loaded_data[list(loaded_data.keys())[-1]]['CDF']['GATT'] # --- print PI info and rules of the road print(' ') print( '**************************************************************************' ) print(gatt["LOGICAL_SOURCE_DESCRIPTION"]) print('') print('PI: ', gatt['PI_NAME']) print("Affiliation: " + gatt["PI_AFFILIATION"]) print('') print('- The rules of the road (RoR) common to the ERG project:') print( ' https://ergsc.isee.nagoya-u.ac.jp/data_info/rules_of_the_road.shtml.en' ) print( '- RoR for MEP-i data: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Mepi' ) print('') print('Contact: erg_mep_info at isee.nagoya-u.ac.jp') print( '**************************************************************************' ) except: print('printing PI info and rules of the road was failed') if initial_notplot_flag or downloadonly: return loaded_data if 'flux' in datatype: original_suffix_list = [ 'FPDU', 'FHE2DU', 'FHEDU', 'FOPPDU', 'FODU', 'FO2PDU', 'count_raw_P', 'count_raw_HE2', 'count_raw_HE', 'count_raw_OPP', 'count_raw_O', 'count_raw_O2P' ] tplot_names_list = [] for i in range(len(original_suffix_list)): tplot_names_list.append(prefix + original_suffix_list[i] + suffix) if tplot_names_list[i] in loaded_data: ylim(tplot_names_list[i], 4, 190) # set spectrogram plot option options(tplot_names_list, 'Spec', 1) # set y axis to logscale options(tplot_names_list, 'ylog', 1) # set ysubtitle options(tplot_names_list, 'ysubtitle', '[keV/q]') # set ztitle options(tplot_names_list[:6], 'ztitle', '[/s-cm^{2}-sr-keV/q]') options(tplot_names_list[6:], 'ztitle', '[cnt/smpl]') # set z axis to logscale options(tplot_names_list, 'zlog', 1) elif 'raw' in datatype: # set spectrogram plot option options(loaded_data, 'Spec', 1) # set z axis to logscale options(loaded_data, 'zlog', 1) return loaded_data
def fullplot(instruments=None, level='l2', type=None, start_date='2014-01-01', end_date='2014-01-02', tplot_names='', filenames=None, insitu=None, parameter='', auto_yes=True, ylog=False, zlog=False): ''' Plot any insitu Level 2 or KP data from MAVEN. Downloads files found into PySPEDAS and loads them into memory via PyTplot. Then creates an interactive plot window including spectrogram slicer, MAVEN's location and orbit in MSO coordinates, and MAVEN's location in GEO coordinates, especially relative to the crustal magnetic fields. Parameters: instruments: str/list of str Instruments from which you want to download data. Accepted values are any combination of: sta, swi, swe, lpw, euv, ngi, iuv, mag, sep, rse type: str/list of str The observation/file type of the instruments to load. If None, all file types are loaded. Otherwise, a file will only be loaded into tplot if its descriptor matches one of the strings in this field. See the instrument SIS for more detail on types. tplot_names : list of str The tplot names to plot. Also not needed, use only if the variables are already loaded into memory. For example, if you want to load in data with this fullplot procedure but modify the variables with pytplot.options or the pytplot.tplot_math routines, you can re-plot the data by specifying the specific pytplot variables. filenames: str/list of str ['yyyy-mm-dd'] List of files to load start_date: str String that is the start date for downloading data (YYYY-MM-DD), or the orbit number end_date: str String that is the end date for downloading data (YYYY-MM-DD), or the orbit number kp : dict insitu kp data structure/dictionary read from file(s). This is not required, only needed if you want to plot variables from this data structure. parameter : list of str/int If the above kp data structure is given, this variable will be the parameters to plot (see the pydivide.plot function) Types: =================== ==================================== Instrument Level 2 Observation Type/File Type =================== ==================================== EUV bands LPW lpiv, lpnt, mrgscpot, we12, we12burstlf, we12bursthf, we12burstmf, wn, wspecact, wspecpas STATIC 2a, c0, c2, c4, c6, c8, ca, cc, cd, ce, cf, d0, d1, d4, d6, d7, d8, d9, da, db SEP s1-raw-svy-full, s1-cal-svy-full, s2-raw-svy-full, s2-cal-svy-full SWEA coarsearc3d, coarsesvy3d, finearc3d, finesvy3d, onboardsvymom, onboardsvyspec SWIA arc3d, arcpad, svy3d, svypad, svyspec MAG ss, pc, pl, ss1s, pc1s, pl1s =================== ===================================== Returns : None Examples: >>> # Plots EUV Bands, LPW LP-IV, and MAG SS data on Jan 01 2015 >>> pydivide.fullplot(instruments=['euv', 'lpw', 'mag'], type=['bands', 'lpnt', 'ss1s'], start_date='2015-01-01', end_date='2015-01-02') ''' import os import pyspedas import pytplot from pyqtgraph.Qt import QtCore, QtGui if insitu != None: pydivide.plot(insitu, parameter=parameter, exec_qt=False) pytplot.options('mvn_kp::spacecraft::altitude', 'map', 1) map_file = os.path.join(os.path.dirname(__file__), 'basemaps', 'MAG_Connerny_2005.jpg') pytplot.options('mvn_kp::spacecraft::altitude', 'basemap', map_file) pytplot.tplot('mvn_kp::spacecraft::altitude', exec_qt=False, window_name='PYDIVIDE_MAP2D', pos_2d=True, pos_3d=True) elif tplot_names == '': tplot_names = pyspedas.maven_load(filenames=filenames, instruments=instruments, level=level, type=type, start_date=start_date, end_date=end_date, auto_yes=auto_yes) if ylog: for t in tplot_names: pytplot.options(t, 'ylog', 1) if zlog: for t in tplot_names: pytplot.options(t, 'zlog', 1) pytplot.options('mvn_kp::spacecraft::altitude', 'map', 1) map_file = os.path.join(os.path.dirname(__file__), 'basemaps', 'MAG_Connerny_2005.jpg') pytplot.options('mvn_kp::spacecraft::altitude', 'basemap', map_file) pytplot.tplot(tplot_names, pos_2d=True, pos_3d=True, interactive=True, exec_qt=False, window_name='PYDIVIDE_PLOT') pytplot.tplot('mvn_kp::spacecraft::altitude', exec_qt=False, window_name='PYDIVIDE_MAP2D', extra_functions=[], extra_function_args=[]) else: pytplot.options('mvn_kp::spacecraft::altitude', 'map', 1) map_file = os.path.join(os.path.dirname(__file__), 'basemaps', 'MAG_Connerny_2005.jpg') pytplot.options('mvn_kp::spacecraft::altitude', 'basemap', map_file) pytplot.tplot(tplot_names, pos_2d=True, pos_3d=True, interactive=True, exec_qt=False, window_name='PYDIVIDE_PLOT') pytplot.tplot('mvn_kp::spacecraft::altitude', exec_qt=False, window_name='PYDIVIDE_MAP2D', extra_functions=[], extra_function_args=[]) app = QtGui.QApplication([]) win = QtGui.QMainWindow() app.setStyle("Fusion") plot_splitter = QtGui.QSplitter(QtCore.Qt.Vertical, frameShape=QtGui.QFrame.StyledPanel, frameShadow=QtGui.QFrame.Plain) ancillary_splitter = QtGui.QSplitter(QtCore.Qt.Vertical, frameShape=QtGui.QFrame.StyledPanel, frameShadow=QtGui.QFrame.Plain) main_splitter = QtGui.QSplitter(QtCore.Qt.Horizontal, frameShape=QtGui.QFrame.StyledPanel, frameShadow=QtGui.QFrame.Plain) main_splitter.addWidget(plot_splitter) main_splitter.addWidget(ancillary_splitter) for i, plot_name in enumerate(pytplot.pytplotWindow_names): if plot_name == 'PYDIVIDE_PLOT': plot_splitter.addWidget(pytplot.pytplotWindows[i]) for i, plot_name in enumerate(pytplot.pytplotWindow_names): if plot_name == 'PYDIVIDE_MAP2D': plot_splitter.addWidget(pytplot.pytplotWindows[i]) for i, plot_name in enumerate(pytplot.pytplotWindow_names): if plot_name == 'Spec_Slice': ancillary_splitter.addWidget(pytplot.pytplotWindows[i]) for i, plot_name in enumerate(pytplot.pytplotWindow_names): if plot_name == '2D_MARS': ancillary_splitter.addWidget(pytplot.pytplotWindows[i]) for i, plot_name in enumerate(pytplot.pytplotWindow_names): if plot_name == '3D_MARS': ancillary_splitter.addWidget(pytplot.pytplotWindows[i]) main_splitter.show() #This section will be for implementing IUVS KP data ''' import pyqtgraph.opengl as gl iuvs_data = gl.GLLinePlotItem() for i, plot_name in enumerate(pytplot.pytplotWindow_names): if plot_name == '3D_MARS': pytplot.pytplotWindows[i].centralWidget().addItem(iuvs_data) import math insitu, iuvs = pydivide.read(input_time='2016-02-18') time = iuvs[0]['periapse1']['time_start'] lat = np.radians(90 - iuvs[0]['periapse1']['lat']) lon = np.radians(iuvs[0]['periapse1']['lon']) alt = np.array(iuvs[0]['periapse1']['density']['ALTITUDE']) + 3389.5 # determine transformation matrix time = pytplot.tplot_utilities.str_to_int(time) iuvs_time = np.abs(insitu['Time'].values - time).argmin() print(iuvs_time) rotmat = np.array([[insitu['SPACECRAFT']['T11'][iuvs_time], insitu['SPACECRAFT']['T12'][iuvs_time], insitu['SPACECRAFT']['T13'][iuvs_time]], [insitu['SPACECRAFT']['T21'][iuvs_time], insitu['SPACECRAFT']['T22'][iuvs_time], insitu['SPACECRAFT']['T23'][iuvs_time]], [insitu['SPACECRAFT']['T31'][iuvs_time], insitu['SPACECRAFT']['T32'][iuvs_time], insitu['SPACECRAFT']['T33'][iuvs_time]]]) mso_coords = [] for a in alt: x = math.cos(lat) * math.cos(lon) * a y = math.cos(lat) * math.sin(lon) * a z = math.sin(lat) * a mso_coords.append(np.matmul(rotmat, np.array([x,y,z]))) mso_coords = np.array(mso_coords) print(mso_coords) iuvs_data.setData(pos=mso_coords, width=10) ''' import sys if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'): app.exec_()
def load_data(filenames=None, instruments=None, level='l2', type=None, insitu=True, iuvs=False, start_date='2014-01-01', end_date='2020-01-01', update_prefs=False, only_update_prefs=False, local_dir=None, list_files=False, new_files=True, exclude_orbit_file=False, download_only=False, varformat=None, prefix='', suffix='', get_support_data=False): """ This function downloads MAVEN data loads it into tplot variables, if applicable. """ # 1. Get a list of MAVEN files queries from the above seach parameters maven_files = maven_filenames(filenames, instruments, level, insitu, iuvs, start_date, end_date, update_prefs, only_update_prefs, local_dir) # If we are not asking for KP data, this flag ensures only ancillary data is loaded in from the KP files if instruments != 'kp': ancillary_only = True else: ancillary_only = False # Convert to list if not isinstance(type, list): type = [type] # Keep track of what files are downloaded files_to_load = [] # Loop through all instruments, download files locally if needed for instr in maven_files.keys(): bn_files_to_load = [] if maven_files[instr]: s = maven_files[instr][0] data_dir = maven_files[instr][1] public = maven_files[instr][2] # Add to list of files to load for f in s: # Filter by type if type is not None and instr != 'kp': file_type_match = False desc = l2_regex.match(f).group("description") for t in type: if t in desc: file_type_match = True if not file_type_match: continue # Check if the files are KP data if instr == 'kp': full_path = create_dir_if_needed(f, data_dir, 'insitu') else: full_path = create_dir_if_needed(f, data_dir, level) bn_files_to_load.append(f) files_to_load.append(os.path.join(full_path, f)) if list_files: for f in s: print(f) return if new_files: if instr == 'kp': s = get_new_files(bn_files_to_load, data_dir, instr, 'insitu') else: s = get_new_files(bn_files_to_load, data_dir, instr, level) if len(s) == 0: continue print("Your request will download a total of: " + str(len(s)) + " files for instrument " + str(instr)) print('Would you like to proceed with the download? ') valid_response = False cancel = False while not valid_response: response = (input('(y/n) > ')) if response == 'y' or response == 'Y': valid_response = True cancel = False elif response == 'n' or response == 'N': print('Cancelled download. Returning...') valid_response = True cancel = True else: print('Invalid input. Please answer with y or n.') if cancel: continue i = 0 display_progress(i, len(s)) for f in s: i = i + 1 if instr == 'kp': full_path = create_dir_if_needed(f, data_dir, 'insitu') else: full_path = create_dir_if_needed(f, data_dir, level) get_file_from_site(f, public, full_path) display_progress(i, len(s)) # 2. Load files into tplot if files_to_load: # Flatten out downloaded files from list of lists of filenames if isinstance(files_to_load[0], list): files_to_load = [ item for sublist in files_to_load for item in sublist ] # Only load in files into tplot if we actually downloaded CDF files cdf_files = [f for f in files_to_load if '.cdf' in f] sts_files = [f for f in files_to_load if '.sts' in f] kp_files = [f for f in files_to_load if '.tab' in f] loaded_tplot_vars = [] if not download_only: for f in cdf_files: # Loop through CDF files desc = l2_regex.match(os.path.basename(f)).group("description") if desc is not '' and suffix == '': created_vars = pytplot.cdf_to_tplot( f, varformat=varformat, get_support_data=get_support_data, prefix=prefix, suffix=desc, merge=True) else: created_vars = pytplot.cdf_to_tplot( f, varformat=varformat, get_support_data=get_support_data, prefix=prefix, suffix=suffix, merge=True) # Specifically for SWIA and SWEA data, make sure the plots have log axes and are spectrograms instr = l2_regex.match(os.path.basename(f)).group("instrument") if instr in ["swi", "swe"]: pytplot.options(created_vars, 'ylog', 1) pytplot.options(created_vars, 'zlog', 1) pytplot.options(created_vars, 'spec', 1) loaded_tplot_vars.append(created_vars) for f in sts_files: # Loop through STS (Mag) files desc = l2_regex.match(os.path.basename(f)).group("description") if desc is not '' and suffix == '': loaded_tplot_vars.append( pytplot.sts_to_tplot(f, prefix=prefix, suffix=desc, merge=True)) else: loaded_tplot_vars.append( pytplot.sts_to_tplot(f, prefix=prefix, suffix=suffix, merge=True)) # Remove the Decimal Day column, not really useful for tvar in loaded_tplot_vars: if "DDAY_" in tvar: pytplot.del_data(tvar) del tvar # Flatten out the list and only grab the unique tplot variables flat_list = list( set([ item for sublist in loaded_tplot_vars for item in sublist ])) # Load in KP data specifically for all of the Ancillary data (position, attitude, Ls, etc) kp_data_loaded = maven_kp_to_tplot(filename=kp_files, ancillary_only=True) # Link all created tplot variables to the corresponding KP data for tvar in flat_list: if tvar == 'data_lpnt': x = 2 pytplot.link(tvar, "mvn_kp::spacecraft::altitude", link_type='alt') pytplot.link(tvar, "mvn_kp::spacecraft::mso_x", link_type='x') pytplot.link(tvar, "mvn_kp::spacecraft::mso_y", link_type='y') pytplot.link(tvar, "mvn_kp::spacecraft::mso_z", link_type='z') pytplot.link(tvar, "mvn_kp::spacecraft::geo_x", link_type='geo_x') pytplot.link(tvar, "mvn_kp::spacecraft::geo_y", link_type='geo_y') pytplot.link(tvar, "mvn_kp::spacecraft::geo_z", link_type='geo_z') pytplot.link(tvar, "mvn_kp::spacecraft::sub_sc_longitude", link_type='lon') pytplot.link(tvar, "mvn_kp::spacecraft::sub_sc_latitude", link_type='lat') # Link all created KP data to the ancillary KP data for tvar in kp_data_loaded: pytplot.link(tvar, "mvn_kp::spacecraft::altitude", link_type='alt') pytplot.link(tvar, "mvn_kp::spacecraft::mso_x", link_type='x') pytplot.link(tvar, "mvn_kp::spacecraft::mso_y", link_type='y') pytplot.link(tvar, "mvn_kp::spacecraft::mso_z", link_type='z') pytplot.link(tvar, "mvn_kp::spacecraft::geo_x", link_type='geo_x') pytplot.link(tvar, "mvn_kp::spacecraft::geo_y", link_type='geo_y') pytplot.link(tvar, "mvn_kp::spacecraft::geo_z", link_type='geo_z') pytplot.link(tvar, "mvn_kp::spacecraft::sub_sc_longitude", link_type='lon') pytplot.link(tvar, "mvn_kp::spacecraft::sub_sc_latitude", link_type='lat') # Return list of unique KP data return flat_list
def mgf(trange=['2017-03-27', '2017-03-28'], datatype='8sec', level='l2', suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False): """ This function loads data from the MGF experiment from the Arase mission Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] datatype: str Data type; Valid options: level: str Data level; Valid options: suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat: str The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. downloadonly: bool Set this flag to download the CDF files, but not load them into tplot variables notplot: bool Return the data in hash tables instead of creating tplot variables no_update: bool If set, only load data from your local cache time_clip: bool Time clip the variables to exactly the range specified in the trange keyword Returns: List of tplot variables created. """ if datatype == '8s' or datatype == '8': datatype = '8sec' elif datatype == '64': datatype = '64hz' elif datatype == '128': datatype = '128hz' elif datatype == '256': datatype = '256hz' loaded_data = load(instrument='mgf', trange=trange, level=level, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd) if loaded_data == None or loaded_data == [] or notplot or downloadonly: return loaded_data clip('erg_mgf_' + level + '_mag_' + datatype + '_dsi' + suffix, -1e+6, 1e6) clip('erg_mgf_' + level + '_mag_' + datatype + '_gse' + suffix, -1e+6, 1e6) clip('erg_mgf_' + level + '_mag_' + datatype + '_gsm' + suffix, -1e+6, 1e6) clip('erg_mgf_' + level + '_mag_' + datatype + '_sm' + suffix, -1e+6, 1e6) # set yrange times, bdata = get_data('erg_mgf_' + level + '_mag_' + datatype + '_dsi' + suffix) ylim('erg_mgf_' + level + '_mag_' + datatype + '_dsi' + suffix, np.nanmin(bdata), np.nanmax(bdata)) times, bdata = get_data('erg_mgf_' + level + '_mag_' + datatype + '_gse' + suffix) ylim('erg_mgf_' + level + '_mag_' + datatype + '_gse' + suffix, np.nanmin(bdata), np.nanmax(bdata)) times, bdata = get_data('erg_mgf_' + level + '_mag_' + datatype + '_gsm' + suffix) ylim('erg_mgf_' + level + '_mag_' + datatype + '_gsm' + suffix, np.nanmin(bdata), np.nanmax(bdata)) times, bdata = get_data('erg_mgf_' + level + '_mag_' + datatype + '_sm' + suffix) ylim('erg_mgf_' + level + '_mag_' + datatype + '_sm' + suffix, np.nanmin(bdata), np.nanmax(bdata)) # set labels options('erg_mgf_' + level + '_mag_' + datatype + '_dsi' + suffix, 'legend_names', ['Bx', 'By', 'Bz']) options('erg_mgf_' + level + '_mag_' + datatype + '_gse' + suffix, 'legend_names', ['Bx', 'By', 'Bz']) options('erg_mgf_' + level + '_mag_' + datatype + '_gsm' + suffix, 'legend_names', ['Bx', 'By', 'Bz']) options('erg_mgf_' + level + '_mag_' + datatype + '_sm' + suffix, 'legend_names', ['Bx', 'By', 'Bz']) # set color of the labels options('erg_mgf_' + level + '_mag_' + datatype + '_dsi' + suffix, 'Color', ['b', 'g', 'r']) options('erg_mgf_' + level + '_mag_' + datatype + '_gse' + suffix, 'Color', ['b', 'g', 'r']) options('erg_mgf_' + level + '_mag_' + datatype + '_gsm' + suffix, 'Color', ['b', 'g', 'r']) options('erg_mgf_' + level + '_mag_' + datatype + '_sm' + suffix, 'Color', ['b', 'g', 'r']) return loaded_data
def mms_feeps_pad_spinavg(probe='1', data_units='intensity', datatype='electron', data_rate='srvy', level='l2', suffix='', energy=[70, 600], bin_size=16.3636): """ This function will spin-average the FEEPS pitch angle distributions Parameters: probe: str probe #, e.g., '4' for MMS4 data_units: str 'intensity' or 'count_rate' datatype: str 'electron' or 'ion' data_rate: str instrument data rate, e.g., 'srvy' or 'brst' level: str data level, e.g., 'l2' suffix: str suffix of the loaded data energy: list of float energy range to include in the calculation bin_size: float size of the pitch angle bins Returns: Name of tplot variable created. """ units_label = '' if data_units == 'intensity': units_label = '1/(cm^2-sr-s-keV)' elif data_units == 'counts': units_label = '[counts/s]' prefix = 'mms'+str(probe)+'_epd_feeps_' n_pabins = 180./bin_size new_bins = [180.*i/n_pabins for i in range(int(n_pabins)+1)] # get the spin sectors # v5.5+ = mms1_epd_feeps_srvy_l1b_electron_spinsectnum sector_times, spin_sectors = get_data(prefix + data_rate + '_' + level + '_' + datatype + '_spinsectnum' + suffix) spin_starts = [spin_end + 1 for spin_end in np.where(spin_sectors[:-1] >= spin_sectors[1:])[0]] en_range_string = str(int(energy[0])) + '-' + str(int(energy[1])) + 'keV' var_name = prefix + data_rate + '_' + level + '_' + datatype + '_' + data_units + '_' + en_range_string + '_pad' + suffix times, data, angles = get_data(var_name) spin_avg_flux = np.zeros([len(spin_starts), len(angles)]) rebinned_data = np.zeros([len(spin_starts), int(n_pabins)+1]) spin_times = np.zeros(len(spin_starts)) # the following is for rebinning and interpolating to new_bins srx = [float(len(angles))/(int(n_pabins)+1)*(x + 0.5) - 0.5 for x in range(int(n_pabins)+1)] current_start = 0 for spin_idx in range(0, len(spin_starts)): with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) spin_avg_flux[spin_idx, :] = nanmean(data[current_start:spin_starts[spin_idx]+1, :], axis=0) spin_times[spin_idx] = times[current_start] # rebin and interpolate to new_bins # this is meant to replicate the functionality of congrid in the IDL routine spin_avg_interp = scipy.interpolate.interp1d(np.arange(len(spin_avg_flux[spin_idx, :])), spin_avg_flux[spin_idx, :], fill_value='extrapolate') rebinned_data[spin_idx, :] = spin_avg_interp(srx) # we want to take the end values instead of extrapolating # again, to match the functionality of congrid in IDL rebinned_data[spin_idx, 0] = spin_avg_flux[spin_idx, 0] rebinned_data[spin_idx, -1] = spin_avg_flux[spin_idx, -1] current_start = spin_starts[spin_idx] + 1 # store_data(var_name + '_spin' + suffix, data={'x': spin_times, 'y': spin_avg_flux, 'v': angles}) store_data(var_name + '_spin' + suffix, data={'x': spin_times, 'y': rebinned_data, 'v': new_bins}) options(var_name + '_spin' + suffix, 'spec', True) options(var_name + '_spin' + suffix, 'ylog', False) options(var_name + '_spin' + suffix, 'zlog', True) options(var_name + '_spin' + suffix, 'Colormap', 'jet') options(var_name + '_spin' + suffix, 'ztitle', units_label) options(var_name + '_spin' + suffix, 'ytitle', 'MMS' + str(probe) + ' ' + datatype + ' PA (deg)') return var_name + '_spin' + suffix
def mms_load_fpi_calc_pad(probe='1', level='sitl', datatype='', data_rate='', suffix='', autoscale=True): """ Calculates the omni-directional pitch angle distribution (summed and averaged) from the individual tplot variables Parameters: probe: str probe, valid values for MMS probes are ['1','2','3','4']. level: str indicates level of data processing. the default if no level is specified is 'sitl' datatype: str Valid datatypes for FPI are: Quicklook: ['des', 'dis'] SITL: '' (none; loads both electron and ion data from single CDF) L1b/L2: ['des-dist', 'dis-dist', 'dis-moms', 'des-moms'] data_rate: str instrument data rates for FPI include 'brst' and 'fast'. The default is 'fast'. suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. autoscale: bool If set, use the default zrange; otherwise, use the min and max of the data for the zrange Returns: List of tplot variables created. """ out_vars = [] if isinstance(datatype, str): if datatype == '*' or datatype == '': if level.lower() == 'ql': datatype = ['des', 'dis'] else: datatype = ['des-dist', 'dis-dist'] if isinstance(datatype, str): datatype = [datatype] for dtype in datatype: species = dtype[1] if level.lower() == 'sitl': spec_str_format = 'PitchAngDist' obs_str_format = '_fpi_' + species else: spec_str_format = 'pitchAngDist' obs_str_format = '_d' + species + 's_' obsstr = 'mms' + str(probe) + obs_str_format if level.lower() == 'l2': spec_str_format = 'pitchangdist' pad_vars = [ obsstr + spec_str_format + '_' + erange + 'en_' + data_rate + suffix for erange in ['low', 'mid', 'high'] ] else: pad_vars = [ obsstr + spec_str_format + '_' + erange + 'En' + suffix for erange in ['low', 'mid', 'high'] ] pad_avg_name = obsstr + 'PitchAngDist_avg' + suffix low_en = get_data(pad_vars[0]) mid_en = get_data(pad_vars[1]) high_en = get_data(pad_vars[2]) if low_en == None or mid_en == None or high_en == None: v3_low_pad = tnames(pad_vars[0].lower() + '_' + data_rate) v3_mid_pad = tnames(pad_vars[1].lower() + '_' + data_rate) v3_high_pad = tnames(pad_vars[2].lower() + '_' + data_rate) if v3_low_pad == [] or v3_mid_pad == [] or v3_high_pad == []: continue low_en = get_data(v3_low_pad[0]) mid_en = get_data(v3_mid_pad[0]) high_en = get_data(v3_high_pad[0]) pad_avg_name = pad_avg_name.lower() e_pad_sum = low_en.y + mid_en.y + high_en.y e_pad_avg = e_pad_sum / 3.0 if level == 'l2': pad_avg_name = pad_avg_name.lower() if species == 'e': species_str = 'electron' elif species == 'i': species_str = 'ion' if level == 'ql': store_data(obsstr + 'PitchAngDist_sum' + suffix, data={ 'x': low_en.times, 'y': e_pad_sum, 'v': low_en.v }) options( obsstr + 'PitchAngDist_sum' + suffix, 'ytitle', 'MMS' + str(probe) + ' \\ ' + species_str + ' \\ PAD \\ SUM') options(obsstr + 'PitchAngDist_sum' + suffix, 'yrange', [0, 180]) options(obsstr + 'PitchAngDist_sum' + suffix, 'zlog', True) options(obsstr + 'PitchAngDist_sum' + suffix, 'spec', True) options(obsstr + 'PitchAngDist_sum' + suffix, 'Colormap', 'jet') out_vars.append(obsstr + 'PitchAngDist_sum' + suffix) store_data(pad_avg_name, data={ 'x': low_en.times, 'y': e_pad_avg, 'v': low_en.v }) options(pad_avg_name, 'ztitle', 'eV/(cm!U2!N s sr eV)') options(pad_avg_name, 'ytitle', 'MMS' + str(probe) + ' \\ ' + species_str + ' \\ PAD \\ AVG') options(pad_avg_name, 'yrange', [0, 180]) options(pad_avg_name, 'zlog', True) options(pad_avg_name, 'spec', True) options(pad_avg_name, 'Colormap', 'jet') out_vars.append(pad_avg_name) return out_vars
def twavpol(tvarname, prefix='', nopfft=-1, steplength=-1, bin_freq=-1): """Apply wavpol to a pytplot variable. Creates multiple pytplot variables: '_powspec','_degpol', '_waveangle', '_elliptict', '_helict', '_pspec3_x', '_pspec3_y', '_pspec3_z' Parameters ---------- tvarname : string Name of pytplot variable. prefix : string, optional Prefix for pytplot variables created. nopfft : int, optional Number of points in FFT. The default is 256. steplength : int, optional The amount of overlap between successive FFT intervals. The default is -1 which means nopfft/2. bin_freq : int, optional Number of bins in frequency domain. The default is 3. Returns ------- result : bool Returns 1 if completed successfully. Returns 0 if it encountered problems and exited. """ if prefix == '': prefix = tvarname all_names = tnames(tvarname) if len(all_names) < 1: print('twavpol error: No valid pytplot variables match tvarname.') return 0 xdata = get_data(tvarname) ct = xdata.times if len(ct) < 2: print('twavpol error: Time variable does not have enough points.') return 0 bfield = xdata.y if bfield.ndim != 2: print('twavpol error: Data should have 2 dimensions.') return 0 b1 = bfield[:, 0] b2 = bfield[:, 1] b3 = bfield[:, 2] if (len(ct) != len(b1) or len(ct) != len(b2) or len(ct) != len(b3)): print('twavpol error: Number of time elements does not match' + 'number of magnetic field elements.') return 0 # Apply vawpol. (timeline, freqline, powspec, degpol, waveangle, elliptict, helict, pspec3, err_flag) = wavpol(ct, b1, b2, b3, nopfft=nopfft, steplength=steplength, bin_freq=bin_freq) if err_flag == 1: print('twavpol error: There were errors while applying wavpol.') return 0 # Store new pytplot variables as spectrograms. vt = prefix+'_powspec' store_data(vt, data={'x': timeline, 'y': powspec, 'v': freqline}) options(vt, 'spec', 1) vt = prefix+'_degpol' store_data(vt, data={'x': timeline, 'y': degpol, 'v': freqline}) options(vt, 'spec', 1) vt = prefix+'_waveangle' store_data(vt, data={'x': timeline, 'y': waveangle, 'v': freqline}) options(vt, 'spec', 1) vt = prefix+'_elliptict' store_data(vt, data={'x': timeline, 'y': elliptict, 'v': freqline}) options(vt, 'spec', 1) vt = prefix+'_helict' store_data(vt, data={'x': timeline, 'y': helict, 'v': freqline}) options(vt, 'spec', 1) # Take the three components of pspec3. vt = prefix+'_pspec3_x' store_data(vt, data={'x': timeline, 'y': pspec3[:, :, 0], 'v': freqline}) options(vt, 'spec', 1) vt = prefix+'_pspec3_y' store_data(vt, data={'x': timeline, 'y': pspec3[:, :, 1], 'v': freqline}) options(vt, 'spec', 1) vt = prefix+'_pspec3_z' store_data(vt, data={'x': timeline, 'y': pspec3[:, :, 2], 'v': freqline}) options(vt, 'spec', 1) return 1
def mms_feeps_gpd( trange=['2017-07-11/22:30', '2017-07-11/22:35'], probe='2', data_rate='brst', level='l2', datatype='electron', data_units='intensity', bin_size=15, # deg energy=[50, 500]): """ Calculate gyrophase distributions using data from the MMS Fly's Eye Energetic Particle Sensor (FEEPS) Parameters ---------- probe: str probe #, e.g., '4' for MMS4 data_units: str 'intensity' datatype: str 'electron' or 'ion' data_rate: str instrument data rate, e.g., 'srvy' or 'brst' level: str data level suffix: str suffix of the loaded data energy: list of float energy range to include in the calculation bin_size: float size of the pitch angle bins Returns -------- Tplot variable containing the gyrophase distribution Notes ------ Based on IDL code by Drew Turner (10 Oct 2017): mms_feeps_gpd.pro """ if isinstance(probe, int): probe = str(probe) feeps_data = pyspedas.mms.feeps(trange=trange, data_rate=data_rate, probe=probe, level=level) if len(feeps_data) == 0: print('Problem loading FEEPS data for this time range.') return # Account for angular response (finite field of view) of instruments # elec can use +/-21.4 deg on each pitch angle as average response angle; ions can start with +/-10 deg, but both need to be further refined if datatype == 'electron': dAngResp = 21.4 # [deg] if datatype == 'ion': dAngResp = 10.0 # [deg] bin_size = float(bin_size) n_bins = 360.0 / bin_size gyro_bins = 360. * np.arange(n_bins + 1) / n_bins gyro_centers = 360. * np.arange(n_bins) / n_bins + (gyro_bins[1] - gyro_bins[0]) / 2. # get the gyrophase angles # calculate the gyro phase angles from the magnetic field data gyro_vars = mms_feeps_getgyrophase(trange=trange, probe=probe, data_rate=data_rate, level=level, datatype=datatype) gyro_data = get_data('mms' + str(probe) + '_epd_feeps_' + data_rate + '_' + level + '_' + datatype + '_gyrophase') if gyro_data is None or gyro_vars is None: print('Problem calculating gyrophase angles.') return eyes = mms_feeps_active_eyes(trange, probe, data_rate, datatype, level) data_map = {} if data_rate == 'srvy': # From Allison Jaynes @ LASP: The 6,7,8 sensors (out of 12) are ions, # so in the pitch angle array, the 5,6,7 columns (counting from zero) will be the ion pitch angles. # for electrons: if datatype == 'electron': data_map['top-electron'] = eyes['top'] - 1 data_map['bottom-electron'] = eyes['bottom'] - 1 elif datatype == 'ion': data_map['top-ion'] = eyes['top'] - 1 data_map['bottom-ion'] = eyes['bottom'] - 1 elif data_rate == 'brst': # note: the following are indices of the top/bottom sensors in pa_data # they should be consistent with pa_dlimits.labels data_map['top-electron'] = [0, 1, 2, 3, 4, 5, 6, 7, 8] data_map['bottom-electron'] = [9, 10, 11, 12, 13, 14, 15, 16, 17] # and ions: data_map['top-ion'] = [0, 1, 2] data_map['bottom-ion'] = [3, 4, 5] sensor_types = ['top', 'bottom'] # First, initialize arrays for flux (dflux) and pitch angles (dpa) compiled from all sensors: if datatype == 'electron': dflux = np.zeros( (len(gyro_data.times), len(data_map['top-electron']) + len(data_map['bottom-electron']))) elif datatype == 'ion': dflux = np.zeros( (len(gyro_data.times), len(data_map['top-ion']) + len(data_map['bottom-ion']))) dpa = np.zeros(dflux.shape) for sensor_type in sensor_types: pa_map = data_map[sensor_type + '-' + datatype] particle_idxs = np.array(eyes[sensor_type]) - 1 for isen in range(len(particle_idxs)): # loop through sensors # get the data var_name = 'mms' + str( probe ) + '_epd_feeps_' + data_rate + '_' + level + '_' + datatype + '_' + sensor_type + '_' + data_units + '_sensorid_' + str( particle_idxs[isen] + 1) + '_clean_sun_removed' data = get_data(var_name) if data is None: print('Data not found: ' + var_name) continue data.y[data.y == 0.0] = np.nan # remove any 0s before averaging # Energy indices to use: indx = np.argwhere((data.v <= energy[1]) & (data.v >= energy[0])) if len(indx) == 0: print( 'Energy range selected is not covered by the detector for FEEPS ' + datatype + ' data') continue dflux[:, pa_map[isen]] = np.nanmean(data.y[:, indx], axis=1).flatten() dpa[:, pa_map[isen]] = gyro_data.y[:, pa_map[isen]].flatten() # we need to replace the 0.0s left in after populating dpa with NaNs; these # 0.0s are left in there because these points aren't covered by sensors loaded # for this datatype/data_rate dpa[dpa == 0.0] = np.nan gyro_flux = np.zeros((len(gyro_data.times), int(n_bins))) delta_gyro = (gyro_bins[1] - gyro_bins[0]) / 2.0 # Now loop through PA bins and time, find the telescopes where there is data in those bins and average it up! for it in range(len(dpa[:, 0])): for ipa in range(int(n_bins)): ind = np.argwhere( (dpa[it, :] + dAngResp >= gyro_centers[ipa] - delta_gyro) & (dpa[it, :] - dAngResp < gyro_centers[ipa] + delta_gyro)) if len(ind) > 0: gyro_flux[it, ipa] = np.nanmean(dflux[it, ind], axis=0).flatten() # fill any missed bins with NAN gyro_flux[gyro_flux == 0.0] = np.nan en_range_string = str(int(energy[0])) + '-' + str(int(energy[1])) + 'keV' new_name = 'mms' + str( probe ) + '_epd_feeps_' + data_rate + '_' + level + '_' + datatype + '_' + data_units + '_' + en_range_string + '_gpd' saved = store_data(new_name, data={ 'x': gyro_data.times, 'y': gyro_flux, 'v': gyro_centers }) if saved: options(new_name, 'spec', True) options(new_name, 'zlog', False) return new_name
data = alldata[1] #################################################################################### # After working with the data, we can store a new pytplot variable. # We can also store our own data in the pytplot object. store_data("tha_new_vel", data={'x': time, 'y': data}) #################################################################################### # Preparing for the plots, we define the y-axis limits for the two panels. ylim('tha_pos', -23000.0, 81000.0) ylim('tha_new_vel', -8.0, 12.0) #################################################################################### # We give a title to the plot and labels for the y-axis panels. tplot_options('title', 'THEMIS tha position and velocity, 2015-12-31') options('tha_pos', 'ytitle', 'Position') options('tha_new_vel', 'ytitle', 'Velocity') #################################################################################### # We plot the position and the velocity using the pyqtgraph library (the default). tplot(["tha_pos", "tha_new_vel"]) #################################################################################### # A new window will open, containing the following plot: # # .. image:: http://themis.ssl.berkeley.edu/images/pyspedas_demo1.png # :alt: Themis tha position and velocity # #################################################################################### # Load and plot GMAG data
def hapi(trange=None, server=None, dataset=None, parameters='', suffix='', prefix='', catalog=False): """ Loads data from a HAPI server into pytplot variables Parameters ----------- trange: list of str or list of float Time range to load the data for server: str HAPI server to load the data from dataset: str HAPI dataset to load parameters: str or list of str Parameters in the dataset to load; default is to load them all prefix: str Prefix to append to the tplot variables suffix: str Suffix to append to the tplot variables catalog: bool If True, returns the server's catalog of datasets Returns ------- List of tplot variables created. """ if server is None: print('Error, no server specified; example servers include:') print('- https://cdaweb.gsfc.nasa.gov/hapi') print('- https://pds-ppi.igpp.ucla.edu/hapi') print('- http://planet.physics.uiowa.edu/das/das2Server/hapi') print('- https://iswa.gsfc.nasa.gov/IswaSystemWebApp/hapi') print('- http://lasp.colorado.edu/lisird/hapi') return if catalog: catalog = load_hapi(server) items = [] if 'catalog' in catalog.keys(): items = catalog['catalog'] print('Available datasets: ') for item in items: if 'title' in item.keys(): print(item['id'] + ': ' + item['title']) else: print(item['id']) return if dataset is None: print( 'Error, no dataset specified; please see the catalog for a list of available data sets.' ) return if trange is None: print('Error, no trange specified') return if isinstance(parameters, list): parameters = ','.join(parameters) opts = {'logging': False} with warnings.catch_warnings(): warnings.simplefilter('ignore', category=ResourceWarning) warnings.filterwarnings('ignore', message='Unverified HTTPS request') data, hapi_metadata = load_hapi(server, dataset, parameters, trange[0], trange[1], **opts) out_vars = [] # loop through the parameters in this dataset params = hapi_metadata['parameters'] for param in params[1:]: spec = False param_name = param.get('name') print('Loading ' + prefix + param_name + suffix) # load the data only for this parameter try: with warnings.catch_warnings(): warnings.simplefilter('ignore', category=ResourceWarning) warnings.filterwarnings('ignore', message='Unverified HTTPS request') data, hapi_metadata = load_hapi(server, dataset, param_name, trange[0], trange[1], **opts) except: continue timestamps = [datapoint[0] for datapoint in data] unixtimes = [ time_double(timestamp.decode('utf-8')) for timestamp in timestamps ] param_type = hapi_metadata['parameters'][1].get('type') if param_type is None: param_type = 'double' data_size = hapi_metadata['parameters'][1].get('size') if data_size is None: single_line = True try: if param_type == 'double': single_line = isinstance(data[0][1], np.float64) elif param_type == 'integer': single_line = isinstance(data[0][1], np.int32) except IndexError: continue if single_line: data_out = np.zeros((len(data))) else: try: data_out = np.zeros((len(data), len(data[0][1]))) except TypeError: continue for idx, datapoint in enumerate(data): if single_line: data_out[idx] = datapoint[1] else: data_out[idx, :] = datapoint[1] data_out = data_out.squeeze() # check for fill values fill_value = hapi_metadata['parameters'][1].get('fill') if fill_value is not None: if param_type == 'double': fill_value = float(fill_value) data_out[data_out == fill_value] = np.nan elif param_type == 'integer': # NaN is only floating point, so we replace integer fill # values with 0 instead of NaN fill_value = int(fill_value) data_out[data_out == fill_value] = 0 bins = param.get('bins') if bins is not None: centers = bins[0].get('centers') if centers is not None: spec = True data_table = {'x': unixtimes, 'y': data_out} if spec: data_table['v'] = centers saved = store_data(prefix + param_name + suffix, data=data_table) metadata = get_data(prefix + param_name + suffix, metadata=True) metadata['HAPI'] = hapi_metadata if spec: options(prefix + param_name + suffix, 'spec', True) if saved: out_vars.append(prefix + param_name + suffix) # wait for a second before going to the next variable # to avoid hitting the server too quickly sleep(1) return out_vars
def mms_eis_omni(probe, species='proton', datatype='extof', suffix='', data_units='flux', data_rate='srvy'): """ This function will calculate the omni-directional EIS spectrograms, and is automatically called from mms_load_eis Parameters: probe: str probe #, e.g., '4' for MMS4 species: str species for calculation (default: 'proton') datatype: str 'extof' or 'phxtof' (default: 'extof') suffix: str suffix of the loaded data data_units: str 'flux' or 'cps' (default: 'flux') data_rate: str instrument data rate, e.g., 'srvy' or 'brst' (default: 'srvy') Returns: Name of tplot variable created. """ probe = str(probe) species_str = datatype + '_' + species if data_rate == 'brst': prefix = 'mms' + probe + '_epd_eis_brst_' else: prefix = 'mms' + probe + '_epd_eis_' if data_units == 'flux': units_label = '1/(cm^2-sr-s-keV)' elif data_units == 'cps': units_label = '1/s' elif data_units == 'counts': units_label = 'counts' telescopes = tnames(pattern=prefix + species_str + '_*' + data_units + '_t?' + suffix) if len(telescopes) == 6: scope_data = get_data(telescopes[0]) if len(scope_data) <= 2: print("Error, couldn't find energy table for the variable: " + telescopes[0]) return None time, data, energies = scope_data flux_omni = np.zeros((len(time), len(energies))) for t in telescopes: time, data, energies = get_data(t) flux_omni = flux_omni + data store_data(prefix + species_str + '_' + data_units + '_omni' + suffix, data={ 'x': time, 'y': flux_omni / 6., 'v': energies }) options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'spec', 1) options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'ylog', 1) options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'zlog', 1) options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'ztitle', units_label) options( prefix + species_str + '_' + data_units + '_omni' + suffix, 'ytitle', 'MMS' + probe + ' ' + datatype + ' ' + species + ' Energy [keV]') options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'yrange', [14, 45]) options(prefix + species_str + '_' + data_units + '_omni' + suffix, 'Colormap', 'jet') # create new variable with omni energy limits energy_minus = get_data(prefix + species_str + '_t0_energy_dminus' + suffix) energy_gm = get_data(prefix + species_str + '_t0_energy' + suffix) energy_plus = get_data(prefix + species_str + '_t0_energy_dplus' + suffix) if isinstance(energy_minus, np.ndarray) and isinstance( energy_plus, np.ndarray): # transpose is used here to make the variable match the variable in IDL store_data(prefix + species_str + '_energy_range' + suffix, data={ 'y': np.array([ energy_gm - energy_minus, energy_gm + energy_plus ]).transpose() }) return prefix + species_str + '_' + data_units + '_omni' + suffix else: print( 'Error, problem finding the telescopes to calculate omni-directional spectrograms' ) return None
def dst(trange=None, time_clip=True, remote_data_dir='http://wdc.kugi.kyoto-u.ac.jp/', suffix=''): """ Loads Dst data from the Kyoto servers. Parameters ----------- trange: list of str Time range to load time_clip: bool If set, time the data to the requested trange remote_data_dir: str Remote data server at Kyoto suffix: str Suffix to append to the output variable's name Acknowledgment ---------------- The DST data are provided by the World Data Center for Geomagnetism, Kyoto, and are not for redistribution (http://wdc.kugi.kyoto-u.ac.jp/). Furthermore, we thank the geomagnetic observatories (Kakioka [JMA], Honolulu and San Juan [USGS], Hermanus [RSA], Alibag [IIG]), NiCT, INTERMAGNET, and many others for their cooperation to make the Dst index available. Returns -------- Name of the tplot variable created. """ file_names = dailynames(file_format='%Y%m/index.html', trange=trange) times = [] data = [] datatype = '' # Final files for filename in file_names: html_text = requests.get(remote_data_dir + 'dst_final/' + filename).text file_times, file_data = parse_html(html_text, year=filename[:4], month=filename[4:6]) times.extend(file_times) data.extend(file_data) if len(file_times) != 0: datatype = 'Final' # Provisional files for filename in file_names: html_text = requests.get(remote_data_dir + 'dst_provisional/' + filename).text file_times, file_data = parse_html(html_text, year=filename[:4], month=filename[4:6]) times.extend(file_times) data.extend(file_data) if len(file_times) != 0: datatype = 'Provisional' # Real Time files for filename in file_names: html_text = requests.get(remote_data_dir + 'dst_realtime/' + filename).text file_times, file_data = parse_html(html_text, year=filename[:4], month=filename[4:6]) times.extend(file_times) data.extend(file_data) if len(file_times) != 0: datatype = 'Real Time' if len(times) == 0: print('No data found.') return store_data('kyoto_dst' + suffix, data={'x': times, 'y': data}) if time_clip: tclip('kyoto_dst' + suffix, trange[0], trange[1], suffix='') options('kyoto_dst' + suffix, 'ytitle', 'Dst (' + datatype + ')') print( '**************************************************************************************' ) print( 'The DST data are provided by the World Data Center for Geomagnetism, Kyoto, and' ) print( ' are not for redistribution (http://wdc.kugi.kyoto-u.ac.jp/). Furthermore, we thank' ) print( ' the geomagnetic observatories (Kakioka [JMA], Honolulu and San Juan [USGS], Hermanus' ) print( ' [RSA], Alibag [IIG]), NiCT, INTERMAGNET, and many others for their cooperation to' ) print(' make the Dst index available.') print( '**************************************************************************************' ) return 'kyoto_dst' + suffix
def mms_feeps_spin_avg(probe='1', data_units='intensity', datatype='electron', data_rate='srvy', level='l2', suffix=''): """ This function will spin-average the omni-directional FEEPS energy spectra Parameters: probe: str probe #, e.g., '4' for MMS4 data_units: str 'intensity' or 'count_rate' datatype: str 'electron' or 'ion' data_rate: str instrument data rate, e.g., 'srvy' or 'brst' level: str data level, e.g., 'l2' suffix: str suffix of the loaded data Returns: Name of tplot variable created. """ units_label = '' if data_units == 'intensity': units_label = '1/(cm^2-sr-s-keV)' elif data_units == 'counts': units_label = '[counts/s]' if datatype == 'electron': lower_en = 71.0 else: lower_en = 78.0 prefix = 'mms'+str(probe)+'_epd_feeps_' # get the spin sectors # v5.5+ = mms1_epd_feeps_srvy_l1b_electron_spinsectnum sector_times, spin_sectors = get_data(prefix + data_rate + '_' + level + '_' + datatype + '_spinsectnum' + suffix) spin_starts = [spin_end + 1 for spin_end in np.where(spin_sectors[:-1] >= spin_sectors[1:])[0]] var_name = prefix + data_rate + '_' + level + '_' + datatype + '_' + data_units + '_omni' times, data, energies = get_data(var_name + suffix) spin_avg_flux = np.zeros([len(spin_starts), len(energies)]) current_start = spin_starts[0] for spin_idx in range(1, len(spin_starts)-1): with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) spin_avg_flux[spin_idx-1, :] = nanmean(data[current_start:spin_starts[spin_idx]+1, :], axis=0) current_start = spin_starts[spin_idx] + 1 store_data(var_name + '_spin' + suffix, data={'x': times[spin_starts], 'y': spin_avg_flux, 'v': energies}) options(var_name + '_spin' + suffix, 'spec', True) options(var_name + '_spin' + suffix, 'ylog', True) options(var_name + '_spin' + suffix, 'zlog', True) options(var_name + '_spin' + suffix, 'yrange', [lower_en, 600.0]) options(var_name + '_spin' + suffix, 'Colormap', 'jet') options(var_name + '_spin' + suffix, 'ztitle', units_label) options(var_name + '_spin' + suffix, 'ytitle', 'MMS' + str(probe) + ' ' + datatype + ' (keV)') return var_name + '_spin' + suffix
def tdpwrspc(varname, newname=None, nboxpoints=256, nshiftpoints=128, binsize=3, nohanning=False, noline=False, notperhz=False, notmvariance=False): """ Compute power spectra for a tplot variable. Parameters ---------- varname: str Name of pytplot variable. newname: str, optional Name of new pytplot variable to save data to. nboxpoints: int, optional The number of points to use for the hanning window. The default is 256. nshiftpoints: int, optional The number of points to shift for each spectrum. The default is 128. binsize: int, optional Size for binning of the data along the frequency domain. The default is 3. nohanning: bool, optional If True, no hanning window is applied to the input. The default is False. noline: bool, optional If True, no straight line is subtracted. The default is False. notperhz: bool, optional If True, the output units are the square of the input units. The default is False. notmvariance: bool, optional If True, replace output spectrum for any windows that have variable. cadence with NaNs. The default is False. Returns ------- str Name of new pytplot variable. """ if newname is None: newname = varname + '_dpwrspc' data_tuple = get_data(varname) if data_tuple is not None: if data_tuple[1][0].shape != (): split_vars = split_vec(varname) out_vars = [] for var in split_vars: out_vars.append( tdpwrspc(var, newname=var + '_dpwrspc', nboxpoints=nboxpoints, nshiftpoints=nshiftpoints, binsize=binsize, nohanning=nohanning, noline=noline, notperhz=notperhz, notmvariance=notmvariance)) return out_vars else: pwrspc = dpwrspc(data_tuple[0], data_tuple[1], nboxpoints=nboxpoints, nshiftpoints=nshiftpoints, binsize=binsize, nohanning=nohanning, noline=noline, notperhz=notperhz, notmvariance=notmvariance) if pwrspc is not None: store_data(newname, data={ 'x': pwrspc[0], 'y': pwrspc[2], 'v': pwrspc[1] }) options(newname, 'spec', True) options(newname, 'ylog', True) options(newname, 'zlog', True) options(newname, 'Colormap', 'jet') # options(newname, 'yrange', [0.01, 16]) return newname
def mms_load_eph_tplot(filenames, level='def', probe='1', datatypes=['pos', 'vel'], suffix='', trange=None): """ Helper routine for loading state data (ASCII files from the SDC); not meant to be called directly; see pyspedas.mms.state instead """ prefix = 'mms' + probe time_values = [] date_values = [] x_values = [] y_values = [] z_values = [] vx_values = [] vy_values = [] vz_values = [] for file in filenames: rows = pd.read_csv(file, delim_whitespace=True, header=None, skiprows=14) times = rows.shape[0] - 1 for time_idx in range(0, times): # these files can overlap, so avoid duplicates if rows[0][time_idx] in date_values: continue time_values.append( pd.to_datetime(rows[0][time_idx], format='%Y-%j/%H:%M:%S.%f').timestamp()) x_values.append(rows[2][time_idx]) y_values.append(rows[3][time_idx]) z_values.append(rows[4][time_idx]) vx_values.append(rows[5][time_idx]) vy_values.append(rows[6][time_idx]) vz_values.append(rows[7][time_idx]) date_values.append(rows[0][time_idx]) if 'pos' in datatypes: store_data(prefix + '_' + level + 'eph_pos' + suffix, data={ 'x': time_values, 'y': np.transpose(np.array([x_values, y_values, z_values])) }) tclip(prefix + '_' + level + 'eph_pos' + suffix, trange[0], trange[1], suffix='') options(prefix + '_' + level + 'eph_pos' + suffix, 'ytitle', 'MMS' + str(probe) + ' position [km]') options(prefix + '_' + level + 'eph_pos' + suffix, 'legend_names', ['X ECI', 'Y ECI', 'Z ECI']) options(prefix + '_' + level + 'eph_pos' + suffix, 'color', ['b', 'g', 'r']) if 'vel' in datatypes: store_data(prefix + '_' + level + 'eph_vel' + suffix, data={ 'x': time_values, 'y': np.transpose(np.array([vx_values, vy_values, vz_values])) }) tclip(prefix + '_' + level + 'eph_vel' + suffix, trange[0], trange[1], suffix='') options(prefix + '_' + level + 'eph_vel' + suffix, 'ytitle', 'MMS' + str(probe) + ' velocity [km/s]') options(prefix + '_' + level + 'eph_vel' + suffix, 'legend_names', ['Vx ECI', 'Vy ECI', 'Vz ECI']) options(prefix + '_' + level + 'eph_vel' + suffix, 'color', ['b', 'g', 'r'])
def mms_eis_spin_avg(probe='1', species='proton', data_units='flux', datatype='extof', data_rate='srvy', level='l2', suffix=''): """ This function will spin-average the EIS spectrograms, and is automatically called from mms_load_eis Parameters ---------- probe: str probe #, e.g., '4' for MMS4 species: str species for calculation (default: 'proton') data_units: str 'flux' or 'cps' (default: 'flux') datatype: str 'extof' or 'phxtof' (default: 'extof') data_rate: str instrument data rate, e.g., 'srvy' or 'brst' (default: 'srvy') level: str data level ['l1a','l1b','l2pre','l2' (default)] suffix: str suffix of the loaded data Returns: List of tplot variables created. """ prefix = 'mms' + probe + '_epd_eis_' + data_rate + '_' + level + '_' if data_units == 'flux': units_label = '1/(cm^2-sr-s-keV)' elif data_units == 'cps': units_label = '1/s' elif data_units == 'counts': units_label = 'counts' spin_data = get_data(prefix + datatype + '_spin' + suffix) if spin_data is None: logging.error('Error, problem finding EIS spin variable to calculate spin-averages') return spin_times, spin_nums = spin_data if spin_nums is not None: spin_starts = [spin_start for spin_start in np.where(spin_nums[1:] > spin_nums[:-1])[0]] telescopes = tnames(prefix + datatype + '_' + species + '_*' + data_units + '_t?' + suffix) if len(telescopes) != 6: logging.error('Problem calculating the spin-average for species: ' + species + ' (' + datatype + ')') return None out_vars = [] for scope in range(0, 6): this_scope = telescopes[scope] scope_data = get_data(this_scope) if len(scope_data) <= 2: logging.error("Error, couldn't find energy table for the variable: " + this_scope) continue flux_times, flux_data, energies = scope_data spin_avg_flux = np.zeros([len(spin_starts), len(energies)]) current_start = 0 for spin_idx in range(0, len(spin_starts)): with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) spin_avg_flux[spin_idx, :] = nanmean(flux_data[current_start:spin_starts[spin_idx]+1, :], axis=0) current_start = spin_starts[spin_idx] + 1 store_data(this_scope + '_spin', data={'x': flux_times[spin_starts], 'y': spin_avg_flux, 'v': energies}) options(this_scope + '_spin', 'ztitle', units_label) options(this_scope + '_spin', 'ytitle', 'MMS' + probe + ' ' + datatype + ' ' + species + ' (spin) Energy [keV]') options(this_scope + '_spin', 'spec', True) options(this_scope + '_spin', 'ylog', True) options(this_scope + '_spin', 'zlog', True) options(this_scope + '_spin', 'Colormap', 'jet') out_vars.append(this_scope + '_spin') return out_vars else: logging.error('Error, problem finding EIS spin variable to calculate spin-averages') return None
def mms_fgm_set_metadata(probe, data_rate, level, instrument, suffix=''): """ This function updates the metadata for FGM data products Parameters: probe : str or list of str probe or list of probes, valid values for MMS probes are ['1','2','3','4']. data_rate : str or list of str instrument data rates for FGM include 'brst' 'fast' 'slow' 'srvy'. The default is 'srvy'. level : str indicates level of data processing. the default if no level is specified is 'l2' instrument : str instrument; probably 'fgm' suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. """ if not isinstance(probe, list): probe = [probe] if not isinstance(data_rate, list): data_rate = [data_rate] if not isinstance(level, list): level = [level] tvars = set(tnames()) for this_probe in probe: for this_dr in data_rate: for this_lvl in level: if 'mms'+str(this_probe)+'_'+instrument+'_b_gse_'+this_dr+'_'+this_lvl+suffix in tvars: options('mms'+str(this_probe)+'_'+instrument+'_b_gse_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' FGM') options('mms'+str(this_probe)+'_'+instrument+'_b_gse_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r', '#000000']) options('mms'+str(this_probe)+'_'+instrument+'_b_gse_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Bx GSE', 'By GSE', 'Bz GSE', 'B total']) if 'mms'+str(this_probe)+'_'+instrument+'_b_gsm_'+this_dr+'_'+this_lvl+suffix in tvars: options('mms'+str(this_probe)+'_'+instrument+'_b_gsm_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' FGM') options('mms'+str(this_probe)+'_'+instrument+'_b_gsm_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r', '#000000']) options('mms'+str(this_probe)+'_'+instrument+'_b_gsm_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Bx GSM', 'By GSM', 'Bz GSM', 'B total']) if 'mms'+str(this_probe)+'_'+instrument+'_b_dmpa_'+this_dr+'_'+this_lvl+suffix in tvars: options('mms'+str(this_probe)+'_'+instrument+'_b_dmpa_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' FGM') options('mms'+str(this_probe)+'_'+instrument+'_b_dmpa_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r', '#000000']) options('mms'+str(this_probe)+'_'+instrument+'_b_dmpa_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Bx DMPA', 'By DMPA', 'Bz DMPA', 'B total']) if 'mms'+str(this_probe)+'_'+instrument+'_b_bcs_'+this_dr+'_'+this_lvl+suffix in tvars: options('mms'+str(this_probe)+'_'+instrument+'_b_bcs_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' FGM') options('mms'+str(this_probe)+'_'+instrument+'_b_bcs_'+this_dr+'_'+this_lvl+suffix, 'color', ['b', 'g', 'r', '#000000']) options('mms'+str(this_probe)+'_'+instrument+'_b_bcs_'+this_dr+'_'+this_lvl+suffix, 'legend_names', ['Bx BCS', 'By BCS', 'Bz BCS', 'B total'])
def mms_eis_set_metadata(tplotnames, data_rate='srvy', datatype='extof', suffix=''): """ This function updates the metadata for the EIS data products Parameters ---------- tplotnames : list of str list of tplot variables loaded by the load routine data_rate : str Data rate datatype : str EIS datatype loaded (extof or phxtof) suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. """ if datatype == 'extof': options(tnames('*_extof_proton_flux_omni*'), 'yrange', [55, 1000]) # options(tnames('*_extof_alpha_flux_omni*'), 'yrange', [80, 650]) # removed in the latest files as of 3 Aug 2021 options(tnames('*_extof_helium_flux_omni*'), 'yrange', [80, 650]) options(tnames('*_extof_oxygen_flux_omni*'), 'yrange', [145, 950]) options(tnames('*_extof_proton_flux_omni*'), 'x_interp', True) options(tnames('*_extof_proton_flux_omni*'), 'y_interp', True) options(tnames('*_extof_helium_flux_omni*'), 'x_interp', True) options(tnames('*_extof_helium_flux_omni*'), 'y_interp', True) options(tnames('*_extof_oxygen_flux_omni*'), 'x_interp', True) options(tnames('*_extof_oxygen_flux_omni*'), 'y_interp', True)