def ex_wavelet(plot=True): """Demonstrates how to use wavelets with pyspedas.""" # Delete any existing pytplot variables pytplot.del_data() # Create a pytplot variable. t = np.arange(4000.) y = np.sin(2 * np.pi * t / 32.) y2 = np.sin(2 * np.pi * t / 64.) y[1000:3000] = y2[1000:3000] var = 'sin_wav' time = time_float('2010-01-01') + 10 * t pytplot.store_data(var, data={'x': time, 'y': y}) # Complex wavelet transformation. powervar = wavelet(var, wavename='cmorl0.5-1.0') # Also try the following and compare: # powervar = wavelet(var, wavename='gaus1') pvar = powervar[0] # Plot. pytplot.options(pvar, 'colormap', 'jet') pytplot.options(pvar, 'ylog', True) pytplot.options(pvar, 'ytitle', pvar) pytplot.ylim(pvar, 0.001, 1.0) if plot: pytplot.tplot([var, pvar]) return 1
def test_altitude_plot(): pytplot.netcdf_to_tplot(current_directory + "/testfiles/g15_xrs_2s_20170619_20170619.nc", time='time_tag') pytplot.store_data( 'altitude', data={ 'x': pytplot.data_quants['A_COUNT'].coords['time'].values, 'y': np.arange( 0, len(pytplot.data_quants['A_COUNT'].coords['time'].values), step=1) }) pytplot.link('A_COUNT', 'altitude') pytplot.xlim('2017-06-19 02:00:00', '2017-06-19 04:00:00') pytplot.ylim("A_COUNT", 17000, 18000) pytplot.timebar('2017-06-19 03:00:00', "A_COUNT", color=(100, 255, 0), thick=3) pytplot.timebar('2017-06-19 03:30:00', "A_COUNT", color='g') pytplot.options("A_COUNT", 'alt', 1) pytplot.tplot(2, testing=True) pytplot.tplot(2, testing=True, bokeh=True)
def ex_spectra(plot=True): """Download THEMIS data and create a plot.""" # Delete any existing pytplot variables pytplot.del_data() # Download THEMIS data for 2015-12-31 # pyspedas.load_data('themis', ['2015-12-31'], ['tha'], 'state', 'l1') # pyspedas.load_data('themis', ['2015-12-31'], ['tha'], 'sst', 'l2') time_range = ['2015-12-31 00:00:00', '2015-12-31 23:59:59'] pyspedas.themis.state(probe='a', trange=time_range) pyspedas.themis.sst(probe='a', trange=time_range, varnames=['tha_psif_en_eflux']) # Specify options pytplot.ylim('tha_pos', -23000.0, 81000.0) pytplot.ylim('tha_psif_en_eflux', 10000.0, 4000000.0) pytplot.options('tha_psif_en_eflux', 'colormap', 'jet') pytplot.tplot_options('title', 'tha 2015-12-31') # Plot line and spectrogram if plot: pytplot.tplot(['tha_pos', 'tha_psif_en_eflux']) # Return 1 as indication that the example finished without problems. return 1
def ex_spectra(): # Delete any existing pytplot variables pytplot.del_data() # Download THEMIS data for 2015-12-31 pyspedas.load_data('themis', ['2015-12-31'], ['tha'], 'sst', 'l2') # Specify options pytplot.tplot_options('title', 'tha_psif_en_eflux 2015-12-31') pytplot.ylim('tha_psif_en_eflux', 10000.0, 10000000.0) pytplot.options('tha_psif_en_eflux', 'colormap', 'viridis') # Plot spectrogram pytplot.tplot(['tha_psif_en_eflux'])
def ex_create(): # Delete any existing pytplot variables pytplot.del_data() # Create a sin wave plot a = list(range(0, 101)) b = [2.0 / 100.0 * numpy.pi * s for s in a] c = pyspedas.time_float('2017-01-01') x = list() y = list() for i in range(len(b)): x.append(c + 60.0 / (2 * numpy.pi) * 60.0 * b[i]) y.append(1000.0 * numpy.sin(b[i])) # Store data pytplot.store_data('sinx', data={'x': x, 'y': y}) # Apply tclip pyspedas.tclip('sinx', -800.0, 800.0) # Remove NaN values pyspedas.tdeflag('sinx-clip') # Interpolate pyspedas.tinterpol(['sinx-clip-deflag'], ['sinx'], 'quadratic') # Plot pytplot.ylim('sinx', -1100.0, 1100.0) pytplot.ylim('sinx-clip', -1100.0, 1100.0) pytplot.ylim('sinx-clip-deflag', -1100.0, 1100.0) pytplot.ylim('sinx-clip-deflag-itrp', -1100.0, 1100.0) pytplot.tplot_options('title', 'Interpolation example') pytplot.tplot( ['sinx', 'sinx-clip', 'sinx-clip-deflag', 'sinx-clip-deflag-itrp'])
def test_goes_read(): pytplot.netcdf_to_tplot(current_directory + "/testfiles/g15_xrs_2s_20170619_20170619.nc", time='time_tag') pytplot.xlim('2017-06-19 02:00:00', '2017-06-19 04:00:00') pytplot.ylim("B_COUNT", 17000, 18000) pytplot.timebar('2017-06-19 03:00:00', "B_COUNT", color=(100, 255, 0), thick=3) pytplot.timebar('2017-06-19 03:30:00', "B_COUNT", color='g') pytplot.options("B_COUNT", 'ylog', 1) pytplot.store_data("BCOUNTFLUX", data=["B_COUNT", "B_FLUX"]) pytplot.tplot([1, 2, 3, 4, 5, 7], var_label=6, testing=True)
def ex_create(plot=True): """Show how to create and plot pytplot variables.""" # Delete any existing pytplot variables pytplot.del_data() # Create a sin wave plot a = list(range(0, 101)) b = [2.0 / 100.0 * numpy.pi * s for s in a] c = time_float('2017-01-01') x = list() y = list() for i in range(len(b)): x.append(c + 60.0 / (2 * numpy.pi) * 60.0 * b[i]) y.append(1000.0 * numpy.sin(b[i])) # Store data pytplot.store_data('sinx', data={'x': x, 'y': y}) # Apply yclip pyspedas.yclip('sinx', -800.0, 800.0) # Remove NaN values pyspedas.tdeflag('sinx-clip') # Interpolate pyspedas.tinterpol(['sinx-clip-deflag'], 'sinx', 'quadratic') # Plot pytplot.ylim('sinx', -1100.0, 1100.0) pytplot.ylim('sinx-clip', -1100.0, 1100.0) pytplot.ylim('sinx-clip-deflag', -1100.0, 1100.0) pytplot.ylim('sinx-clip-deflag-itrp', -1100.0, 1100.0) pytplot.tplot_options('title', 'Interpolation example') if plot: pytplot.tplot( ['sinx', 'sinx-clip', 'sinx-clip-deflag', 'sinx-clip-deflag-itrp']) # Return 1 as indication that the example finished without problems. return 1
def ex_basic(): # Delete any existing pytplot variables pytplot.del_data() # Download THEMIS state data for 2015-12-31 time_range = ['2015-12-31 00:00:00', '2016-01-01 12:00:00'] pyspedas.load_data('themis', time_range, ['tha'], 'state', 'l1') # Get data into python variables alldata = pytplot.get_data("tha_pos") time = alldata[0] data = alldata[1] # Store a new pytplot variable pytplot.store_data("tha_position", data={'x': time, 'y': data}) # Define the y-axis limits pytplot.ylim('tha_pos', -23000.0, 81000.0) pytplot.ylim('tha_position', -23000.0, 81000.0) pytplot.ylim('tha_vel', -8.0, 12.0) # Plot position and velocity using the pyqtgraph library (default) pytplot.tplot(["tha_pos", "tha_position", "tha_vel"])
def ex_basic(plot=True): """Download and plot THEMIS data.""" # Delete any existing pytplot variables pytplot.del_data() # Download THEMIS state data for 2015-12-31 time_range = ['2015-12-31 00:00:00', '2016-01-01 12:00:00'] pyspedas.themis.state(probe='a', trange=time_range, time_clip=True) # Get data into python variables alldata = pytplot.get_data("tha_pos") time = alldata[0] data = alldata[1] # Here we could work on the data before saving a new tplot variable. # Store a new pytplot variable pytplot.store_data("tha_position", data={'x': time, 'y': data}) # Define the y-axis limits pytplot.ylim('tha_pos', -23000.0, 81000.0) pytplot.ylim('tha_position', -23000.0, 81000.0) pytplot.ylim('tha_vel', -8.0, 12.0) # Give a title to the plot and labels for the y-axis panels. pytplot.tplot_options('title', 'tha position and velocity, 2015-12-31') pytplot.options('tha_pos', 'ytitle', 'Position') pytplot.options('tha_vel', 'ytitle', 'Velocity') # Plot position and velocity using the pyqtgraph library (default) if plot: pytplot.tplot(["tha_pos", "tha_position", "tha_vel"]) # Plot position and velocity using the bokeh library # pytplot.tplot(["tha_pos", "tha_position", "tha_vel"], bokeh=True) # Return 1 as indication that the example finished without problems. return 1
def pwe_hfa(trange=['2017-04-01', '2017-04-02'], datatype='spec', mode='low', level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False, ror=True): """ This function loads data from the PWE experiment from the Arase mission Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] datatype: str Data type; Valid options: level: str Data level; Valid options: suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat: str The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. varnames: list of str List of variable names to load (if not specified, all data variables are loaded) downloadonly: bool Set this flag to download the CDF files, but not load them into tplot variables notplot: bool Return the data in hash tables instead of creating tplot variables no_update: bool If set, only load data from your local cache time_clip: bool Time clip the variables to exactly the range specified in the trange keyword ror: bool If set, print PI info and rules of the road Returns: List of tplot variables created. """ initial_notplot_flag = False if notplot: initial_notplot_flag = True file_res = 3600. * 24 if level == 'l2': prefix = 'erg_pwe_hfa_' + level + '_' + mode + '_' if level == 'l2': pathformat = 'satellite/erg/pwe/hfa/'+level+'/'+datatype+'/'+mode + \ '/%Y/%m/erg_pwe_hfa_'+level+'_'+datatype+'_'+mode+'_%Y%m%d_v??_??.cdf' elif level == 'l3': prefix = 'erg_pwe_hfa_' + level + '_1min_' pathformat = 'satellite/erg/pwe/hfa/'+level + \ '/%Y/%m/erg_pwe_hfa_'+level+'_1min_%Y%m%d_v??_??.cdf' loaded_data = load(pathformat=pathformat, trange=trange, level=level, datatype=datatype, file_res=file_res, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd) if (len(loaded_data) > 0) and ror: try: if isinstance(loaded_data, list): if downloadonly: cdf_file = cdflib.CDF(loaded_data[-1]) gatt = cdf_file.globalattsget() else: gatt = get_data(loaded_data[-1], metadata=True)['CDF']['GATT'] elif isinstance(loaded_data, dict): gatt = loaded_data[list(loaded_data.keys())[-1]]['CDF']['GATT'] # --- print PI info and rules of the road print(' ') print(' ') print( '**************************************************************************' ) print(gatt["LOGICAL_SOURCE_DESCRIPTION"]) print('') print('Information about ERG PWE HFA') print('') print('PI: ', gatt['PI_NAME']) print("Affiliation: " + gatt["PI_AFFILIATION"]) print('') print( 'RoR of ERG project common: https://ergsc.isee.nagoya-u.ac.jp/data_info/rules_of_the_road.shtml.en' ) print( 'RoR of PWE/HFA: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Pwe/Hfa' ) print('') print('Contact: erg_pwe_info at isee.nagoya-u.ac.jp') print( '**************************************************************************' ) except: print('printing PI info and rules of the road was failed') if initial_notplot_flag or downloadonly: return loaded_data if (level == 'l2') and (mode == 'low') and (not notplot): # set spectrogram plot option options(prefix + 'spectra_eu' + suffix, 'Spec', 1) options(prefix + 'spectra_ev' + suffix, 'Spec', 1) options(prefix + 'spectra_bgamma' + suffix, 'Spec', 1) options(prefix + 'spectra_esum' + suffix, 'Spec', 1) options(prefix + 'spectra_er' + suffix, 'Spec', 1) options(prefix + 'spectra_el' + suffix, 'Spec', 1) options(prefix + 'spectra_e_mix' + suffix, 'Spec', 1) options(prefix + 'spectra_e_ar' + suffix, 'Spec', 1) if prefix + 'spectra_er' + suffix in loaded_data: # remove minus values in y array clip(prefix + 'spectra_er' + suffix, 0., 5000.) if prefix + 'spectra_el' + suffix in loaded_data: # remove minus values in y array clip(prefix + 'spectra_el' + suffix, 0., 5000.) if prefix + 'spectra_eu' + suffix in loaded_data: # remove minus values in y array clip(prefix + 'spectra_eu' + suffix, 0., 5000.) # set ylim ylim(prefix + 'spectra_eu' + suffix, 2.0, 10000.0) # set zlim zlim(prefix + 'spectra_eu' + suffix, 1e-10, 1e-3) if prefix + 'spectra_ev' + suffix in loaded_data: # remove minus values in y array clip(prefix + 'spectra_ev' + suffix, 0., 5000.) # set ylim ylim(prefix + 'spectra_ev' + suffix, 2.0, 10000.0) # set zlim zlim(prefix + 'spectra_ev' + suffix, 1e-10, 1e-3) if prefix + 'spectra_bgamma' + suffix in loaded_data: # set ylim ylim(prefix + 'spectra_bgamma' + suffix, 2.0, 200.0) # set zlim zlim(prefix + 'spectra_bgamma' + suffix, 1e-4, 1e+2) if prefix + 'spectra_esum' + suffix in loaded_data: # set ylim ylim(prefix + 'spectra_esum' + suffix, 2.0, 10000.0) # set zlim zlim(prefix + 'spectra_esum' + suffix, 1e-10, 1e-3) if prefix + 'spectra_e_ar' + suffix in loaded_data: # set ylim ylim(prefix + 'spectra_e_ar' + suffix, 2.0, 10000.0) # set zlim zlim(prefix + 'spectra_e_ar' + suffix, -1, 1) # set y axis to logscale options(prefix + 'spectra_eu' + suffix, 'ylog', 1) options(prefix + 'spectra_ev' + suffix, 'ylog', 1) options(prefix + 'spectra_bgamma' + suffix, 'ylog', 1) options(prefix + 'spectra_esum' + suffix, 'ylog', 1) options(prefix + 'spectra_er' + suffix, 'ylog', 1) options(prefix + 'spectra_el' + suffix, 'ylog', 1) options(prefix + 'spectra_e_mix' + suffix, 'ylog', 1) options(prefix + 'spectra_e_ar' + suffix, 'ylog', 1) # set z axis to logscale options(prefix + 'spectra_eu' + suffix, 'zlog', 1) options(prefix + 'spectra_ev' + suffix, 'zlog', 1) options(prefix + 'spectra_bgamma' + suffix, 'zlog', 1) options(prefix + 'spectra_esum' + suffix, 'zlog', 1) options(prefix + 'spectra_er' + suffix, 'zlog', 1) options(prefix + 'spectra_el' + suffix, 'zlog', 1) options(prefix + 'spectra_e_mix' + suffix, 'zlog', 1) # set ytitle options(prefix + 'spectra_eu' + suffix, 'ytitle', 'ERG PWE/HFA (EU)') options(prefix + 'spectra_ev' + suffix, 'ytitle', 'ERG PWE/HFA (EV)') options(prefix + 'spectra_esum' + suffix, 'ytitle', 'ERG PWE/HFA (ESUM)') options(prefix + 'spectra_e_ar' + suffix, 'ytitle', 'ERG PWE/HFA (E_AR)') options(prefix + 'spectra_bgamma' + suffix, 'ytitle', 'ERG PWE/HFA (BGAMMA)') # set ysubtitle options(prefix + 'spectra_eu' + suffix, 'ysubtitle', 'frequency [Hz]') options(prefix + 'spectra_ev' + suffix, 'ysubtitle', 'frequency [Hz]') options(prefix + 'spectra_esum' + suffix, 'ysubtitle', 'frequency [Hz]') options(prefix + 'spectra_e_ar' + suffix, 'ysubtitle', 'frequency [Hz]') options(prefix + 'spectra_bgamma' + suffix, 'ysubtitle', 'frequency [Hz]') # set ztitle options(prefix + 'spectra_eu' + suffix, 'ztitle', 'mV^2/m^2/Hz') options(prefix + 'spectra_ev' + suffix, 'ztitle', 'mV^2/m^2/Hz') options(prefix + 'spectra_esum' + suffix, 'ztitle', 'mV^2/m^2/Hz') options(prefix + 'spectra_e_ar' + suffix, 'ztitle', 'LH:-1/RH:+1') options(prefix + 'spectra_bgamma' + suffix, 'ztitle', 'pT^2/Hz') # change colormap option options(prefix + 'spectra_eu' + suffix, 'Colormap', 'jet') options(prefix + 'spectra_ev' + suffix, 'Colormap', 'jet') options(prefix + 'spectra_bgamma' + suffix, 'Colormap', 'jet') options(prefix + 'spectra_esum' + suffix, 'Colormap', 'jet') options(prefix + 'spectra_er' + suffix, 'Colormap', 'jet') options(prefix + 'spectra_el' + suffix, 'Colormap', 'jet') options(prefix + 'spectra_e_mix' + suffix, 'Colormap', 'jet') options(prefix + 'spectra_e_ar' + suffix, 'Colormap', 'jet') elif level == 'l3': # set ytitle options(prefix + 'Fuhr' + suffix, 'ytitle', 'UHR frequency [Mhz]') options(prefix + 'ne_mgf' + suffix, 'ytitle', 'eletctorn density [/cc]') # set y axis to logscale options(prefix + 'Fuhr' + suffix, 'ylog', 1) options(prefix + 'ne_mgf' + suffix, 'ylog', 1) return loaded_data
def lepi(trange=['2017-07-01', '2017-07-02'], datatype='omniflux', level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False, ror=True, version=None): """ This function loads data from the LEP-i experiment from the Arase mission Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] datatype: str Data type; Valid options: level: str Data level; Valid options: suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat: str The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. varnames: list of str List of variable names to load (if not specified, all data variables are loaded) downloadonly: bool Set this flag to download the CDF files, but not load them into tplot variables notplot: bool Return the data in hash tables instead of creating tplot variables no_update: bool If set, only load data from your local cache time_clip: bool Time clip the variables to exactly the range specified in the trange keyword ror: bool If set, print PI info and rules of the road version: str Set this value to specify the version of cdf files (such as "v03_00") Returns: List of tplot variables created. """ initial_notplot_flag = False if notplot: initial_notplot_flag = True if level == 'l3': datatype = 'pa' file_res = 3600. * 24 prefix = 'erg_lepi_' + level + '_' + datatype + '_' pathformat = 'satellite/erg/lepi/'+level+'/'+datatype + \ '/%Y/%m/erg_lepi_'+level+'_'+datatype+'_%Y%m%d_' if version is None: pathformat += 'v??_??.cdf' else: pathformat += version + '.cdf' loaded_data = load(pathformat=pathformat, trange=trange, file_res=file_res, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd) if (len(loaded_data) > 0) and ror: try: if isinstance(loaded_data, list): if downloadonly: cdf_file = cdflib.CDF(loaded_data[-1]) gatt = cdf_file.globalattsget() else: gatt = get_data(loaded_data[-1], metadata=True)['CDF']['GATT'] elif isinstance(loaded_data, dict): gatt = loaded_data[list(loaded_data.keys())[-1]]['CDF']['GATT'] # --- print PI info and rules of the road print(' ') print( '**************************************************************************' ) print(gatt["LOGICAL_SOURCE_DESCRIPTION"]) print('') print('Information about ERG LEPi') print('') print('PI: ', gatt['PI_NAME']) print("Affiliation: " + gatt["PI_AFFILIATION"]) print('') print( 'RoR of ERG project common: https://ergsc.isee.nagoya-u.ac.jp/data_info/rules_of_the_road.shtml.en' ) print( 'RoR of LEPi L2: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Lepi' ) print( 'RoR of ERG/LEPi: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Lepi#Rules_of_the_Road' ) print('') print('Contact: erg_lepi_info at isee.nagoya-u.ac.jp') print( '**************************************************************************' ) except: print('printing PI info and rules of the road was failed') if initial_notplot_flag or downloadonly: return loaded_data if (datatype == 'omniflux') and (level == 'l2'): tplot_variables = [] if prefix + 'FPDO' + suffix in loaded_data: store_data(prefix + 'FPDO' + suffix, newname=prefix + 'FPDO_raw' + suffix) loaded_data.append(prefix + 'FPDO_raw' + suffix) get_data_vars = get_data(prefix + 'FPDO_raw' + suffix) flag_FPDO = store_data(prefix + 'FPDO' + suffix, data={ 'x': get_data_vars[0], 'y': get_data_vars[1][:, :-2], 'v': get_data_vars[2][:-2] }) tplot_variables.append(prefix + 'FPDO' + suffix) if prefix + 'FHEDO' + suffix in loaded_data: store_data(prefix + 'FHEDO' + suffix, newname=prefix + 'FHEDO_raw' + suffix) loaded_data.append(prefix + 'FHEDO_raw' + suffix) get_data_vars = get_data(prefix + 'FHEDO_raw' + suffix) store_data(prefix + 'FHEDO' + suffix, data={ 'x': get_data_vars[0], 'y': get_data_vars[1][:, :-2], 'v': get_data_vars[2][:-2] }) tplot_variables.append(prefix + 'FHEDO' + suffix) if prefix + 'FODO' + suffix in loaded_data: store_data(prefix + 'FODO' + suffix, newname=prefix + 'FODO_raw' + suffix) loaded_data.append(prefix + 'FODO_raw' + suffix) get_data_vars = get_data(prefix + 'FODO_raw' + suffix) store_data(prefix + 'FODO' + suffix, data={ 'x': get_data_vars[0], 'y': get_data_vars[1][:, :-2], 'v': get_data_vars[2][:-2] }) tplot_variables.append(prefix + 'FODO' + suffix) # remove minus valuse of y array if flag_FPDO: clip(prefix + 'FPDO' + suffix, 0., 2.e+16) # set ytitle options(tplot_variables, 'ytitle', 'LEPi\nomniflux\nLv2\nEnergy') # set ysubtitle options(tplot_variables, 'ysubtitle', '[keV/q]') # set spectrogram plot option options(tplot_variables, 'Spec', 1) # set y axis to logscale options(tplot_variables, 'ylog', 1) for i in range(len(tplot_variables)): # set ylim ylim(tplot_variables[i], 0.01, 25.0) # set zlim zlim(tplot_variables[i], 1e+1, 1e+9) # set ztitle options(tplot_variables, 'ztitle', '[/cm^2-str-s-keV]') # set z axis to logscale options(tplot_variables, 'zlog', 1) # change colormap option options(tplot_variables, 'Colormap', 'jet') elif (datatype == '3dflux') and (level == 'l2') and (not notplot): vns_fidu = [ 'FPDU', 'FPDU_sub', 'FHEDU', 'FHEDU_sub', 'FODU', 'FODU_sub' ] vns_fiedu = [ 'FPEDU', 'FPEDU_sub', 'FHEEDU', 'FHEEDU_sub', 'FOEDU', 'FOEDU_sub' ] vns_cnt = [ 'FPDU_COUNT_RAW', 'FPDU_COUNT_RAW_sub', 'FHEDU_COUNT_RAW', 'FHEDU_COUNT_RAW_sub', 'FODU_COUNT_RAW', 'FODU_COUNT_RAW_sub' ] vns_list = vns_fidu + vns_fiedu + vns_cnt v2_array_for_not_sub = np.arange(8) v2_array_for_sub = np.arange(7) + 8 v3_array = np.arange(16) for vns_pattarn in vns_list: t_plot_name = prefix + vns_pattarn + suffix if t_plot_name in loaded_data: tplot_copy(t_plot_name, t_plot_name + '_raw') get_data_vars_temporal = get_data(t_plot_name) meta_data_in = get_data(t_plot_name, metadata=True) if 'sub' in t_plot_name: store_data(t_plot_name, data={ 'x': get_data_vars_temporal[0], 'y': get_data_vars_temporal[1][:, 0:30, :, :], 'v1': get_data_vars_temporal[2][0:30], 'v2': v2_array_for_sub, 'v3': v3_array }, attr_dict=meta_data_in) else: store_data(t_plot_name, data={ 'x': get_data_vars_temporal[0], 'y': get_data_vars_temporal[1][:, 0:30, :, :], 'v1': get_data_vars_temporal[2][0:30], 'v2': v2_array_for_not_sub, 'v3': v3_array }, attr_dict=meta_data_in) ylim(t_plot_name, 0.01, 30.) options(t_plot_name, 'ylog', 1) options(t_plot_name, 'zlog', 1) """comment out in order to match the result of part_product of IDL. if prefix + 'FPDU' + suffix in loaded_data: clip(prefix + 'FPDU' + suffix, -1.0e+10, 1.0e+10) if prefix + 'FHEDU' + suffix in loaded_data: clip(prefix + 'FHEDU' + suffix, -1.0e+10, 1.0e+10) if prefix + 'FODU' + suffix in loaded_data: clip(prefix + 'FODU' + suffix, -1.0e+10, 1.0e+10)""" elif level == 'l3': tplot_variables = [] if prefix + 'FPDU' + suffix in loaded_data: tplot_variables.append(prefix + 'FPDU' + suffix) get_data_vars = get_data(prefix + 'FPDU' + suffix) ylim(prefix + 'FPDU' + suffix, 0, 180) zlim(prefix + 'FPDU' + suffix, 1e2, 1e5) options(prefix + 'FPDU' + suffix, 'spec', 1) ytitle_keV_array = np.round(np.nan_to_num(get_data_vars[2]), 2) for i in range(get_data_vars[1].shape[1]): tplot_name = prefix + 'pabin_' + \ str(i).zfill(2) + '_FPDU' + suffix store_data(tplot_name, data={ 'x': get_data_vars[0], 'y': get_data_vars[1][:, i, :], 'v': get_data_vars[3] }) options(tplot_name, 'spec', 1) ylim(tplot_name, 0, 180) zlim(tplot_name, 1e2, 1e5) options( tplot_name, 'ytitle', 'ERG LEP-i P\n' + str(ytitle_keV_array[i]) + ' keV\nPitch angle') tplot_variables.append(tplot_name) loaded_data += tplot_variables[1:] tplot_variables_length = len(tplot_variables) if prefix + 'FHEDU' + suffix in loaded_data: tplot_variables.append(prefix + 'FHEDU' + suffix) get_data_vars = get_data(prefix + 'FHEDU' + suffix) options(prefix + 'FHEDU' + suffix, 'spec', 1) ylim(prefix + 'FHEDU' + suffix, 0, 180) zlim(prefix + 'FHEDU' + suffix, 1e2, 1e5) ytitle_keV_array = np.round(np.nan_to_num(get_data_vars[2]), 2) for i in range(get_data_vars[1].shape[1]): tplot_name = prefix + 'pabin_' + \ str(i).zfill(2) + '_FHEDU' + suffix store_data(tplot_name, data={ 'x': get_data_vars[0], 'y': get_data_vars[1][:, i, :], 'v': get_data_vars[3] }) options(tplot_name, 'spec', 1) ylim(tplot_name, 0, 180) zlim(tplot_name, 1e2, 1e5) options( tplot_name, 'ytitle', 'ERG LEP-i P\n' + str(ytitle_keV_array[i]) + ' keV\nPitch angle') tplot_variables.append(tplot_name) loaded_data += tplot_variables[tplot_variables_length + 1:] tplot_variables_length = len(tplot_variables) if prefix + 'FODU' + suffix in loaded_data: tplot_variables.append(prefix + 'FODU' + suffix) get_data_vars = get_data(prefix + 'FODU' + suffix) options(prefix + 'FODU' + suffix, 'spec', 1) ylim(prefix + 'FODU' + suffix, 0, 180) zlim(prefix + 'FODU' + suffix, 1e2, 1e5) ytitle_keV_array = np.round(np.nan_to_num(get_data_vars[2]), 2) for i in range(get_data_vars[1].shape[1]): tplot_name = prefix + 'pabin_' + \ str(i).zfill(2) + '_FODU' + suffix store_data(tplot_name, data={ 'x': get_data_vars[0], 'y': get_data_vars[1][:, i, :], 'v': get_data_vars[3] }) options(tplot_name, 'spec', 1) ylim(tplot_name, 0, 180) zlim(tplot_name, 1e2, 1e5) options( tplot_name, 'ytitle', 'ERG LEP-i P\n' + str(ytitle_keV_array[i]) + ' keV\nPitch angle') tplot_variables.append(tplot_name) loaded_data += tplot_variables[tplot_variables_length + 1:] options(tplot_variables, 'zlog', 1) options(tplot_variables, 'ysubtitle', 'PA [deg]') options(tplot_variables, 'colormap', 'jet') options(tplot_variables, 'ztitle', '[/s-cm^{2}-sr-keV/q]') return loaded_data
def standards(kp, list_plots=False, all_plots=False, euv=False, mag_mso=False, mag_geo=False, mag_cone=False, mag_dir=False, ngims_neutral=False, ngims_ions=False, eph_angle=False, eph_geo=False, eph_mso=False, swea=False, sep_ion=False, sep_electron=False, wave=False, plasma_den=False, plasma_temp=False, swia_h_vel=False, static_h_vel=False, static_o2_vel=False, static_flux=False, static_energy=False, sun_bar=False, solar_wind=False, ionosphere=False, sc_pot=False, altitude=False, title='Standard Plots', qt=True): ''' Generate all or a subset of 25 standardized plots, created from insitu KP data on the MAVEN SDC website Parameters: kp : dict insitu kp data structure/dictionary read from file(s) mag_mso: bool magnetic field, MSO coordinates mag_geo: bool magnetic field, geographic coordinates mag_cone: bool magnetic clock and cone angles, MSO coordinates mag_dir: bool magnetic field, radial/horizontal/northward/eastward components ngims_neutral: bool neutral atmospheric component densities ngims_ions: bool ionized atmospheric component densities eph_angle: bool spacecraft ephemeris information eph_geo: bool spacecraft position, geographic coordinates eph_mso: bool spacecraft position, MSO coordinates swea: bool electron parallel/anti-parallel fluxes sep_ion: bool ion energy flux sep_electron: bool electron energy flux wave: bool electric field wave power plasma_den: bool plasma density plasma_temp: bool plasma temperature swia_h_vel: bool H+ flow velocity, SWIA MSO coordinates static_h_vel: bool H+ flow velocity, STATIC MSO coordinates static_o2_vel: bool O2+ flow velocity, STATIC MSO coordinates static_flux: bool H+/He++ and pick-up ion omni-directional flux static_energy: bool H+/He++ and pick-up ion characteristic energy sun_bar: bool MAVEN sunlight indicator solar_wind: bool solar wind dynamic pressure ionosphere: bool electron spectrum shape parameter altitude: bool spacecraft altitude sc_pot: bool spacecraft potential list : bool Lists all Key Parameters instead of plotting title : str The Title to give the plot qt : bool If true, plots with qt. Else creates an HTML page with bokeh. exec_qt : bool If False, does not run the event loop for pyqtgraph. Returns : None Examples : >>> # Solar Orbital coordinates (x, y, z, magnitude), standard spacecraft ephemeris >>> # information (sub-spacecraft lat/lon, subsolar lat/lon, local solar time, solar >>> # zenith angle, Mars season) >>> # omni-directional flux. >>> insitu,iuvs = pydivide.read(input_time=['2017-06-19','2017-06-20']) >>> pydivide.standards(insitu, mag_mso=True, eph_angle=True, title='Example Title') ''' main_title = title if all_plots: euv = True mag_mso = True mag_geo = True mag_cone = True mag_dir = True ngims_neutral = True ngims_ions = True eph_angle = True eph_geo = True eph_mso = True swea = True sep_ion = True sep_electron = True wave = True plasma_den = True plasma_temp = True swia_h_vel = True static_h_vel = True static_o2_vel = True static_flux = True static_energy = True sun_bar = True solar_wind = True ionosphere = True sc_pot = True if list_plots: print("all: Generate all 25 plots") print("euv: EUV irradiance in each of three bands") print("mag_mso: Magnetic field, MSO coordinates") print("mag_geo: Magnetic field, Geographic coordinates") print("mag_cone: Magnetic clock and cone angles, MSO coordinates") print("mag_dir: Magnetic field: radial, horizontal, northward, and eastward components") print("ngims_neutral: Neutral atmospheric component densities") print("ngims_ions: Ionized atmospheric component densities") print("eph_angle: Spacecraft ephemeris information") print("eph_geo: Spacecraft position in geographic coordinates") print("eph_mso: Spacecraft position in MSO coordinates") print("swea: electron parallel/anti-parallel fluxes") print("sep_ion: Ion Energy fluxes") print("sep_electron: Electron Energy fluxes") print("wave: Electric field wave power") print("plasma_den: Plasma densities") print("plasma_temp: Plasma Temperatures") print("swia_h_vel: H+ Flow velocity in MSO coordinates from SWIA") print("static_h_vel: H+ flow velocity in MSO coordinates from STATIC") print("static_o2_vel: O2+ flow velocity in MSO coords from STATIC") print("static_flux: H+/He++ and Pick-up Ion omni-directional fluxes") print("static_energy: H+/He++ and Pick-up Ion characteristic energies") print("sun_bar: Indication of whether MAVEn is in sunlight") print("solar_wind: solar wind dynamic pressure") print("ionosphere: Electron Spectrum shape parameter") print("sc_pot: Spacecraft potential") return # Set up the plots to be underneath each other max_num_plots = sum([euv, mag_mso, mag_geo, mag_cone, mag_dir, ngims_neutral, ngims_ions, eph_angle, eph_geo, eph_mso, swea, sep_ion, sep_electron, wave, plasma_den, plasma_temp, swia_h_vel, static_h_vel, static_o2_vel, static_flux, static_energy, sun_bar, solar_wind, ionosphere, sc_pot]) if max_num_plots == 0: print("Please specify a plot to generate.") return # The number plot we're plotting in the figure current_plot_number = 0 names_to_plot = [] pytplot.xlim(float(kp['Time'][0]), float(kp['Time'][-1])) if euv: title = "EUV" try: if 'EUV' not in kp.keys(): raise Exception("NoDataException") euv_dataframe = kp['EUV'].loc[:, [param_dict['EUV Irradiance Lyman-alpha'], param_dict['EUV Irradiance 17-22 nm'], param_dict['EUV Irradiance 0.1-7.0 nm']]] pytplot.store_data(title, data={'x': kp['Time'], 'y': euv_dataframe}) pytplot.options(title, 'legend_names', ['EUV Irradiance Lyman-alpha', 'EUV Irradiance 17-22 nm', 'EUV Irradiance 0.1-7.0 nm']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("EUV is not in the Key Parameter Data Structure, " + title + " will not be plotted") if mag_mso: title = "MAG MSO" try: if 'MAG' not in kp.keys(): raise Exception("NoDataException") mag_mso_dataframe = kp['MAG'].loc[:, [param_dict['Magnetic Field MSO X'], param_dict['Magnetic Field MSO Y'], param_dict['Magnetic Field MSO Z']]] mag_mso_dataframe['Magnetic Field Magnitude MSO'] = ((kp['MAG'][param_dict['Magnetic Field MSO X']] * kp['MAG'][param_dict['Magnetic Field MSO X']]) + (kp['MAG'][param_dict['Magnetic Field MSO Y']] * kp['MAG'][param_dict['Magnetic Field MSO Y']]) + (kp['MAG'][param_dict['Magnetic Field MSO Z']] * kp['MAG'][param_dict['Magnetic Field MSO Z']])).apply( math.sqrt) pytplot.store_data(title, data={'x': kp['Time'], 'y': mag_mso_dataframe}) pytplot.options(title, 'legend_names', ['Magnetic Field MSO X', 'Magnetic Field MSO Y', 'Magnetic Field MSO Z', 'Magnetic Field Magnitude MSO']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("MAG is not in the Key Parameter Data Structure, " + title + " will not be plotted") if mag_geo: title = "MAG GEO" try: if 'MAG' not in kp.keys(): raise Exception("NoDataException") mag_geo_dataframe = kp['MAG'].loc[:, [param_dict['Magnetic Field GEO X'], param_dict['Magnetic Field GEO Y'], param_dict['Magnetic Field GEO Z']]] mag_geo_dataframe['Magnetic Field Magnitude GEO'] = ((kp['MAG'][param_dict['Magnetic Field GEO X']] * kp['MAG'][param_dict['Magnetic Field GEO X']]) + (kp['MAG'][param_dict['Magnetic Field GEO Y']] * kp['MAG'][param_dict['Magnetic Field GEO Y']]) + (kp['MAG'][param_dict['Magnetic Field GEO Z']] * kp['MAG'][param_dict['Magnetic Field GEO Z']])).apply( math.sqrt) pytplot.store_data(title, data={'x': kp['Time'], 'y': mag_geo_dataframe}) pytplot.options(title, 'legend_names', ['Magnetic Field GEO X', 'Magnetic Field GEO Y', 'Magnetic Field GEO Z', 'Magnetic Field Magnitude GEO']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("MAG is not in the Key Parameter Data Structure, " + title + " will not be plotted") if mag_cone: title = "MAG Cone" try: if 'MAG' not in kp.keys(): raise Exception("NoDataException") # Note, this plot ends up different from the IDL version, because of the way IDL calculates arctans. # Important, or not? mag_cone_dataframe_temp = kp['MAG'].loc[:, [param_dict['Magnetic Field MSO X'], param_dict['Magnetic Field MSO Y'], param_dict['Magnetic Field MSO Z']]] mag_cone_dataframe_temp['Clock Angle'] = (mag_cone_dataframe_temp[param_dict['Magnetic Field MSO X']] / mag_cone_dataframe_temp[param_dict['Magnetic Field MSO Y']] ).apply(math.atan) * 57.295776 mag_cone_dataframe_temp['Cone Angle'] = \ ((mag_cone_dataframe_temp[param_dict['Magnetic Field MSO X']].apply(abs)) / (((kp['MAG'][param_dict['Magnetic Field MSO X']] * kp['MAG'][param_dict['Magnetic Field MSO X']]) + (kp['MAG'][param_dict['Magnetic Field MSO Y']] * kp['MAG'][param_dict['Magnetic Field MSO Y']]) + (kp['MAG'][param_dict['Magnetic Field MSO Z']] * kp['MAG'][param_dict['Magnetic Field MSO Z']])).apply(math.sqrt))).apply(math.acos) * 57.295776 mag_cone_dataframe = mag_cone_dataframe_temp.loc[:, ['Clock Angle', 'Cone Angle']] pytplot.store_data(title, data={'x': kp['Time'], 'y': mag_cone_dataframe}) pytplot.options(title, 'legend_names', ['Magnetic Clock Angle', 'Magnetic Cone Angle']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("MAG is not in the Key Parameter Data Structure, " + title + " will not be plotted") if mag_dir: title = "MAG Direction" try: if 'MAG' not in kp.keys(): raise Exception("NoDataException") clat = (kp['SPACECRAFT']['SUB_SC_LATITUDE'] * 3.14159265 / 180).apply(math.cos) slat = (kp['SPACECRAFT']['SUB_SC_LATITUDE'] * 3.14159265 / 180).apply(math.sin) clon = (kp['SPACECRAFT']['SUB_SC_LONGITUDE'] * 3.14159265 / 180).apply(math.cos) slon = (kp['SPACECRAFT']['SUB_SC_LONGITUDE'] * 3.14159265 / 180).apply(math.sin) mag_rad_series = (kp['MAG'][param_dict['Magnetic Field GEO X']] * clon * clat) + \ (kp['MAG'][param_dict['Magnetic Field GEO Y']] * slon * clat) + \ (kp['MAG'][param_dict['Magnetic Field GEO Z']] * slat) mag_dir_dataframe = mag_rad_series.to_frame(name='Radial') mag_dir_dataframe['Eastward'] = (kp['MAG'][param_dict['Magnetic Field GEO X']] * slon * -1) + \ (kp['MAG'][param_dict['Magnetic Field GEO Y']] * clon) mag_dir_dataframe['Northward'] = (kp['MAG'][param_dict['Magnetic Field GEO X']] * clon * slat * -1) + \ (kp['MAG'][param_dict['Magnetic Field GEO Y']] * slon * slat * -1) + \ (kp['MAG'][param_dict['Magnetic Field GEO Z']] * clat) pytplot.store_data(title, data={'x': kp['Time'], 'y': mag_dir_dataframe}) pytplot.options(title, 'legend_names', ['Radial', 'Eastward', 'Northward']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("MAG is not in the Key Parameter Data Structure, " + title + " will not be plotted") if ngims_neutral: title = "NGIMS Neutrals" try: if 'NGIMS' not in kp.keys(): raise Exception("NoDataException") ngims_neutrals_dataframe = kp['NGIMS'].loc[:, [param_dict['Density He'], param_dict['Density O'], param_dict['Density CO'], param_dict['Density N2'], param_dict['Density NO'], param_dict['Density Ar'], param_dict['Density CO2']]] pytplot.store_data(title, data={'x': kp['Time'], 'y': ngims_neutrals_dataframe}) pytplot.options(title, 'legend_names', ['Density He', 'Density O', 'Density CO', 'Density N2', 'Density NO', 'Density Ar', 'Density CO2']) pytplot.options(title, 'ylog', 1) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("NGIMS is not in the Key Parameter Data Structure, " + title + " will not be plotted") if ngims_ions: title = "NGIMS IONS" try: if 'NGIMS' not in kp.keys(): raise Exception("NoDataException") ngims_ion_dataframe = kp['NGIMS'].loc[:, [param_dict['Density 32+'], param_dict['Density 44+'], param_dict['Density 30+'], param_dict['Density 16+'], param_dict['Density 28+'], param_dict['Density 12+'], param_dict['Density 17+'], param_dict['Density 14+']]] pytplot.store_data(title, data={'x': kp['Time'], 'y': ngims_ion_dataframe}) pytplot.options(title, 'legend_names', ['Density 32+', 'Density 44+', 'Density 30+', 'Density 16+', 'Density 28+', 'Density 12+', 'Density 17+', 'Density 14+']) pytplot.options(title, 'ylog', 1) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("NGIMS is not in the Key Parameter Data Structure, " + title + " will not be plotted") if eph_angle: title = "Spacecraft Ephemeris Information" try: if 'SPACECRAFT' not in kp.keys(): raise Exception("NoDataException") # This plot makes no sense. Why is Local Time plotted here, when it is not a measurement in degrees? # Why is Mars season/Subsolar Latitude plotted when they are essentially straight lines? sc_eph_dataframe = kp['SPACECRAFT'].loc[:, ['SUB_SC_LONGITUDE', 'SUB_SC_LATITUDE', 'SZA', 'LOCAL_TIME', 'MARS_SEASON', 'SUBSOLAR_POINT_GEO_LONGITUDE', 'SUBSOLAR_POINT_GEO_LATITUDE']] pytplot.store_data(title, data={'x': kp['Time'], 'y': sc_eph_dataframe}) pytplot.options(title, 'legend_names', ['GEO Longitude', 'GEO Latitude', 'Solar Zenith Angle', 'Local Time', 'Mars Season (Ls)', 'Subsolar Point GEO Longitude', 'Subsolar Point GEO Latitude']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("SPACECRAFT is not in the Key Parameter Data Structure, " + title + " will not be plotted") if eph_geo: title = "Spacecraft positon in GEO Coordinates" try: if 'SPACECRAFT' not in kp.keys(): raise Exception("NoDataException") sc_pos_dataframe = kp['SPACECRAFT'].loc[:, ['GEO_X', 'GEO_Y', 'GEO_Z', 'ALTITUDE']] pytplot.store_data(title, data={'x': kp['Time'], 'y': sc_pos_dataframe}) pytplot.options(title, 'legend_names', ['GEO X', 'GEO Y', 'GEO Z', 'Altitude Aeroid']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("SPACECRAFT is not in the Key Parameter Data Structure, " + title + " will not be plotted") if eph_mso: title = "Spacecraft positon in MSO Coordinates" try: if 'SPACECRAFT' not in kp.keys(): raise Exception("NoDataException") sc_pos_mso_dataframe = kp['SPACECRAFT'].loc[:, 'MSO_X', 'MSO_Y', 'MSO_Z', 'ALTITUDE'] pytplot.store_data(title, data={'x': kp['Time'], 'y': sc_pos_mso_dataframe}) pytplot.options(title, 'legend_names', ['MSO X', 'MSO Y', 'MSO Z', 'Altitude Aeroid']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("LPW is not in the Key Parameter Data Structure, " + title + " will not be plotted") if swea: title = "SWEA" try: if 'SWEA' not in kp.keys(): raise Exception("NoDataException") swea_dataframe = kp['SWEA'].loc[:, [param_dict['Flux, e- Parallel (5-100 ev)'], param_dict['Flux, e- Parallel (100-500 ev)'], param_dict['Flux, e- Parallel (500-1000 ev)'], param_dict['Flux, e- Anti-par (5-100 ev)'], param_dict['Flux, e- Anti-par (100-500 ev)'], param_dict['Flux, e- Anti-par (500-1000 ev)'], param_dict['Electron Spectrum Shape']]] pytplot.store_data(title, data={'x': kp['Time'], 'y': swea_dataframe}) pytplot.options(title, 'legend_names', ['Flux, e- Parallel (5-100 ev)', 'Flux, e- Parallel (100-500 ev)', 'Flux, e- Parallel (500-1000 ev)', 'Flux, e- Anti-par (5-100 ev)', 'Flux, e- Anti-par (100-500 ev)', 'Flux, e- Anti-par (500-1000 ev)', 'Electron Spectrum Shape']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("SWEA is not in the Key Parameter Data Structure, " + title + " will not be plotted") if sep_ion: title = "SEP Ions" try: if 'SEP' not in kp.keys(): raise Exception("NoDataException") # Need to fill in the NaNs as zero, otherwise the Sum will equal all Nans sep_ion_dataframe = kp['SEP'].loc[:, [param_dict['Ion Flux FOV 1 F'], param_dict['Ion Flux FOV 1 R'], param_dict['Ion Flux FOV 2 F'], param_dict['Ion Flux FOV 2 R']]].fillna(0) sep_ion_dataframe['Sum'] = sep_ion_dataframe[param_dict['Ion Flux FOV 1 F']] + \ sep_ion_dataframe[param_dict['Ion Flux FOV 1 R']] + \ sep_ion_dataframe[param_dict['Ion Flux FOV 2 F']] + \ sep_ion_dataframe[param_dict['Ion Flux FOV 2 R']] pytplot.store_data(title, data={'x': kp['Time'], 'y': sep_ion_dataframe}) pytplot.options(title, 'legend_names', ['Ion Flux FOV 1 F', 'Ion Flux FOV 1 R', 'Ion Flux FOV 2 F', 'Ion Flux FOV 2 R', 'Sum']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("SEP is not in the Key Parameter Data Structure, " + title + " will not be plotted") if sep_electron: title = "SEP Electrons" try: if 'SEP' not in kp.keys(): raise Exception("NoDataException") sep_electron_dataframe = kp['SEP'].loc[:, [param_dict['Electron Flux FOV 1 F'], param_dict['Electron Flux FOV 1 R'], param_dict['Electron Flux FOV 2 F'], param_dict['Electron Flux FOV 2 R']]].fillna(0) sep_electron_dataframe['Sum'] = sep_electron_dataframe[param_dict['Electron Flux FOV 1 F']] + \ sep_electron_dataframe[param_dict['Electron Flux FOV 1 R']] + \ sep_electron_dataframe[param_dict['Electron Flux FOV 2 F']] + \ sep_electron_dataframe[param_dict['Electron Flux FOV 2 R']] pytplot.store_data(title, data={'x': kp['Time'], 'y': sep_electron_dataframe}) pytplot.options(title, 'legend_names', ['Electron Flux FOV 1 F', 'Electron Flux FOV 1 R', 'Electron Flux FOV 2 F', 'Electron Flux FOV 2 R', 'Sum']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("SEP is not in the Key Parameter Data Structure, " + title + " will not be plotted") if wave: title = "E-Field" try: if 'LPW' not in kp.keys(): raise Exception("NoDataException") wave_dataframe = kp['LPW'].loc[:, [param_dict['E-field Power 2-100 Hz'], param_dict['E-field Power 100-800 Hz'], param_dict['E-field Power 0.8-1.0 Mhz']]] wave_dataframe['RMS Deviation'] = kp['MAG'].loc[:, [param_dict['Magnetic Field RMS Dev']]] pytplot.store_data(title, data={'x': kp['Time'], 'y': wave_dataframe}) pytplot.options(title, 'legend_names', ['E-field Power 2-100 Hz', 'E-field Power 100-800 Hz', 'E-field Power 0.8-1.0 Mhz', 'RMS Deviation']) pytplot.options(title, 'ylog', 1) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("LPW is not in the Key Parameter Data Structure, " + title + " will not be plotted") if plasma_den: title = "Plasma Density" try: if 'SWIA' not in kp.keys() or 'STATIC' not in kp.keys() or 'LPW' not in kp.keys() \ or 'SWEA' not in kp.keys(): raise Exception("NoDataException") plasma_den_dataframe = kp['STATIC'].loc[:, [param_dict['H+ Density'], param_dict['O+ Density'], param_dict['O2+ Density']]] plasma_den_dataframe['SWIA H+ Density'] = kp['SWIA'].loc[:, [param_dict['H+ Density']]] plasma_den_dataframe['Solar Wind Electron Density'] = \ kp['SWEA'].loc[:, [param_dict['Solar Wind Electron Density']]] plasma_den_dataframe['Electron Density'] = kp['LPW'].loc[:, param_dict[['Electron Density']]] pytplot.store_data(title, data={'x': kp['Time'], 'y': plasma_den_dataframe}) pytplot.options(title, 'legend_names', ['H+ Density', 'O+ Density', 'O2+ Density', 'SWIA H+ Density', 'Solar Wind Electron Density', 'Electron Density']) pytplot.options(title, 'ylog', 1) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("One or all of SWIA/STATIC/LPW/SWEA are not in the Key Parameter Data Structure, " + title + " will not be plotted") if plasma_temp: title = "Plasma Temperature" try: if 'SWIA' not in kp.keys() or 'STATIC' not in kp.keys() or 'LPW' not in kp.keys() \ or 'SWEA' not in kp.keys(): raise Exception("NoDataException") plasma_temp_dataframe = kp['STATIC'].loc[:, [param_dict['H+ Temperature'], param_dict['O+ Temperature'], param_dict['O2+ Temperature']]] plasma_temp_dataframe['SWIA H+ Temperature'] = kp['SWIA'].loc[:, [param_dict['H+ Temperature']]] plasma_temp_dataframe['Solar Wind Electron Temperature'] = \ kp['SWEA'].loc[:, [param_dict['Solar Wind Electron Temperature']]] plasma_temp_dataframe['Electron Temperature'] = kp['LPW'].loc[:, [param_dict['Electron Temperature']]] pytplot.store_data(title, data={'x': kp['Time'], 'y': plasma_temp_dataframe}) pytplot.options(title, 'legend_names', ['H+ Temperature', 'O+ Temperature', 'O2+ Temperature', 'SWIA H+ Temperature', 'Solar Wind Electron Temperature', 'Electron Temperature']) pytplot.options(title, 'ylog', 1) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("One or all of SWIA/STATIC/LPW/SWEA are not in the Key Parameter Data Structure, " + title + " will not be plotted") if swia_h_vel: title = "SWIA H+ Velocity" try: if 'SWIA' not in kp.keys(): raise Exception("NoDataException") swia_h_vel_dataframe = kp['SWIA'].loc[:, [param_dict['H+ Flow Velocity MSO X'], param_dict['H+ Flow Velocity MSO Y'], param_dict['H+ Flow Velocity MSO Z']]] swia_h_vel_dataframe['Magnitude'] = ((kp['SWIA'][param_dict['H+ Flow Velocity MSO X']] * kp['SWIA'][param_dict['H+ Flow Velocity MSO X']]) + (kp['SWIA'][param_dict['H+ Flow Velocity MSO Y']] * kp['SWIA'][param_dict['H+ Flow Velocity MSO Y']]) + (kp['SWIA'][param_dict['H+ Flow Velocity MSO Z']] * kp['SWIA'][param_dict['H+ Flow Velocity MSO Z']])).apply(math.sqrt) pytplot.store_data(title, data={'x': kp['Time'], 'y': swia_h_vel_dataframe}) pytplot.options(title, 'legend_names', ['H+ Flow Velocity MSO X', 'H+ Flow Velocity MSO Y', 'H+ Flow Velocity MSO Z', 'Magnitude']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("SWIA is not in the Key Parameter Data Structure, " + title + " will not be plotted") if static_h_vel: title = "STATIC H+ Velocity" try: if 'STATIC' not in kp.keys(): raise Exception("NoDataException") # This is more like a direction, not a velocity. The values are between 0 and 1. static_h_vel_dataframe = kp['STATIC'].loc[:, [param_dict['H+ Direction MSO X'], param_dict['H+ Direction MSO Y'], param_dict['H+ Direction MSO Z']]] static_h_vel_dataframe['Magnitude'] = ((kp['STATIC'][param_dict['H+ Direction MSO X']] * kp['STATIC'][param_dict['H+ Direction MSO X']]) + (kp['STATIC'][param_dict['H+ Direction MSO Y']] * kp['STATIC'][param_dict['H+ Direction MSO Y']]) + (kp['STATIC'][param_dict['H+ Direction MSO Z']] * kp['STATIC'][param_dict['H+ Direction MSO Z']])).apply(math.sqrt) pytplot.store_data(title, data={'x': kp['Time'], 'y': static_h_vel_dataframe}) pytplot.options(title, 'legend_names', ['H+ Direction MSO X', 'H+ Direction MSO Y', 'H+ Direction MSO Z', 'Magnitude']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("STATIC is not in the Key Parameter Data Structure, " + title + " will not be plotted") if static_o2_vel: title = "STATIC O2+ Velocity" try: if 'STATIC' not in kp.keys(): raise Exception("NoDataException") static_o2_vel_dataframe = kp['STATIC'].loc[:, [param_dict['O2+ Flow Velocity MSO X'], param_dict['O2+ Flow Velocity MSO Y'], param_dict['O2+ Flow Velocity MSO Z']]] static_o2_vel_dataframe['Magnitude'] = ((kp['STATIC'][param_dict['O2+ Flow Velocity MSO X']] * kp['STATIC'][param_dict['O2+ Flow Velocity MSO X']]) + (kp['STATIC'][param_dict['O2+ Flow Velocity MSO Y']] * kp['STATIC'][param_dict['O2+ Flow Velocity MSO Y']]) + (kp['STATIC'][param_dict['O2+ Flow Velocity MSO Z']] * kp['STATIC'][param_dict['O2+ Flow Velocity MSO Z']])).apply( math.sqrt) pytplot.store_data(title, data={'x': kp['Time'], 'y': static_o2_vel_dataframe}) pytplot.options(title, 'legend_names', ['O2+ Flow Velocity MSO X', 'O2+ Flow Velocity MSO Y', 'O2+ Flow Velocity MSO Z', 'Magnitude']) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("STATIC is not in the Key Parameter Data Structure, " + title + " will not be plotted") if static_flux: title = "STATIC Flux" try: if 'STATIC' not in kp.keys(): raise Exception("NoDataException") # In the IDL Toolkit, it only plots O2PLUS_FLOW_VELOCITY_MSO_X/Y. I'm assuming this is incorrect. # I have no idea what the right values to plot are. current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("STATIC is not in the Key Parameter Data Structure, " + title + " will not be plotted") if static_energy: title = "STATIC Characteristic Energies" try: if 'STATIC' not in kp.keys(): raise Exception("NoDataException") sta_char_eng_dataframe = kp['STATIC'].loc[:, [param_dict['H+ Energy'], param_dict['He++ Energy'], param_dict['O+ Energy'], param_dict['O2+ Energy']]] pytplot.store_data(title, data={'x': kp['Time'], 'y': sta_char_eng_dataframe}) pytplot.options(title, 'legend_names', ['H+ Energy', 'He++ Energy', 'O+ Energy', 'O2+ Energy']) pytplot.options(title, 'ylog', 1) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("STATIC is not in the Key Parameter Data Structure, " + title + " will not be plotted") if sun_bar: title = "Sunbar" try: if 'SPACECRAFT' not in kp.keys(): raise Exception("NoDataException") # Shows whether or not MAVEN is in the sun # 1 if True # 0 if False # Could there be a more efficient way of doing this? radius_mars = 3396.0 sun_bar_series = ((kp['SPACECRAFT']['MSO_Y'] * kp['SPACECRAFT']['MSO_Y']) + (kp['SPACECRAFT']['MSO_Z'] * kp['SPACECRAFT']['MSO_Z'])).apply(math.sqrt) sun_bar_series.name = "Sunlit/Eclipsed" index = 0 for mso_x in kp['SPACECRAFT']['MSO_X']: if mso_x < 0: if sun_bar_series[index] < radius_mars: sun_bar_series[index] = 0 else: sun_bar_series[index] = 1 else: sun_bar_series[index] = 1 index += 1 pytplot.store_data(title, data={'x': kp['Time'], 'y': sun_bar_series}) pytplot.ylim(title, -0.1, 1.1) names_to_plot.append(title) current_plot_number = current_plot_number + 1 except Exception as x: if str(x) == "NoDataException": print("SPACECRAFT is not in the Key Parameter Data Structure, " + title + " will not be plotted") if solar_wind: title = "Solar Wind" try: if 'SWIA' not in kp.keys(): raise Exception("NoDataException") solar_wind_dataframe = kp['SWIA'].loc[:, [param_dict['Solar Wind Dynamic Pressure']]] pytplot.store_data(title, data={'x': kp['Time'], 'y': solar_wind_dataframe}) pytplot.options(title, 'ylog', 1) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("SWIA is not in the Key Parameter Data Structure, " + title + " will not be plotted") if ionosphere: title = "Ionosphere" try: if 'SWEA' not in kp.keys(): raise Exception("NoDataException") # Need to convert to float first, not sure why it is not already ionosphere_dataframe = kp['SWEA'].loc[:, [param_dict['Electron Spectrum Shape']]] ionosphere_dataframe['Electron Spectrum Shape'] = \ ionosphere_dataframe[param_dict['Electron Spectrum Shape']].apply(float) pytplot.store_data(title, data={'x': kp['Time'], 'y': ionosphere_dataframe}) pytplot.options(title, 'ylog', 1) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("SWEA is not in the Key Parameter Data Structure, " + title + " will not be plotted") if sc_pot: title = "Spacecraft Potential" try: if 'LPW' not in kp.keys(): raise Exception("NoDataException") sc_pot_dataframe = kp['LPW'].loc[:, [param_dict['Spacecraft Potential']]] pytplot.store_data(title, data={'x': kp['Time'], 'y': sc_pot_dataframe}) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("LPW is not in the Key Parameter Data Structure, " + title + " will not be plotted") if altitude: title = "Spacecraft Altitude" try: altitude_dataframe = kp['SPACECRAFT'].loc[:, ['ALTITUDE']] pytplot.store_data(title, data={'x': kp['Time'], 'y': altitude_dataframe}) names_to_plot.append(title) current_plot_number += 1 except Exception as x: if str(x) == "NoDataException": print("LPW is not in the Key Parameter Data Structure, " + title + " will not be plotted") # Show the plot pytplot.tplot_options('wsize', [1000, 300 * current_plot_number]) pytplot.tplot_options('title', main_title) pytplot.tplot(names_to_plot, bokeh=not qt) pytplot.del_data(names_to_plot) return
def xep(trange=['2017-06-01', '2017-06-02'], datatype='omniflux', level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False, ror=True): """ This function loads data from the XEP-e experiment from the Arase mission Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] datatype: str Data type; Valid options: level: str Data level; Valid options: suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat: str The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. varnames: list of str List of variable names to load (if not specified, all data variables are loaded) downloadonly: bool Set this flag to download the CDF files, but not load them into tplot variables notplot: bool Return the data in hash tables instead of creating tplot variables no_update: bool If set, only load data from your local cache time_clip: bool Time clip the variables to exactly the range specified in the trange keyword ror: bool If set, print PI info and rules of the road Returns: List of tplot variables created. """ initial_notplot_flag = False if notplot: initial_notplot_flag = True if (datatype == 'omniflux') or (datatype == '2dflux'): # to avoid failure of creation Tplot variables (at store_data.py) of xep notplot = True file_res = 3600. * 24 prefix = 'erg_xep_'+level+'_' pathformat = 'satellite/erg/xep/'+level+'/'+datatype + \ '/%Y/%m/erg_xep_'+level+'_'+datatype+'_%Y%m%d_v??_??.cdf' loaded_data = load(pathformat=pathformat, trange=trange, level=level, datatype=datatype, file_res=file_res, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd) if (len(loaded_data) > 0) and ror: try: if isinstance(loaded_data, list): if downloadonly: cdf_file = cdflib.CDF(loaded_data[-1]) gatt = cdf_file.globalattsget() else: gatt = get_data(loaded_data[-1], metadata=True)['CDF']['GATT'] elif isinstance(loaded_data, dict): gatt = loaded_data[list(loaded_data.keys())[-1]]['CDF']['GATT'] # --- print PI info and rules of the road print(' ') print( '**************************************************************************') print(gatt["LOGICAL_SOURCE_DESCRIPTION"]) print('') print('Information about ERG XEP') print('') print('PI: ', gatt['PI_NAME']) print("Affiliation: "+gatt["PI_AFFILIATION"]) print('') print('RoR of ERG project common: https://ergsc.isee.nagoya-u.ac.jp/data_info/rules_of_the_road.shtml.en') print('RoR of XEP: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Xep') print('') print('Contact: erg_xep_info at isee.nagoya-u.ac.jp') print( '**************************************************************************') except: print('printing PI info and rules of the road was failed') if initial_notplot_flag or downloadonly: return loaded_data if isinstance(loaded_data, dict): if datatype == 'omniflux': tplot_variables = [] if prefix + 'FEDO_SSD' + suffix in loaded_data: v_vars_min = loaded_data[prefix + 'FEDO_SSD' + suffix]['v'][0] v_vars_max = loaded_data[prefix + 'FEDO_SSD' + suffix]['v'][1] v_vars = np.sqrt(v_vars_min * v_vars_max) # Geometric mean store_data(prefix + 'FEDO_SSD' + suffix, data={'x': loaded_data[prefix + 'FEDO_SSD' + suffix]['x'], 'y': loaded_data[prefix + 'FEDO_SSD' + suffix]['y'], 'v': v_vars}, attr_dict={'CDF':loaded_data[prefix + 'FEDO_SSD' + suffix]['CDF']}) tplot_variables.append(prefix + 'FEDO_SSD' + suffix) if prefix + 'FEDO_SSD' + suffix in tplot_variables: # remove minus valuse of y array clip(prefix + 'FEDO_SSD' + suffix, 0., 5000.) # set spectrogram plot option options(prefix + 'FEDO_SSD' + suffix, 'Spec', 1) # set y axis to logscale options(prefix + 'FEDO_SSD' + suffix, 'ylog', 1) # set yrange options(prefix + 'FEDO_SSD' + suffix, 'yrange', [4.0e+02, 4.5e+03]) # set z axis to logscale options(prefix + 'FEDO_SSD' + suffix, 'zlog', 1) # set zrange options(prefix + 'FEDO_SSD' + suffix, 'zrange', [1.0e-01, 1.0e+3]) # change colormap option options(prefix + 'FEDO_SSD' + suffix, 'Colormap', 'jet') # set ztitle options(prefix + 'FEDO_SSD' + suffix, 'ztitle', '[/cm^{2}-str-s-keV]') # set ytitle options(prefix + 'FEDO_SSD' + suffix, 'ytitle', 'XEP\nomniflux\nLv2\nEnergy') # set ysubtitle options(prefix + 'FEDO_SSD' + suffix, 'ysubtitle', '[keV]') ylim(prefix + 'FEDO_SSD' + suffix, 4.0e+02, 4.5e+03) zlim(prefix + 'FEDO_SSD' + suffix, 1.0e-01, 1.0e+3) return tplot_variables if datatype == '2dflux': tplot_variables = [] if prefix + 'FEDU_SSD' + suffix in loaded_data: store_data(prefix + 'FEDU_SSD' + suffix, data={'x': loaded_data[prefix + 'FEDU_SSD' + suffix]['x'], 'y': loaded_data[prefix + 'FEDU_SSD' + suffix]['y'], 'v1': np.sqrt(loaded_data[prefix + 'FEDU_SSD' + suffix]['v'][:, 0] * loaded_data[prefix + 'FEDU_SSD' + suffix]['v'][:, 1]), # Geometric mean of 'v' 'v2': [i for i in range(16)]}, # [0, 1, 2, .., 15] attr_dict={'CDF':loaded_data[prefix + 'FEDU_SSD' + suffix]['CDF']}) tplot_variables.append(prefix + 'FEDU_SSD' + suffix) if prefix + 'FEDU_SSD' + suffix in tplot_variables: clip(prefix + 'FEDU_SSD' + suffix, -1.0e+10, 1.0e+10) return tplot_variables return loaded_data
def test_math(): pytplot.cdf_to_tplot(os.path.dirname(os.path.realpath(__file__)) + "/testfiles/mvn_euv_l2_bands_20170619_v09_r03.cdf") pytplot.tplot_names() pytplot.tplot_math.split_vec('mvn_euv_calib_bands') pytplot.tplot('mvn_euv_calib_bands_x', testing=True) pytplot.tplot_math.subtract('mvn_euv_calib_bands_x', 'mvn_euv_calib_bands_y', new_tvar='s') pytplot.tplot('s', testing=True) pytplot.tplot_math.add('s', 'mvn_euv_calib_bands_x', new_tvar='a') pytplot.tplot(['mvn_euv_calib_bands_x', 'a'], testing=True) pytplot.tplot_math.subtract('mvn_euv_calib_bands_x', 'mvn_euv_calib_bands_z', new_tvar='m') pytplot.tplot('m', testing=True) pytplot.tplot_math.divide('m', 'mvn_euv_calib_bands_z', new_tvar='d') pytplot.tplot('d', testing=True) pytplot.add_across('mvn_euv_calib_bands', new_tvar='data_summed') pytplot.tplot('mvn_euv_calib_bands', testing=True) pytplot.avg_res_data('data_summed', res=120) pytplot.tplot('data_summed', testing=True) pytplot.deflag('mvn_euv_calib_bands', 0, new_tvar='deflagged') pytplot.tplot('deflagged', testing=True) pytplot.flatten('mvn_euv_calib_bands') pytplot.tplot('data_flattened', testing=True) pytplot.join_vec(['mvn_euv_calib_bands_x', 'mvn_euv_calib_bands_y', 'mvn_euv_calib_bands_z'], new_tvar='data2') pytplot.tplot('data2', testing=True) pytplot.pwr_spec('mvn_euv_calib_bands_x') pytplot.tplot('mvn_euv_calib_bands_x_pwrspec', testing=True) pytplot.derive('mvn_euv_calib_bands_x') pytplot.store_data("data3", data=['mvn_euv_calib_bands_x', 'mvn_euv_calib_bands_y', 'mvn_euv_calib_bands_z']) pytplot.tplot('data3', testing=True) pytplot.cdf_to_tplot(os.path.dirname(os.path.realpath(__file__))+ "/testfiles/mvn_swe_l2_svyspec_20170619_v04_r04.cdf") pytplot.resample('mvn_euv_calib_bands_y', pytplot.data_quants['diff_en_fluxes'].coords['time'].values, new_tvar='data_3_resampled') pytplot.tplot('data_3_resampled', testing=True) pytplot.options('diff_en_fluxes', 'spec', 1) pytplot.spec_mult('diff_en_fluxes') pytplot.add_across('diff_en_fluxes_specmult', new_tvar='tot_en_flux', column_range=[[0, 10], [10, 20], [20, 30]]) pytplot.options('diff_en_fluxes', 'ylog', 1) pytplot.options('diff_en_fluxes', 'zlog', 1) pytplot.options('tot_en_flux', 'ylog', 1) pytplot.ylim('tot_en_flux', 1, 100) pytplot.tplot(['diff_en_fluxes', 'tot_en_flux'], testing=True) pytplot.split_vec('tot_en_flux') pytplot.add('tot_en_flux_x', 'mvn_euv_calib_bands_y', new_tvar='weird_data') pytplot.tplot('weird_data', testing=True)
def mgf(trange=['2017-03-27', '2017-03-28'], datatype='8sec', level='l2', suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False): """ This function loads data from the MGF experiment from the Arase mission Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] datatype: str Data type; Valid options: level: str Data level; Valid options: suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat: str The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. downloadonly: bool Set this flag to download the CDF files, but not load them into tplot variables notplot: bool Return the data in hash tables instead of creating tplot variables no_update: bool If set, only load data from your local cache time_clip: bool Time clip the variables to exactly the range specified in the trange keyword Returns: List of tplot variables created. """ if datatype == '8s' or datatype == '8': datatype = '8sec' elif datatype == '64': datatype = '64hz' elif datatype == '128': datatype = '128hz' elif datatype == '256': datatype = '256hz' loaded_data = load(instrument='mgf', trange=trange, level=level, datatype=datatype, suffix=suffix, get_support_data=get_support_data, varformat=varformat, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd) if loaded_data == None or loaded_data == [] or notplot or downloadonly: return loaded_data clip('erg_mgf_' + level + '_mag_' + datatype + '_dsi' + suffix, -1e+6, 1e6) clip('erg_mgf_' + level + '_mag_' + datatype + '_gse' + suffix, -1e+6, 1e6) clip('erg_mgf_' + level + '_mag_' + datatype + '_gsm' + suffix, -1e+6, 1e6) clip('erg_mgf_' + level + '_mag_' + datatype + '_sm' + suffix, -1e+6, 1e6) # set yrange times, bdata = get_data('erg_mgf_' + level + '_mag_' + datatype + '_dsi' + suffix) ylim('erg_mgf_' + level + '_mag_' + datatype + '_dsi' + suffix, np.nanmin(bdata), np.nanmax(bdata)) times, bdata = get_data('erg_mgf_' + level + '_mag_' + datatype + '_gse' + suffix) ylim('erg_mgf_' + level + '_mag_' + datatype + '_gse' + suffix, np.nanmin(bdata), np.nanmax(bdata)) times, bdata = get_data('erg_mgf_' + level + '_mag_' + datatype + '_gsm' + suffix) ylim('erg_mgf_' + level + '_mag_' + datatype + '_gsm' + suffix, np.nanmin(bdata), np.nanmax(bdata)) times, bdata = get_data('erg_mgf_' + level + '_mag_' + datatype + '_sm' + suffix) ylim('erg_mgf_' + level + '_mag_' + datatype + '_sm' + suffix, np.nanmin(bdata), np.nanmax(bdata)) # set labels options('erg_mgf_' + level + '_mag_' + datatype + '_dsi' + suffix, 'legend_names', ['Bx', 'By', 'Bz']) options('erg_mgf_' + level + '_mag_' + datatype + '_gse' + suffix, 'legend_names', ['Bx', 'By', 'Bz']) options('erg_mgf_' + level + '_mag_' + datatype + '_gsm' + suffix, 'legend_names', ['Bx', 'By', 'Bz']) options('erg_mgf_' + level + '_mag_' + datatype + '_sm' + suffix, 'legend_names', ['Bx', 'By', 'Bz']) # set color of the labels options('erg_mgf_' + level + '_mag_' + datatype + '_dsi' + suffix, 'Color', ['b', 'g', 'r']) options('erg_mgf_' + level + '_mag_' + datatype + '_gse' + suffix, 'Color', ['b', 'g', 'r']) options('erg_mgf_' + level + '_mag_' + datatype + '_gsm' + suffix, 'Color', ['b', 'g', 'r']) options('erg_mgf_' + level + '_mag_' + datatype + '_sm' + suffix, 'Color', ['b', 'g', 'r']) return loaded_data
#################################################################################### # Get data from pytplot object into python variables. # This is useful when we want to work on the data using standard python libraries. alldata = get_data("tha_vel") time = alldata[0] data = alldata[1] #################################################################################### # After working with the data, we can store a new pytplot variable. # We can store any data in the pytplot object. store_data("tha_new_vel", data={'x': time, 'y': data}) #################################################################################### # Preparing for the plots, we define the y-axis limits. ylim('tha_pos', -23000.0, 81000.0) ylim('tha_new_vel', -8.0, 12.0) #################################################################################### # We plot the position and the velocity using the pyqtgraph library (the default). # Another option is to plot using the bokeh library. tplot(["tha_pos", "tha_new_vel"]) #################################################################################### # Load and plot GMAG data #################################################################################### #################################################################################### # Delete any existing pytplot variables, and define a time range. del_data() time_range = ['2015-12-31 00:00:00', '2015-12-31 23:59:59']
def orb(trange=['2017-03-27', '2017-03-28'], datatype='def', level='l2', model="op", suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False, version=None, ror=True): """ This function loads orbit data from the Arase mission Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] datatype: str Data type; Valid options: level: str Data level; Valid options: suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat: str The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. varnames: list of str List of variable names to load (if not specified, all data variables are loaded) downloadonly: bool Set this flag to download the CDF files, but not load them into tplot variables notplot: bool Return the data in hash tables instead of creating tplot variables no_update: bool If set, only load data from your local cache time_clip: bool Time clip the variables to exactly the range specified in the trange keyword version: str Set this value to specify the version of cdf files (such as "v03") Returns: List of tplot variables created. """ initial_notplot_flag = False if notplot: initial_notplot_flag = True file_res = 3600. * 24 prefix = 'erg_orb_' + level + '_' if (datatype in ["pre", "spre", "mpre", "lpre"]) and (level == 'l2'): prefix = 'erg_orb_' + datatype + '_' + level + '_' if level == 'l3': if model == 'op': pathformat = 'satellite/erg/orb/'+level + \ '/opq/%Y/%m/erg_orb_'+level+'_op_%Y%m%d_' else: pathformat = 'satellite/erg/orb/'+level+'/'+model + \ '/%Y/%m/erg_orb_'+level+'_'+model+'_%Y%m%d_' elif level == 'l2': if datatype == 'def': pathformat = 'satellite/erg/orb/' + datatype + '/%Y/erg_orb_' + level + '_%Y%m%d_' else: pathformat = 'satellite/erg/orb/' + datatype + \ '/%Y/erg_orb_' + datatype + '_'+level+'_%Y%m%d_' if version is None: pathformat += 'v??.cdf' else: pathformat += version + '.cdf' loaded_data = load(pathformat=pathformat, trange=trange, level=level, datatype=datatype, file_res=file_res, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd, version=version) if (len(loaded_data) > 0) and ror: try: if isinstance(loaded_data, list): if downloadonly: cdf_file = cdflib.CDF(loaded_data[-1]) gatt = cdf_file.globalattsget() else: gatt = get_data(loaded_data[-1], metadata=True)['CDF']['GATT'] elif isinstance(loaded_data, dict): gatt = loaded_data[list(loaded_data.keys())[-1]]['CDF']['GATT'] # --- print PI info and rules of the road print(' ') print( '**************************************************************************' ) print(gatt["LOGICAL_SOURCE_DESCRIPTION"]) print('') if level == 'l3': print('Information about ERG L3 orbit') elif level == 'l2': print('Information about ERG orbit') print('') # print('PI: ', gatt['PI_NAME']) # not need? # print("Affiliation: "+gatt["PI_AFFILIATION"]) # not need? print('') print( 'RoR of ERG project common: https://ergsc.isee.nagoya-u.ac.jp/data_info/rules_of_the_road.shtml.en' ) print('') print('Contact: erg-sc-core at isee.nagoya-u.ac.jp') print( '**************************************************************************' ) except: print('printing PI info and rules of the road was failed') if initial_notplot_flag or downloadonly: return loaded_data remove_duplicated_tframe(tnames(prefix + '*pos*')) if (level == 'l2') and (datatype == 'def'): # remove -1.0e+30 if prefix + 'pos_Lm' + suffix in loaded_data: clip(prefix + 'pos_Lm' + suffix, -1e+6, 1e6) _, bdata = get_data(prefix + 'pos_Lm' + suffix) ylim(prefix + 'pos_Lm' + suffix, np.nanmin(bdata), np.nanmax(bdata)) # set labels options(prefix + 'pos_gse' + suffix, 'legend_names', ['X', 'Y', 'Z']) options(prefix + 'pos_gsm' + suffix, 'legend_names', ['X', 'Y', 'Z']) options(prefix + 'pos_sm' + suffix, 'legend_names', ['X', 'Y', 'Z']) options(prefix + 'pos_rmlatmlt' + suffix, 'legend_names', ['Re', 'MLAT', 'MLT']) options(prefix + 'pos_eq' + suffix, 'legend_names', ['Req', 'MLT']) options(prefix + 'pos_iono_north' + suffix, 'legend_names', ['GLAT', 'GLON']) options(prefix + 'pos_iono_south' + suffix, 'legend_names', ['GLAT', 'GLON']) options(prefix + 'pos_blocal' + suffix, 'legend_names', ['X', 'Y', 'Z']) options(prefix + 'pos_blocal_mag' + suffix, 'legend_names', ['B(model)_at_ERG']) # options(prefix + 'pos_blocal_mag' + suffix, 'legend_names', ['B(model)\n_at_ERG']) # Can't break? options(prefix + 'pos_beq' + suffix, 'legend_names', ['X', 'Y', 'Z']) options(prefix + 'pos_Lm' + suffix, 'legend_names', ['90deg', '60deg', '30deg']) options(prefix + 'vel_gse' + suffix, 'legend_names', ['X[km/s]', 'Y[km/s]', 'Z[km/s]']) options(prefix + 'vel_gsm' + suffix, 'legend_names', ['X[km/s]', 'Y[km/s]', 'Z[km/s]']) options(prefix + 'vel_sm' + suffix, 'legend_names', ['X[km/s]', 'Y[km/s]', 'Z[km/s]']) # set color options(prefix + 'pos_gse' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'pos_gsm' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'pos_sm' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'pos_rmlatmlt' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'pos_blocal' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'pos_beq' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'pos_Lm' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'vel_gse' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'vel_gsm' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'vel_sm' + suffix, 'Color', ['b', 'g', 'r']) # set y axis to logscale options(prefix + 'pos_blocal_mag' + suffix, 'ylog', 1) options(prefix + 'pos_beq' + suffix, 'ylog', 1) elif (datatype in ["pre", "spre", "mpre", "lpre"]) and (level == 'l2'): # set labels options(prefix + 'pos_gse' + suffix, 'legend_names', ['X', 'Y', 'Z']) options(prefix + 'pos_gsm' + suffix, 'legend_names', ['X', 'Y', 'Z']) options(prefix + 'pos_sm' + suffix, 'legend_names', ['X', 'Y', 'Z']) options(prefix + 'pos_rmlatmlt' + suffix, 'legend_names', ['Re', 'MLAT', 'MLT']) options(prefix + 'pos_eq' + suffix, 'legend_names', ['Req', 'MLT']) options(prefix + 'pos_iono_north' + suffix, 'legend_names', ['GLAT', 'GLON']) options(prefix + 'pos_iono_south' + suffix, 'legend_names', ['GLAT', 'GLON']) options(prefix + 'pos_blocal' + suffix, 'legend_names', ['X', 'Y', 'Z']) options(prefix + 'pos_blocal_mag' + suffix, 'legend_names', 'B(' + datatype + ')\n_at ERG') options(prefix + 'pos_beq' + suffix, 'legend_names', ['X', 'Y', 'Z']) options(prefix + 'pos_Lm' + suffix, 'legend_names', ['90deg', '60deg', '30deg']) options(prefix + 'vel_gse' + suffix, 'legend_names', ['X[km/s]', 'Y[km/s]', 'Z[km/s]']) options(prefix + 'vel_gsm' + suffix, 'legend_names', ['X[km/s]', 'Y[km/s]', 'Z[km/s]']) options(prefix + 'vel_sm' + suffix, 'legend_names', ['X[km/s]', 'Y[km/s]', 'Z[km/s]']) # set color options(prefix + 'pos_gse' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'pos_gsm' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'pos_sm' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'pos_rmlatmlt' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'pos_blocal' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'pos_beq' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'pos_Lm' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'vel_gse' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'vel_gsm' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'vel_sm' + suffix, 'Color', ['b', 'g', 'r']) # set y axis to logscale options(prefix + 'pos_blocal_mag' + suffix, 'ylog', 1) options(prefix + 'pos_beq' + suffix, 'ylog', 1) elif level == 'l3': # remove -1.0e+30 for i in range(len(loaded_data)): get_data_vars = get_data(loaded_data[i]) if len(get_data_vars) < 3: if np.nanmin(get_data_vars[1]) < -1.0e+29: clip(loaded_data[i], -1e+6, 1e6) _, bdata = get_data(loaded_data[i]) ylim(loaded_data[i], np.nanmin(bdata), np.nanmax(bdata)) if model in ["op", "t89", "ts04"]: if model == "ts04": model = model.upper() # set ytitle options(prefix + 'pos_lmc_' + model + suffix, 'ytitle', f'Lmc ({model})') options(prefix + 'pos_lstar_' + model + suffix, 'ytitle', f'Lstar ({model})') options(prefix + 'pos_I_' + model + suffix, 'ytitle', f'I ({model})') options(prefix + 'pos_blocal_' + model + suffix, 'ytitle', f'Blocal ({model})') options(prefix + 'pos_beq_' + model + suffix, 'ytitle', f'Beq ({model})') options(prefix + 'pos_eq_' + model + suffix, 'ytitle', f'Eq_pos ({model})') options(prefix + 'pos_iono_north_' + model + suffix, 'ytitle', f'footprint_north ({model})') options(prefix + 'pos_iono_south_' + model + suffix, 'ytitle', f'footprint_south ({model})') # set ysubtitle options(prefix + 'pos_lmc_' + model + suffix, 'ysubtitle', '[dimensionless]') options(prefix + 'pos_lstar_' + model + suffix, 'ysubtitle', '[dimensionless]') options(prefix + 'pos_I_' + model + suffix, 'ysubtitle', '[Re]') options(prefix + 'pos_blocal_' + model + suffix, 'ysubtitle', '[nT]') options(prefix + 'pos_beq_' + model + suffix, 'ysubtitle', '[nT]') options(prefix + 'pos_eq_' + model + suffix, 'ysubtitle', '[Re Hour]') options(prefix + 'pos_iono_north_' + model + suffix, 'ysubtitle', '[deg. deg.]') options(prefix + 'pos_iono_south_' + model + suffix, 'ysubtitle', '[deg. deg.]') # set ylabels options(prefix + 'pos_lmc_' + model + suffix, 'legend_names', [ '90deg', '80deg', '70deg', '60deg', '50deg', '40deg', '30deg', '20deg', '10deg' ]) options(prefix + 'pos_lstar_' + model + suffix, 'legend_names', [ '90deg', '80deg', '70deg', '60deg', '50deg', '40deg', '30deg', '20deg', '10deg' ]) options(prefix + 'pos_I_' + model + suffix, 'legend_names', [ '90deg', '80deg', '70deg', '60deg', '50deg', '40deg', '30deg', '20deg', '10deg' ]) options(prefix + 'pos_blocal_' + model + suffix, 'legend_names', '|B|') options(prefix + 'pos_beq_' + model + suffix, 'legend_names', '|B|') options(prefix + 'pos_eq_' + model + suffix, 'legend_names', ['Re', 'MLT']) options(prefix + 'pos_iono_north_' + model + suffix, 'legend_names', ['GLAT', 'GLON']) options(prefix + 'pos_iono_south_' + model + suffix, 'legend_names', ['GLAT', 'GLON']) # set y axis to logscale options(prefix + 'pos_blocal_' + model + suffix, 'ylog', 1) options(prefix + 'pos_beq_' + model + suffix, 'ylog', 1) return loaded_data
def mgf(trange=['2017-03-27', '2017-03-28'], datatype='8sec', level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False, ror=True, coord='dsi', version=None): """ This function loads data from the MGF experiment from the Arase mission Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] datatype: str Data type; Valid options: level: str Data level; Valid options: suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat: str The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. downloadonly: bool Set this flag to download the CDF files, but not load them into tplot variables notplot: bool Return the data in hash tables instead of creating tplot variables no_update: bool If set, only load data from your local cache time_clip: bool Time clip the variables to exactly the range specified in the trange keyword ror: bool If set, print PI info and rules of the road coord: str "sm", "dsi", "gse", "gsm", "sgi" version: str Set this value to specify the version of cdf files (such as "v03.03", "v03.04", ...) Returns: List of tplot variables created. """ initial_notplot_flag = False if notplot: initial_notplot_flag = True if datatype == '8s' or datatype == '8': datatype = '8sec' elif datatype == '64': datatype = '64hz' elif datatype == '128': datatype = '128hz' elif datatype == '256': datatype = '256hz' prefix = 'erg_mgf_'+level+'_' if datatype == '8sec': file_res = 3600. * 24 pathformat = 'satellite/erg/mgf/'+level+'/'+datatype + \ '/%Y/%m/erg_mgf_'+level+'_'+datatype+'_%Y%m%d_' else: file_res = 3600. pathformat = 'satellite/erg/mgf/'+level+'/'+datatype + \ '/%Y/%m/erg_mgf_'+level+'_'+datatype+'_' + coord + '_%Y%m%d%H_' if version is None: pathformat += 'v??.??.cdf' else: pathformat += version + '.cdf' loaded_data = load(pathformat=pathformat, file_res=file_res, trange=trange, level=level, datatype=datatype, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd) if (loaded_data is None) or (loaded_data == []): return loaded_data if (len(loaded_data) > 0) and ror: try: if isinstance(loaded_data, list): if downloadonly: cdf_file = cdflib.CDF(loaded_data[-1]) gatt = cdf_file.globalattsget() else: gatt = get_data(loaded_data[-1], metadata=True)['CDF']['GATT'] elif isinstance(loaded_data, dict): gatt = loaded_data[list(loaded_data.keys())[-1]]['CDF']['GATT'] # --- print PI info and rules of the road print(' ') print( '**************************************************************************') print(gatt["LOGICAL_SOURCE_DESCRIPTION"]) print('') print('Information about ERG MGF') print('') print('PI: ', gatt['PI_NAME']) print("Affiliation: "+gatt["PI_AFFILIATION"]) print('') print('RoR of ERG project common: https://ergsc.isee.nagoya-u.ac.jp/data_info/rules_of_the_road.shtml.en') print( 'RoR of MGF L2: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Mgf') print('Contact: erg_mgf_info at isee.nagoya-u.ac.jp') print( '**************************************************************************') except: print('printing PI info and rules of the road was failed') if initial_notplot_flag or downloadonly: return loaded_data if datatype == '8sec': # remove -1.0e+30 clip(prefix + 'mag_'+datatype+'_dsi'+suffix, -1e+6, 1e6) clip(prefix + 'mag_'+datatype+'_gse'+suffix, -1e+6, 1e6) clip(prefix + 'mag_'+datatype+'_gsm'+suffix, -1e+6, 1e6) clip(prefix + 'mag_'+datatype+'_sm'+suffix, -1e+6, 1e6) clip(prefix + 'magt_'+datatype+suffix, -1e+6, 1e6) clip(prefix + 'rmsd_'+datatype+'_dsi'+suffix, -1e+6, +1e+6) clip(prefix + 'rmsd_'+datatype+'_gse'+suffix, -1e+6, +1e+6) clip(prefix + 'rmsd_'+datatype+'_gsm'+suffix, -1e+6, +1e+6) clip(prefix + 'rmsd_'+datatype+'_sm'+suffix, -1e+6, +1e+6) clip(prefix + 'rmsd_'+datatype+suffix, 0., 80.) clip(prefix + 'dyn_rng_'+datatype+suffix, -120., +1e+6) clip(prefix + 'igrf_'+datatype+'_dsi'+suffix, -1e+6, +1e+6) clip(prefix + 'igrf_'+datatype+'_gse'+suffix, -1e+6, +1e+6) clip(prefix + 'igrf_'+datatype+'_gsm'+suffix, -1e+6, +1e+6) clip(prefix + 'igrf_'+datatype+'_sm'+suffix, -1e+6, +1e+6) # set yrange _, bdata = get_data(prefix + 'mag_'+datatype+'_dsi'+suffix) ylim(prefix + 'mag_'+datatype+'_dsi'+suffix, np.nanmin(bdata), np.nanmax(bdata)) _, bdata = get_data(prefix + 'mag_'+datatype+'_gse'+suffix) ylim(prefix + 'mag_'+datatype+'_gse'+suffix, np.nanmin(bdata), np.nanmax(bdata)) _, bdata = get_data(prefix + 'mag_'+datatype+'_gsm'+suffix) ylim(prefix + 'mag_'+datatype+'_gsm'+suffix, np.nanmin(bdata), np.nanmax(bdata)) _, bdata = get_data(prefix + 'mag_'+datatype+'_sm'+suffix) ylim(prefix + 'mag_'+datatype+'_sm'+suffix, np.nanmin(bdata), np.nanmax(bdata)) _, bdata = get_data(prefix + 'magt_'+datatype+suffix) ylim(prefix + 'magt_'+datatype+suffix, np.nanmin(bdata), np.nanmax(bdata)) _, bdata = get_data(prefix + 'rmsd_'+datatype+suffix,) ylim(prefix + 'rmsd_'+datatype+suffix, np.nanmin(bdata), np.nanmax(bdata)) _, bdata = get_data(prefix + 'rmsd_'+datatype+'_dsi'+suffix) ylim(prefix + 'rmsd_'+datatype+'_dsi'+suffix, np.nanmin(bdata), np.nanmax(bdata)) _, bdata = get_data(prefix + 'rmsd_'+datatype+'_gse'+suffix) ylim(prefix + 'rmsd_'+datatype+'_gse'+suffix, np.nanmin(bdata), np.nanmax(bdata)) _, bdata = get_data(prefix + 'rmsd_'+datatype+'_gsm'+suffix) ylim(prefix + 'rmsd_'+datatype+'_gsm'+suffix, np.nanmin(bdata), np.nanmax(bdata)) _, bdata = get_data(prefix + 'rmsd_'+datatype+'_sm'+suffix) ylim(prefix + 'rmsd_'+datatype+'_sm'+suffix, np.nanmin(bdata), np.nanmax(bdata)) _, bdata = get_data(prefix + 'rmsd_'+datatype+suffix) ylim(prefix + 'rmsd_'+datatype+suffix, np.nanmin(bdata), np.nanmax(bdata)) _, bdata = get_data(prefix + 'quality_'+datatype+suffix) ylim(prefix + 'quality_'+datatype+suffix, np.nanmin(bdata), np.nanmax(bdata)) _, bdata = get_data(prefix + 'quality_'+datatype+'_gc'+suffix) ylim(prefix + 'quality_'+datatype+'_gc' + suffix, np.nanmin(bdata), np.nanmax(bdata)) # set labels options(prefix + 'mag_'+datatype+'_dsi'+suffix, 'legend_names', ['Bx', 'By', 'Bz']) options(prefix + 'mag_'+datatype+'_gse'+suffix, 'legend_names', ['Bx', 'By', 'Bz']) options(prefix + 'mag_'+datatype+'_gsm'+suffix, 'legend_names', ['Bx', 'By', 'Bz']) options(prefix + 'mag_'+datatype+'_sm'+suffix, 'legend_names', ['Bx', 'By', 'Bz']) options(prefix + 'rmsd_'+datatype+'_dsi'+suffix, 'legend_names', ['Bx', 'By', 'Bz']) options(prefix + 'rmsd_'+datatype+'_gse'+suffix, 'legend_names', ['Bx', 'By', 'Bz']) options(prefix + 'rmsd_'+datatype+'_gsm'+suffix, 'legend_names', ['Bx', 'By', 'Bz']) options(prefix + 'rmsd_'+datatype+'_sm'+suffix, 'legend_names', ['Bx', 'By', 'Bz']) options(prefix + 'igrf_'+datatype+'_dsi'+suffix, 'legend_names', ['Bx', 'By', 'Bz']) options(prefix + 'igrf_'+datatype+'_gse'+suffix, 'legend_names', ['Bx', 'By', 'Bz']) options(prefix + 'igrf_'+datatype+'_gsm'+suffix, 'legend_names', ['Bx', 'By', 'Bz']) options(prefix + 'igrf_'+datatype+'_sm'+suffix, 'legend_names', ['Bx', 'By', 'Bz']) # set color of the labels options(prefix + 'mag_'+datatype+'_dsi' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'mag_'+datatype+'_gse' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'mag_'+datatype+'_gsm' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'mag_'+datatype+'_sm' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'rmsd_'+datatype+'_dsi' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'rmsd_'+datatype+'_gse' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'rmsd_'+datatype+'_gsm' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'rmsd_'+datatype+'_sm' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'quality_'+datatype+suffix, 'Color', ['r', 'g', 'b']) options(prefix + 'igrf_'+datatype+'_dsi' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'igrf_'+datatype+'_gse' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'igrf_'+datatype+'_gsm' + suffix, 'Color', ['b', 'g', 'r']) options(prefix + 'igrf_'+datatype+'_sm' + suffix, 'Color', ['b', 'g', 'r']) else: # remove -1.0e+30 clip(prefix + 'mag_'+datatype+'_' + coord + suffix, -1e+6, 1e6) # set yrange _, bdata = get_data(prefix + 'mag_'+datatype+'_' + coord + suffix) ylim(prefix + 'mag_'+datatype+'_' + coord + suffix, np.nanmin(bdata), np.nanmax(bdata)) # set labels options(prefix + 'mag_'+datatype+'_' + coord + suffix, 'legend_names', ['Bx', 'By', 'Bz']) # set color of the labels options(prefix + 'mag_'+datatype+'_' + coord + suffix, 'Color', ['b', 'g', 'r']) return loaded_data
def mepe(trange=['2017-03-27', '2017-03-28'], datatype='omniflux', level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False, ror=True): """ This function loads data from the MEP-e experiment from the Arase mission Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] datatype: str Data type; Valid options: level: str Data level; Valid options: suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat: str The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. varnames: list of str List of variable names to load (if not specified, all data variables are loaded) downloadonly: bool Set this flag to download the CDF files, but not load them into tplot variables notplot: bool Return the data in hash tables instead of creating tplot variables no_update: bool If set, only load data from your local cache time_clip: bool Time clip the variables to exactly the range specified in the trange keyword ror: bool If set, print PI info and rules of the road Returns: List of tplot variables created. """ initial_notplot_flag = False if notplot: initial_notplot_flag = True if level == 'l3': datatype = '3dflux' file_res = 3600. * 24 prefix = 'erg_mepe_'+level + '_' + datatype + '_' pathformat = 'satellite/erg/mepe/'+level+'/'+datatype + \ '/%Y/%m/erg_mepe_'+level+'_'+datatype+'_%Y%m%d_v??_??.cdf' loaded_data = load(pathformat=pathformat, trange=trange, level=level, datatype=datatype, file_res=file_res, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd) if (len(loaded_data) > 0) and ror: try: if isinstance(loaded_data, list): if downloadonly: cdf_file = cdflib.CDF(loaded_data[-1]) gatt = cdf_file.globalattsget() else: gatt = get_data(loaded_data[-1], metadata=True)['CDF']['GATT'] elif isinstance(loaded_data, dict): gatt = loaded_data[list(loaded_data.keys())[-1]]['CDF']['GATT'] # --- print PI info and rules of the road print(' ') print( '**************************************************************************') print(gatt["LOGICAL_SOURCE_DESCRIPTION"]) print('') print('PI: ', gatt['PI_NAME']) print("Affiliation: "+gatt["PI_AFFILIATION"]) print('') print('- The rules of the road (RoR) common to the ERG project:') print( ' https://ergsc.isee.nagoya-u.ac.jp/data_info/rules_of_the_road.shtml.en') print( '- RoR for MEP-e data: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Mepe') print('') print('Contact: erg_mep_info at isee.nagoya-u.ac.jp') print( '**************************************************************************') except: print('printing PI info and rules of the road was failed') if initial_notplot_flag or downloadonly: return loaded_data if datatype == 'omniflux': # set spectrogram plot option options(prefix + 'FEDO' + suffix, 'Spec', 1) # set y axis to logscale options(prefix + 'FEDO' + suffix, 'ylog', 1) # set ytitle options(prefix + 'FEDO' + suffix, 'ytitle', 'ERG\nMEP-e\nFEDO\nEnergy') # set ysubtitle options(prefix + 'FEDO' + suffix, 'ysubtitle', '[keV]') # set ylim ylim(prefix + 'FEDO' + suffix, 6., 100.) # set z axis to logscale options(prefix + 'FEDO' + suffix, 'zlog', 1) # set ztitle options(prefix + 'FEDO' + suffix, 'ztitle', '[/s-cm^{2}-sr-keV]') # change colormap option options(prefix + 'FEDO' + suffix, 'Colormap', 'jet') elif (datatype == '3dflux') and (level == 'l2'): # set spectrogram plot option options(prefix + 'FEDU' + suffix, 'Spec', 1) options(prefix + 'FEDU_n' + suffix, 'Spec', 1) options(prefix + 'FEEDU' + suffix, 'Spec', 1) options(prefix + 'count_raw' + suffix, 'Spec', 1) # set y axis to logscale options(prefix + 'FEDU' + suffix, 'ylog', 1) options(prefix + 'FEDU_n' + suffix, 'ylog', 1) options(prefix + 'FEEDU' + suffix, 'ylog', 1) options(prefix + 'count_raw' + suffix, 'ylog', 1) # set ysubtitle options(prefix + 'FEDU' + suffix, 'ysubtitle', '[keV]') options(prefix + 'FEDU_n' + suffix, 'ysubtitle', '[keV]') options(prefix + 'count_raw' + suffix, 'ysubtitle', '[keV]') # set ylim ylim(prefix + 'FEDU' + suffix, 6., 100.) ylim(prefix + 'FEDU_n' + suffix, 6., 100.) ylim(prefix + 'count_raw' + suffix, 6., 100.) # set z axis to logscale options(prefix + 'FEDU' + suffix, 'zlog', 1) options(prefix + 'FEDU_n' + suffix, 'zlog', 1) options(prefix + 'FEEDU' + suffix, 'zlog', 1) options(prefix + 'count_raw' + suffix, 'zlog', 1) # set ztitle options(prefix + 'FEDU' + suffix, 'ztitle', '[/s-cm^{2}-sr-keV]') options(prefix + 'FEDU_n' + suffix, 'ztitle', '[/s-cm^{2}-sr-keV]') # change colormap option options(prefix + 'FEDU' + suffix, 'Colormap', 'jet') options(prefix + 'FEDU_n' + suffix, 'Colormap', 'jet') options(prefix + 'FEEDU' + suffix, 'Colormap', 'jet') options(prefix + 'count_raw' + suffix, 'Colormap', 'jet') return loaded_data
def pwe_wfc(trange=['2017-04-01/12:00:00', '2017-04-01/13:00:00'], datatype='waveform', mode='65khz', level='l2', suffix='', coord='sgi', component='all', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False, ror=True): """ This function loads data from the PWE experiment from the Arase mission Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] datatype: str Data type; Valid options: level: str Data level; Valid options: suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat: str The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. varnames: list of str List of variable names to load (if not specified, all data variables are loaded) downloadonly: bool Set this flag to download the CDF files, but not load them into tplot variables notplot: bool Return the data in hash tables instead of creating tplot variables no_update: bool If set, only load data from your local cache time_clip: bool Time clip the variables to exactly the range specified in the trange keyword ror: bool If set, print PI info and rules of the road Returns: List of tplot variables created. """ initial_notplot_flag = False if notplot: initial_notplot_flag = True file_res = 3600. if level == 'l2': prefix = 'erg_pwe_wfc_' + level + '_' + mode + '_' loaded_data = [] if level == 'l2': if datatype == 'waveform': tplot_name_list = [] if component == 'all': component_list = ['e', 'b'] elif (component == 'e') or (component == 'b'): component_list = [component] for com in component_list: prefix = 'erg_pwe_wfc_' + level + '_' + com + '_' + mode + '_' pathformat = 'satellite/erg/pwe/wfc/'+level+'/'+datatype+'/%Y/%m/erg_pwe_wfc_' + \ level+'_'+com+'_'+datatype+'_'+mode+'_'+coord+'_%Y%m%d%H_v??_??.cdf' loaded_data.append( load(pathformat=pathformat, trange=trange, level=level, datatype=datatype, file_res=file_res, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd)) if com == 'e': tplot_name_list += [ prefix + 'Ex_waveform', prefix + 'Ey_waveform' ] elif com == 'b': tplot_name_list += [ prefix + 'Bx_waveform', prefix + 'By_waveform', prefix + 'Bz_waveform' ] elif datatype == 'spec': prefix_list = [] component_suffix_list = [] if component == 'all': component_list = ['e', 'b'] elif (component == 'e') or (component == 'b'): component_list = [component] for com in component_list: prefix = 'erg_pwe_wfc_' + level + '_' + com + '_' + mode + '_' pathformat = 'satellite/erg/pwe/wfc/'+level+'/'+datatype+'/%Y/%m/erg_pwe_wfc_' + \ level+'_'+com+'_'+datatype+'_'+mode+'_%Y%m%d%H_v??_??.cdf' loaded_data.append( load(pathformat=pathformat, trange=trange, level=level, datatype=datatype, file_res=file_res, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd)) prefix_list.append(prefix) component_suffix_list.append(com.upper() + '_spectra') if (len(loaded_data) > 0) and ror: try: if isinstance(loaded_data, list): if downloadonly: cdf_file = cdflib.CDF(loaded_data[-1][-1]) gatt = cdf_file.globalattsget() elif notplot: gatt = loaded_data[-1][list( loaded_data[-1].keys())[-1]]['CDF']['GATT'] else: gatt = get_data(loaded_data[-1][-1], metadata=True)['CDF']['GATT'] # --- print PI info and rules of the road print(' ') print(' ') print( '**************************************************************************' ) print(gatt["LOGICAL_SOURCE_DESCRIPTION"]) print('') print('Information about ERG PWE WFC') print('') print('PI: ', gatt['PI_NAME']) print("Affiliation: " + gatt["PI_AFFILIATION"]) print('') print( 'RoR of ERG project common: https://ergsc.isee.nagoya-u.ac.jp/data_info/rules_of_the_road.shtml.en' ) print( 'RoR of PWE/WFC: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Pwe/Wfc' ) print('') print('Contact: erg_pwe_info at isee.nagoya-u.ac.jp') print( '**************************************************************************' ) except: print('printing PI info and rules of the road was failed') if initial_notplot_flag or downloadonly: return loaded_data if datatype == 'spec': trange_in_float = time_float(trange) for i in range(len(prefix_list)): t_plot_name = prefix_list[i] + component_suffix_list[i] options(t_plot_name, 'spec', 1) options(t_plot_name, 'colormap', 'jet') options(t_plot_name, 'ylog', 1) options(t_plot_name, 'zlog', 1) options(t_plot_name, 'ysubtitle', '[Hz]') ylim(t_plot_name, 32., 2e4) if 'E_spectra' in component_suffix_list[i]: zlim(t_plot_name, 1e-9, 1e-2) options(t_plot_name, 'ztitle', '[mV^2/m^2/Hz]') options(t_plot_name, 'ytitle', 'E\nspectra') elif 'B_spectra' in component_suffix_list[i]: zlim(t_plot_name, 1e-4, 1e2) options(t_plot_name, 'ztitle', '[pT^2/Hz]') options(t_plot_name, 'ytitle', 'B\nspectra') get_data_vars = get_data(t_plot_name) time_array = get_data_vars[0] if time_array[0] <= trange_in_float[0]: t_ge_indices = np.where(time_array <= trange_in_float[0]) t_min_index = t_ge_indices[0][-1] else: t_min_index = 0 if trange_in_float[1] <= time_array[-1]: t_le_indices = np.where(trange_in_float[1] <= time_array) t_max_index = t_le_indices[0][0] else: t_max_index = -1 if t_min_index == t_max_index: t_max_index = +1 if (t_min_index != 0) or (t_max_index != -1): meta_data = get_data(t_plot_name, metadata=True) store_data(t_plot_name, newname=t_plot_name + '_all_loaded_time_range') store_data(t_plot_name, data={ 'x': time_array[t_min_index:t_max_index], 'y': get_data_vars[1][t_min_index:t_max_index], 'v': get_data_vars[2] }, attr_dict=meta_data) options(t_plot_name, 'zlog', 1) if 'E_spectra' in t_plot_name: zlim(t_plot_name, 1e-9, 1e-2) elif 'B_spectra' in t_plot_name: zlim(t_plot_name, 1e-4, 1e2) if datatype == 'waveform': trange_in_float = time_float(trange) yn = '' all_time_range_flag = False if trange_in_float[1] - trange_in_float[0] <= 0.: yn = input('Invalid time range. Use full time range ?:[y/n] ') if yn == 'y': all_time_range_flag = True t_min_index = 0 t_max_index = -1 else: return for t_plot_name in tplot_name_list: get_data_vars = get_data(t_plot_name) dl_in = get_data(t_plot_name, metadata=True) time_array = get_data_vars[0] if not all_time_range_flag: if time_array[0] <= trange_in_float[0]: t_ge_indices = np.where(time_array <= trange_in_float[0]) t_min_index = t_ge_indices[0][-1] else: t_min_index = 0 if trange_in_float[1] <= time_array[-1]: t_le_indices = np.where(trange_in_float[1] <= time_array) t_max_index = t_le_indices[0][0] else: t_max_index = -1 if t_min_index == t_max_index: t_max_index = +1 data = np.where(get_data_vars[1] <= -1e+30, np.nan, get_data_vars[1]) dt = get_data_vars[2] ndt = dt.size ndata = (t_max_index - t_min_index) * ndt time_new = (np.tile(time_array[t_min_index:t_max_index], (ndt, 1)).T + dt * 1e-3).reshape(ndata) data_new = data[t_min_index:t_max_index].reshape(ndata) store_data(t_plot_name, data={ 'x': time_new, 'y': data_new }, attr_dict=dl_in) options(t_plot_name, 'ytitle', '\n'.join(t_plot_name.split('_'))) # ylim settings because pytplot.timespan() doesn't affect in ylim. # May be it will be no need in future. if not all_time_range_flag: if time_new[0] <= trange_in_float[0]: t_min_index = np.where( (time_new <= trange_in_float[0]))[0][-1] else: t_min_index = 0 if trange_in_float[1] <= time_new[-1]: t_max_index = np.where( trange_in_float[1] <= time_new)[0][0] else: t_max_index = -1 ylim_min = np.nanmin(data_new[t_min_index:t_max_index]) ylim_max = np.nanmax(data_new[t_min_index:t_max_index]) ylim(t_plot_name, ylim_min, ylim_max) return loaded_data
def lepe(trange=['2017-04-04', '2017-04-05'], datatype='omniflux', level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False, ror=True, version=None, only_fedu=False, et_diagram=False): """ This function loads data from the LEP-e experiment from the Arase mission Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] datatype: str Data type; Valid options: level: str Data level; Valid options: suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat: str The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. varnames: list of str List of variable names to load (if not specified, all data variables are loaded) downloadonly: bool Set this flag to download the CDF files, but not load them into tplot variables notplot: bool Return the data in hash tables instead of creating tplot variables no_update: bool If set, only load data from your local cache time_clip: bool Time clip the variables to exactly the range specified in the trange keyword ror: bool If set, print PI info and rules of the road version: str Set this value to specify the version of cdf files (such as "v02_02") only_fedu: bool If set, not make erg_lepe_l3_pa_enech_??(??:01,01,..32)_FEDU Tplot Variables et_diagram: bool If set, make erg_lepe_l3_pa_pabin_??(??:01,01,..16)_FEDU Tplot Variables Returns: List of tplot variables created. """ initial_notplot_flag = False if notplot: initial_notplot_flag = True if level == 'l3': datatype = 'pa' if ((level == 'l2') and (datatype == 'omniflux')) or \ ((level == 'l2') and (datatype == '3dflux')) or \ (level == 'l3'): # to avoid failure of creation plot variables (at store_data.py) of lepe notplot = True file_res = 3600. * 24 prefix = 'erg_lepe_'+level+'_' + datatype + '_' pathformat = 'satellite/erg/lepe/'+level+'/'+datatype + \ '/%Y/%m/erg_lepe_'+level+'_'+datatype+'_%Y%m%d_' if version is None: pathformat += 'v??_??.cdf' else: pathformat += version + '.cdf' loaded_data = load(pathformat=pathformat, trange=trange, level=level, datatype=datatype, file_res=file_res, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd) if (len(loaded_data) > 0) and ror: try: if isinstance(loaded_data, list): if downloadonly: cdf_file = cdflib.CDF(loaded_data[-1]) gatt = cdf_file.globalattsget() else: gatt = get_data(loaded_data[-1], metadata=True)['CDF']['GATT'] elif isinstance(loaded_data, dict): gatt = loaded_data[list(loaded_data.keys())[-1]]['CDF']['GATT'] # --- print PI info and rules of the road print(' ') print( '**************************************************************************') print(gatt["LOGICAL_SOURCE_DESCRIPTION"]) print('') print('Information about ERG LEPe') print('') print('PI: ', gatt['PI_NAME']) print("Affiliation: "+gatt["PI_AFFILIATION"]) print('') print('RoR of ERG project common: https://ergsc.isee.nagoya-u.ac.jp/data_info/rules_of_the_road.shtml.en') if level == 'l2': print( 'RoR of LEPe L2: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Lepe') if level == 'l3': print( 'RoR of LEPe L3: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Lepe') print( 'RoR of MGF L2: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Mgf') print('') print('Contact: erg_lepe_info at isee.nagoya-u.ac.jp') print( '**************************************************************************') except: print('printing PI info and rules of the road was failed') if initial_notplot_flag or downloadonly: return loaded_data if (isinstance(loaded_data, dict)) and (len(loaded_data) > 0): if (level == 'l2') and (datatype == 'omniflux'): tplot_variables = [] v_array = (loaded_data[prefix + 'FEDO' + suffix]['v'][:, 0, :] + loaded_data[prefix + 'FEDO' + suffix]['v'][:, 1, :]) / 2. # change minus values to NaN v_array = np.where(v_array < 0., np.nan, v_array) all_nan_v_indices_array = np.where( np.all(np.isnan(v_array), axis=1))[0] store_data(prefix + 'FEDO' + suffix, data={'x': np.delete(loaded_data[prefix + 'FEDO' + suffix]['x'], all_nan_v_indices_array, axis=0), 'y': np.delete(loaded_data[prefix + 'FEDO' + suffix]['y'], all_nan_v_indices_array, axis=0), 'v': np.delete(v_array, all_nan_v_indices_array, 0)}, attr_dict={'CDF':loaded_data[prefix + 'FEDO' + suffix]['CDF']}) tplot_variables.append(prefix + 'FEDO' + suffix) # set spectrogram plot option options(prefix + 'FEDO' + suffix, 'Spec', 1) # change minus values to NaN in y array clip(prefix + 'FEDO' + suffix, 0., np.nanmax(loaded_data[prefix + 'FEDO' + suffix]['y'])) # set y axis to logscale options(prefix + 'FEDO' + suffix, 'ylog', 1) # set ylim ylim(prefix + 'FEDO' + suffix, 19, 21*1e3) # set ytitle options(prefix + 'FEDO' + suffix, 'ytitle', 'ERG\nLEP-e\nFEDO\nEnergy') # set ysubtitle options(prefix + 'FEDO' + suffix, 'ysubtitle', '[eV]') # set z axis to logscale options(prefix + 'FEDO' + suffix, 'zlog', 1) # set zlim zlim(prefix + 'FEDO' + suffix, 1, 1e6) # set ztitle options(prefix + 'FEDO' + suffix, 'ztitle', '[/cm^{2}-str-s-eV]') # change colormap option options(prefix + 'FEDO' + suffix, 'Colormap', 'jet') return tplot_variables if (level == 'l2') and (datatype == '3dflux'): tplot_variables = [] other_variables_dict = {} if prefix + 'FEDU' + suffix in loaded_data: store_data(prefix + 'FEDU' + suffix, data={'x': loaded_data[prefix + 'FEDU' + suffix]['x'], 'y': loaded_data[prefix + 'FEDU' + suffix]['y'], 'v1': np.sqrt(loaded_data[prefix + 'FEDU' + suffix]['v'][:, 0, :] * loaded_data[prefix + 'FEDU' + suffix]['v'][:, 1, :]), # geometric mean 'v2': ['01', '02', '03', '04', '05', 'A', 'B', '18', '19', '20', '21', '22'], 'v3': [i for i in range(16)]}, attr_dict={'CDF':loaded_data[prefix + 'FEDU' + suffix]['CDF']}) tplot_variables.append(prefix + 'FEDU' + suffix) options(prefix + 'FEDU' + suffix, 'spec', 1) if prefix + 'FEDU' + suffix in tplot_variables: clip(prefix + 'FEDU' + suffix, 0, np.nanmax(loaded_data[prefix + 'FEDU' + suffix]['y'])) ylim(prefix + 'FEDU' + suffix, 19, 21*1e3) zlim(prefix + 'FEDU' + suffix, 1, 1e6) options(prefix + 'FEDU' + suffix, 'zlog', 1) options(prefix + 'FEDU' + suffix, 'ylog', 1) options(prefix + 'FEDU' + suffix, 'ysubtitle', '[eV]') other_variables_dict[prefix + 'Count_Rate' + suffix] = loaded_data[prefix + 'Count_Rate' + suffix] other_variables_dict[prefix + 'Count_Rate_BG' + suffix] = loaded_data[prefix + 'Count_Rate_BG' + suffix] tplot_variables.append(other_variables_dict) return tplot_variables if level == 'l3': tplot_variables = [] if prefix + 'FEDU' + suffix in loaded_data: store_data(prefix + 'FEDU' + suffix, data={'x': loaded_data[prefix + 'FEDU' + suffix]['x'], 'y': loaded_data[prefix + 'FEDU' + suffix]['y'], 'v1': (loaded_data[prefix + 'FEDU' + suffix]['v1'][:, 0, :] + loaded_data[prefix + 'FEDU' + suffix]['v1'][:, 1, :]) / 2., # arithmetic mean 'v2': loaded_data[prefix + 'FEDU' + suffix]['v2']}, attr_dict={'CDF':loaded_data[prefix + 'FEDU' + suffix]['CDF']}) tplot_variables.append(prefix + 'FEDU' + suffix) options(prefix + 'FEDU' + suffix, 'spec', 1) if prefix + 'FEDU' + suffix in tplot_variables: clip(prefix + 'FEDU' + suffix, 0, np.nanmax(loaded_data[prefix + 'FEDU' + suffix]['y'])) ylim(prefix + 'FEDU' + suffix, 19, 21*1e3) zlim(prefix + 'FEDU' + suffix, 1, 1e6) options(prefix + 'FEDU' + suffix, 'zlog', 1) options(prefix + 'FEDU' + suffix, 'ylog', 1) options(prefix + 'FEDU' + suffix, 'ysubtitle', '[eV]') FEDU_get_data = get_data(prefix + 'FEDU' + suffix) FEDU_CDF_data = loaded_data[prefix + 'FEDU' + suffix]['CDF'] if not only_fedu: ytitle_eV_array = np.round( np.nan_to_num(FEDU_get_data[2][0, :]), 2) # processing for erg_lepe_l3_pa_enech_??(??:01,01,..32)_FEDU for i in range(FEDU_get_data[1].shape[1]): tplot_name = prefix + 'enech_' + \ str(i + 1).zfill(2) + '_FEDU' + suffix store_data(tplot_name, data={'x': FEDU_get_data[0], 'y': FEDU_get_data[1][:, i, :], 'v': FEDU_get_data[3]}, attr_dict={'CDF':FEDU_CDF_data}) options(tplot_name, 'spec', 1) ylim(tplot_name, 0, 180) zlim(tplot_name, 1, 1e6) options(tplot_name, 'ytitle', 'ERG LEP-e\n' + str(ytitle_eV_array[i]) + ' eV\nPitch angle') tplot_variables.append(tplot_name) options(tplot_variables[1:], 'zlog', 1) options(tplot_variables[1:], 'ysubtitle', '[deg]') options(tplot_variables[1:], 'yrange', [0, 180]) options(tplot_variables[1:], 'colormap', 'jet') options(tplot_variables[1:], 'ztitle', '[/s-cm^{2}-sr-keV/q]') if et_diagram: ytitle_deg_array = np.round( np.nan_to_num(FEDU_get_data[3]), 3) all_nan_v_indices_array = np.where( np.all(np.isnan(FEDU_get_data[2]), axis=1))[0] x_all_nan_deleted_array = np.delete( FEDU_get_data[0], all_nan_v_indices_array, axis=0) y_all_nan_deleted_array = np.delete( FEDU_get_data[1], all_nan_v_indices_array, axis=0) v_all_nan_deleted_array = np.delete( FEDU_get_data[2], all_nan_v_indices_array, axis=0) # processing for erg_lepe_l3_pa_pabin_??(??:01,01,..16)_FEDU for i in range(FEDU_get_data[1].shape[2]): tplot_name = prefix + 'pabin_' + \ str(i + 1).zfill(2) + '_FEDU' + suffix store_data(tplot_name, data={'x': x_all_nan_deleted_array, 'y': y_all_nan_deleted_array[:, :, i], 'v': v_all_nan_deleted_array}, attr_dict={'CDF':FEDU_CDF_data}) options(tplot_name, 'spec', 1) ylim(tplot_name, 19, 21*1e3) zlim(tplot_name, 1, 1e6) options(tplot_name, 'ytitle', 'ERG LEP-e\n' + str(ytitle_deg_array[i]) + ' deg\nEnergy') tplot_variables.append(tplot_name) options( tplot_variables[-FEDU_get_data[1].shape[2]:], 'ysubtitle', '[eV]') options( tplot_variables[-FEDU_get_data[1].shape[2]:], 'zlog', 1) options( tplot_variables[-FEDU_get_data[1].shape[2]:], 'ylog', 1) options( tplot_variables[-FEDU_get_data[1].shape[2]:], 'colormap', 'jet') options( tplot_variables[-FEDU_get_data[1].shape[2]:], 'ztitle', '[/s-cm^{2}-sr-eV]') return tplot_variables return loaded_data
def hep(trange=['2017-03-27', '2017-03-28'], datatype='omniflux', level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False, ror=True, version=None): """ This function loads data from the HEP experiment from the Arase mission Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] datatype: str Data type; Valid options: level: str Data level; Valid options: suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat: str The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. varnames: list of str List of variable names to load (if not specified, all data variables are loaded) downloadonly: bool Set this flag to download the CDF files, but not load them into tplot variables notplot: bool Return the data in hash tables instead of creating tplot variables no_update: bool If set, only load data from your local cache time_clip: bool Time clip the variables to exactly the range specified in the trange keyword ror: bool If set, print PI info and rules of the road version: str Set this value to specify the version of cdf files (such as "v01_02", "v01_03", ...) Returns: List of tplot variables created. """ file_res = 3600. * 24 prefix = 'erg_hep_'+level+'_' if level == 'l2': pathformat = 'satellite/erg/hep/'+level+'/'+datatype + \ '/%Y/%m/erg_hep_'+level+'_'+datatype + '_%Y%m%d_' if version is None: pathformat += 'v??_??.cdf' else: pathformat += version + '.cdf' if level == 'l3': pathformat = 'satellite/erg/hep/'+level + \ '/pa/%Y/%m/erg_hep_'+level+'_pa_%Y%m%d_' if version is None: pathformat += 'v??_??.cdf' else: pathformat += version + '.cdf' initial_notplot_flag = False if notplot: initial_notplot_flag = True if ((level == 'l2') and (datatype == 'omniflux')) or (datatype == '3dflux') or (level == 'l3'): # to avoid failure of creation plot variables (at store_data.py) of hep notplot = True loaded_data = load(pathformat=pathformat, trange=trange, level=level, datatype=datatype, file_res=file_res, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd, version=version) if (len(loaded_data) > 0) and ror: try: if isinstance(loaded_data, list): if downloadonly: cdf_file = cdflib.CDF(loaded_data[-1]) gatt = cdf_file.globalattsget() else: gatt = get_data(loaded_data[-1], metadata=True)['CDF']['GATT'] elif isinstance(loaded_data, dict): gatt = loaded_data[list(loaded_data.keys())[-1]]['CDF']['GATT'] # --- print PI info and rules of the road print(' ') print( '**************************************************************************') print(gatt["LOGICAL_SOURCE_DESCRIPTION"]) print('') print('PI: ', gatt['PI_NAME']) print("Affiliation: "+gatt["PI_AFFILIATION"]) print('') print('- The rules of the road (RoR) common to the ERG project:') print( ' https://ergsc.isee.nagoya-u.ac.jp/data_info/rules_of_the_road.shtml.en') print( '- RoR for HEP data: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Hep') if level == 'l3': print( '- RoR for MGF data: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Mgf') print('') print('Contact: erg_hep_info at isee.nagoya-u.ac.jp') print( '**************************************************************************') except: print('printing PI info and rules of the road was failed') if initial_notplot_flag or downloadonly: return loaded_data if isinstance(loaded_data, dict): if (level == 'l2') and (datatype == 'omniflux'): tplot_variables = [] if prefix + 'FEDO_L' + suffix in loaded_data: v_vars_min = loaded_data[prefix + 'FEDO_L' + suffix]['v'][0] v_vars_max = loaded_data[prefix + 'FEDO_L' + suffix]['v'][1] # log average of energy bins v_vars = np.power( 10., (np.log10(v_vars_min) + np.log10(v_vars_max)) / 2.) store_data(prefix + 'FEDO_L' + suffix, data={'x': loaded_data[prefix + 'FEDO_L' + suffix]['x'], 'y': loaded_data[prefix + 'FEDO_L' + suffix]['y'], 'v': v_vars}, attr_dict={'CDF':loaded_data[prefix + 'FEDO_L' + suffix]['CDF']}) tplot_variables.append(prefix + 'FEDO_L' + suffix) if prefix + 'FEDO_H' + suffix in loaded_data: v_vars_min = loaded_data[prefix + 'FEDO_H' + suffix]['v'][0] v_vars_max = loaded_data[prefix + 'FEDO_H' + suffix]['v'][1] # log average of energy bins v_vars = np.power( 10., (np.log10(v_vars_min) + np.log10(v_vars_max)) / 2.) store_data(prefix + 'FEDO_H' + suffix, data={'x': loaded_data[prefix + 'FEDO_H' + suffix]['x'], 'y': loaded_data[prefix + 'FEDO_H' + suffix]['y'], 'v': v_vars}, attr_dict={'CDF':loaded_data[prefix + 'FEDO_H' + suffix]['CDF']}) tplot_variables.append(prefix + 'FEDO_H' + suffix) # remove minus valuse of y array if prefix + 'FEDO_L' + suffix in tplot_variables: clip(prefix + 'FEDO_L' + suffix, 0., 1.0e+10) if prefix + 'FEDO_H' + suffix in tplot_variables: clip(prefix + 'FEDO_H' + suffix, 0., 1.0e+10) # set spectrogram plot option options(prefix + 'FEDO_L' + suffix, 'Spec', 1) options(prefix + 'FEDO_H' + suffix, 'Spec', 1) # set y axis to logscale options(prefix + 'FEDO_L' + suffix, 'ylog', 1) options(prefix + 'FEDO_H' + suffix, 'ylog', 1) # set yrange options(prefix + 'FEDO_L' + suffix, 'yrange', [3.0e+01, 2.0e+03]) options(prefix + 'FEDO_H' + suffix, 'yrange', [7.0e+01, 2.0e+03]) # set ytitle options(prefix + 'FEDO_L' + suffix, 'ytitle', 'HEP-L\nomniflux\nLv2\nEnergy') options(prefix + 'FEDO_H' + suffix, 'ytitle', 'HEP-H\nomniflux\nLv2\nEnergy') # set ysubtitle options(prefix + 'FEDO_L' + suffix, 'ysubtitle', '[keV]') options(prefix + 'FEDO_H' + suffix, 'ysubtitle', '[keV]') # set ylim if prefix + 'FEDO_L' + suffix in tplot_variables: ylim(prefix + 'FEDO_L' + suffix, 30, 1800) if prefix + 'FEDO_H' + suffix in tplot_variables: ylim(prefix + 'FEDO_H' + suffix, 500, 2048) # set z axis to logscale options(prefix + 'FEDO_L' + suffix, 'zlog', 1) options(prefix + 'FEDO_H' + suffix, 'zlog', 1) # set zrange options(prefix + 'FEDO_L' + suffix, 'zrange', [1.0e-15, 1.0e+06]) options(prefix + 'FEDO_H' + suffix, 'zrange', [1.0e-10, 1.0e+5]) # set ztitle options(prefix + 'FEDO_L' + suffix, 'ztitle', '[/cm^{2}-str-s-keV]') options(prefix + 'FEDO_H' + suffix, 'ztitle', '[/cm^{2}-str-s-keV]') # set zlim if prefix + 'FEDO_L' + suffix in tplot_variables: zlim(prefix + 'FEDO_L' + suffix, 1e+0, 1e+5) if prefix + 'FEDO_H' + suffix in tplot_variables: zlim(prefix + 'FEDO_H' + suffix, 1e+0, 1e+5) # change colormap option options(prefix + 'FEDO_L' + suffix, 'Colormap', 'jet') options(prefix + 'FEDO_H' + suffix, 'Colormap', 'jet') return tplot_variables if (level == 'l2') and (datatype == '3dflux'): tplot_variables = [] v2_array = [i for i in range(15)] if prefix + 'FEDU_L' + suffix in loaded_data: store_data(prefix + 'FEDU_L' + suffix, data={'x': loaded_data[prefix + 'FEDU_L' + suffix]['x'], 'y': loaded_data[prefix + 'FEDU_L' + suffix]['y'], 'v1': np.sqrt(loaded_data[prefix + 'FEDU_L' + suffix]['v'][0, :] * loaded_data[prefix + 'FEDU_L' + suffix]['v'][1, :]), # geometric mean for 'v1' 'v2': v2_array}, attr_dict={'CDF':loaded_data[prefix + 'FEDU_L' + suffix]['CDF']}) tplot_variables.append(prefix + 'FEDU_L' + suffix) clip(prefix + 'FEDU_L' + suffix, -1.0e+10, 1.0e+10) if prefix + 'FEDU_H' + suffix in loaded_data: store_data(prefix + 'FEDU_H' + suffix, data={'x': loaded_data[prefix + 'FEDU_H' + suffix]['x'], 'y': loaded_data[prefix + 'FEDU_H' + suffix]['y'], 'v1': np.sqrt(loaded_data[prefix + 'FEDU_H' + suffix]['v'][0, :] * loaded_data[prefix + 'FEDU_H' + suffix]['v'][1, :]), # geometric mean for 'v1' 'v2': v2_array}, attr_dict={'CDF':loaded_data[prefix + 'FEDU_H' + suffix]['CDF']}) tplot_variables.append(prefix + 'FEDU_H' + suffix) clip(prefix + 'FEDU_H' + suffix, -1.0e+10, 1.0e+10) return tplot_variables if level == 'l3': # implementation for level = 'l3' tplot_variables = [] if prefix + 'FEDU_L' + suffix in loaded_data: L_energy_array_ave = np.sqrt(loaded_data[prefix + 'FEDU_L' + suffix]['v1'][0, :] * loaded_data[prefix + 'FEDU_L' + suffix]['v1'][1, :]) # geometric mean for 'v1' # get energy [keV] array for ytitle options L_energy_array = np.trunc(L_energy_array_ave).astype(int) non_negative_y_array = np.where( loaded_data[prefix + 'FEDU_L' + suffix]['y'] < 0., np.nan, loaded_data[prefix + 'FEDU_L' + suffix]['y']) store_data(prefix + 'FEDU_L' + suffix, data={'x': loaded_data[prefix + 'FEDU_L' + suffix]['x'], 'y': non_negative_y_array, 'v1': L_energy_array_ave, 'v2': loaded_data[prefix + 'FEDU_L' + suffix]['v2']}, attr_dict={'CDF':loaded_data[prefix + 'FEDU_L' + suffix]['CDF']}) options(prefix + 'FEDU_L' + suffix, 'spec', 1) # set ylim ylim(prefix + 'FEDU_L' + suffix, 0, 180) # set zlim zlim(prefix + 'FEDU_L' + suffix, 1e+2, 1e+6) tplot_variables.append(prefix + 'FEDU_L' + suffix) # make Tplot Variables of erg_hep_l3_FEDU_L_paspec_ene?? (??: 00, 01, 02, ..., 15) for i in range(loaded_data[prefix + 'FEDU_L' + suffix]['y'].shape[1]): tplot_name = prefix + 'FEDU_L_paspec_ene' + \ str(i).zfill(2) + suffix store_data(tplot_name, data={'x': loaded_data[prefix + 'FEDU_L' + suffix]['x'], 'y': non_negative_y_array[:, i, :], 'v': loaded_data[prefix + 'FEDU_L' + suffix]['v2']}, attr_dict={'CDF':loaded_data[prefix + 'FEDU_L' + suffix]['CDF']}) options(tplot_name, 'spec', 1) # set ylim ylim(tplot_name, 0, 180) # set zlim zlim(tplot_name, 1e+2, 1e+6) # set ytitle options( tplot_name, 'ytitle', f'HEP-L\nEne{str(i).zfill(2)}\n{L_energy_array[i]} keV') tplot_variables.append(tplot_name) if prefix + 'FEDU_H' + suffix in loaded_data: H_energy_array_ave = np.sqrt(loaded_data[prefix + 'FEDU_H' + suffix]['v1'][0, :] * loaded_data[prefix + 'FEDU_H' + suffix]['v1'][1, :]) # geometric mean for 'v1' # get energy [keV] array for ytitle options H_energy_array = np.trunc(H_energy_array_ave).astype(int) non_negative_y_array = np.where( loaded_data[prefix + 'FEDU_H' + suffix]['y'] < 0., np.nan, loaded_data[prefix + 'FEDU_H' + suffix]['y']) store_data(prefix + 'FEDU_H' + suffix, data={'x': loaded_data[prefix + 'FEDU_H' + suffix]['x'], 'y': non_negative_y_array, 'v1': H_energy_array_ave, 'v2': loaded_data[prefix + 'FEDU_H' + suffix]['v2']}, attr_dict={'CDF':loaded_data[prefix + 'FEDU_H' + suffix]['CDF']}) options(prefix + 'FEDU_H' + suffix, 'spec', 1) # set ylim ylim(prefix + 'FEDU_H' + suffix, 0, 180) # set zlim zlim(prefix + 'FEDU_H' + suffix, 1e+1, 1e+4) tplot_variables.append(prefix + 'FEDU_H' + suffix) # make Tplot Variables of erg_hep_l3_FEDU_H_paspec_ene?? (??: 00, 01, 02, ..., 10) for i in range(loaded_data[prefix + 'FEDU_H' + suffix]['y'].shape[1]): tplot_name = prefix + 'FEDU_H_paspec_ene' + \ str(i).zfill(2) + suffix store_data(tplot_name, data={'x': loaded_data[prefix + 'FEDU_H' + suffix]['x'], 'y': non_negative_y_array[:, i, :], 'v': loaded_data[prefix + 'FEDU_H' + suffix]['v2']}, attr_dict={'CDF':loaded_data[prefix + 'FEDU_H' + suffix]['CDF']}) options(tplot_name, 'spec', 1) # set ylim ylim(tplot_name, 0, 180) # set zlim zlim(tplot_name, 1e+1, 1e+4) # set ytitle options( tplot_name, 'ytitle', f'HEP-H\nEne{str(i).zfill(2)}\n{H_energy_array[i]} keV') tplot_variables.append(tplot_name) # set z axis to logscale options(tplot_variables, 'zlog', 1) # change colormap option options(tplot_variables, 'colormap', 'jet') # set ysubtitle options(tplot_variables, 'ysubtitle', 'PA [deg]') # set ztitle options(tplot_variables, 'ztitle', '[/keV/cm^{2}/sr/s]') return tplot_variables return loaded_data
def pwe_efd(trange=['2017-04-01', '2017-04-02'], datatype='E_spin', level='l2', suffix='', coord='dsi', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False, ror=True): """ This function loads data from the PWE experiment from the Arase mission Parameters: trange : list of str time range of interest [starttime, endtime] with the format 'YYYY-MM-DD','YYYY-MM-DD'] or to specify more or less than a day ['YYYY-MM-DD/hh:mm:ss','YYYY-MM-DD/hh:mm:ss'] datatype: str Data type; Valid options: level: str Data level; Valid options: suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat: str The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. varnames: list of str List of variable names to load (if not specified, all data variables are loaded) downloadonly: bool Set this flag to download the CDF files, but not load them into tplot variables notplot: bool Return the data in hash tables instead of creating tplot variables no_update: bool If set, only load data from your local cache time_clip: bool Time clip the variables to exactly the range specified in the trange keyword ror: bool If set, print PI info and rules of the road Returns: List of tplot variables created. """ initial_notplot_flag = False if notplot: initial_notplot_flag = True file_res = 3600. * 24 prefix = 'erg_pwe_efd_' + level + '_' if ('64' in datatype) or ('256' in datatype): if '64' in datatype: mode = '64Hz' elif '256' in datatype: mode = '256Hz' md = 'E' + mode pathformat = 'satellite/erg/pwe/efd/'+level+'/'+md + \ '/%Y/%m/erg_pwe_efd_'+level+'_'+md+'_'+coord+'_%Y%m%d_v??_??.cdf' prefix += md + '_' + coord + '_' if coord == 'wpt': component = ['Eu_waveform', 'Ev_waveform'] elif coord == 'dsi': component = [ 'Ex_waveform', 'Ey_waveform', 'Eu_offset' + '_' + mode, 'Ev_offset' + '_' + mode ] else: pathformat = 'satellite/erg/pwe/efd/'+level+'/'+datatype + \ '/%Y/%m/erg_pwe_efd_'+level+'_'+datatype+'_%Y%m%d_v??_??.cdf' prefix += datatype + '_' if 'spin' in datatype: component = ['Eu', 'Ev', 'Eu1', 'Ev1', 'Eu2', 'Ev2'] labels = ['Ex', 'Ey'] if datatype == 'pot': component = ['Vu1', 'Vu2', 'Vv1', 'Vv2'] if datatype == 'pot8Hz': component = [ 'Vu1_waveform_8Hz', 'Vu2_waveform_8Hz', 'Vv1_waveform_8Hz', 'Vv2_waveform_8Hz' ] loaded_data = load(pathformat=pathformat, trange=trange, level=level, datatype=datatype, file_res=file_res, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd) if (len(loaded_data) > 0) and ror: try: if isinstance(loaded_data, list): if downloadonly: cdf_file = cdflib.CDF(loaded_data[-1]) gatt = cdf_file.globalattsget() else: gatt = get_data(loaded_data[-1], metadata=True)['CDF']['GATT'] elif isinstance(loaded_data, dict): gatt = loaded_data[list(loaded_data.keys())[-1]]['CDF']['GATT'] # --- print PI info and rules of the road print(' ') print(' ') print( '**************************************************************************' ) print(gatt["LOGICAL_SOURCE_DESCRIPTION"]) print('') print('Information about ERG PWE EFD') print('') print('PI: ', gatt['PI_NAME']) print("Affiliation: " + gatt["PI_AFFILIATION"]) print('') print( 'RoR of ERG project common: https://ergsc.isee.nagoya-u.ac.jp/data_info/rules_of_the_road.shtml.en' ) print( 'RoR of PWE/EFD: https://ergsc.isee.nagoya-u.ac.jp/mw/index.php/ErgSat/Pwe/Efd' ) print('') print('Contact: erg_pwe_info at isee.nagoya-u.ac.jp') print( '**************************************************************************' ) except: print('printing PI info and rules of the road was failed') if initial_notplot_flag or downloadonly: return loaded_data time_min_max = time_float(trange) if 'spin' in datatype: for elem in component: t_plot_name = prefix + elem + '_dsi' options(t_plot_name, 'ytitle', elem + ' vector in DSI') options(t_plot_name, 'legend_names', labels) # ylim settings because pytplot.timespan() doesn't affect in ylim. # May be it will be no need in future. get_data_vars = get_data(t_plot_name) if get_data_vars[0][0] < time_min_max[0]: min_time_index = np.where( (get_data_vars[0] <= time_min_max[0]))[0][-1] else: min_time_index = 0 if time_min_max[1] < get_data_vars[0][-1]: max_time_index = np.where( time_min_max[1] <= get_data_vars[0])[0][0] else: max_time_index = -1 ylim_min = np.nanmin( get_data_vars[1][min_time_index:max_time_index]) ylim_max = np.nanmax( get_data_vars[1][min_time_index:max_time_index]) ylim(t_plot_name, ylim_min, ylim_max) if ('64' in datatype) or ('256' in datatype) or (datatype == 'pot8Hz'): for elem in component: t_plot_name = prefix + elem get_data_vars = get_data(t_plot_name) dl_in = get_data(t_plot_name, metadata=True) time1 = get_data_vars[0] data = np.where(get_data_vars[1] <= -1e+30, np.nan, get_data_vars[1]) dt = get_data_vars[2] ndt = dt.size ndata = data.size time_new = (np.tile(time1, (ndt, 1)).T + dt * 1e-3).reshape(ndata) data_new = data.reshape(ndata) store_data(t_plot_name, data={ 'x': time_new, 'y': data_new }, attr_dict=dl_in) options(t_plot_name, 'ytitle', '\n'.join(t_plot_name.split('_'))) # ylim settings because pytplot.timespan() doesn't affect in ylim. # May be it will be no need in future. if time_new[0] < time_min_max[0]: min_time_index = np.where((time_new <= time_min_max[0]))[0][-1] else: min_time_index = 0 if time_min_max[1] < time_new[-1]: max_time_index = np.where(time_min_max[1] <= time_new)[0][0] else: max_time_index = -1 ylim_min = np.nanmin(data_new[min_time_index:max_time_index]) ylim_max = np.nanmax(data_new[min_time_index:max_time_index]) ylim(t_plot_name, ylim_min, ylim_max) if datatype == 'pot': for elem in component: t_plot_name = prefix + elem options(t_plot_name, 'ytitle', elem + ' potential') if datatype == 'spec': options(tnames(prefix + '*spectra*'), 'spec', 1) options(tnames(prefix + '*spectra*'), 'colormap', 'jet') options(tnames(prefix + '*spectra*'), 'zlog', 1) ylim(prefix + 'spectra', 0, 100) zlim(prefix + 'spectra', 1e-6, 1e-2) options(tnames(prefix + '*spectra*'), 'ysubtitle', '[Hz]') options(tnames(prefix + '*spectra*'), 'ztitle', '[mV^2/m^2/Hz]') for t_plot_name in (tnames(prefix + '*spectra*')): options(t_plot_name, 'ytitle', '\n'.join(t_plot_name.split('_'))) return loaded_data