def load(trange=['2020-06-01', '2020-06-02'], instrument='mag', datatype='rtn-normal', mode=None, level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the Solar Orbiter mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.solo.mag pyspedas.solo.epd pyspedas.solo.rpw pyspedas.solo.swa """ # Defaults for L2, L3 data science_or_low_latency = 'science' date_format = '%Y%m%d' cdf_version = '??' res = 24*3600. if level == 'll02': science_or_low_latency = 'low_latency' date_format = '%Y%m%dt%H%M??-*' cdf_version = '???' res = 60.0 if instrument == 'mag': pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf' elif instrument == 'epd': pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/'+mode+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'-'+mode+'_'+date_format+'_v'+cdf_version+'.cdf' elif instrument == 'rpw': pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf' elif instrument == 'swa': if level == 'l2' or level == 'll02': if datatype == 'pas-eflux' or datatype == 'pas-grnd-mom' or datatype == 'pas-vdf': pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf' else: date_format = '%Y%m%dt%H%M??-*' res = 60.0 pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf' elif level == 'l1': if datatype == 'his-pha' or datatype == 'his-sensorrates' or datatype == 'pas-3d' or datatype == 'pas-cal' or datatype == 'pas-mom': pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf' else: date_format = '%Y%m%dt%H%M??-*' res = 60.0 pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=res) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], instrument='mgf', datatype='k0', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the Geotail mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.geotail.mgf pyspedas.geotail.efd pyspedas.geotail.lep pyspedas.geotail.cpi pyspedas.geotail.epi pyspedas.geotail.pwi """ tvars_created = [] if instrument == 'mgf': if datatype == 'k0': pathformat = 'mgf/mgf_k0/%Y/ge_' + datatype + '_mgf_%Y%m%d_v??.cdf' elif datatype == 'eda3sec' or datatype == 'edb3sec': pathformat = 'mgf/' + datatype + '_mgf/%Y/ge_' + datatype + '_mgf_%Y%m%d_v??.cdf' elif instrument == 'efd': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/ge_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'lep': if datatype == 'k0': pathformat = 'lep/lep_k0/%Y/ge_' + datatype + '_lep_%Y%m%d_v??.cdf' elif instrument == 'cpi': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/ge_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'epi': pathformat = 'epic/' + instrument + '_' + datatype + '/%Y/ge_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'pwi': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/ge_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2017-03-27', '2017-03-28'], pathformat=None, instrument='mgf', datatype='8sec', mode=None, site=None, model=None, level='l2', prefix='', suffix='', file_res=24 * 3600., get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False, version=None): """ This function is not meant to be called directly; please see the instrument specific wrappers: pyspedas.erg.mgf() pyspedas.erg.hep() pyspedas.erg.orb() pyspedas.erg.lepe() pyspedas.erg.lepi() pyspedas.erg.mepe() pyspedas.erg.mepi() pyspedas.erg.pwe_ofa() pyspedas.erg.pwe_efd() pyspedas.erg.pwe_hfa() pyspedas.erg.xep() """ # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=file_res) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update, last_version=True, username=uname, password=passwd) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: if len(out_files) > 0: cdf_file = cdflib.CDF(out_files[-1]) cdf_info = cdf_file.cdf_info() all_cdf_variables = cdf_info['rVariables'] + cdf_info['zVariables'] gatt = cdf_file.globalattsget() for var in all_cdf_variables: t_plot_name = prefix + var + suffix if t_plot_name in tvars: vatt = cdf_file.varattsget(var) tvars[t_plot_name]['CDF'] = { 'VATT': vatt, 'GATT': gatt, 'FILENAME': out_files[-1] } return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def mms_load_att_tplot(filenames, level='def', probe='1', datatypes=['spinras', 'spindec'], suffix='', trange=None): """ Helper routine for loading state data (ASCII files from the SDC); not meant to be called directly; see pyspedas.mms.state instead """ prefix = 'mms' + probe file_times = [] file_lras = [] file_ldecs = [] tvalues = {} logging.info( 'Loading attitude files can take some time; please be patient...') for file in filenames: logging.info('Loading ' + file) rows = pd.read_csv(file, delim_whitespace=True, header=None, skiprows=49) times = rows.shape[0] - 1 time_values = np.empty(times) lra_values = np.empty(times) ldec_values = np.empty(times) for time_idx in range(0, times): # these files can overlap, so avoid duplicates if tvalues.get(time_values[time_idx]): continue time_values[time_idx] = pd.to_datetime( rows[0][time_idx], format='%Y-%jT%H:%M:%S.%f').timestamp() tvalues[time_values[time_idx]] = 1 lra_values[time_idx] = rows[13][time_idx] ldec_values[time_idx] = rows[14][time_idx] file_times.append(time_values) file_lras.append(lra_values) file_ldecs.append(ldec_values) file_times_array = np.concatenate(file_times) file_lras_array = np.concatenate(file_lras) file_ldecs_array = np.concatenate(file_ldecs) file_times_sorted_idx = np.argsort(file_times_array) file_times_sorted = file_times_array[file_times_sorted_idx] file_lras_sorted = file_lras_array[file_times_sorted_idx] file_ldecs_sorted = file_ldecs_array[file_times_sorted_idx] file_times_uniq = np.unique(file_times_sorted, return_index=True) file_lras_out = file_lras_sorted[file_times_uniq[1]] file_ldecs_out = file_ldecs_sorted[file_times_uniq[1]] if 'spinras' in datatypes: store_data(prefix + '_' + level + 'att_spinras' + suffix, data={ 'x': file_times_uniq[0], 'y': file_lras_out }) tclip(prefix + '_' + level + 'att_spinras' + suffix, trange[0], trange[1], suffix='') if 'spindec' in datatypes: store_data(prefix + '_' + level + 'att_spindec' + suffix, data={ 'x': file_times_uniq[0], 'y': file_ldecs_out }) tclip(prefix + '_' + level + 'att_spindec' + suffix, trange[0], trange[1], suffix='')
def load(trange=['2018-11-5', '2018-11-6'], probe='1', instrument='fgm', datatype='up', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the Cluster mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.cluster.fgm pyspedas.cluster.aspoc pyspedas.cluster.cis pyspedas.cluster.dwp pyspedas.cluster.edi pyspedas.cluster.efw pyspedas.cluster.peace pyspedas.cluster.rapid pyspedas.cluster.staff pyspedas.cluster.wbd pyspedas.cluster.whi """ if not isinstance(probe, list): probe = [probe] probe = [str(prb) for prb in probe] # these will need to be strings from now on out_files = [] res = 24 * 3600 if instrument != 'wbd': # note: can't use last_version with WBD data due to using wild cards for the times (and not just in the version) last_version = True else: last_version = False for prb in probe: if instrument == 'fgm': if datatype == 'cp': pathformat = 'c' + prb + '/cp/%Y/c' + prb + '_cp_fgm_spin_%Y%m%d_v??.cdf' else: pathformat = 'c' + prb + '/' + datatype + '/' + instrument + '/%Y/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'aspoc': pathformat = 'c' + prb + '/' + datatype + '/asp/%Y/c' + prb + '_' + datatype + '_asp_%Y%m%d_v??.cdf' elif instrument == 'cis': pathformat = 'c' + prb + '/' + datatype + '/' + instrument + '/%Y/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'dwp': pathformat = 'c' + prb + '/' + datatype + '/' + instrument + '/%Y/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'edi': pathformat = 'c' + prb + '/' + datatype + '/' + instrument + '/%Y/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'efw': pathformat = 'c' + prb + '/' + datatype + '/' + instrument + '/%Y/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'peace': pathformat = 'c' + prb + '/' + datatype + '/pea/%Y/c' + prb + '_' + datatype + '_pea_%Y%m%d_v??.cdf' elif instrument == 'rapid': pathformat = 'c' + prb + '/' + datatype + '/rap/%Y/c' + prb + '_' + datatype + '_rap_%Y%m%d_v??.cdf' elif instrument == 'staff': pathformat = 'c' + prb + '/' + datatype + '/sta/%Y/c' + prb + '_' + datatype + '_sta_%Y%m%d_v??.cdf' elif instrument == 'whi': pathformat = 'c' + prb + '/' + datatype + '/' + instrument + '/%Y/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'wbd': pathformat = 'c' + prb + '/' + instrument + '/%Y/%m/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d%H%M_v??.cdf' res = 600.0 # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=res) files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update, last_version=last_version) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['1997-01-03', '1997-01-04'], instrument='mfe', datatype='k0', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the Polar mission; this function is not meant to be called directly; instead, see the wrappera: pyspedas.polar.mfe pyspedas.polar.efi pyspedas.polar.pwi pyspedas.polar.hydra pyspedas.polar.tide pyspedas.polar.timas pyspedas.polar.cammice pyspedas.polar.ceppad pyspedas.polar.uvi pyspedas.polar.vis pyspedas.polar.pixie pyspedas.polar.orbit """ if instrument == 'mfe': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'efi': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'pwi': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'hydra': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_hyd_%Y%m%d_v??.cdf' elif instrument == 'tide': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_tid_%Y%m%d_v??.cdf' elif instrument == 'timas': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_tim_%Y%m%d_v??.cdf' elif instrument == 'cammice': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_cam_%Y%m%d_v??.cdf' elif instrument == 'ceppad': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_cep_%Y%m%d_v??.cdf' elif instrument == 'uvi': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'vis': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'pixie': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_pix_%Y%m%d_v??.cdf' elif instrument == 'spha': pathformat = 'orbit/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], instrument='fgm', datatype='k0', suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, varnames=[], time_clip=False): """ This function loads data from the ACE mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.ace.mfi pyspedas.ace.swe pyspedas.ace.epam pyspedas.ace.cris pyspedas.ace.sis pyspedas.ace.uleis pyspedas.ace.sepica pyspedas.ace.swics """ if instrument == 'fgm': pathformat = 'mag/level_2_cdaweb/mfi_' + datatype + '/%Y/ac_' + datatype + '_mfi_%Y%m%d_v??.cdf' elif instrument == 'swe': pathformat = 'swepam/level_2_cdaweb/swe_' + datatype + '/%Y/ac_' + datatype + '_swe_%Y%m%d_v??.cdf' elif instrument == 'epm': pathformat = 'epam/level_2_cdaweb/epm_' + datatype + '/%Y/ac_' + datatype + '_epm_%Y%m%d_v??.cdf' elif instrument == 'cris': pathformat = 'cris/level_2_cdaweb/cris_' + datatype + '/%Y/ac_' + datatype + '_cris_%Y%m%d_v??.cdf' elif instrument == 'sis': pathformat = 'sis/level_2_cdaweb/sis_' + datatype + '/%Y/ac_' + datatype + '_sis_%Y%m%d_v??.cdf' elif instrument == 'ule': pathformat = 'uleis/level_2_cdaweb/ule_' + datatype + '/%Y/ac_' + datatype + '_ule_%Y%m%d_v??.cdf' elif instrument == 'sep': pathformat = 'sepica/level_2_cdaweb/sep_' + datatype + '/%Y/ac_' + datatype + '_sep_%Y%m%d_v??.cdf' elif instrument == 'swics': filename_dtype = datatype.split('_')[1] + '_' + datatype.split('_')[0] pathformat = 'swics/level_2_cdaweb/' + datatype + '/%Y/ac_' + filename_dtype + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def mms_load_data_spdf(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy', level='l2', instrument='fgm', datatype='', varformat=None, suffix='', get_support_data=False, time_clip=False, no_update=False, center_measurement=False, available=False, notplot=False, latest_version=False, major_version=False, min_version=None, cdf_version=None, varnames=[]): """ This function loads MMS data from NASA SPDF into pyTplot variables This function is not meant to be called directly. Please see the individual load routines for documentation and use. """ tvars_created = [] if not isinstance(probe, list): probe = [probe] if not isinstance(data_rate, list): data_rate = [data_rate] if not isinstance(level, list): level = [level] if not isinstance(datatype, list): datatype = [datatype] for prb in probe: for lvl in level: for drate in data_rate: if drate == 'brst': time_format = '%Y%m%d%H%M??' file_res = 60. else: time_format = '%Y%m%d' file_res = 24 * 3600. for dtype in datatype: remote_path = 'mms' + prb + '/' + instrument + '/' + drate + '/' + lvl + '/' if instrument == 'fgm': pathformat = remote_path + '%Y/%m/mms' + prb + '_fgm_' + drate + '_' + lvl + '_' + time_format + '_v*.cdf' elif instrument == 'aspoc': pathformat = remote_path + '%Y/%m/mms' + prb + '_aspoc_' + drate + '_' + lvl + '_' + time_format + '_v*.cdf' elif instrument == 'edi': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_edi_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'fpi': if drate != 'brst': time_format = '%Y%m%d??0000' pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_fpi_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'epd-eis': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_epd-eis_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'feeps': if drate != 'brst': time_format = '%Y%m%d000000' pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_feeps_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'hpca': time_format = '%Y%m%d??????' pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_hpca_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'mec': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_mec_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'scm': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_scm_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'dsp': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_dsp_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'edp': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_edp_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' if drate == 'brst': if isinstance(trange[0], float): trange = [trange[0] - 300., trange[1]] else: trange = [time_double(trange[0]) - 300., trange[1]] # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=file_res) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir']) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) filtered_out_files = mms_file_filter( out_files, latest_version=latest_version, major_version=major_version, min_version=min_version, version=cdf_version) tvars = cdf_to_tplot(filtered_out_files, varformat=varformat, varnames=varnames, get_support_data=get_support_data, suffix=suffix, center_measurement=center_measurement, notplot=notplot) if tvars is not None: tvars_created.extend(tvars) if time_clip: for new_var in tvars_created: tclip(new_var, trange[0], trange[1], suffix='') return tvars_created
def load(trange=['2020-06-01', '2020-06-02'], instrument='mag', datatype='rtn-normal', mode=None, level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the Solar Orbiter mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.solo.mag pyspedas.solo.epd pyspedas.solo.rpw pyspedas.solo.swa """ if instrument == 'mag': pathformat = instrument + '/science/' + level + '/' + datatype + '/%Y/solo_' + level + '_' + instrument + '-' + datatype + '_%Y%m%d_v??.cdf' elif instrument == 'epd': pathformat = instrument + '/science/' + level + '/' + datatype + '/' + mode + '/%Y/solo_' + level + '_' + instrument + '-' + datatype + '-' + mode + '_%Y%m%d_v??.cdf' elif instrument == 'rpw': pathformat = instrument + '/science/' + level + '/' + datatype + '/%Y/solo_' + level + '_' + instrument + '-' + datatype + '_%Y%m%d_v??.cdf' elif instrument == 'swa': if datatype == 'pas-eflux': pathformat = instrument + '/science/' + level + '/' + datatype + '/%Y/solo_' + level + '_' + instrument + '-' + datatype + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2018-10-16', '2018-10-17'], instrument='mag', datatype='h0', suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads DSCOVR data into tplot variables; this function is not meant to be called directly; instead, see the wrappers: dscovr.mag: Fluxgate Magnetometer data dscovr.fc: Faraday Cup data dscovr.orb: Ephemeris data dscovr.att: Attitude data dscovr.all: Load all data """ remote_path = datatype + '/' + instrument + '/%Y/' if instrument == 'mag': if datatype == 'h0': pathformat = remote_path + 'dscovr_h0_mag_%Y%m%d_v??.cdf' elif instrument == 'faraday_cup': if datatype == 'h1': pathformat = remote_path + 'dscovr_h1_fc_%Y%m%d_v??.cdf' elif instrument == 'pre_or': if datatype == 'orbit': pathformat = remote_path + 'dscovr_orbit_pre_%Y%m%d_v??.cdf' elif instrument == 'def_at': if datatype == 'orbit': pathformat = remote_path + 'dscovr_at_def_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] for remote_file in remote_names: files = download(remote_file=remote_file, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, merge=True, get_support_data=get_support_data, varformat=varformat, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], datatype='1min', level='hro2', suffix='', get_support_data=False, get_ignore_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=True): """ This function loads OMNI (Combined 1AU IP Data; Magnetic and Solar Indices) data; this function is not meant to be called directly; instead, see the wrapper: pyspedas.omni.data """ if 'min' in datatype: pathformat = level + '_' + datatype + '/%Y/omni_' + level + '_' + datatype + '_%Y%m01_v??.cdf' elif 'hour' in datatype: pathformat = 'hourly/%Y/omni2_h0_mrg1hr_%Y%m01_v??.cdf' else: raise TypeError("%r are invalid keyword arguments" % datatype) # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files with warnings.catch_warnings(): # for some reason, OMNI CDFs throw ERFA warnings (likely while converting # times inside astropy); we're ignoring these here # see: https://github.com/astropy/astropy/issues/9603 warnings.simplefilter('ignore', astropy.utils.exceptions.ErfaWarning) tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, get_ignore_data=get_ignore_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], probe='15', instrument='fgm', datatype='1min', suffix='', downloadonly=False, no_update=False, time_clip=False): """ This function loads data from the GOES mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.goes.fgm pyspedas.goes.eps pyspedas.goes.epead pyspedas.goes.maged pyspedas.goes.magpd pyspedas.goes.hepad pyspedas.goes.xrs """ if not isinstance(probe, list): probe = [probe] fullavgpath = ['full', 'avg'] goes_path_dir = fullavgpath[datatype == '1min' or datatype == '5min'] for prb in probe: remote_path = goes_path_dir + '/%Y/%m/goes' + str(prb) + '/netcdf/' if instrument == 'fgm': if datatype == '512ms': # full, unaveraged data pathformat = remote_path + 'g' + str( prb) + '_magneto_512ms_%Y%m%d_%Y%m%d.nc' elif datatype == '1min': # 1 min averages pathformat = remote_path + 'g' + str( prb) + '_magneto_1m_%Y%m01_%Y%m??.nc' elif datatype == '5min': # 5 min averages pathformat = remote_path + 'g' + str( prb) + '_magneto_5m_%Y%m01_%Y%m??.nc' elif instrument == 'eps': # energetic particle sensor -- only valid for GOES-08 through GOES-12, only averaged data available if datatype == '1min': pathformat = remote_path + 'g' + str( prb) + '_eps_1m_%Y%m01_%Y%m??.nc' else: pathformat = remote_path + 'g' + str( prb) + '_eps_5m_%Y%m01_%Y%m??.nc' elif instrument == 'epead': # electron, proton, alpha detector -- only valid on GOES-13, 14, 15 if datatype == '1min': pathformat = [ remote_path + 'g' + str(prb) + '_epead_e13ew_1m_%Y%m01_%Y%m??.nc', '_epead_p17ew_1m_%Y%m01_%Y%m??.c', '_epead_a16ew_1m_%Y%m01_%Y%m??.nc' ] elif datatype == '5min': pathformat = [ remote_path + 'g' + str(prb) + '_epead_e13ew_5m_%Y%m01_%Y%m??.nc', '_epead_p17ew_5m_%Y%m01_%Y%m??.c', '_epead_a16ew_5m_%Y%m01_%Y%m??.nc' ] else: pathformat = [ remote_path + 'g' + str(prb) + '_epead_e1ew_4s_%Y%m%d_%Y%m%d.nc', '_epead_e2ew_16s_%Y%m%d_%Y%m%d.nc', '_epead_e3ew_16s_%Y%m%d_%Y%m%d.nc', '_epead_p1ew_8s_%Y%m%d_%Y%m%d.nc', '_epead_p27e_32s_%Y%m%d_%Y%m%d.nc', '_epead_p27w_32s_%Y%m%d_%Y%m%d.nc', '_epead_a16e_32s_%Y%m%d_%Y%m%d.nc', '_epead_a16w_32s_%Y%m%d_%Y%m%d.nc' ] elif instrument == 'maged': # magnetospheric electron detector -- only valid on GOES 13, 14, 15 if datatype == '1min': pathformat = remote_path + 'g' + str( prb) + '_maged_19me15_1m_%Y%m01_%Y%m??.nc' elif datatype == '5min': pathformat = remote_path + 'g' + str( prb) + '_maged_19me15_5m_%Y%m01_%Y%m??.nc' else: channels = ['me1', 'me2', 'me3', 'me4', 'me5'] resolution = ['2', '2', '4', '16', '32'] pathformat = [] for idx, channel in enumerate(channels): pathformat.append(remote_path + 'g' + str(prb) + '_maged_19' + channel + '_' + resolution[idx] + 's_%Y%m%d_%Y%m%d.nc') elif instrument == 'magpd': # magnetospheric proton detector -- only valid on GOES 13, 14, 15 if datatype == '1min': pathformat = remote_path + 'g' + str( prb) + '_magpd_19mp15_1m_%Y%m01_%Y%m??.nc' elif datatype == '5min': pathformat = remote_path + 'g' + str( prb) + '_magpd_19mp15_5m_%Y%m01_%Y%m??.nc' else: channels = ['mp1', 'mp2', 'mp3', 'mp4', 'mp5'] resolution = ['16', '16', '16', '32', '32'] pathformat = [] for idx, channel in enumerate(channels): pathformat.append(remote_path + 'g' + str(prb) + '_magpd_19' + channel + '_' + resolution[idx] + 's_%Y%m%d_%Y%m%d.nc') elif instrument == 'hepad': # high energy proton and alpha detector -- valid for GOES 08-15 if datatype == '1min': pathformat = [ remote_path + 'g' + str(prb) + '_hepad_ap_1m_%Y%m01_%Y%m??.nc', '_hepad_s15_1m_%Y%m01_%Y%m??.nc' ] elif datatype == '5min': pathformat = [ remote_path + 'g' + str(prb) + '_hepad_ap_5m_%Y%m01_%Y%m??.nc', '_hepad_s15_5m_%Y%m01_%Y%m??.nc' ] else: pathformat = [ remote_path + 'g' + str(prb) + '_hepad_ap_32s_%Y%m%d_%Y%m%d.nc', '_hepad_s15_4s_%Y%m%d_%Y%m%d.nc' ] elif instrument == 'xrs': # x-ray sensor -- valid for GOES 08-15 if datatype == '1min': pathformat = remote_path + 'g' + str( prb) + '_xrs_1m_%Y%m01_%Y%m??.nc' elif datatype == '5min': pathformat = remote_path + 'g' + str( prb) + '_xrs_5m_%Y%m01_%Y%m??.nc' else: pathformat = remote_path + 'g' + str( prb) + '_xrs_2s_%Y%m%d_%Y%m%d.nc' # find the full remote path names using the trange if isinstance(pathformat, list): remote_names = [] for path in pathformat: remote_names.extend(dailynames(file_format=path, trange=trange)) else: remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = netcdf_to_tplot(out_files, suffix=suffix, merge=True, time='time_tag') if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def mms_load_eph_tplot(filenames, level='def', probe='1', datatypes=['pos', 'vel'], suffix='', trange=None): """ Helper routine for loading state data (ASCII files from the SDC); not meant to be called directly; see pyspedas.mms.state instead """ prefix = 'mms' + probe time_values = [] date_values = [] x_values = [] y_values = [] z_values = [] vx_values = [] vy_values = [] vz_values = [] for file in filenames: rows = pd.read_csv(file, delim_whitespace=True, header=None, skiprows=14) times = rows.shape[0] - 1 for time_idx in range(0, times): # these files can overlap, so avoid duplicates if rows[0][time_idx] in date_values: continue time_values.append( pd.to_datetime(rows[0][time_idx], format='%Y-%j/%H:%M:%S.%f').timestamp()) x_values.append(rows[2][time_idx]) y_values.append(rows[3][time_idx]) z_values.append(rows[4][time_idx]) vx_values.append(rows[5][time_idx]) vy_values.append(rows[6][time_idx]) vz_values.append(rows[7][time_idx]) date_values.append(rows[0][time_idx]) if 'pos' in datatypes: store_data(prefix + '_' + level + 'eph_pos' + suffix, data={ 'x': time_values, 'y': np.transpose(np.array([x_values, y_values, z_values])) }) tclip(prefix + '_' + level + 'eph_pos' + suffix, trange[0], trange[1], suffix='') options(prefix + '_' + level + 'eph_pos' + suffix, 'ytitle', 'MMS' + str(probe) + ' position [km]') options(prefix + '_' + level + 'eph_pos' + suffix, 'legend_names', ['X ECI', 'Y ECI', 'Z ECI']) options(prefix + '_' + level + 'eph_pos' + suffix, 'color', ['b', 'g', 'r']) if 'vel' in datatypes: store_data(prefix + '_' + level + 'eph_vel' + suffix, data={ 'x': time_values, 'y': np.transpose(np.array([vx_values, vy_values, vz_values])) }) tclip(prefix + '_' + level + 'eph_vel' + suffix, trange[0], trange[1], suffix='') options(prefix + '_' + level + 'eph_vel' + suffix, 'ytitle', 'MMS' + str(probe) + ' velocity [km/s]') options(prefix + '_' + level + 'eph_vel' + suffix, 'legend_names', ['Vx ECI', 'Vy ECI', 'Vz ECI']) options(prefix + '_' + level + 'eph_vel' + suffix, 'color', ['b', 'g', 'r'])
def load( trange=['2018-11-5', '2018-11-6'], instrument='fields', datatype='mag_rtn', spec_types=None, # for DFB AC spectral data level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads Parker Solar Probe data into tplot variables; this function is not meant to be called directly; instead, see the wrappers: psp.fields: FIELDS data psp.spc: Solar Probe Cup data psp.spe: SWEAP/SPAN-e data psp.spi: SWEAP/SPAN-i data psp.epihi: ISoIS/EPI-Hi data psp.epilo: ISoIS/EPI-Lo data psp.epi ISoIS/EPI (merged Hi-Lo) data """ # remote path formats are going to be all lowercase datatype = datatype.lower() file_resolution = 24 * 3600. if instrument == 'fields': # 4_per_cycle and 1min are daily, not 6h like the full resolution 'mag_(rtn|sc)' if datatype == 'mag_rtn_1min' or datatype == 'mag_sc_1min': pathformat = instrument + '/' + level + '/' + datatype + '/%Y/psp_fld_' + level + '_' + datatype + '_%Y%m%d_v??.cdf' elif datatype == 'mag_rtn_4_per_cycle' or datatype == 'mag_rtn_4_sa_per_cyc': pathformat = instrument + '/' + level + '/mag_rtn_4_per_cycle/%Y/psp_fld_' + level + '_mag_rtn_4_sa_per_cyc_%Y%m%d_v??.cdf' elif datatype == 'mag_sc_4_per_cycle' or datatype == 'mag_sc_4_sa_per_cyc': pathformat = instrument + '/' + level + '/mag_sc_4_per_cycle/%Y/psp_fld_' + level + '_mag_sc_4_sa_per_cyc_%Y%m%d_v??.cdf' elif datatype == 'rfs_hfr' or datatype == 'rfs_lfr' or datatype == 'rfs_burst' or datatype == 'f2_100bps': pathformat = instrument + '/' + level + '/' + datatype + '/%Y/psp_fld_' + level + '_' + datatype + '_%Y%m%d_v??.cdf' elif datatype == 'dfb_dc_spec' or datatype == 'dfb_ac_spec' or datatype == 'dfb_dc_xspec' or datatype == 'dfb_ac_xspec': out_vars = [] for item in spec_types: loaded_data = load(trange=trange, instrument=instrument, datatype=datatype + '_' + item, level=level, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update) if loaded_data != []: out_vars.extend(loaded_data) return out_vars elif datatype[: 12] == 'dfb_dc_spec_' or datatype[: 12] == 'dfb_ac_spec_' or datatype[: 13] == 'dfb_dc_xspec_' or datatype[: 13] == 'dfb_ac_xspec_': if datatype[: 13] == 'dfb_dc_xspec_' or datatype[: 13] == 'dfb_ac_xspec_': dtype_tmp = datatype[:12] stype_tmp = datatype[13:] else: dtype_tmp = datatype[:11] stype_tmp = datatype[12:] pathformat = instrument + '/' + level + '/' + dtype_tmp + '/' + stype_tmp + '/%Y/psp_fld_' + level + '_' + datatype + '_%Y%m%d_v??.cdf' else: pathformat = instrument + '/' + level + '/' + datatype + '/%Y/psp_fld_' + level + '_' + datatype + '_%Y%m%d%H_v??.cdf' file_resolution = 6 * 3600. elif instrument == 'spc': pathformat = 'sweap/spc/' + level + '/' + datatype + '/%Y/psp_swp_spc_' + datatype + '_%Y%m%d_v??.cdf' elif instrument == 'spe': pathformat = 'sweap/spe/' + level + '/' + datatype + '/%Y/psp_swp_sp?_*_%Y%m%d_v??.cdf' elif instrument == 'spi': pathformat = 'sweap/spi/' + level + '/' + datatype + '/%Y/psp_swp_spi_*_%Y%m%d_v??.cdf' elif instrument == 'epihi': pathformat = 'isois/epihi/' + level + '/' + datatype + '/%Y/psp_isois-epihi_' + level + '*_%Y%m%d_v??.cdf' elif instrument == 'epilo': pathformat = 'isois/epilo/' + level + '/' + datatype + '/%Y/psp_isois-epilo_' + level + '*_%Y%m%d_v??.cdf' elif instrument == 'epi': pathformat = 'isois/merged/' + level + '/' + datatype + '/%Y/psp_isois_' + level + '-' + datatype + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=file_resolution) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2018-11-5', '2018-11-6'], instrument='fields', datatype='mag_rtn', level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads Parker Solar Probe data into tplot variables; this function is not meant to be called directly; instead, see the wrappers: psp.fields: FIELDS data psp.spc: Solar Probe Cup data psp.spe: SWEAP/SPAN-e data psp.spi: SWEAP/SPAN-i data psp.epihi: ISoIS/EPI-Hi data psp.epilo: ISoIS/EPI-Lo data psp.epi ISoIS/EPI (merged Hi-Lo) data """ file_resolution = 24*3600. if instrument == 'fields': pathformat = instrument + '/' + level + '/' + datatype + '/%Y/psp_fld_' + level + '_' + datatype + '_%Y%m%d%H_v??.cdf' file_resolution = 6*3600. elif instrument == 'spc': pathformat = 'sweap/spc/' + level + '/' + datatype + '/%Y/psp_swp_spc_' + datatype + '_%Y%m%d_v??.cdf' elif instrument == 'spe': pathformat = 'sweap/spe/' + level + '/' + datatype + '/%Y/psp_swp_sp?_*_%Y%m%d_v??.cdf' elif instrument == 'spi': pathformat = 'sweap/spi/' + level + '/' + datatype + '/%Y/psp_swp_spi_*_%Y%m%d_v??.cdf' elif instrument == 'epihi': pathformat = 'isois/epihi/' + level + '/' + datatype + '/%Y/psp_isois-epihi_' + level + '*_%Y%m%d_v??.cdf' elif instrument == 'epilo': pathformat = 'isois/epilo/' + level + '/' + datatype + '/%Y/psp_isois-epilo_' + level + '*_%Y%m%d_v??.cdf' elif instrument == 'epi': pathformat = 'isois/merged/' + level + '/' + datatype + '/%Y/psp_isois_' + level + '-' + datatype + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=file_resolution) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], instrument='fgm', probe='c', level='l2', stations=None, # ground mag data greenland=None, # also for ground mag data suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the THEMIS mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.themis.fgm pyspedas.themis.fit pyspedas.themis.efi pyspedas.themis.scm pyspedas.themis.fft pyspedas.themis.fbk pyspedas.themis.esa pyspedas.themis.sst pyspedas.themis.mom pyspedas.themis.gmom pyspedas.themis.gmag pyspedas.themis.state """ if not isinstance(probe, list): probe = [probe] out_files = [] for prb in probe: if instrument == 'fgm': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'fit': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'efi': if level == 'l2': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif level == 'l1': pathformat = [('th' + prb + '/' + level + '/vaf/%Y/th' + prb + '_' + level + '_vaf_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/vap/%Y/th' + prb + '_' + level + '_vap_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/vaw/%Y/th' + prb + '_' + level + '_vaw_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/vbf/%Y/th' + prb + '_' + level + '_vbf_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/vbp/%Y/th' + prb + '_' + level + '_vbp_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/vbw/%Y/th' + prb + '_' + level + '_vbw_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/eff/%Y/th' + prb + '_' + level + '_eff_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/efw/%Y/th' + prb + '_' + level + '_efw_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/efp/%Y/th' + prb + '_' + level + '_efp_%Y%m%d_v??.cdf')] elif instrument == 'scm': if level == 'l2': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif level == 'l1': pathformat = [('th' + prb + '/' + level + '/scp/%Y/th' + prb + '_' + level + '_scp_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/scf/%Y/th' + prb + '_' + level + '_scf_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/scw/%Y/th' + prb + '_' + level + '_scw_%Y%m%d_v??.cdf')] elif instrument == 'fft': if level == 'l2': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif level == 'l1': pathformat = [('th' + prb + '/' + level + '/fff_16/%Y/th' + prb + '_' + level + '_fff_16_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/fff_32/%Y/th' + prb + '_' + level + '_fff_32_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/fff_64/%Y/th' + prb + '_' + level + '_fff_64_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/ffp_16/%Y/th' + prb + '_' + level + '_ffp_16_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/ffp_32/%Y/th' + prb + '_' + level + '_ffp_32_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/ffp_64/%Y/th' + prb + '_' + level + '_ffp_64_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/ffw_16/%Y/th' + prb + '_' + level + '_ffw_16_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/ffw_32/%Y/th' + prb + '_' + level + '_ffw_32_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/ffw_64/%Y/th' + prb + '_' + level + '_ffw_64_%Y%m%d_v??.cdf')] elif instrument == 'fbk': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'esa': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'sst': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'mom': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'gmom': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'state': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d.cdf') elif instrument == 'gmag': if stations is None: logging.error('No stations specified') return else: pathformat = [] for site, in_greenland in zip(stations, greenland): if in_greenland: pathformat.append('thg/greenland_gmag/' + level + '/mag/' + site + '/%Y/thg_' + level + '_mag_' + site + '_%Y%m%d_v??.cdf') else: pathformat.append('thg/' + level + '/mag/' + site + '/%Y/thg_' + level + '_mag_' + site + '_%Y%m%d_v??.cdf') if not isinstance(pathformat, list): pathformat = [pathformat] for file_format in pathformat: # find the full remote path names using the trange remote_names = dailynames(file_format=file_format, trange=trange) files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update, last_version=True) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, merge=True, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], probe='a', instrument='mag', level='l2', datatype='8hz', coord='RTN', suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the STEREO mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.stereo.mag pyspedas.stereo.plastic """ out_files = [] if not isinstance(probe, list): probe = [probe] if datatype == '32hz': burst = 'B' else: burst = '' for prb in probe: if prb == 'a': direction = 'ahead' elif prb == 'b': direction = 'behind' if instrument == 'mag': pathformat = 'impact/level1/'+direction+'/mag/'+coord+'/%Y/%m/ST'+prb.upper()+'_L1_MAG'+burst+'_'+coord+'_%Y%m%d_V??.cdf' elif instrument == 'plastic': CONFIG['remote_data_dir'] = 'http://stereo-ssc.nascom.nasa.gov/data/ins_data/' if level == 'l2': pathformat = 'plastic/level2/Protons/Derived_from_1D_Maxwellian/'+direction+'/'+datatype+'/%Y/ST'+prb.upper()+'_L2_PLA_1DMax_'+datatype+'_%Y%m%d_V??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) for remote_file in remote_names: files = download(remote_file=remote_file, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, merge=True, get_support_data=get_support_data, varformat=varformat, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load_csa(trange=['2001-02-01', '2001-02-03'], probes=['C1'], datatypes=['CP_CIS-CODIF_HS_H1_MOMENTS'], downloadonly=False, time_clip=True, suffix='', get_support_data=False, varformat=None, varnames=[], notplot=False): """Load data using the Cluster Science Data archive. Parameters: trange : list of str Time range [start, end]. probes : list of str List of Cluster probes. Use ['*'] to load all. See cl_master_probes(). datatypes : list of str List of Cluster data types. Use ['*'] to load all. See cl_master_datatypes(). downloadonly: bool If true, do not use cdf_to_tplot. time_clip: bool If true, apply time clip to data. suffix: str (for pytplot) The tplot variable names will be given this suffix. By default, no suffix is added. get_support_data: bool (for pytplot) Data with an attribute "VAR_TYPE" with a value of "support_data" will be loaded into tplot. By default, only loads in data with a "VAR_TYPE" attribute of "data". varformat : str (for pytplot) The file variable formats to load into tplot. Wildcard character "*" is accepted. By default, all variables are loaded in. varnames: str or list of str (for pytplot) Load these variables only. If [] or ['*'], then load everything. notplot: bool (for pytplot) If True, then data are returned in a hash table instead of being stored in tplot variables (useful for debugging, and access to multi-dimensional data products) Returns: List of tplot variables created (unless notplot keyword is used). """ # Empty output in case of errors. tvars = [] # Start and end dates start_date = cl_format_time(trange[0]) end_date = cl_format_time(trange[1]) # Delivery format delivery_format = 'CDF_ISTP' # Delivery interval delivery_interval = 'ALL' # TODO: Create a function that can resolve wildcards # similar to IDL spedas ssl_check_valid_name # my_datatypes=ssl_check_valid_name(uc_datatypes,master_datatypes) # my_probes=ssl_check_valid_name(uc_probes,master_probes) if not probes: # list is empty return tvars elif probes[0] == '*': # load all probes probes = cl_master_probes() if not datatypes: # list is empty return tvars elif datatypes[0] == '*': # load all probes datatypes = cl_master_datatypes() # Construct the query string base_url = 'https://csa.esac.esa.int/csa-sl-tap/data?' query_string = ('retrieval_type=PRODUCT&START_DATE=' + start_date + '&END_DATE=' + end_date + '&DELIVERY_FORMAT=' + delivery_format + '&DELIVERY_INTERVAL=' + delivery_interval + '&NON_BROWSER') for p in probes: for d in datatypes: query_string += '&DATASET_ID=' + p + '_' + d # Encode the url urllib.parse.quote url = base_url + (query_string) local_path = CONFIG['local_data_dir'] Path(local_path).mkdir(parents=True, exist_ok=True) out_gz = local_path + 'temp_cluster_file.tar.gz' # Temp file name # Download the file. print("Downloading Cluster data, please wait....") try: r = requests.get(url, allow_redirects=True) r.raise_for_status() except requests.exceptions.HTTPError as err: print("Download HTTP error: ", err) return tvars except requests.exceptions.RequestException as e: print("Download error: ", e) return tvars print("Download complete.") # Open the downloaded file. with open(out_gz, 'wb') as w: w.write(r.content) # Extract the tar archive. tar = tarfile.open(out_gz, "r:gz") f = tar.getnames() tar.extractall(path=local_path) tar.close() # Remove the tar.gz file but keep the extracted. os.remove(out_gz) # Get unique set of files. f_set = set(f) # File list with full path. out_files = [local_path+s for s in list(f_set)] out_files = sorted(out_files) if downloadonly: return out_files # Load data into tplot try: tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) except IndexError as e: print("cdf_to_tplot cannot load Cluster cdf file.") print("File: ", out_files[0]) print("IndexError:", e) return tvars except TypeError as e: print("cdf_to_tplot cannot load Cluster cdf file.") print("File: ", out_files[0]) print("TypeError:", e) return tvars if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], instrument='dcb', datatype='', level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the FAST mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.fast.dcb pyspedas.fast.acb pyspedas.fast.esa pyspedas.fast.teams """ file_resolution = 24 * 3600. if instrument == 'dcb': if level == 'k0': pathformat = 'dcf/' + level + '/%Y/fa_k0_dcf_%Y%m%d_v??.cdf' else: pathformat = 'dcf/' + level + '/' + instrument + '/%Y/%m/fast_hr_' + instrument + '_%Y%m%d%H????_v??.cdf' file_resolution = 3600. if instrument == 'acb': pathformat = 'acf/' + level + '/%Y/fa_' + level + '_acf_%Y%m%d_v??.cdf' elif instrument == 'esa': pathformat = instrument + '/' + level + '/' + datatype + '/%Y/%m/fa_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d??????_*_v??.cdf' if instrument == 'teams': pathformat = 'teams/' + level + '/%Y/fa_' + level + '_tms_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=file_resolution) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2009-01-01', '2009-01-02'], instrument='vhm', datatype='1min', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function is not meant to be called directly; please see the instrument specific wrappers in __init__.py """ if instrument == 'vhm': pathformat = 'mag_cdaweb/vhm_'+datatype+'/%Y/uy_'+datatype+'_vhm_%Y%m%d_v??.cdf' elif instrument == 'swoops': if datatype in ['bai_m0', 'bai_m1', 'bae_m0']: pathformat = 'plasma/swoops_cdaweb/'+datatype+'/%Y/uy_'+datatype.split('_')[1]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf' else: pathformat = 'plasma/swoops_cdaweb/'+datatype+'/%Y/uy_'+datatype+'_%Y0101_v??.cdf' elif instrument == 'swics': pathformat = 'plasma/swics_cdaweb/'+datatype+'/%Y/uy_'+datatype.split('_')[1]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf' elif instrument == 'urap': pathformat = 'radio/urap_cdaweb/'+datatype+'/%Y/uy_'+datatype.split('_')[1]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf' elif instrument == 'epac': if datatype == 'epac_m1': pathformat = 'particle/epac_cdaweb/'+datatype+'/%Y/uy_m1_epa_%Y%m%d_v??.cdf' elif instrument == 'hiscale': pathformat = 'particle/hiscale_cdaweb/'+datatype+'/%Y/uy_'+datatype.split('_')[1]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf' elif instrument == 'cospin': pathformat = 'particle/cospin_cdaweb/'+datatype+'/%Y/uy_m0_'+datatype+'_%Y%m%d_v??.cdf' elif instrument == 'grb': pathformat = 'gamma/grb_cdaweb/'+datatype+'/%Y/uy_'+datatype.split('_')[1]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2018-11-5', '2018-11-6'], probe='1', instrument='lad', datatype='', suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads TWINS data; this function is not meant to be called directly; instead, see the wrappers: pyspedas.twins.lad pyspedas.twins.ephemeris pyspedas.twins.imager """ if not isinstance(probe, list): probe = [probe] probe = [str(prb) for prb in probe] out_files = [] for prb in probe: if instrument == 'lad': pathformat = 'twins' + prb + '/' + instrument + '/%Y/twins' + prb + '_l1_lad_%Y%m%d_v??.cdf' elif instrument == 'imager': pathformat = 'twins' + prb + '/' + instrument + '/%Y/twins' + prb + '_l1_imager_%Y%m%d??_v??.cdf' elif instrument == 'ephemeris': pathformat = 'twins' + prb + '/' + instrument + '/' + datatype + '/%Y/twins' + prb + '_' + datatype + '_def_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) for remote_file in remote_names: files = download(remote_file=remote_file, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, merge=True, get_support_data=get_support_data, varformat=varformat, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['1998-04-06', '1998-04-07'], instrument='mam', datatype='pp', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the Equator-S mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.equator_s.mam pyspedas.equator_s.edi pyspedas.equator_s.esa (3DA) pyspedas.equator_s.epi pyspedas.equator_s.ici pyspedas.equator_s.pcd pyspedas.equator_s.sfd """ if instrument == 'mam': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'edi': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == '3da': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'epi': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'ici': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'pcd': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'sfd': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], probe='15', instrument='fgm', datatype='1min', suffix='', downloadonly=False, no_update=False, time_clip=False): """ This function loads data from the GOES mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.goes.fgm """ if not isinstance(probe, list): probe = [probe] fullavgpath = ['full', 'avg'] goes_path_dir = fullavgpath[datatype == '1min' or datatype == '5min'] for prb in probe: remote_path = goes_path_dir + '/%Y/%m/goes' + str(prb) + '/netcdf/' if instrument == 'fgm': if datatype == '512ms': # full, unaveraged data pathformat = remote_path + 'g' + str( prb) + '_magneto_512ms_%Y%m%d_%Y%m%d.nc' elif datatype == '1min': # 1 min averages pathformat = remote_path + 'g' + str( prb) + '_magneto_1m_%Y%m01_%Y%m??.nc' elif datatype == '5min': # 5 min averages pathformat = remote_path + 'g' + str( prb) + '_magneto_5m_%Y%m01_%Y%m??.nc' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = netcdf_to_tplot(out_files, suffix=suffix, merge=True, time='time_tag') if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load( trange=['2018-11-5', '2018-11-6'], probe='a', instrument='emfisis', level='l3', datatype='magnetometer', suffix='', cadence='4sec', # for EMFISIS mag data coord='sm', # for EMFISIS mag data wavetype='waveform', # for EMFISIS waveform data rel='rel04', # for ECT data get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads Van Allen Probes (RBSP) data; this function is not meant to be called directly; instead, see the wrappers: pyspedas.rbsp.emfisis pyspedas.rbsp.rbspice pyspedas.rbsp.efw pyspedas.rbsp.mageis pyspedas.rbsp.hope pyspedas.rbsp.rept pyspedas.rbsp.rps """ if not isinstance(probe, list): probe = [probe] out_files = [] for prb in probe: if instrument == 'emfisis': if datatype == 'density' or datatype == 'housekeeping' or datatype == 'wna-survey': pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/' + datatype + '/%Y/rbsp-' + prb + '_' + datatype + '_' + instrument + '-' + level + '_%Y%m%d_v*.cdf' elif datatype == 'wfr' or datatype == 'hfr': pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/' + datatype + '/' + wavetype + '/%Y/rbsp-' + prb + '_' + datatype + '-' + wavetype + '_' + instrument + '-' + level + '_%Y%m%d*_v*.cdf' else: if level == 'l2' and datatype == 'magnetometer': pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/' + datatype + '/uvw/%Y/rbsp-' + prb + '_' + datatype + '_uvw_' + instrument + '-' + level + '_%Y%m%d*_v*.cdf' else: pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/' + datatype + '/' + cadence + '/' + coord + '/%Y/rbsp-' + prb + '_' + datatype + '_' + cadence + '-' + coord + '_' + instrument + '-' + level + '_%Y%m%d_v*.cdf' elif instrument == 'rbspice': pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/' + datatype + '/%Y/rbsp-' + prb + '-' + instrument + '_lev-' + str( level[-1]) + '?' + datatype + '_%Y%m%d_v*.cdf' elif instrument == 'efw': if level == 'l3': pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/%Y/rbsp' + prb + '_' + instrument + '-' + level + '_%Y%m%d_v??.cdf' else: pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/' + datatype + '/%Y/rbsp' + prb + '_' + instrument + '-' + level + '_' + datatype + '_%Y%m%d_v??.cdf' elif instrument == 'mageis': pathformat = 'rbsp' + prb + '/' + level + '/ect/' + instrument + '/sectors/' + rel + '/%Y/rbsp' + prb + '_' + rel + '_ect-mageis-' + level + '_%Y%m%d_v*.cdf' elif instrument == 'hope': if datatype == 'moments': pathformat = 'rbsp' + prb + '/' + level + '/ect/' + instrument + '/' + datatype + '/' + rel + '/%Y/rbsp' + prb + '_' + rel + '_ect-hope-mom-' + level + '_%Y%m%d_v*.cdf' elif datatype == 'pitchangle': pathformat = 'rbsp' + prb + '/' + level + '/ect/' + instrument + '/' + datatype + '/' + rel + '/%Y/rbsp' + prb + '_' + rel + '_ect-hope-pa-' + level + '_%Y%m%d_v*.cdf' elif datatype == 'spinaverage': pathformat = 'rbsp' + prb + '/' + level + '/ect/' + instrument + '/' + datatype + '/' + rel + '/%Y/rbsp' + prb + '_' + rel + '_ect-hope-sci-' + level + 'sa_%Y%m%d_v*.cdf' elif instrument == 'rept': pathformat = 'rbsp' + prb + '/' + level + '/ect/' + instrument + '/sectors/' + rel + '/%Y/rbsp' + prb + '_' + rel + '_ect-rept-sci-' + level + '_%Y%m%d_v*.cdf' elif instrument == 'rps': if datatype == 'rps-1min': pathformat = 'rbsp' + prb + '/' + level + '/rps/psbr-rps-1min/%Y/rbsp' + prb + '_' + level + '-1min_psbr-rps_%Y%m%d_v*.cdf' elif datatype == 'rps': pathformat = 'rbsp' + prb + '/' + level + '/rps/psbr-rps/%Y/rbsp' + prb + '_' + level + '_psbr-rps_%Y%m%d_v*.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2017-03-27', '2017-03-28'], instrument='mgf', datatype='8sec', mode=None, site=None, level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False): """ This function is not meant to be called directly; please see the instrument specific wrappers: pyspedas.erg.mgf() pyspedas.erg.hep() pyspedas.erg.orb() pyspedas.erg.lepe() pyspedas.erg.lepi() pyspedas.erg.mepe() pyspedas.erg.mepi() pyspedas.erg.pwe_ofa() pyspedas.erg.pwe_efd() pyspedas.erg.pwe_hfa() pyspedas.erg.xep() """ prefix = 'erg_' + instrument + '_' + level + '_' file_res = 24 * 3600. if instrument == 'mgf': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??.??.cdf' elif instrument == 'hep': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'orb': pathformat = 'satellite/erg/' + instrument + '/' + level + '/opq/%Y/%m/erg_' + instrument + '_' + level + '_op_%Y%m%d_v??.cdf' elif instrument == 'lepe': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'lepi': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'mepe': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'mepi': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'pwe_ofa': pathformat = 'satellite/erg/pwe/ofa/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'pwe_efd': pathformat = 'satellite/erg/pwe/efd/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'pwe_hfa': pathformat = 'satellite/erg/pwe/hfa/' + level + '/' + datatype + '/' + mode + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_' + mode + '_%Y%m%d_v??_??.cdf' elif instrument == 'xep': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=file_res) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update, last_version=True, username=uname, password=passwd) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2004-11-5', '2004-11-6'], instrument='lena', datatype='k0', suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads IMAGE data; this function is not meant to be called directly; instead, see the wrappers: pyspedas.image.lena pyspedas.image.mena pyspedas.image.hena pyspedas.image.rpi pyspedas.image.euv pyspedas.image.fuv """ if instrument == 'lena': pathformat = instrument+'/'+instrument+'_'+datatype+'/%Y/im_'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf' elif instrument == 'mena': pathformat = instrument+'/'+instrument+'_'+datatype+'/%Y/im_'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf' elif instrument == 'hena': pathformat = instrument+'/'+instrument+'_'+datatype+'/%Y/im_'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf' elif instrument == 'rpi': pathformat = instrument+'/'+instrument+'_'+datatype+'/%Y/im_'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf' elif instrument == 'euv': pathformat = instrument+'/'+instrument+'_'+datatype+'/%Y/im_'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf' elif instrument == 'fuv': pathformat = instrument+'/wic_'+datatype+'/%Y/im_'+datatype+'_wic_%Y%m%d_v??.cdf' elif instrument == 'orbit': if datatype == 'def_or': pathformat = instrument+'/def_or/%Y/im_or_def_%Y%m%d_v??.cdf' elif datatype == 'pre_or': pathformat = instrument+'/pre_or/%Y/im_or_pre_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, merge=True, get_support_data=get_support_data, varformat=varformat, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], instrument='fgm', datatype='h0', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the WIND mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.wind.mfi pyspedas.wind.swe pyspedas.wind.sms pyspedas.wind.threedp pyspedas.wind.waves pyspedas.wind.orbit """ if instrument == 'fgm': pathformat = 'mfi/mfi_' + datatype + '/%Y/wi_' + datatype + '_mfi_%Y%m%d_v??.cdf' elif instrument == 'swe': pathformat = 'swe/swe_' + datatype + '/%Y/wi_' + datatype + '_swe_%Y%m%d_v??.cdf' elif instrument == 'sms': pathformat = 'sms/' + datatype + '/sms_' + datatype + '/%Y/wi_' + datatype + '_sms_%Y%m%d_v??.cdf' elif instrument == 'waves': pathformat = 'waves/wav_' + datatype + '/%Y/wi_' + datatype + '_wav_%Y%m%d_v??.cdf' elif instrument == 'orbit': pathformat = 'orbit/' + datatype + '/%Y/wi_' + datatype.split( '_')[1] + '_' + datatype.split('_')[0] + '_%Y%m%d_v??.cdf' elif instrument == '3dp': if datatype == '3dp_emfits_e0': pathformat = '3dp/' + datatype + '/%Y/wi_' + datatype.split( '_')[1] + '_' + datatype.split('_')[2] + '_' + datatype.split( '_')[0] + '_%Y%m%d_v??.cdf' else: pathformat = '3dp/' + datatype + '/%Y/wi_' + datatype.split( '_')[1] + '_' + datatype.split('_')[0] + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update, last_version=True) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2019-02-01', '2019-02-02'], site=None, suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads Magnetic Induction Coil Array (MICA) data; this function is not meant to be called directly; instead, see the wrapper: pyspedas.mica.induction """ if site is None: print('A valid MICA site code name must be entered.') print('Current site codes include: ') print( 'NAL, LYR, LOR, ISR, SDY, IQA, SNK, MCM, SPA, JBS, NEV, HAL, PG2[3,4,5]' ) return pathformat = site.upper() + '/%Y/%m/mica_ulf_' + site.lower( ) + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix='_' + site.upper() + suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars