def test_dailynames(self): """Test dailynames function.""" self.assertTrue(dailynames(trange=['2015-12-1', '2015-12-1/2:00'], hour_res=True) == ['2015120100', '2015120101']) self.assertTrue(dailynames(trange=['2015-12-1', '2015-12-3']) == ['20151201', '20151202']) self.assertTrue(dailynames(trange=['2015-12-3', '2015-12-2']) == ['20151203']) self.assertTrue(dailynames() is None) self.assertTrue(dailynames(trange=['2015-12-3', '2019-12-2'], file_format='%Y') == ['2015', '2016', '2017', '2018', '2019']) self.assertTrue(dailynames(trange=['2015-1-1', '2015-3-2'], file_format='%Y%m') == ['201501', '201502', '201503']) self.assertTrue(dailynames(trange=['2015-1-1', '2015-3-2'], file_format='/%Y/%m/') == ['/2015/01/', '/2015/02/', '/2015/03/']) self.assertTrue(dailynames(trange=['2015-1-1', '2015-1-1/3:00'], file_format='%H', res=60.0) == ['00', '01', '02']) self.assertTrue(dailynames(trange=['2015-1-1/2:00', '2015-1-1/3:00'], file_format='%M', res=600.) == ['00', '10', '20', '30', '40', '50'])
def load(trange=['2019-02-01', '2019-02-02'], site=None, suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads Magnetic Induction Coil Array (MICA) data; this function is not meant to be called directly; instead, see the wrapper: pyspedas.mica.induction """ if site == None: print('A valid MICA site code name must be entered.') print('Current site codes include: ') print( 'NAL, LYR, LOR, ISR, SDY, IQA, SNK, MCM, SPA, JBS, NEV, HAL, PG2[3,4,5]' ) return pathformat = site.upper() + '/%Y/%m/mica_ulf_' + site.lower( ) + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix='_' + site.upper() + suffix, merge=True, get_support_data=get_support_data, varformat=varformat, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], datatype='1min', level='hro2', suffix='', get_support_data=False, get_ignore_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=True): """ This function loads OMNI (Combined 1AU IP Data; Magnetic and Solar Indices) data; this function is not meant to be called directly; instead, see the wrapper: pyspedas.omni.data """ if 'min' in datatype: pathformat = level + '_' + datatype + '/%Y/omni_' + level + '_' + datatype + '_%Y%m01_v??.cdf' elif 'hour' in datatype: pathformat = 'hourly/%Y/omni2_h0_mrg1hr_%Y%m01_v??.cdf' else: raise TypeError("%r are invalid keyword arguments" % datatype) # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, get_ignore_data=get_ignore_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2018-11-5', '2018-11-6'], probe=['noaa19'], instrument='sem', datatype='*', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads POES Space Environment Monitor data. This function is not meant to be called directly; instead, see the wrapper: pyspedas.poes.sem """ if not isinstance(probe, list): probe = [probe] out_files = [] for prb in probe: if instrument == 'sem': pathformat = prb + '/sem2_fluxes-2sec/%Y/' + prb + '_poes-sem2_fluxes-2sec_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2020-06-01', '2020-06-02'], instrument='mag', datatype='rtn-normal', mode=None, level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the Solar Orbiter mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.solo.mag pyspedas.solo.epd pyspedas.solo.rpw pyspedas.solo.swa """ if instrument == 'mag': pathformat = instrument+'/science/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_%Y%m%d_v??.cdf' elif instrument == 'epd': pathformat = instrument+'/science/'+level+'/'+datatype+'/'+mode+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'-'+mode+'_%Y%m%d_v??.cdf' elif instrument == 'rpw': pathformat = instrument+'/science/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_%Y%m%d_v??.cdf' elif instrument == 'swa': if datatype == 'pas-eflux': pathformat = instrument+'/science/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], instrument='dcb', datatype='', level='l2', suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the FAST mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.fast.dcb pyspedas.fast.acb pyspedas.fast.esa pyspedas.fast.teams """ file_resolution = 24*3600. if instrument == 'dcb': pathformat = 'dcf/'+level+'/'+instrument+'/%Y/%m/fast_hr_'+instrument+'_%Y%m%d%H????_v??.cdf' file_resolution = 3600. if instrument == 'acb': pathformat = 'acf/'+level+'/%Y/fa_'+level+'_acf_%Y%m%d_v??.cdf' elif instrument == 'esa': pathformat = instrument+'/'+level+'/'+datatype+'/%Y/%m/fa_'+instrument+'_'+level+'_'+datatype+'_%Y%m%d??????_*_v??.cdf' if instrument == 'teams': pathformat = 'teams/'+level+'/%Y/fa_'+level+'_tms_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=file_resolution) out_files = [] for remote_file in remote_names: files = download(remote_file=remote_file, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, merge=True, get_support_data=get_support_data, varformat=varformat, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], instrument='reptile', datatype='flux', level='l2', suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the CSSWE mission; this function is not meant to be called directly; instead, see the wrapper: pyspedas.csswe.reptile """ if instrument == 'reptile': pathformat = level + '/' + instrument + '/' + datatype + '/%Y/csswe_' + instrument + '_6sec-' + datatype + '-' + level + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] for remote_file in remote_names: files = download(remote_file=remote_file, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, merge=True, get_support_data=get_support_data, varformat=varformat, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=None, resolution=10, dtype=None, no_download=False, downloadonly=False, out_type='np', save_pickle=False): """ This function loads SECS/EICS data; this function is not meant to be called directly; instead, see the wrapper: pyspedas.secs.data """ if dtype == 'EICS' or dtype == 'SECS': pathformat_prefix = dtype + '/%Y/%m/' pathformat_zip = pathformat_prefix + dtype + '%Y%m%d.zip' pathformat_gz = pathformat_prefix + dtype + '%Y%m%d.zip.gz' # only 2007! pathformat_unzipped = pathformat_prefix + '%d/' + dtype + '%Y%m%d_%H%M%S.dat' else: raise TypeError("%r are invalid keyword arguments" % dtype) # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat_zip, trange=trange) remote_names_gz = dailynames(file_format=pathformat_gz, trange=trange) remote_names_gz = [s for s in remote_names_gz if s[-15:-11] == '2007'] out_files = [] out_files_zip = [] files_zip = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_download) files_gz = download(remote_file=remote_names_gz, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_download) files_zip = files_zip + files_gz if files_zip is not None: for rf_zip_zero in files_zip: if rf_zip_zero.endswith('.gz'): rf_zip = rf_zip_zero[0:-3] # unzip .gz file to .zip file with gzip.open(rf_zip_zero, 'rb') as f_in: with open(rf_zip, 'wb') as f_out: shutil.copyfileobj(f_in, f_out) elif rf_zip_zero.endswith('.zip'): rf_zip = rf_zip_zero else: rf_zip = rf_zip_zero out_files_zip.append(rf_zip) #print('Start for unzipping process ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~') foldername_unzipped = rf_zip[0:-19] + rf_zip[-8:-6] + '/' + rf_zip[ -6:-4] #print('foldername_unzipped-------: ', foldername_unzipped) ### add?????? if not os.path.isdir(foldername_unzipped): logging.info('Start unzipping: ' + rf_zip + ' ------') with zipfile.ZipFile(rf_zip, 'r') as zip_ref: zip_ref.extractall(rf_zip[0:-16]) if not os.path.isdir(foldername_unzipped): # for the case of unzipping directly without the %d folder made. # make %d folder os.makedirs(foldername_unzipped) # move .dat files sourcepath = rf_zip[0:-16] sourcefiles = os.listdir(sourcepath) destinationpath = foldername_unzipped logging.info('start to move files: --------------') for file in sourcefiles: if rf_zip[-16:-4] in file and file.endswith('.dat'): shutil.move(os.path.join(sourcepath, file), os.path.join(destinationpath, file)) else: logging.info('Unzipped folder: ' + foldername_unzipped + ' existed, skip unzipping ------') if files_zip is not None: for file in files_zip: out_files.append(file) out_files = sorted(out_files) if out_files_zip is not None: out_files_zip = list(set(out_files_zip)) out_files_zip = sorted(out_files_zip) if downloadonly: return out_files_zip #out_files remote_names_unzipped = dailynames(file_format=pathformat_unzipped, trange=trange, res=resolution) """ files_unzipped = download(remote_file=remote_names_unzipped, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=True) """ remote_names_unzipped_existed = [ rnud for rnud in remote_names_unzipped for ofz in out_files_zip if ofz[-16:-4] in rnud ] remote_names_unzipped = remote_names_unzipped_existed out_files_unzipped = [ CONFIG['local_data_dir'] + rf_res for rf_res in remote_names_unzipped ] out_files_unzipped = sorted(out_files_unzipped) if out_files_unzipped == []: data_vars = [] else: data_vars = pyspedas.secs.read_data_files(out_files=out_files_unzipped, dtype=dtype, out_type=out_type, save_pickle=save_pickle) #print('data_vars: ', data_vars, np.shape(data_vars)) return data_vars #tvars
def load(trange=['2018-11-5', '2018-11-6'], probe='1', instrument='lad', datatype='', suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads TWINS data; this function is not meant to be called directly; instead, see the wrappers: pyspedas.twins.lad pyspedas.twins.ephemeris pyspedas.twins.imager """ if not isinstance(probe, list): probe = [probe] probe = [str(prb) for prb in probe] out_files = [] for prb in probe: if instrument == 'lad': pathformat = 'twins' + prb + '/' + instrument + '/%Y/twins' + prb + '_l1_lad_%Y%m%d_v??.cdf' elif instrument == 'imager': pathformat = 'twins' + prb + '/' + instrument + '/%Y/twins' + prb + '_l1_imager_%Y%m%d??_v??.cdf' elif instrument == 'ephemeris': pathformat = 'twins' + prb + '/' + instrument + '/' + datatype + '/%Y/twins' + prb + '_' + datatype + '_def_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) for remote_file in remote_names: files = download(remote_file=remote_file, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, merge=True, get_support_data=get_support_data, varformat=varformat, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], instrument='fgm', datatype='k0', suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, varnames=[], time_clip=False): """ This function loads data from the ACE mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.ace.mfi pyspedas.ace.swe pyspedas.ace.epam pyspedas.ace.cris pyspedas.ace.sis pyspedas.ace.uleis pyspedas.ace.sepica pyspedas.ace.swics """ if instrument == 'fgm': pathformat = 'mag/level_2_cdaweb/mfi_' + datatype + '/%Y/ac_' + datatype + '_mfi_%Y%m%d_v??.cdf' elif instrument == 'swe': pathformat = 'swepam/level_2_cdaweb/swe_' + datatype + '/%Y/ac_' + datatype + '_swe_%Y%m%d_v??.cdf' elif instrument == 'epm': pathformat = 'epam/level_2_cdaweb/epm_' + datatype + '/%Y/ac_' + datatype + '_epm_%Y%m%d_v??.cdf' elif instrument == 'cris': pathformat = 'cris/level_2_cdaweb/cris_' + datatype + '/%Y/ac_' + datatype + '_cris_%Y%m%d_v??.cdf' elif instrument == 'sis': pathformat = 'sis/level_2_cdaweb/sis_' + datatype + '/%Y/ac_' + datatype + '_sis_%Y%m%d_v??.cdf' elif instrument == 'ule': pathformat = 'uleis/level_2_cdaweb/ule_' + datatype + '/%Y/ac_' + datatype + '_ule_%Y%m%d_v??.cdf' elif instrument == 'sep': pathformat = 'sepica/level_2_cdaweb/sep_' + datatype + '/%Y/ac_' + datatype + '_sep_%Y%m%d_v??.cdf' elif instrument == 'swics': filename_dtype = datatype.split('_')[1] + '_' + datatype.split('_')[0] pathformat = 'swics/level_2_cdaweb/' + datatype + '/%Y/ac_' + filename_dtype + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['1997-01-03', '1997-01-04'], instrument='mfe', datatype='k0', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the Polar mission; this function is not meant to be called directly; instead, see the wrappera: pyspedas.polar.mfe pyspedas.polar.efi pyspedas.polar.pwi pyspedas.polar.hydra pyspedas.polar.tide pyspedas.polar.timas pyspedas.polar.cammice pyspedas.polar.ceppad pyspedas.polar.uvi pyspedas.polar.vis pyspedas.polar.pixie pyspedas.polar.orbit """ if instrument == 'mfe': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'efi': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'pwi': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'hydra': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_hyd_%Y%m%d_v??.cdf' elif instrument == 'tide': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_tid_%Y%m%d_v??.cdf' elif instrument == 'timas': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_tim_%Y%m%d_v??.cdf' elif instrument == 'cammice': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_cam_%Y%m%d_v??.cdf' elif instrument == 'ceppad': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_cep_%Y%m%d_v??.cdf' elif instrument == 'uvi': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'vis': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'pixie': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_pix_%Y%m%d_v??.cdf' elif instrument == 'spha': pathformat = 'orbit/' + instrument + '_' + datatype + '/%Y/po_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], probe='a', instrument='mag', level='l2', datatype='8hz', coord='RTN', suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the STEREO mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.stereo.mag pyspedas.stereo.plastic """ out_files = [] if not isinstance(probe, list): probe = [probe] if datatype == '32hz': burst = 'B' else: burst = '' for prb in probe: if prb == 'a': direction = 'ahead' elif prb == 'b': direction = 'behind' if instrument == 'mag': pathformat = 'impact/level1/'+direction+'/mag/'+coord+'/%Y/%m/ST'+prb.upper()+'_L1_MAG'+burst+'_'+coord+'_%Y%m%d_V??.cdf' elif instrument == 'plastic': CONFIG['remote_data_dir'] = 'http://stereo-ssc.nascom.nasa.gov/data/ins_data/' if level == 'l2': pathformat = 'plastic/level2/Protons/Derived_from_1D_Maxwellian/'+direction+'/'+datatype+'/%Y/ST'+prb.upper()+'_L2_PLA_1DMax_'+datatype+'_%Y%m%d_V??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) for remote_file in remote_names: files = download(remote_file=remote_file, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, merge=True, get_support_data=get_support_data, varformat=varformat, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def _make_SECS_plots(data=None, dtime=None, contour_den=8, s_loc=False): """ @Parameter: dtime input as a string @Parameter: s_loc input as a bool, which means the locations of the virtual stations. """ dtype = 'SECS' if not os.path.exists(CONFIG['plots_dir']): os.makedirs(CONFIG['plots_dir']) dtime_range = [dtime, dtime] pathformat_prefix = dtype + '/%Y/%m/' pathformat_unzipped = pathformat_prefix + '%d/' + dtype + '%Y%m%d_%H%M%S.dat' filename_unzipped = dailynames(file_format=pathformat_unzipped, trange=dtime_range, res=10) out_files_unzipped = [ CONFIG['local_data_dir'] + rf_res for rf_res in filename_unzipped ] Data_Days_time = read_data_files(out_files=out_files_unzipped, dtype=dtype, out_type='df') J_comp = Data_Days_time['J'] Jc_max, Jc_min = J_comp.max(), J_comp.min() Jcm_abs = max(abs(Jc_max), abs(Jc_min)) contour_density = np.linspace(-Jcm_abs, Jcm_abs, num=contour_den) tp = dtime datetime_tp = tp[0:4] + tp[5:7] + tp[8:10] + '_' + tp[11:13] + tp[ 14:16] + tp[17:19] lon = Data_Days_time['longitude'] lat = Data_Days_time['latitude'] J = Data_Days_time['J'] lon = lon.to_numpy() lat = lat.to_numpy() J = J.to_numpy() # plot 1: contour plot # plot map ground (North hemisphere) fig = plt.figure(figsize=(8, 8)) m = Basemap(projection='lcc', resolution='c', width=8E6, height=8E6, lat_0=60, lon_0=-100) # draw coastlines, country boundaries, fill continents. m.drawcoastlines(linewidth=0.25) m.drawcountries(linewidth=0.25) m.fillcontinents(color='None', lake_color='None') # draw the edge of the map projection region (the projection limb) m.drawmapboundary(fill_color=None) m.drawlsmask() m.shadedrelief() # draw parallels and meridians. # label parallels on right and top # meridians on bottom and left parallels = np.arange(0., 81, 10.) m.drawparallels(parallels, labels=[False, True, True, False]) meridians = np.arange(10., 351., 20.) m.drawmeridians(meridians, labels=[True, False, False, True]) date_nightshade = datetime.strptime(dtime, '%Y-%m-%d/%H:%M:%S') delta = 0.25 lons_dd, lats_dd, tau, dec = daynight_terminator(date_nightshade, delta, m.lonmin, m.lonmax) xy = [lons_dd, lats_dd] xy = np.array(xy) xb, yb = xy[0], xy[1] m.plot(xb, yb, marker=None, color='b', latlon=True) # Plot the noon-midnight line. n_interval = len(lons_dd) ni_half = int(np.floor(len(lons_dd) / 2)) ni_otherhalf = n_interval - ni_half noon_midnight = noon_midnight_meridian(dtime, delta) m.plot(noon_midnight['lons_noon'], noon_midnight['lats_noon'], marker=None, color='deepskyblue', latlon=True) # noon semi-circle m.plot(noon_midnight['lons_midnight'], noon_midnight['lats_midnight'], marker=None, color='k', latlon=True) # midnight semi-circle draw_map(m) norm_cb = CenteredNorm() ctrf = m.contourf(lon, lat, J, contour_density, latlon=True, tri=True, cmap=plt.get_cmap('seismic', 20), norm=norm_cb) if s_loc: m.scatter(lon, lat, latlon=True, marker='*', c='black') cb = m.colorbar(matplotlib.cm.ScalarMappable(norm=norm_cb, cmap='seismic'), pad='15%') cb.set_label(r'$\mathit{J} \ (mA/m)$') ax_cb = cb.ax text = ax_cb.yaxis.label font_cb = matplotlib.font_manager.FontProperties(family='times new roman', style='italic', size=20) text.set_font_properties(font_cb) plt.title(label='SECS ' + tp, fontsize=20, color="black", pad=20) plt.tight_layout() plt.savefig(CONFIG['plots_dir'] + 'SECS' + '_' + date_nightshade.strftime('%Y%m%d%H%M%S') + '.jpeg') plt.show() print('SECS plots completed!') return
def mms_load_data_spdf(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy', level='l2', instrument='fgm', datatype='', varformat=None, suffix='', get_support_data=False, time_clip=False, no_update=False, center_measurement=False, available=False, notplot=False, latest_version=False, major_version=False, min_version=None, cdf_version=None): tvars_created = [] if not isinstance(probe, list): probe = [probe] if not isinstance(data_rate, list): data_rate = [data_rate] if not isinstance(level, list): level = [level] if not isinstance(datatype, list): datatype = [datatype] if data_rate == 'brst': time_format = '%Y%m%d??????' else: time_format = '%Y%m%d' for prb in probe: for lvl in level: for drate in data_rate: for dtype in datatype: remote_path = 'mms' + prb + '/' + instrument + '/' + drate + '/' + lvl + '/' if instrument == 'fgm': pathformat = remote_path + '%Y/%m/mms' + prb + '_fgm_' + drate + '_' + lvl + '_' + time_format + '_v*.cdf' elif instrument == 'aspoc': pathformat = remote_path + '%Y/%m/mms' + prb + '_aspoc_' + drate + '_' + lvl + '_' + time_format + '_v*.cdf' elif instrument == 'edi': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_edi_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'fpi': if drate != 'brst': time_format = '%Y%m%d??0000' pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_fpi_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'epd-eis': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_epd-eis_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'feeps': if drate != 'brst': time_format = '%Y%m%d000000' pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_feeps_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'hpca': time_format = '%Y%m%d??????' pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_hpca_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'mec': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_mec_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'scm': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_scm_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'dsp': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_dsp_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'edp': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_edp_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] for remote_file in remote_names: files = download(remote_file=remote_file, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir']) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) filtered_out_files = mms_file_filter( out_files, latest_version=latest_version, major_version=major_version, min_version=min_version, version=cdf_version) tvars = cdf_to_tplot(filtered_out_files, merge=True, varformat=varformat, get_support_data=get_support_data, suffix=suffix, center_measurement=center_measurement, notplot=notplot) if tvars is not None: tvars_created.extend(tvars) if time_clip: for new_var in tvars_created: tclip(new_var, trange[0], trange[1], suffix='') return tvars_created
def load(trange=['2013-11-5', '2013-11-6'], instrument='mgf', datatype='k0', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the Geotail mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.geotail.mgf pyspedas.geotail.efd pyspedas.geotail.lep pyspedas.geotail.cpi pyspedas.geotail.epi pyspedas.geotail.pwi """ tvars_created = [] if instrument == 'mgf': if datatype == 'k0': pathformat = 'mgf/mgf_k0/%Y/ge_' + datatype + '_mgf_%Y%m%d_v??.cdf' elif datatype == 'eda3sec' or datatype == 'edb3sec': pathformat = 'mgf/' + datatype + '_mgf/%Y/ge_' + datatype + '_mgf_%Y%m%d_v??.cdf' elif instrument == 'efd': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/ge_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'lep': if datatype == 'k0': pathformat = 'lep/lep_k0/%Y/ge_' + datatype + '_lep_%Y%m%d_v??.cdf' elif instrument == 'cpi': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/ge_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'epi': pathformat = 'epic/' + instrument + '_' + datatype + '/%Y/ge_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'pwi': pathformat = instrument + '/' + instrument + '_' + datatype + '/%Y/ge_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2018-11-5', '2018-11-6'], probe='1', instrument='fgm', datatype='up', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the Cluster mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.cluster.fgm pyspedas.cluster.aspoc pyspedas.cluster.cis pyspedas.cluster.dwp pyspedas.cluster.edi pyspedas.cluster.efw pyspedas.cluster.peace pyspedas.cluster.rapid pyspedas.cluster.staff pyspedas.cluster.wbd pyspedas.cluster.whi """ if not isinstance(probe, list): probe = [probe] probe = [str(prb) for prb in probe] # these will need to be strings from now on out_files = [] res = 24 * 3600 if instrument != 'wbd': # note: can't use last_version with WBD data due to using wild cards for the times (and not just in the version) last_version = True else: last_version = False for prb in probe: if instrument == 'fgm': if datatype == 'cp': pathformat = 'c' + prb + '/cp/%Y/c' + prb + '_cp_fgm_spin_%Y%m%d_v??.cdf' else: pathformat = 'c' + prb + '/' + datatype + '/' + instrument + '/%Y/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'aspoc': pathformat = 'c' + prb + '/' + datatype + '/asp/%Y/c' + prb + '_' + datatype + '_asp_%Y%m%d_v??.cdf' elif instrument == 'cis': pathformat = 'c' + prb + '/' + datatype + '/' + instrument + '/%Y/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'dwp': pathformat = 'c' + prb + '/' + datatype + '/' + instrument + '/%Y/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'edi': pathformat = 'c' + prb + '/' + datatype + '/' + instrument + '/%Y/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'efw': pathformat = 'c' + prb + '/' + datatype + '/' + instrument + '/%Y/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'peace': pathformat = 'c' + prb + '/' + datatype + '/pea/%Y/c' + prb + '_' + datatype + '_pea_%Y%m%d_v??.cdf' elif instrument == 'rapid': pathformat = 'c' + prb + '/' + datatype + '/rap/%Y/c' + prb + '_' + datatype + '_rap_%Y%m%d_v??.cdf' elif instrument == 'staff': pathformat = 'c' + prb + '/' + datatype + '/sta/%Y/c' + prb + '_' + datatype + '_sta_%Y%m%d_v??.cdf' elif instrument == 'whi': pathformat = 'c' + prb + '/' + datatype + '/' + instrument + '/%Y/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'wbd': pathformat = 'c' + prb + '/' + instrument + '/%Y/%m/c' + prb + '_' + datatype + '_' + instrument + '_%Y%m%d%H%M_v??.cdf' res = 600.0 # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=res) files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update, last_version=last_version) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def mms_load_data_spdf(trange=['2015-10-16', '2015-10-17'], probe='1', data_rate='srvy', level='l2', instrument='fgm', datatype='', varformat=None, suffix='', get_support_data=False, time_clip=False, no_update=False, center_measurement=False, available=False, notplot=False, latest_version=False, major_version=False, min_version=None, cdf_version=None, varnames=[]): """ This function loads MMS data from NASA SPDF into pyTplot variables This function is not meant to be called directly. Please see the individual load routines for documentation and use. """ tvars_created = [] if not isinstance(probe, list): probe = [probe] if not isinstance(data_rate, list): data_rate = [data_rate] if not isinstance(level, list): level = [level] if not isinstance(datatype, list): datatype = [datatype] for prb in probe: for lvl in level: for drate in data_rate: if drate == 'brst': time_format = '%Y%m%d%H%M??' file_res = 60. else: time_format = '%Y%m%d' file_res = 24 * 3600. for dtype in datatype: remote_path = 'mms' + prb + '/' + instrument + '/' + drate + '/' + lvl + '/' if instrument == 'fgm': pathformat = remote_path + '%Y/%m/mms' + prb + '_fgm_' + drate + '_' + lvl + '_' + time_format + '_v*.cdf' elif instrument == 'aspoc': pathformat = remote_path + '%Y/%m/mms' + prb + '_aspoc_' + drate + '_' + lvl + '_' + time_format + '_v*.cdf' elif instrument == 'edi': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_edi_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'fpi': if drate != 'brst': time_format = '%Y%m%d??0000' pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_fpi_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'epd-eis': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_epd-eis_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'feeps': if drate != 'brst': time_format = '%Y%m%d000000' pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_feeps_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'hpca': time_format = '%Y%m%d??????' pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_hpca_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'mec': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_mec_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'scm': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_scm_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'dsp': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_dsp_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' elif instrument == 'edp': pathformat = remote_path + dtype + '/%Y/%m/mms' + prb + '_edp_' + drate + '_' + lvl + '_' + dtype + '_' + time_format + '_v*.cdf' if drate == 'brst': if isinstance(trange[0], float): trange = [trange[0] - 300., trange[1]] else: trange = [time_double(trange[0]) - 300., trange[1]] # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=file_res) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir']) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) filtered_out_files = mms_file_filter( out_files, latest_version=latest_version, major_version=major_version, min_version=min_version, version=cdf_version) tvars = cdf_to_tplot(filtered_out_files, varformat=varformat, varnames=varnames, get_support_data=get_support_data, suffix=suffix, center_measurement=center_measurement, notplot=notplot) if tvars is not None: tvars_created.extend(tvars) if time_clip: for new_var in tvars_created: tclip(new_var, trange[0], trange[1], suffix='') return tvars_created
def load( trange=['2018-11-5', '2018-11-6'], probe='a', instrument='emfisis', level='l3', datatype='magnetometer', suffix='', cadence='4sec', # for EMFISIS mag data coord='sm', # for EMFISIS mag data wavetype='waveform', # for EMFISIS waveform data rel='rel04', # for ECT data get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads Van Allen Probes (RBSP) data; this function is not meant to be called directly; instead, see the wrappers: pyspedas.rbsp.emfisis pyspedas.rbsp.rbspice pyspedas.rbsp.efw pyspedas.rbsp.mageis pyspedas.rbsp.hope pyspedas.rbsp.rept pyspedas.rbsp.rps """ if not isinstance(probe, list): probe = [probe] out_files = [] for prb in probe: if instrument == 'emfisis': if datatype == 'density' or datatype == 'housekeeping' or datatype == 'wna-survey': pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/' + datatype + '/%Y/rbsp-' + prb + '_' + datatype + '_' + instrument + '-' + level + '_%Y%m%d_v*.cdf' elif datatype == 'wfr' or datatype == 'hfr': pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/' + datatype + '/' + wavetype + '/%Y/rbsp-' + prb + '_' + datatype + '-' + wavetype + '_' + instrument + '-' + level + '_%Y%m%d*_v*.cdf' else: if level == 'l2' and datatype == 'magnetometer': pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/' + datatype + '/uvw/%Y/rbsp-' + prb + '_' + datatype + '_uvw_' + instrument + '-' + level + '_%Y%m%d*_v*.cdf' else: pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/' + datatype + '/' + cadence + '/' + coord + '/%Y/rbsp-' + prb + '_' + datatype + '_' + cadence + '-' + coord + '_' + instrument + '-' + level + '_%Y%m%d_v*.cdf' elif instrument == 'rbspice': pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/' + datatype + '/%Y/rbsp-' + prb + '-' + instrument + '_lev-' + str( level[-1]) + '?' + datatype + '_%Y%m%d_v*.cdf' elif instrument == 'efw': if level == 'l3': pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/%Y/rbsp' + prb + '_' + instrument + '-' + level + '_%Y%m%d_v??.cdf' else: pathformat = 'rbsp' + prb + '/' + level + '/' + instrument + '/' + datatype + '/%Y/rbsp' + prb + '_' + instrument + '-' + level + '_' + datatype + '_%Y%m%d_v??.cdf' elif instrument == 'mageis': pathformat = 'rbsp' + prb + '/' + level + '/ect/' + instrument + '/sectors/' + rel + '/%Y/rbsp' + prb + '_' + rel + '_ect-mageis-' + level + '_%Y%m%d_v*.cdf' elif instrument == 'hope': if datatype == 'moments': pathformat = 'rbsp' + prb + '/' + level + '/ect/' + instrument + '/' + datatype + '/' + rel + '/%Y/rbsp' + prb + '_' + rel + '_ect-hope-mom-' + level + '_%Y%m%d_v*.cdf' elif datatype == 'pitchangle': pathformat = 'rbsp' + prb + '/' + level + '/ect/' + instrument + '/' + datatype + '/' + rel + '/%Y/rbsp' + prb + '_' + rel + '_ect-hope-pa-' + level + '_%Y%m%d_v*.cdf' elif datatype == 'spinaverage': pathformat = 'rbsp' + prb + '/' + level + '/ect/' + instrument + '/' + datatype + '/' + rel + '/%Y/rbsp' + prb + '_' + rel + '_ect-hope-sci-' + level + 'sa_%Y%m%d_v*.cdf' elif instrument == 'rept': pathformat = 'rbsp' + prb + '/' + level + '/ect/' + instrument + '/sectors/' + rel + '/%Y/rbsp' + prb + '_' + rel + '_ect-rept-sci-' + level + '_%Y%m%d_v*.cdf' elif instrument == 'rps': if datatype == 'rps-1min': pathformat = 'rbsp' + prb + '/' + level + '/rps/psbr-rps-1min/%Y/rbsp' + prb + '_' + level + '-1min_psbr-rps_%Y%m%d_v*.cdf' elif datatype == 'rps': pathformat = 'rbsp' + prb + '/' + level + '/rps/psbr-rps/%Y/rbsp' + prb + '_' + level + '_psbr-rps_%Y%m%d_v*.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['1998-04-06', '1998-04-07'], instrument='mam', datatype='pp', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the Equator-S mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.equator_s.mam pyspedas.equator_s.edi pyspedas.equator_s.esa (3DA) pyspedas.equator_s.epi pyspedas.equator_s.ici pyspedas.equator_s.pcd pyspedas.equator_s.sfd """ if instrument == 'mam': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'edi': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == '3da': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'epi': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'ici': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'pcd': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' elif instrument == 'sfd': pathformat = datatype + '/' + instrument + '/%Y/eq_' + datatype + '_' + instrument + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2009-01-01', '2009-01-02'], instrument='vhm', datatype='1min', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function is not meant to be called directly; please see the instrument specific wrappers in __init__.py """ if instrument == 'vhm': pathformat = 'mag_cdaweb/vhm_'+datatype+'/%Y/uy_'+datatype+'_vhm_%Y%m%d_v??.cdf' elif instrument == 'swoops': if datatype in ['bai_m0', 'bai_m1', 'bae_m0']: pathformat = 'plasma/swoops_cdaweb/'+datatype+'/%Y/uy_'+datatype.split('_')[1]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf' else: pathformat = 'plasma/swoops_cdaweb/'+datatype+'/%Y/uy_'+datatype+'_%Y0101_v??.cdf' elif instrument == 'swics': pathformat = 'plasma/swics_cdaweb/'+datatype+'/%Y/uy_'+datatype.split('_')[1]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf' elif instrument == 'urap': pathformat = 'radio/urap_cdaweb/'+datatype+'/%Y/uy_'+datatype.split('_')[1]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf' elif instrument == 'epac': if datatype == 'epac_m1': pathformat = 'particle/epac_cdaweb/'+datatype+'/%Y/uy_m1_epa_%Y%m%d_v??.cdf' elif instrument == 'hiscale': pathformat = 'particle/hiscale_cdaweb/'+datatype+'/%Y/uy_'+datatype.split('_')[1]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf' elif instrument == 'cospin': pathformat = 'particle/cospin_cdaweb/'+datatype+'/%Y/uy_m0_'+datatype+'_%Y%m%d_v??.cdf' elif instrument == 'grb': pathformat = 'gamma/grb_cdaweb/'+datatype+'/%Y/uy_'+datatype.split('_')[1]+'_'+datatype.split('_')[0]+'_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], instrument='fgm', probe='c', level='l2', stations=None, # ground mag data greenland=None, # also for ground mag data suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the THEMIS mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.themis.fgm pyspedas.themis.fit pyspedas.themis.efi pyspedas.themis.scm pyspedas.themis.fft pyspedas.themis.fbk pyspedas.themis.esa pyspedas.themis.sst pyspedas.themis.mom pyspedas.themis.gmom pyspedas.themis.gmag pyspedas.themis.state """ if not isinstance(probe, list): probe = [probe] out_files = [] for prb in probe: if instrument == 'fgm': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'fit': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'efi': if level == 'l2': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif level == 'l1': pathformat = [('th' + prb + '/' + level + '/vaf/%Y/th' + prb + '_' + level + '_vaf_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/vap/%Y/th' + prb + '_' + level + '_vap_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/vaw/%Y/th' + prb + '_' + level + '_vaw_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/vbf/%Y/th' + prb + '_' + level + '_vbf_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/vbp/%Y/th' + prb + '_' + level + '_vbp_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/vbw/%Y/th' + prb + '_' + level + '_vbw_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/eff/%Y/th' + prb + '_' + level + '_eff_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/efw/%Y/th' + prb + '_' + level + '_efw_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/efp/%Y/th' + prb + '_' + level + '_efp_%Y%m%d_v??.cdf')] elif instrument == 'scm': if level == 'l2': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif level == 'l1': pathformat = [('th' + prb + '/' + level + '/scp/%Y/th' + prb + '_' + level + '_scp_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/scf/%Y/th' + prb + '_' + level + '_scf_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/scw/%Y/th' + prb + '_' + level + '_scw_%Y%m%d_v??.cdf')] elif instrument == 'fft': if level == 'l2': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif level == 'l1': pathformat = [('th' + prb + '/' + level + '/fff_16/%Y/th' + prb + '_' + level + '_fff_16_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/fff_32/%Y/th' + prb + '_' + level + '_fff_32_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/fff_64/%Y/th' + prb + '_' + level + '_fff_64_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/ffp_16/%Y/th' + prb + '_' + level + '_ffp_16_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/ffp_32/%Y/th' + prb + '_' + level + '_ffp_32_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/ffp_64/%Y/th' + prb + '_' + level + '_ffp_64_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/ffw_16/%Y/th' + prb + '_' + level + '_ffw_16_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/ffw_32/%Y/th' + prb + '_' + level + '_ffw_32_%Y%m%d_v??.cdf'), ('th' + prb + '/' + level + '/ffw_64/%Y/th' + prb + '_' + level + '_ffw_64_%Y%m%d_v??.cdf')] elif instrument == 'fbk': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'esa': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'sst': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'mom': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'gmom': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d_v??.cdf') elif instrument == 'state': pathformat = ('th' + prb + '/' + level + '/' + instrument + '/%Y/th' + prb + '_' + level + '_' + instrument + '_%Y%m%d.cdf') elif instrument == 'gmag': if stations is None: logging.error('No stations specified') return else: pathformat = [] for site, in_greenland in zip(stations, greenland): if in_greenland: pathformat.append('thg/greenland_gmag/' + level + '/mag/' + site + '/%Y/thg_' + level + '_mag_' + site + '_%Y%m%d_v??.cdf') else: pathformat.append('thg/' + level + '/mag/' + site + '/%Y/thg_' + level + '_mag_' + site + '_%Y%m%d_v??.cdf') if not isinstance(pathformat, list): pathformat = [pathformat] for file_format in pathformat: # find the full remote path names using the trange remote_names = dailynames(file_format=file_format, trange=trange) files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update, last_version=True) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, merge=True, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def get_w(trange=None, create_tvar=False, newname=None): """ This routine downloads the 6 Tsygeneko (TS05) model driving variables W1, W2, W3, W4, W5, W6; from: http://geo.phys.spbu.ru/~tsyganenko/TS05_data_and_stuff """ if trange is None: print('trange keyword must be specified.') return years = dailynames(trange=trange, file_format='%Y') tmpdir = mkdtemp() if newname is None: newname = 'Tsy_W_vars_' + '-'.join(years) ut_out = np.empty(0) w1_out = np.empty(0) w2_out = np.empty(0) w3_out = np.empty(0) w4_out = np.empty(0) w5_out = np.empty(0) w6_out = np.empty(0) for year in years: file = download( remote_path= 'http://geo.phys.spbu.ru/~tsyganenko/TS05_data_and_stuff/', remote_file=year + '_OMNI_5m_with_TS05_variables.???', local_path=tmpdir) if file[0][-3:] == 'zip': with zipfile.ZipFile(file[0], 'r') as zip_ref: zip_ref.extractall(tmpdir) rows = pd.read_csv(tmpdir + '/' + year + '_OMNI_5m_with_TS05_variables.dat', delim_whitespace=True, header=None) # extract the W variables w1 = rows.to_numpy()[:, -6] w2 = rows.to_numpy()[:, -5] w3 = rows.to_numpy()[:, -4] w4 = rows.to_numpy()[:, -3] w5 = rows.to_numpy()[:, -2] w6 = rows.to_numpy()[:, -1] # extract the times years = rows.to_numpy()[:, 0] doys = rows.to_numpy()[:, 1] hours = rows.to_numpy()[:, 2] minutes = rows.to_numpy()[:, 3] time_strings = [ str(int(year)) + '-' + str(int(doy)).zfill(3) + ' ' + str(int(hour)).zfill(2) + ':' + str(int(minute)).zfill(2) for year, doy, hour, minute in zip(years, doys, hours, minutes) ] unix_times = np.array(time_double(time_strings)) ut_out = np.append(ut_out, unix_times) w1_out = np.append(w1_out, w1) w2_out = np.append(w2_out, w2) w3_out = np.append(w3_out, w3) w4_out = np.append(w4_out, w4) w5_out = np.append(w5_out, w5) w6_out = np.append(w6_out, w6) in_range = np.argwhere((ut_out >= time_double(trange[0])) & (ut_out < time_double(trange[1]))).squeeze() if len(in_range) == 0: print('No data found in the trange.') return if create_tvar: out = np.array((w1_out[in_range], w2_out[in_range], w3_out[in_range], w4_out[in_range], w5_out[in_range], w6_out[in_range])) store_data(newname, data={'x': ut_out[in_range], 'y': out.T}) return newname return { 'times': ut_out[in_range], 'w1': w1_out[in_range], 'w2': w2_out[in_range], 'w3': w3_out[in_range], 'w4': w4_out[in_range], 'w5': w5_out[in_range], 'w6': w6_out[in_range] }
def load(trange=['2013-11-5', '2013-11-6'], probe='15', instrument='fgm', datatype='1min', suffix='', downloadonly=False, no_update=False, time_clip=False): """ This function loads data from the GOES mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.goes.fgm """ if not isinstance(probe, list): probe = [probe] fullavgpath = ['full', 'avg'] goes_path_dir = fullavgpath[datatype == '1min' or datatype == '5min'] for prb in probe: remote_path = goes_path_dir + '/%Y/%m/goes' + str(prb) + '/netcdf/' if instrument == 'fgm': if datatype == '512ms': # full, unaveraged data pathformat = remote_path + 'g' + str( prb) + '_magneto_512ms_%Y%m%d_%Y%m%d.nc' elif datatype == '1min': # 1 min averages pathformat = remote_path + 'g' + str( prb) + '_magneto_1m_%Y%m01_%Y%m??.nc' elif datatype == '5min': # 5 min averages pathformat = remote_path + 'g' + str( prb) + '_magneto_5m_%Y%m01_%Y%m??.nc' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = netcdf_to_tplot(out_files, suffix=suffix, merge=True, time='time_tag') if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2018-11-5', '2018-11-6'], instrument='fields', datatype='mag_rtn', level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads Parker Solar Probe data into tplot variables; this function is not meant to be called directly; instead, see the wrappers: psp.fields: FIELDS data psp.spc: Solar Probe Cup data psp.spe: SWEAP/SPAN-e data psp.spi: SWEAP/SPAN-i data psp.epihi: ISoIS/EPI-Hi data psp.epilo: ISoIS/EPI-Lo data psp.epi ISoIS/EPI (merged Hi-Lo) data """ file_resolution = 24*3600. if instrument == 'fields': pathformat = instrument + '/' + level + '/' + datatype + '/%Y/psp_fld_' + level + '_' + datatype + '_%Y%m%d%H_v??.cdf' file_resolution = 6*3600. elif instrument == 'spc': pathformat = 'sweap/spc/' + level + '/' + datatype + '/%Y/psp_swp_spc_' + datatype + '_%Y%m%d_v??.cdf' elif instrument == 'spe': pathformat = 'sweap/spe/' + level + '/' + datatype + '/%Y/psp_swp_sp?_*_%Y%m%d_v??.cdf' elif instrument == 'spi': pathformat = 'sweap/spi/' + level + '/' + datatype + '/%Y/psp_swp_spi_*_%Y%m%d_v??.cdf' elif instrument == 'epihi': pathformat = 'isois/epihi/' + level + '/' + datatype + '/%Y/psp_isois-epihi_' + level + '*_%Y%m%d_v??.cdf' elif instrument == 'epilo': pathformat = 'isois/epilo/' + level + '/' + datatype + '/%Y/psp_isois-epilo_' + level + '*_%Y%m%d_v??.cdf' elif instrument == 'epi': pathformat = 'isois/merged/' + level + '/' + datatype + '/%Y/psp_isois_' + level + '-' + datatype + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=file_resolution) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2004-11-5', '2004-11-6'], instrument='lena', datatype='k0', suffix='', get_support_data=False, varformat=None, downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads IMAGE data; this function is not meant to be called directly; instead, see the wrappers: pyspedas.image.lena pyspedas.image.mena pyspedas.image.hena pyspedas.image.rpi pyspedas.image.euv pyspedas.image.fuv """ if instrument == 'lena': pathformat = instrument+'/'+instrument+'_'+datatype+'/%Y/im_'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf' elif instrument == 'mena': pathformat = instrument+'/'+instrument+'_'+datatype+'/%Y/im_'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf' elif instrument == 'hena': pathformat = instrument+'/'+instrument+'_'+datatype+'/%Y/im_'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf' elif instrument == 'rpi': pathformat = instrument+'/'+instrument+'_'+datatype+'/%Y/im_'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf' elif instrument == 'euv': pathformat = instrument+'/'+instrument+'_'+datatype+'/%Y/im_'+datatype+'_'+instrument+'_%Y%m%d_v??.cdf' elif instrument == 'fuv': pathformat = instrument+'/wic_'+datatype+'/%Y/im_'+datatype+'_wic_%Y%m%d_v??.cdf' elif instrument == 'orbit': if datatype == 'def_or': pathformat = instrument+'/def_or/%Y/im_or_def_%Y%m%d_v??.cdf' elif datatype == 'pre_or': pathformat = instrument+'/pre_or/%Y/im_or_pre_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, merge=True, get_support_data=get_support_data, varformat=varformat, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def _make_EICS_plots(dtime=None, vplot_sized=False, contour_den=8, s_loc=False, quiver_scale=30): """ @Parameter: dtime input as a string @Parameter: s_loc input as a bool, which means the locations of the virtual stations. """ dtype = 'EICS' if not os.path.exists(CONFIG['plots_dir']): os.makedirs(CONFIG['plots_dir']) dtime_range = [dtime, dtime] pathformat_prefix = dtype + '/%Y/%m/' pathformat_unzipped = pathformat_prefix + '%d/' + dtype + '%Y%m%d_%H%M%S.dat' filename_unzipped = dailynames(file_format=pathformat_unzipped, trange=dtime_range, res=10) out_files_unzipped = [ CONFIG['local_data_dir'] + rf_res for rf_res in filename_unzipped ] Data_Days_time = read_data_files(out_files=out_files_unzipped, dtype=dtype, out_type='df') J_comp = Data_Days_time['Jy'] Jc_max, Jc_min = J_comp.max(), J_comp.min() Jcm_abs = max(abs(Jc_max), abs(Jc_min)) contour_density = np.linspace(-Jcm_abs, Jcm_abs, num=contour_den) tp = dtime datetime_tp = tp[0:4] + tp[5:7] + tp[8:10] + '_' + tp[11:13] + tp[ 14:16] + tp[17:19] lon = Data_Days_time['longitude'] lat = Data_Days_time['latitude'] Jx = Data_Days_time['Jx'] # Note: positive is Northward Jy = Data_Days_time['Jy'] # Note: positive is Eastward # plot 1: # plot map ground (North hemisphere) fig1 = plt.figure(figsize=(8, 8)) ax1 = plt.gca() m = Basemap(projection='lcc', resolution='c', width=8E6, height=8E6, lat_0=60, lon_0=-100) # draw coastlines, country boundaries, fill continents. m.drawcoastlines(linewidth=0.25) m.drawcountries(linewidth=0.25) m.fillcontinents(color='None', lake_color='None') # draw the edge of the map projection region (the projection limb) m.drawmapboundary(fill_color=None) # m.drawgreatcircle(-100,0,0,90) m.drawlsmask() # m.bluemarble() m.shadedrelief() # draw parallels and meridians. # label parallels on right and top # meridians on bottom and left parallels = np.arange(0., 81, 10.) # labels = [left,right,top,bottom] m.drawparallels(parallels, labels=[False, True, True, False]) meridians = np.arange(10., 351., 20.) m.drawmeridians(meridians, labels=[True, False, False, True]) date_nightshade = datetime.strptime(dtime, '%Y-%m-%d/%H:%M:%S') m.nightshade(date=date_nightshade) draw_map(m) # plot vector field: lon = lon.to_numpy() lat = lat.to_numpy() Jx = Jx.to_numpy() # Note: positive is Northward Jy = Jy.to_numpy() # Note: positive is Eastward Jx_uni = Jx / np.sqrt(Jx**2 + Jy**2) Jy_uni = Jy / np.sqrt(Jx**2 + Jy**2) n = -2 color = np.sqrt(((Jx_uni - n) / 2)**2 + ((Jy_uni - n) / 2)**2) if vplot_sized == False: qv = m.quiver( lon, lat, Jx_uni, Jy_uni, color, headlength=7, latlon=True, cmap='GnBu') # autumn_r #, color=cm(norm(o)))#, cmap = 'jet') plt.colorbar() else: Jy_rot, Jx_rot, x, y = m.rotate_vector(Jy, Jx, lon, lat, returnxy=True) qv = m.quiver(lon, lat, Jy_rot, Jx_rot, headlength=7, latlon=True, scale_units='dots', scale=quiver_scale) # , transform='lcc') qk = ax1.quiverkey(qv, 0.3, -0.1, 100, r'$100 \ mA/m$', labelpos='E', coordinates='data') # figure plt.title(label='EICS ' + tp, fontsize=20, color="black", pad=20) plt.tight_layout() plt.savefig(CONFIG['plots_dir'] + 'EICS' + '_vector_' + date_nightshade.strftime('%Y%m%d%H%M%S') + '.jpeg') plt.show() # plot 2: contour plot # plot map ground (North hemisphere) fig2 = plt.figure(figsize=(8, 8)) ax2 = plt.gca() m = Basemap(projection='lcc', resolution='c', width=8E6, height=8E6, lat_0=60, lon_0=-100) # draw coastlines, country boundaries, fill continents. m.drawcoastlines(linewidth=0.25) m.drawcountries(linewidth=0.25) m.fillcontinents(color='None', lake_color='None') # draw the edge of the map projection region (the projection limb) m.drawmapboundary(fill_color=None) m.drawlsmask() m.shadedrelief() # draw parallels and meridians. # label parallels on right and top # meridians on bottom and left parallels = np.arange(0., 81, 10.) m.drawparallels(parallels, labels=[False, True, True, False]) meridians = np.arange(10., 351., 20.) m.drawmeridians(meridians, labels=[True, False, False, True]) date_nightshade = datetime.strptime(dtime, '%Y-%m-%d/%H:%M:%S') # m.nightshade(date=date_nightshade, alpha = 0.0) delta = 0.25 lons_dd, lats_dd, tau, dec = daynight_terminator(date_nightshade, delta, m.lonmin, m.lonmax) xy = [lons_dd, lats_dd] xy = np.array(xy) xb, yb = xy[0], xy[1] m.plot(xb, yb, marker=None, color='m', latlon=True) # for dawn-dusk circle line # Plot the noon-midnight line. n_interval = len(lons_dd) ni_half = int(np.floor(len(lons_dd) / 2)) ni_otherhalf = n_interval - ni_half noon_midnight = noon_midnight_meridian(dtime, delta) m.plot(noon_midnight['lons_noon'], noon_midnight['lats_noon'], marker=None, color='deepskyblue', latlon=True) # noon semi-circle m.plot(noon_midnight['lons_midnight'], noon_midnight['lats_midnight'], marker=None, color='k', latlon=True) # midnight semi-circle draw_map(m) Jy_log = Jy / np.abs(Jy) * np.log10(np.abs(Jy)) norm_cb = CenteredNorm() # norm_cb = NoNorm() # norm_cb = CenteredNorm(vmin=Jy.min(), vcenter=0, vmax=Jy.max()) # use Jy for the contour map, not Jy_rot. ctrf = m.contourf(lon, lat, Jy, contour_density, latlon=True, tri=True, cmap='jet_r', norm=norm_cb) ##ctrf = m.contourf(lon, lat, Jy, contour_density, latlon=True, tri=True, cmap='jet_r', norm=norm_cb) # ------------- if s_loc: m.scatter(lon, lat, latlon=True, marker='*', c='black') # ------------- cb = m.colorbar(matplotlib.cm.ScalarMappable(norm=norm_cb, cmap='jet_r'), pad='15%') cb.set_label(r'$\mathit{J}_y \ (mA/m)$') ax_cb = cb.ax text = ax_cb.yaxis.label font_cb = matplotlib.font_manager.FontProperties(family='times new roman', style='italic', size=20) text.set_font_properties(font_cb) plt.title(label='EICS ' + tp, fontsize=20, color="black", pad=20) plt.tight_layout() plt.savefig(CONFIG['plots_dir'] + 'EICS' + '_contour_' + date_nightshade.strftime('%Y%m%d%H%M%S') + '.jpeg') plt.show() print('EICS plots completed!') return
def load(trange=['2017-03-27', '2017-03-28'], instrument='mgf', datatype='8sec', mode=None, site=None, level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, uname=None, passwd=None, time_clip=False): """ This function is not meant to be called directly; please see the instrument specific wrappers: pyspedas.erg.mgf() pyspedas.erg.hep() pyspedas.erg.orb() pyspedas.erg.lepe() pyspedas.erg.lepi() pyspedas.erg.mepe() pyspedas.erg.mepi() pyspedas.erg.pwe_ofa() pyspedas.erg.pwe_efd() pyspedas.erg.pwe_hfa() pyspedas.erg.xep() """ prefix = 'erg_' + instrument + '_' + level + '_' file_res = 24 * 3600. if instrument == 'mgf': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??.??.cdf' elif instrument == 'hep': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'orb': pathformat = 'satellite/erg/' + instrument + '/' + level + '/opq/%Y/%m/erg_' + instrument + '_' + level + '_op_%Y%m%d_v??.cdf' elif instrument == 'lepe': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'lepi': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'mepe': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'mepi': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'pwe_ofa': pathformat = 'satellite/erg/pwe/ofa/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'pwe_efd': pathformat = 'satellite/erg/pwe/efd/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' elif instrument == 'pwe_hfa': pathformat = 'satellite/erg/pwe/hfa/' + level + '/' + datatype + '/' + mode + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_' + mode + '_%Y%m%d_v??_??.cdf' elif instrument == 'xep': pathformat = 'satellite/erg/' + instrument + '/' + level + '/' + datatype + '/%Y/%m/erg_' + instrument + '_' + level + '_' + datatype + '_%Y%m%d_v??_??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=file_res) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update, last_version=True, username=uname, password=passwd) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, prefix=prefix, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2020-06-01', '2020-06-02'], instrument='mag', datatype='rtn-normal', mode=None, level='l2', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the Solar Orbiter mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.solo.mag pyspedas.solo.epd pyspedas.solo.rpw pyspedas.solo.swa """ # Defaults for L2, L3 data science_or_low_latency = 'science' date_format = '%Y%m%d' cdf_version = '??' res = 24*3600. if level == 'll02': science_or_low_latency = 'low_latency' date_format = '%Y%m%dt%H%M??-*' cdf_version = '???' res = 60.0 if instrument == 'mag': pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf' elif instrument == 'epd': pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/'+mode+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'-'+mode+'_'+date_format+'_v'+cdf_version+'.cdf' elif instrument == 'rpw': pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf' elif instrument == 'swa': if level == 'l2' or level == 'll02': if datatype == 'pas-eflux' or datatype == 'pas-grnd-mom' or datatype == 'pas-vdf': pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf' else: date_format = '%Y%m%dt%H%M??-*' res = 60.0 pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf' elif level == 'l1': if datatype == 'his-pha' or datatype == 'his-sensorrates' or datatype == 'pas-3d' or datatype == 'pas-cal' or datatype == 'pas-mom': pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf' else: date_format = '%Y%m%dt%H%M??-*' res = 60.0 pathformat = instrument+'/'+science_or_low_latency+'/'+level+'/'+datatype+'/%Y/solo_'+level+'_'+instrument+'-'+datatype+'_'+date_format+'_v'+cdf_version+'.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange, res=res) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], instrument='fgm', datatype='h0', suffix='', get_support_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=False): """ This function loads data from the WIND mission; this function is not meant to be called directly; instead, see the wrappers: pyspedas.wind.mfi pyspedas.wind.swe pyspedas.wind.sms pyspedas.wind.threedp pyspedas.wind.waves pyspedas.wind.orbit """ if instrument == 'fgm': pathformat = 'mfi/mfi_' + datatype + '/%Y/wi_' + datatype + '_mfi_%Y%m%d_v??.cdf' elif instrument == 'swe': pathformat = 'swe/swe_' + datatype + '/%Y/wi_' + datatype + '_swe_%Y%m%d_v??.cdf' elif instrument == 'sms': pathformat = 'sms/' + datatype + '/sms_' + datatype + '/%Y/wi_' + datatype + '_sms_%Y%m%d_v??.cdf' elif instrument == 'waves': pathformat = 'waves/wav_' + datatype + '/%Y/wi_' + datatype + '_wav_%Y%m%d_v??.cdf' elif instrument == 'orbit': pathformat = 'orbit/' + datatype + '/%Y/wi_' + datatype.split( '_')[1] + '_' + datatype.split('_')[0] + '_%Y%m%d_v??.cdf' elif instrument == '3dp': if datatype == '3dp_emfits_e0': pathformat = '3dp/' + datatype + '/%Y/wi_' + datatype.split( '_')[1] + '_' + datatype.split('_')[2] + '_' + datatype.split( '_')[0] + '_%Y%m%d_v??.cdf' else: pathformat = '3dp/' + datatype + '/%Y/wi_' + datatype.split( '_')[1] + '_' + datatype.split('_')[0] + '_%Y%m%d_v??.cdf' # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update, last_version=True) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars
def load(trange=['2013-11-5', '2013-11-6'], datatype='1min', level='hro2', suffix='', get_support_data=False, get_ignore_data=False, varformat=None, varnames=[], downloadonly=False, notplot=False, no_update=False, time_clip=True): """ This function loads OMNI (Combined 1AU IP Data; Magnetic and Solar Indices) data; this function is not meant to be called directly; instead, see the wrapper: pyspedas.omni.data """ if 'min' in datatype: pathformat = level + '_' + datatype + '/%Y/omni_' + level + '_' + datatype + '_%Y%m01_v??.cdf' elif 'hour' in datatype: pathformat = 'hourly/%Y/omni2_h0_mrg1hr_%Y%m01_v??.cdf' else: raise TypeError("%r are invalid keyword arguments" % datatype) # find the full remote path names using the trange remote_names = dailynames(file_format=pathformat, trange=trange) out_files = [] files = download(remote_file=remote_names, remote_path=CONFIG['remote_data_dir'], local_path=CONFIG['local_data_dir'], no_download=no_update) if files is not None: for file in files: out_files.append(file) out_files = sorted(out_files) if downloadonly: return out_files with warnings.catch_warnings(): # for some reason, OMNI CDFs throw ERFA warnings (likely while converting # times inside astropy); we're ignoring these here # see: https://github.com/astropy/astropy/issues/9603 warnings.simplefilter('ignore', astropy.utils.exceptions.ErfaWarning) tvars = cdf_to_tplot(out_files, suffix=suffix, get_support_data=get_support_data, get_ignore_data=get_ignore_data, varformat=varformat, varnames=varnames, notplot=notplot) if notplot: return tvars if time_clip: for new_var in tvars: tclip(new_var, trange[0], trange[1], suffix='') return tvars