示例#1
0
    def cache_omni_interval_wrapper(dt):

        #print("Cached OMNI called for {}".format(dt))

        if 'omni_interval' in cache:
            cached_oi = cache['omni_interval']
            need_new_oi = not _dt_within_range(dt, cached_oi)
        else:
            need_new_oi = True

        if need_new_oi:
            startdt = dt - datetime.timedelta(days=new_interval_days_before_dt)
            enddt = dt + datetime.timedelta(days=new_interval_days_after_dt)

            oi = omnireader.omni_interval(startdt,
                                          enddt,
                                          _ovation_prime_omni_cadence,
                                          silent=True)
            #Save to cache
            cache['omni_interval'] = oi
            # print("Created new solar wind interval: {}-{}".format(oi.startdt,
            #                                                         oi.enddt))
        else:
            #Load from cache
            oi = cache['omni_interval']

            # print("Using cached solar wind interval: {}-{}".format(oi.startdt,
            #                                                         oi.enddt))

        return func(dt, oi)
def get_f107_ap(dt, silent=False):
    """
	Get F10.7 for day of interest (noon), and day previous and 81-day centered average
	Also get AP index for day of interest (noon)
	"""
    jd = special_datetime.datetime2jd(dt)

    oi = omnireader.omni_interval(dt - datetime.timedelta(days=41),
                                  dt + datetime.timedelta(days=41), 'hourly')

    odt = oi['Epoch']
    ojd = special_datetime.datetimearr2jd(odt).flatten()
    f107_81 = oi['F10_INDEX']
    ap_81 = oi['AP_INDEX']
    f107a = np.nanmean(f107_81)
    today = np.logical_and(ojd > np.floor(jd), ojd < np.ceil(jd))
    yesterday = np.logical_and(ojd > np.floor(jd - 1), ojd < np.ceil(jd - 1))
    f107p = np.nanmean(f107_81[yesterday])  # Previous day f10.7
    f107 = np.nanmean(f107_81[today])
    ap = np.nanmean(ap_81[today])
    if not silent:
        print((dt.strftime('%Y%m%d %H:%M')))
        print(('Yesterday F107 %.2f' % (f107p)))
        print(('Today F107 %.2f' % (f107)))
        print(('81-day avg F107: %.2f' % (f107a)))
    return f107, ap, f107p, f107a
示例#3
0
def example_omni_interval(request):
    """
	A fixture that can have tests written around it,
	and each test will be executed for each parameter,
	that is, for each possible cadence
	"""
    cadence = request.param
    dt = datetime.datetime(2006, 3, 14)
    return omnireader.omni_interval(dt, dt + datetime.timedelta(days=1),
                                    cadence)
示例#4
0
def omni_interval_txtcdf_comparison(request):
    """
	A fixture that can have tests write around it,
	and each test will be executed for each parameter,
	that is, for each possible cadence
	"""
    cadence = request.param
    dt = datetime.datetime(2006, 3, 14)
    oiformats = {
        'cdf':
        omnireader.omni_interval(dt,
                                 dt + datetime.timedelta(days=1),
                                 cadence,
                                 cdf_or_txt='cdf'),
        'txt':
        omnireader.omni_interval(dt,
                                 dt + datetime.timedelta(days=1),
                                 cadence,
                                 cdf_or_txt='txt')
    }
    return oiformats
示例#5
0
    def __init__(self, year, month, day, hour, minute, use_igrf, *args,
                 **kwargs):
        from geopack import t01
        # epoch time since Jan. 1 1970 00:00 UT
        # datetime.timedelta
        dt = datetime.datetime(year, month, day, hour,
                               minute) - datetime.datetime(1970, 1, 1)
        # seconds from 1970/1/1 00:00 UT
        self.dt_seconds = dt.total_seconds()

        self.ps = geopack.recalc(self.dt_seconds)
        self.use_igrf = use_igrf

        from geospacepy import omnireader
        sTimeIMF = datetime.datetime(year, month, day, hour, minute)
        eTimeIMF = datetime.datetime(year, month, day, hour,
                                     minute) + datetime.timedelta(
                                         0, 0, 0, 0, 1, 0)
        omniInt = omnireader.omni_interval(sTimeIMF, eTimeIMF, '1min')
        t = omniInt['Epoch']  #datetime timestamps
        By = omniInt['BY_GSM']
        Bz = omniInt['BZ_GSM']
        Pdyn = omniInt['Pressure']
        SYM_H = omniInt['SYM_H']

        self.parmod = np.array([Pdyn, SYM_H, By, Bz, 0., 0., 0., 0., 0., 0.],
                               dtype=float)

        super(T01, self).__init__(*args, **kwargs)
        #        parmod=np.zeros(10,dtype=float)
        #        t89.tsyganenko.init_t89(year,month,day,hour,minute,use_igrf,0,parmod)
        #        t89.tsyganenko.init_t89(int(year),int(month),int(day),int(hour),int(minute),int(use_igrf))
        bounds_error = kwargs.get('bounds_error', False)
        fill_value = kwargs.get('missing_value', np.nan)

        self.citation = 'Kamodo.T01 by Lutz Rastaetter (2020), Geopack/Tsyganenko by Sheng Tian (2019) and geospacepy-lite by Liam Kilkommons (2019)'

        self.x = np.linspace(-30., 10., 40)  # make sure to avoid (0,0,0)
        self.y = np.linspace(-10., 10., 20)
        self.z = np.linspace(-10., 10., 20)

        self.variables = dict(b_x=dict(units='nT', data=None),
                              b_y=dict(units='nT', data=None),
                              b_z=dict(units='nT', data=None),
                              bvec=dict(units='nT', data=None))

        for varname in self.variables:
            units = self.variables[varname]['units']
            self.register_variable(varname, units)
示例#6
0
    def __init__(self, year, month, day, hour, minute, use_igrf, *args,
                 **kwargs):
        from geopack import t89
        super(T89, self).__init__(*args, **kwargs)
        # time since Jan. 1 1970 00:00 UT as datetime.timedelta
        dt = datetime.datetime(year, month, day, hour,
                               minute) - datetime.datetime(1970, 1, 1)
        # seconds from 1970/1/1 00:00 UT
        self.dt_seconds = dt.total_seconds()

        self.ps = geopack.recalc(self.dt_seconds)
        self.use_igrf = use_igrf

        from geospacepy import omnireader
        sTimeIMF = datetime.datetime(year, month, day, hour)
        eTimeIMF = datetime.datetime(
            year, month, day, hour) + datetime.timedelta(0, 0, 0, 1, 0, 0)
        omniInt = omnireader.omni_interval(sTimeIMF, eTimeIMF, 'hourly')
        t = omniInt['Epoch']  #datetime timestamps
        By, Bz = omniInt['BY_GSM'], omniInt['BZ_GSM']

        kp = omniInt['KP']
        self.iopt = int(kp[0]) + 1
        if self.iopt > 7: self.iopt = 7

        bounds_error = kwargs.get('bounds_error', False)
        fill_value = kwargs.get('missing_value', np.nan)

        self.citation = 'Kamodo.T89 by Lutz Rastaetter (2020), Geopack/Tsyganenko by Sheng Tian (2019) and geospacepy-lite by Liam Kilkommons (2019)'
        self.unit = 'nT'

        self.x = np.linspace(-30., 10., 20)
        self.y = np.linspace(-10., 10., 10)
        self.z = np.linspace(-10., 10., 10)

        self.variables = dict(b_x=dict(units='nT', data=None),
                              b_y=dict(units='nT', data=None),
                              b_z=dict(units='nT', data=None),
                              bvec=dict(units='nT', data=None))

        for varname in self.variables:
            units = self.variables[varname]['units']
            self.register_variable(varname, units)
示例#7
0
    def __init__(self, year, month, day, hour, minute, use_igrf, *args,
                 **kwargs):
        from geopack import t04
        # epoch time since Jan. 1 1970 00:00 UT
        # datetime.timedelta
        dt = datetime.datetime(year, month, day, hour,
                               minute) - datetime.datetime(1970, 1, 1)
        # seconds from 1970/1/1 00:00 UT
        self.dt_seconds = dt.total_seconds()
        qin_denton_url = 'https://rbsp-ect.newmexicoconsortium.org/data_pub/QinDenton/%d/' % (
            year)
        qin_denton_file = 'QinDenton_%d%d%d_1min.txt' % (year, month, day)
        # fetch file
        qin_denton_local_file = './data/QinDenton/%d/%s' % (year,
                                                            qin_denton_file)

        #        import requests
        #        response=requests.get(qin_denton_url+qin-denton_file
        import pandas as pd
        qindenton_frame = pd.read_json(qin_denton_local_file)

        self.ps = geopack.recalc(self.dt_seconds)
        self.use_igrf = use_igrf

        from geospacepy import omnireader
        sTimeIMF = datetime.datetime(year, month, day, hour, minute)
        eTimeIMF = datetime.datetime(year, month, day, hour,
                                     minute) + datetime.timedelta(
                                         0, 0, 0, 0, 1, 0)
        omniInt = omnireader.omni_interval(sTimeIMF, eTimeIMF, '1min')
        t = omniInt['Epoch']  #datetime timestamps
        By = omniInt['BY_GSM']
        Bz = omniInt['BZ_GSM']
        Pdyn = omniInt['Pressure']
        SYM_H = omniInt['SYM_H']
        # need Qin-Denton parameters
        w1, w2, w3, w4, w5, w6 = np.zeros(6, dtype=float)

        #        import pandas as pd
        #        pd.read_json(qin_denton_path
        # end Qin-Dention acquisition

        self.parmod = np.array([Pdyn, SYM_H, By, Bz, w1, w2, w3, w4, w5, w6],
                               dtype=float)

        super(T04, self).__init__(*args, **kwargs)
        #        parmod=np.zeros(10,dtype=float)
        #        t89.tsyganenko.init_t89(year,month,day,hour,minute,use_igrf,0,parmod)
        #        t89.tsyganenko.init_t89(int(year),int(month),int(day),int(hour),int(minute),int(use_igrf))
        bounds_error = kwargs.get('bounds_error', False)
        fill_value = kwargs.get('missing_value', np.nan)

        self.units = 'nT'
        self.citation = 'Kamodo.T89 by Lutz Rastaetter (2020), Geopack/Tsyganenko by Sheng Tian (2019) and geospacepy-lite by Liam Kilkommons (2019)'

        self.x = np.linspace(-30., 10., 40)  # make sure to avoid (0,0,0)
        self.y = np.linspace(-10., 10., 20)
        self.z = np.linspace(-10., 10., 20)

        self.variables = dict(b_x=dict(units='nT', data=None),
                              b_y=dict(units='nT', data=None),
                              b_z=dict(units='nT', data=None),
                              bvec=dict(units='nT', data=None))

        for varname in self.variables:
            units = self.variables[varname]['units']
            self.register_variable(varname, units)
示例#8
0
def plot_comparison_data(all_data,outfile='ssm_comparison.png',outdir='/tmp',offset=9):
	from matplotlib.ticker import FuncFormatter,MaxNLocator
		
	fig = pp.figure(figsize=(6,6))

	gs = mpl.gridspec.GridSpec(7,2)
	a1 = pp.subplot(gs[:2,0])
	a2 = pp.subplot(gs[:2,1])
	a3 = fig.add_subplot(gs[3,:])
	a4 = fig.add_subplot(gs[4,:])
	a5 = fig.add_subplot(gs[5,:])
	a6 = fig.add_subplot(gs[6,:])

	gs.update(hspace=.35,wspace=.35)
	
	axletter_x,axletter_y = -.13,1.0
	axletter_xh,axletter_yh = -.33,1.15
	axlabel_x,axlabel_y = 0.,1.0	
	
	textkwargs = {'fontweight':'bold'}

	jd_cdf = all_data[:,0]

	db_inds = np.array([4,5,6])
	dbav_inds = np.array([7,8,9])

	sod_diff_noaa =  all_data[:,1]-all_data[:,(1+offset)]
	sod_diff_mad =  all_data[:,1]-all_data[:,(1+2*offset)]

	glat_cdf,glon_cdf = all_data[:,2],all_data[:,3]
	glat_noaa,glon_noaa =  all_data[:,(2+offset)],all_data[:,(3+offset)]
	glat_mad,glon_mad =  all_data[:,(2+2*offset)],all_data[:,(3+2*offset)]
	
	glon_cdf[glon_cdf<0] += 360.
	glon_cdf[glon_cdf>360.] -= 360.
	 
	glon_noaa[glon_noaa<0] += 360. 
	glon_noaa[glon_noaa>360.] -= 360.
	
	glon_mad[glon_mad<0.] += 360. 
	glon_mad[glon_mad>360.] -= 360. 
	
	glon_diff_noaa = glon_cdf-glon_noaa
	glon_diff_mad = glon_cdf-glon_mad

	gc_diff_noaa = satplottools.greatCircleDist(np.column_stack((glat_cdf,glon_cdf)),
														np.column_stack((glat_noaa,glon_noaa)),
														lonorlt='lon')
	gc_diff_mad = satplottools.greatCircleDist(np.column_stack((glat_cdf,glon_cdf)),
														np.column_stack((glat_mad,glon_mad)),
														lonorlt='lon')

	magnitude = lambda x: np.sqrt(x[:,0]**2+x[:,1]**2+x[:,2]**2)
	
	db_cdf,db_noaa,db_mad = all_data[:,dbav_inds],all_data[:,(dbav_inds+offset)],all_data[:,(dbav_inds+2*offset)]
	db_diff_noaa =  db_cdf-db_noaa
	db_diff_mad = db_cdf-db_mad

	db_diff_noaa_mag = magnitude(db_diff_noaa)
	db_diff_mad_mag = magnitude(db_diff_mad)
	
	#for a in [a1,a2,a3,a4,a5,a6]:
	#	a.cla()

	sod_bins = np.arange(-2.,2.,.1)
	sns.distplot(sod_diff_noaa[np.isfinite(sod_diff_noaa)],kde=False,bins=sod_bins,color='g',label='CDF-MFR',ax=a1)
	sns.distplot(sod_diff_mad[np.isfinite(sod_diff_mad)],kde=False,bins=sod_bins,color='r',label='CDF-MAD',ax=a1)
	#a1.plot(jd_cdf,sod_diff_noaa,'go')
	#a1.plot(jd_cdf,sod_diff_mad,'ro')
	#a1.set_yscale('log')
	a1.set_ylabel('Equator Crossings')
	a1.legend()
	a1.set_xlabel('Seconds')
	a1.text(axletter_xh,axletter_yh, 'a)',transform=a1.transAxes,**textkwargs)

	R = 6371.2+850.
	gc_bins = np.arange(0,.1,.01)*R/180.*np.pi
	sns.distplot(gc_diff_noaa[np.isfinite(gc_diff_noaa)]*R,kde=False,bins=gc_bins,color='g',label='CDF-MFR',ax=a2)
	sns.distplot(gc_diff_mad[np.isfinite(gc_diff_mad)]*R,kde=False,bins=gc_bins,color='r',label='CDF-MAD',ax=a2)
	#a2.plot(jd_cdf,glon_diff_noaa,'go')
	#a2.plot(jd_cdf,glon_diff_mad,'ro')
	#a2.set_yscale('log')
	a2.legend()
	a2.set_ylabel('Equator Crossings')
	a2.set_xlabel('Km')
	a2.text(axletter_xh,axletter_yh, 'b)',transform=a2.transAxes,**textkwargs)

	"""
	a3.plot(jd_cdf,glon_cdf,'bo')
	a3.plot(jd_cdf,glon_noaa,'go')
	a3.plot(jd_cdf,glon_mad,'ro')
	#a2.set_yscale('log')
	a3.set_ylabel('Degrees')
	a3.set_title('Longitude at Equator Crossing')

	a4.plot(jd_cdf,glat_cdf,'bo')
	a4.plot(jd_cdf,glat_noaa,'go')
	a4.plot(jd_cdf,glat_mad,'ro')
	#a2.set_yscale('log')
	a4.set_ylabel('Degrees')
	a4.set_title('Latitude at Equator Crossing')
	"""
	#alpha=.8
	a3.plot(jd_cdf,db_mad[:,0],'r^',label='MAD')
	a3.plot(jd_cdf,db_noaa[:,0],'g.',label='MFR')
	a3.plot(jd_cdf,db_cdf[:,0],'b.',label='CDF')
	a3.set_title('Magnetic Perturbation @ GEO Equator\n',fontweight='bold')
	a3.text(axletter_x,axletter_y, 'c)',
		transform=a3.transAxes,**textkwargs)
	a3.text(axlabel_x,axlabel_y, 'X (Down) [nT]',
		transform=a3.transAxes,**textkwargs)
	
	a4.plot(jd_cdf,db_mad[:,1],'r^',label='MAD')
	a4.plot(jd_cdf,db_noaa[:,1],'g.',label='MFR')
	a4.plot(jd_cdf,db_cdf[:,1],'b.',label='CDF')
	a4.text(axletter_x,axletter_y, 'd)',
		transform=a4.transAxes,**textkwargs)
	a4.text(axlabel_x,axlabel_y, 'Y (Along) [nT]',
		transform=a4.transAxes,**textkwargs)
	
	#a4.set_title('Y (Along-Track)')

	a5.plot(jd_cdf,db_mad[:,2],'r^',label='MAD')
	a5.plot(jd_cdf,db_noaa[:,2],'g.',label='MFR')
	a5.plot(jd_cdf,db_cdf[:,2],'b.',label='CDF')
	a5.text(axletter_x,axletter_y, 'e)',
		transform=a5.transAxes,**textkwargs)
	a5.text(axlabel_x,axlabel_y, 'Z (Across) [nT]',
		transform=a5.transAxes,**textkwargs)
	
	#a5.set_title('Z (Across-Track)')
	
	a3.legend(bbox_to_anchor=(0.,.8, 1., .102),ncol=3,loc=4)
	for a in [a3,a4,a5]:
		a.xaxis.set_ticklabels([])
		a.yaxis.set_major_locator(MaxNLocator(5))
		#a.set_ylim([-250.,250.])
		#a.set_xlim((jd_cdf[0],jd_cdf[-1]))
	#a5.set_xlabel('Time of Equator Crossing (from CDF data)')

	startdt,enddt = special_datetime.jd2datetime(jd_cdf[0]),special_datetime.jd2datetime(jd_cdf[-1])
	oi = omnireader.omni_interval(startdt,enddt,'hourly')
	dt_omni = oi['Epoch']
	jd_omni = special_datetime.datetimearr2jd(oi['Epoch'])
	dst = oi['DST']
	a6.plot(jd_omni,dst,'m.')
	a6.text(axletter_x,axletter_y, 'f)',
		transform=a6.transAxes,**textkwargs)
	a6.text(axlabel_x,axlabel_y, 'DST [nT]',
		transform=a6.transAxes,**textkwargs)
	#a6.set_title('Hourly OMNIWeb DST',fontweight='bold',ha='right')

	jd2str = lambda x,pos: special_datetime.jd2datetime(x).strftime('%m/%d\n%H:%M')
	a6.xaxis.set_major_formatter(FuncFormatter(jd2str))
	a6.set_xlabel('Date & Universal Time (2010)',fontweight='bold')

	fig.savefig(os.path.join(outdir,outfile),dpi=300.)
示例#9
0
def download_omni_text(input_datetime):

    t_start = input_datetime - datetime.timedelta(1)
    t_end = input_datetime + datetime.timedelta(1) + datetime.timedelta(
        minutes=10)

    t_start_day = input_datetime
    t_end_day = input_datetime + datetime.timedelta(minutes=1439)

    #--------------------------------------------------------#
    #	OMNI Data - includes solar wind, and geomag params   #
    #--------------------------------------------------------#

    #get OMNI data
    omniInt = omnireader.omni_interval(t_start,
                                       t_end,
                                       '5min',
                                       cdf_or_txt='txt')

    #print(omniInt.cdfs[0].vars) #prints all the variables available on omni

    epochs = omniInt['Epoch']  #time array for omni 5min data
    By, Bz, AE, SymH = omniInt['BY_GSM'], omniInt['BZ_GSM'], omniInt[
        'AE_INDEX'], omniInt['SYM_H']
    vsw, psw = omniInt['flow_speed'], omniInt['Pressure']
    borovsky_reader = omnireader.borovsky(omniInt)
    borovsky = borovsky_reader()

    #newell_reader = omnireader.newell(omniInt)
    #newell = newell_reader()

    def NewellCF_calc(v, bz, by):
        # v expected in km/s
        # b's expected in nT
        NCF = np.zeros_like(v)
        NCF.fill(np.nan)
        bt = np.sqrt(by**2 + bz**2)
        bztemp = bz
        bztemp[bz == 0] = .001
        #Caculate clock angle (theta_c = t_c)
        tc = np.arctan2(by, bztemp)
        neg_tc = bt * np.cos(tc) * bz < 0
        tc[neg_tc] = tc[neg_tc] + np.pi
        sintc = np.abs(np.sin(tc / 2.))
        NCF = (v**1.33333) * (sintc**2.66667) * (bt**0.66667)
        return NCF

    newell = NewellCF_calc(vsw, Bz, By)

    proton_flux_10MeV, proton_flux_30MeV, proton_flux_60MeV = omniInt[
        'PR-FLX_10'], omniInt['PR-FLX_30'], omniInt['PR-FLX_60']

    #calculate clock angle
    clock_angle = np.degrees(np.arctan2(By, Bz))
    clock_angle[clock_angle < 0] = clock_angle[clock_angle < 0] + 360.

    print('Got 5 minutes data')

    omniInt_1hr = omnireader.omni_interval(t_start,
                                           t_end,
                                           'hourly',
                                           cdf_or_txt='txt')
    epochs_1hr = omniInt_1hr['Epoch']  #datetime timestamps
    F107, KP = omniInt_1hr['F10_INDEX'], omniInt_1hr['KP']

    print('Got hour data')
    #--------------------------------------------------------#
    #	GOES X-ray data - Channel 1-8A, defines flare class  #
    #--------------------------------------------------------#

    results = Fido.search(a.Time(t_start, t_end), a.Instrument('XRS'))
    files = Fido.fetch(results)
    goes = TimeSeries(files, concatenate=True)

    goes_l = goes.data['xrsb']

    print('Got GOES data')
    #--------------------------------------------------------#
    #	Resample data to 1min to match GNSS CHAIN network    #
    #--------------------------------------------------------#

    #resample OMNI Solar Wind Data
    By_data = pd.Series(By, index=epochs).resample('1T').pad().truncate(
        t_start_day, t_end_day)
    Bz_data = pd.Series(Bz, index=epochs).resample('1T').pad().truncate(
        t_start_day, t_end_day)
    AE_data = pd.Series(AE, index=epochs).resample('1T').pad().truncate(
        t_start_day, t_end_day)
    SymH_data = pd.Series(SymH, index=epochs).resample('1T').pad().truncate(
        t_start_day, t_end_day)
    vsw_data = pd.Series(vsw, index=epochs).resample('1T').pad().truncate(
        t_start_day, t_end_day)
    psw_data = pd.Series(psw, index=epochs).resample('1T').pad().truncate(
        t_start_day, t_end_day)
    borovsky_data = pd.Series(borovsky,
                              index=epochs).resample('1T').pad().truncate(
                                  t_start_day, t_end_day)
    newell_data = pd.Series(newell,
                            index=epochs).resample('1T').pad().truncate(
                                t_start_day, t_end_day)
    proton_10_data = pd.Series(proton_flux_10MeV,
                               index=epochs).resample('1T').pad().truncate(
                                   t_start_day, t_end_day)
    proton_30_data = pd.Series(proton_flux_30MeV,
                               index=epochs).resample('1T').pad().truncate(
                                   t_start_day, t_end_day)
    proton_60_data = pd.Series(proton_flux_60MeV,
                               index=epochs).resample('1T').pad().truncate(
                                   t_start_day, t_end_day)
    clock_angle_data = pd.Series(clock_angle,
                                 index=epochs).resample('1T').pad().truncate(
                                     t_start_day, t_end_day)

    F107data = pd.Series(F107, index=epochs_1hr).resample('1T').pad().truncate(
        t_start_day, t_end_day)
    KPdata = pd.Series(KP, index=epochs_1hr).resample('1T').pad().truncate(
        t_start_day, t_end_day)

    #function to find data at previous time intervals
    def roll_back(data, minutes=1):
        ts = t_start_day - datetime.timedelta(minutes=minutes)
        te = t_end_day - datetime.timedelta(minutes=minutes)
        data = pd.Series(data, index=epochs).resample('1T').pad()
        new_data = data.truncate(ts, te)
        rolled_data = pd.Series(np.array(new_data), index=By_data.index)
        return rolled_data

    #calculate rolled back timeseries - 15 and 30 minutes previous
    By_15 = roll_back(By, minutes=15)
    By_30 = roll_back(By, minutes=30)
    Bz_15 = roll_back(Bz, minutes=15)
    Bz_30 = roll_back(Bz, minutes=30)
    AE_15 = roll_back(AE, minutes=15)
    AE_30 = roll_back(AE, minutes=30)
    SymH_15 = roll_back(SymH, minutes=15)
    SymH_30 = roll_back(SymH, minutes=30)
    vsw_15 = roll_back(vsw, minutes=15)
    vsw_30 = roll_back(vsw, minutes=30)
    psw_15 = roll_back(psw, minutes=15)
    psw_30 = roll_back(psw, minutes=30)
    borovsky_15 = roll_back(borovsky, minutes=15)
    borovsky_30 = roll_back(borovsky, minutes=30)
    newell_15 = roll_back(newell, minutes=15)
    newell_30 = roll_back(newell, minutes=30)
    clock_angle_15 = roll_back(clock_angle, minutes=15)
    clock_angle_30 = roll_back(clock_angle, minutes=30)
    #resample GOES X-ray flux
    goes_data = goes_l.resample('1T').mean().truncate(t_start_day, t_end_day)

    #put all in a dataframe and save

    dataframe = pd.DataFrame()
    dataframe['Bz - 0min [nT]'] = Bz_data
    dataframe['Bz - 15min [nT]'] = Bz_15
    dataframe['Bz - 30min [nT]'] = Bz_30

    dataframe['By - 0min [nT]'] = By_data
    dataframe['By - 15min [nT]'] = By_15
    dataframe['By - 30min [nT]'] = By_30

    dataframe['Vsw - 0min [km/s]'] = vsw_data
    dataframe['Vsw - 15min [km/s]'] = vsw_15
    dataframe['Vsw - 30min [km/s]'] = vsw_30

    dataframe['Psw - 0min [nPa]'] = psw_data
    dataframe['Psw - 15min [nPa]'] = psw_15
    dataframe['Psw - 30min [nPa]'] = psw_30

    dataframe['AE - 0min [nT]'] = AE_data
    dataframe['AE - 15min [nT]'] = AE_15
    dataframe['AE - 30min [nT]'] = AE_30

    dataframe['SymH - 0min [nT]'] = SymH_data
    dataframe['SymH - 15min [nT]'] = SymH_15
    dataframe['SymH - 30min [nT]'] = SymH_30

    dataframe['Clock Angle - 0min [deg]'] = clock_angle_data
    dataframe['Clock Angle - 15min [deg]'] = clock_angle_15
    dataframe['Clock Angle - 30min [deg]'] = clock_angle_30

    dataframe['Newell CF - 0min [m/s^(4/3) T^(2/3)]'] = newell_data
    dataframe['Newell CF - 15min [m/s^(4/3) T^(2/3)]'] = newell_15
    dataframe['Newell CF - 30min [m/s^(4/3) T^(2/3)]'] = newell_30

    dataframe['Borovsky CF - 0min [nT km/s]'] = borovsky_data
    dataframe['Borovsky CF - 15min [nT km/s]'] = borovsky_15
    dataframe['Borovsky CF - 30min [nT km/s]'] = borovsky_30

    dataframe['Kp [dimensionless]'] = KPdata
    dataframe['F107 [sfu=10^-22 W/m^2/hz]'] = F107data

    dataframe['Proton 10MeV'] = proton_10_data
    dataframe['Proton 30MeV'] = proton_30_data
    dataframe['Proton 60MeV'] = proton_60_data

    dataframe['GOES X-ray Wm^-2'] = goes_data
    dataframe_nan = dataframe.replace(9999.99,
                                      np.nan)  #replace 9999.99 with nans

    filepath = '/Users/ryanmcgranaghan/Documents/Conferences/ISSI_2018/ISSI_geospaceParticles/solar_data/'
    filename = filepath + 'solardata' + input_datetime.strftime(
        '%Y') + '_' + input_datetime.strftime('%j') + '.csv'
    print('output solardata file location = {}'.format(filename))
    dataframe_nan.to_csv(filename, index_label='Datetime')