예제 #1
0
def write_combined_file(
    target_file,
    output_dir='/Users/ptracy/Box Sync/00_postdoc_projects/Wind-STICS/zephyrus_transfer/output_dir/'
):

    STICS_data_product = 'DF'

    #Retrieve STICS data
    STICS_3D_data_format = np.dtype([('year', float), ('doy', float),
                                     ('sector', int), ('telescope', int),
                                     ('eoq', float), ('ion', 'S5'),
                                     (STICS_data_product, float),
                                     (STICS_data_product + '_error', float),
                                     ('delT', float)])
    STICS_data = np.loadtxt(target_file,
                            skiprows=4,
                            dtype=STICS_3D_data_format)
    #gather ion info
    m, q = ion_mq_stats(STICS_data['ion'][0])

    #Calculate the counts (from the stated error)
    counts_value = np.floor(
        np.round((STICS_data['DF'] / STICS_data['DF_error'])**2))
    counts_error_value = counts_value * (1.0 / np.sqrt(counts_value))

    #fix divide by zero errors
    small_num = 1e-10
    zero_ind = np.where((STICS_data['DF'] < small_num)
                        & (STICS_data['DF'] > -1.0 * small_num))
    counts_value[zero_ind] = 0.0
    counts_error_value[zero_ind] = 0.0

    #Calculate the differential flux (PSD[s^3/km^6]=1.076*m[amu]^2/(2*E[keV] ) * dJ[1/(cm^2*sr*s*keV)]
    dJ_value = STICS_data['DF'] / (
        1.076 * (m**2) / (2.0 * STICS_data['eoq'] * q))  #1/(cm^2*sr*s*keV)
    dJ_error_value = dJ_value * (1.0 / np.sqrt(counts_value))

    #fix divide by zero errors
    dJ_value[zero_ind] = 0.0
    dJ_error_value[zero_ind] = 0.0

    #append counts data to overall STICS_data
    #STICS_data=matplotlib.mlab.rec_append_fields(STICS_data, ('Counts', 'Counts_error','dJ', 'dJ_error'),
    #    (counts_value, counts_error_value, dJ_value, dJ_error_value))
    STICS_data = recfunctions.rec_append_fields(
        STICS_data, ('Counts', 'Counts_error', 'dJ', 'dJ_error'),
        (counts_value, counts_error_value, dJ_value,
         dJ_error_value))  #don't have matplotlib version on zephyrus (yet)

    temp = target_file.split('/')
    just_filename = temp[-1]  #remove directory info
    just_filename = just_filename.replace('wtdc', '')
    just_filename = just_filename.replace('DF', 'VDF')
    just_filename = just_filename.replace('_Lite', '')
    just_filename = just_filename.replace('_M-MOQ', '')
    just_filename = 'wstics_' + just_filename
    outfile1 = output_dir + just_filename

    header_string = ('year, doy, sector, telescope, eoq, ion, DF, DF_error,'
                     ' counts, counts_error, dJ, dJ_error, delT')
    #format_string='%7.2f %8.4f %2d %1d %8.4f %4s %8.2e %8.2e %8.2e %8.2e %8.2e %8.2e %8.2e'
    format_string = [
        '%7.2f', '%8.4f', '%2d', '%1d', '%8.4f', '%4s', '%8.2e', '%8.2e',
        '%8.2e', '%8.2e', '%8.2e', '%8.2e', '%8.2e'
    ]  #need to specify format in this way to use delimiter option in np.savetxt
    with open(outfile1, 'w') as file_handle:
        #np.savetxt(file_handle, STICS_data, header='Year, doyfrac, Telescope, Sec 0, Sec 1, Sec 2, Sec 3, '
        #    'Sec 4, Sec 5, Sec 6, Sec 7, Sec 8, Sec 9, Sec 10, Sec 11, Sec 12, Sec 13, Sec 14, Sec 15',
        #    fmt=('%d %f %d %e %e %e %e %e %e %e %e %e %e %e %e %e %e %e %e'))
        file_handle.write(
            '#  Wind / STICS 3D velocity distributions for ion ' +
            STICS_data['ion'][0] + '.\n')
        file_handle.write('# Created: ' + str(datetime.datetime.now()) + '\n')
        file_handle.write('# Modified to have C, dJ, and DF\n')
        np.savetxt(
            file_handle,
            STICS_data[[
                'year', 'doy', 'sector', 'telescope', 'eoq', 'ion', 'DF',
                'DF_error', 'Counts', 'Counts_error', 'dJ', 'dJ_error', 'delT'
            ]],  #fancy indexing of structured array
            header=header_string,
            fmt=format_string,
            delimiter='\t')

    #Remove old DF file after creation of the new one
    os.remove(target_file)  #remove file from directory after run

    print "Finished reprocessing wtdcLV2_Lite distribution file normally"  #Add this so I can search for it in the output files.
예제 #2
0
def get_erpa_data_mag_input(STICS_data, mfi_data):
    '''
    Function to get the data required to create an energy resolved pitch angle
    distribution from Wind/STICS data.  Differs from "get_erpa_data" in that is takes
    a year of mfi data as an input so that the mfi data doesn't have to be loaded 
    repeatedly with each call to get_erpa_data (speed improvement)
    
    Arguments:
        STICS_data: an nd array of the STICS data for the current time period
        mfi_data : mag data for the year of the current STICS data
    '''
    ion_m, ion_q = ion_mq_stats(
        STICS_data['ion'][0])  #return mass [amu], charge [e]

    #determine how many unique time steps exist in current data set
    #(may need to change this later to be a subset of the input data?)

    leap_add = np.array([int(calendar.isleap(a)) for a in STICS_data['year']])
    #isleap only works on scalar years, need to use list comprehension
    yearfrac = STICS_data['year'] + (STICS_data['doy'] - 1.0) / (365.0 +
                                                                 leap_add)

    unique_times = np.unique(yearfrac)
    n_time_steps = unique_times.size

    #prepare total time range string: for use in plotting later
    delta_t = STICS_data['delT'][0]  #seconds
    start_yearfrac = yearfrac[0] - delta_t / (
        60.0 * 60.0 * 24.0 *
        (365 + calendar.isleap(np.floor(yearfrac[0]))))  #yearfrac
    stop_yearfrac = yearfrac[-1]  #yearfrac

    #Define STICS measurement parameters
    n_epq = 32  #number of E/q steps in data
    n_telescope = 3  #number of telescopes
    n_sector = 16  #number of sectors
    epq_table = np.unique(STICS_data['eoq'])  #keV/e

    #Define edges for pitch angle bins
    PA_resolution = 15  #deg
    PA_low_bin_edges = np.arange(0, 180, PA_resolution)  #deg

    total_ERPA = np.zeros(
        (len(PA_low_bin_edges),
         n_epq))  #sum contribution to ERPA over time (not properly weighted)
    viewtime_tot_arr = total_ERPA.copy(
    )  #preallocate view time array to same size as "total_ERPA", store total observation time
    #for each PA-E/q bin

    #Compute velocity direction/ solid angle for each telescope and sector
    #combination for use in the loop
    bin_v_dir_data_type = np.dtype([('vx', np.float64), ('vy', np.float64),
                                    ('vz', np.float64)])
    bin_v_dir_arr = np.zeros(
        (n_telescope, n_sector),
        bin_v_dir_data_type)  #this is a 2D array of unit vectors
    #pretty sweet that we can make it so easily in python (can reference both with component names and 2D indexing!)
    bin_SA_arr = np.zeros((n_telescope, n_sector))

    #Define central theta/phi angle for each STICS bin
    telescope_num_arr = np.array([0, 1, 2])  #store index number of telescopes
    theta_bin_width = 53.0 * np.pi / 180.0  #rad, bin width in polar direction
    theta_lower_bin_edges = np.array([90 - 79.5, 90 - 26.5, 90 + 26.5
                                      ]) * np.pi / 180.0  #rad, polar angle
    theta_upper_bin_edges = theta_lower_bin_edges.copy(
    ) + theta_bin_width  #rad
    theta_mid_bin = (theta_upper_bin_edges + theta_lower_bin_edges) / (2.0)

    #Azimuth direction bins
    #Define zero degrees in azimuth as the sunward facing sector center (sector 9)
    sector_num_arr = np.arange(0, 16, 1)  #store index number of sectors
    phi_bin_width = 22.5 * np.pi / 180.0  #rad, azimuthal sector width
    phi_mid_bin = np.arange(202.5, 202.5 - 360, -22.5) * np.pi / 180.0
    ind1 = np.where(phi_mid_bin < 0.0)
    phi_mid_bin[ind1] = phi_mid_bin[
        ind1] + 2.0 * np.pi  #set azimuth range to [0,360) deg

    #Loop over all bins and compute unit vector
    for i in xrange(len(theta_mid_bin)):  #loop over telescope
        for j in xrange(len(phi_mid_bin)):  #loop over sector
            #make sure sector numbers line up with indices!
            #mid bin angle correspond to look direction, we need to
            #take (-) of that to get observed velocity/flow direction
            bin_v_dir_arr[telescope_num_arr[i],
                          sector_num_arr[j]]['vx'] = -np.sin(
                              theta_mid_bin[i]) * np.cos(phi_mid_bin[j])
            bin_v_dir_arr[telescope_num_arr[i],
                          sector_num_arr[j]]['vy'] = -np.sin(
                              theta_mid_bin[i]) * np.sin(phi_mid_bin[j])
            bin_v_dir_arr[telescope_num_arr[i],
                          sector_num_arr[j]]['vz'] = -np.cos(theta_mid_bin[i])

            bin_SA_arr[telescope_num_arr[i],
                       sector_num_arr[j]] = phi_bin_width * (
                           np.cos(theta_lower_bin_edges[i]) -
                           np.cos(theta_upper_bin_edges[i])
                       )  #solid angle, steradians
        #End of loop over j
    #End of loop over i

    #loop over number of unique time steps
    for i in xrange(n_time_steps):
        current_stop_yearfrac = unique_times[
            i]  #this should be the stop time for the current time step

        #find indices of STICS data that are in current time step
        small_num = 1.0E-5
        STICS_time_ind = np.where(
            (yearfrac > current_stop_yearfrac - small_num)
            & (yearfrac < current_stop_yearfrac + small_num))[0]

        current_start_yearfrac = current_stop_yearfrac - STICS_data['delT'][
            STICS_time_ind[0]] / (
                60.0 * 60.0 * 24.0 *
                (365 + calendar.isleap(np.floor(yearfrac[0]))))

        #Load in MFIcurr data for the relevant year
        ##mag_data_dir= 'C:/Users/ptracy/Box Sync/00_postdoc_projects/Wind-STICS/wind_mfi/pickled/'
        #mag_data_dir= '/Users/ptracy/Box Sync/00_postdoc_projects/Wind-STICS/wind_mfi/pickled/' #mac compatible
        ##load in mag data takes a while
        #if int(current_stop_yearfrac) == int(current_start_yearfrac):
        #    with open(mag_data_dir+'wind_mfi_'+str(int(current_stop_yearfrac))+'.pkl') as fp:
        #        mfi_data=pickle.load(fp)
        #    print 'after load mag erpa'
        #find all indices within current time range
        mag_ind = np.where(
            (mfi_data['yearfrac'] > current_start_yearfrac) &
            (mfi_data['yearfrac'] < current_stop_yearfrac))  #returns tuple!

        #compute average component of B vector (GSE coords)
        ave_bx = np.mean(
            mfi_data['bx_gse (nT)'][mag_ind]
        )  #nT -> may need to check for fill values and NaNs here...
        ave_by = np.mean(mfi_data['by_gse (nT)'][mag_ind])  #nT
        ave_bz = np.mean(mfi_data['bz_gse (nT)'][mag_ind])  #nT

        ave_b_vec = np.array([ave_bx, ave_by, ave_bz])  #nT

        #Don't have this functionality yet...
        '''
        elif int(current_stop_yearfrac)-1 == int(current_start_yearfrac): #spanning a year
            with open(mag_data_dir+'wind_mfi_'+str(int(current_start_yearfrac))+'.pkl') as fp:
                mfi_data_1=pickle.load(fp)
                
            with open(mag_data_dir+'wind_mfi_'+str(int(current_stop_yearfrac))+'.pkl') as fp:
                mfi_data_2=pickle.load(fp)  
                
            #find indices form year 1 that are within time range
            mag_ind1=np.where(mfi_data_1['yearfrac'] > current_start_yearfrac)
            mag_ind2=np.where(mfi_data_2['yearfrac'] < current_stop_yearfrac)
            
            bx_combined_arr=np.concatenate( (mfi_data_1['bx_gse (nT)'][mag_ind1], mfi_data_2['bx_gse (nT)'][mag_ind2]), axis=0)
            by_combined_arr=np.concatenate( (mfi_data_1['by_gse (nT)'][mag_ind1], mfi_data_2['by_gse (nT)'][mag_ind2]), axis=0)
            bz_combined_arr=np.concatenate( (mfi_data_1['bz_gse (nT)'][mag_ind1], mfi_data_2['bz_gse (nT)'][mag_ind2]), axis=0)
            
            #compute average component of B vector (GSE coords)
            ave_bx=np.mean(bx_combined_arr) #nT -> may need to check for fill values and NaNs here...
            ave_by=np.mean(by_combined_arr) #nT
            ave_bz=np.mean(bz_combined_arr) #nT

            ave_b_vec=np.array([ave_bx, ave_by, ave_bz]) #nT
        
        else:
            raise NameError, 'strange yearfractions detected for mag data loading'
        '''
        #preallocate arrays used overwritten each time step
        ERPA_array_SA_wtd = np.zeros(
            (len(PA_low_bin_edges),
             n_epq))  #keep track of SA weighted PSD in each PA-E/q bin
        SA_arr = np.zeros(
            (len(PA_low_bin_edges), n_epq
             ))  #keep track of total solid angle that observed each PA-Eq bin

        #Compute the pitch angle of each telescope and sector combo of Wind STICS for this time step
        #(and average b vector direction), need to loop over each telescope and sector combo of STICS,
        #but don't need to to it for every E/q step as look direction don't change between E/q steps.
        PA_for_telescope_sector = np.zeros(
            (n_telescope, n_sector
             ))  #preallocate to store PA value of each angular bin of STICS
        PA_bin_ind_for_telescope_sector = np.zeros(
            (n_telescope, n_sector
             ))  #preallocate to store PA value of each angular bin of STICS

        for tele_ind in xrange(
                n_telescope
        ):  #loop over telescope (tele_ind = telescope index)
            for sec_ind in xrange(
                    n_sector):  #loop over sector (sec_ind = sector index)
                v_unit_vec = np.array([
                    bin_v_dir_arr[tele_ind, sec_ind]['vx'],
                    bin_v_dir_arr[tele_ind, sec_ind]['vy'],
                    bin_v_dir_arr[tele_ind, sec_ind]['vz']
                ])  #call unit vec from array
                PA_for_telescope_sector[tele_ind, sec_ind] = np.arccos(
                    np.dot(ave_b_vec, v_unit_vec) /
                    (np.linalg.norm(ave_b_vec) *
                     np.linalg.norm(v_unit_vec))) * 180.0 / np.pi  #deg
                PA_bin_ind = np.where(
                    PA_low_bin_edges ==
                    np.floor(PA_for_telescope_sector[tele_ind, sec_ind] /
                             PA_resolution) * PA_resolution)
                PA_bin_ind_for_telescope_sector[tele_ind, sec_ind] = PA_bin_ind[
                    0]  #store pitch angle bin indices of each angular bin of STICS

                #record total solid angle in each PA- E/q bin
                SA_arr[PA_bin_ind, :] = SA_arr[PA_bin_ind, :] + bin_SA_arr[
                    tele_ind, sec_ind]
                #now we can use this PA_bin_ind for each E/q step in this angular bin

                #find all entries at current sector/telescope (for current time ind), this covers E/q steps
                epq_subind = np.where(
                    (STICS_data[STICS_time_ind]['telescope'] == tele_ind)
                    & (STICS_data[STICS_time_ind]['sector'] == sec_ind))[0]
                #take first element of returned tuple

                for kk in xrange(len(epq_subind)):
                    small_val = 0.01  #small number, smaller than % difference of adjacent E/q steps (for np.where search)
                    eoq_step_ind = np.where(
                        (STICS_data['eoq'][STICS_time_ind[epq_subind[kk]]] >
                         epq_table * (1.0 - small_val))
                        & (STICS_data['eoq'][STICS_time_ind[epq_subind[kk]]] <
                           epq_table * (1.0 + small_val)))[0]
                    #"[0]" at end of where statement extracts 1D indices from tuple
                    PSD_temp = (
                        ion_m**2 / (2.0 * epq_table[eoq_step_ind] * ion_q)
                    ) * STICS_data['dJ'][STICS_time_ind[epq_subind[
                        kk]]]  #units of (amu^2/keV) * (1/(cm^2*sec*sr*keV) )
                    PSD_temp = PSD_temp * ((1 / cnst.keV2eV) * (1 / cnst.e2C) *
                                           (cnst.amu2kg**2)) * (
                                               (1 / cnst.cm2m**2) *
                                               (1 / cnst.keV2eV) *
                                               (1 / cnst.e2C))  #s^3/m^6
                    PSD_temp = PSD_temp * (cnst.km2m**6)  #s^3/km^6
                    ERPA_array_SA_wtd[
                        PA_bin_ind, eoq_step_ind] = ERPA_array_SA_wtd[
                            PA_bin_ind, eoq_step_ind] + bin_SA_arr[
                                tele_ind, sec_ind] * PSD_temp  #sr* s^3/km^6
                #End of loop over kk
            #End of loop over j
        #End of loop over i
        #Back to loop over time.

        #normalize PSD by solid angle (accounts for the weighting by solid angle done previously)
        ERPA_array = ERPA_array_SA_wtd / SA_arr  #divide element by element

        #set NaN values to zero
        zero_SA_ind = np.where(SA_arr < 1.0E-10)
        if len(zero_SA_ind[0]) > 0:
            ERPA_array[zero_SA_ind] = 0.0  #set NaN values to zero (works out
            #to be same as not including them in weighted average)

        #Need to weight each scan time by the accumulation time
        total_ERPA = total_ERPA + ERPA_array * STICS_data['delT'][
            STICS_time_ind[0]]  # (s^3/km^6) * s
        #assume "delT" is same for all telescope/sector/epq bins in current time step

        nonzero_SA_ind = np.where(SA_arr > 0.0)
        if len(nonzero_SA_ind[0]) > 0:
            viewtime_tot_arr[nonzero_SA_ind] = viewtime_tot_arr[
                nonzero_SA_ind] + STICS_data['delT'][STICS_time_ind[0]]  #s

    #End of loop over time steps
    final_ERPA = total_ERPA / viewtime_tot_arr  #element by element division

    #ERPA bins that were never observed over the whole time period need to be seperately identified in the array.
    #We will set them to -1.
    zero_viewtime_ind = np.where(
        viewtime_tot_arr < 1.0E-5
    )  #1.0E-5 is arbitrary low bound, just lower than single accum time
    if len(zero_viewtime_ind[0]) > 0:
        final_ERPA[
            zero_viewtime_ind] = -1.0  #should overwrite all remaining NaN values

    return final_ERPA, start_yearfrac, stop_yearfrac, delta_t  # (s^3/km^6), at the moment
예제 #3
0
def get_afm_data_mag_input(STICS_data, mfi_data):
    '''
    Function to get the data required to create an all sky flux map
    from Wind/STICS data. Differs from "get_afm_data" in that it takes
    a year of mfi data as an input so that the mfi data doesn't have to be loaded 
    repeatedly with each call to get_afm_data (speed improvement)
    
    Arguments:
        STICS_data: an nd array of the STICS data for the current time period
        mfi_data : mag data for the year of the current STICS data
    
    '''
    ion_m, ion_q = ion_mq_stats(
        STICS_data['ion'][0])  #return mass [amu], charge [e]

    #determine how many unique time steps exist in current data set
    #(may need to change this later to be a subset of the input data?)

    leap_add = np.array([int(calendar.isleap(a)) for a in STICS_data['year']])
    #isleap only works on scalar years, need to use list comprehension
    yearfrac = STICS_data['year'] + (STICS_data['doy'] - 1.0) / (365.0 +
                                                                 leap_add)

    unique_times = np.unique(yearfrac)
    n_time_steps = unique_times.size

    #prepare total time range string: for use in plotting later
    delta_t = STICS_data['delT'][0]  #seconds
    start_yearfrac = yearfrac[0] - delta_t / (
        60.0 * 60.0 * 24.0 *
        (365 + calendar.isleap(np.floor(yearfrac[0]))))  #yearfrac
    stop_yearfrac = yearfrac[-1]  #yearfrac

    #assemble all sky flux map array
    n_epq = 32  #number of E/q steps in data
    n_telescope = 3  #number of telescopes
    n_sector = 16  #number of sectors

    #loop through and put every observation in its appropriate position in afm array
    afm_array_wtd = np.zeros((n_telescope, n_sector),
                             dtype=np.float64)  #preallocate
    accum_time_array = afm_array_wtd.copy(
    )  #preallocate, NEED TO MAKE COPY HERE! OTHERWISE
    #THE NEW ARRAY IS SIMPLY A REFERENCE TO THE OLD!

    #Calculate the width, dE, to be used for each E/q bin
    epq_table = np.unique(STICS_data['eoq'])  #keV/e
    delta = 0.019  #(delta E/q) / (E/q) = 1.9% (Chotoo, 1998)
    epq_L = epq_table - delta * epq_table  #keV/e, left E/q bin edges
    epq_R = epq_table + delta * epq_table  #keV/e, right E/q bin edges

    #there are "gaps" between the E/q bins, so just extend the Reimann sum bins to
    #cover these gaps (a rough solution)
    epq_L_new = epq_L.copy()
    epq_R_new = epq_R.copy()

    epq_R_new[0:-1] = epq_L[
        1:]  #make right bin edges equal to left edge of next bin (except for last bin)

    #bin 26 is excluded from data, so it will never have a value above zero.  Actual
    #ambient phase will not always be zero, so we should try to integrate "over" this bin
    #w/o assuming the zero value, change right bin edge of bin 25, so it encompasses bin 26 as well
    epq_R_new[25] = epq_L[27]  #account for bin 26

    #Assumed units for dJ/dE (abbreviated dJ in Jacob's code library) are
    # [dJ/dE] = 1/(cm^2 * sec * sr * keV)
    dE = ion_q * (epq_R_new - epq_L_new)  #keV

    #Find the average magnetic field direciton for this time period as well-------------

    ##Load in MFIcurr data for the relevant year
    ##mag_data_dir= 'C:/Users/ptracy/Box Sync/00_postdoc_projects/Wind-STICS/wind_mfi/pickled/'
    #mag_data_dir= '/Users/ptracy/Box Sync/00_postdoc_projects/Wind-STICS/wind_mfi/pickled/'#Mac compatible
    #print 'before load afm mag'
    #if int(stop_yearfrac) == int(start_yearfrac):
    #    with open(mag_data_dir+'wind_mfi_'+str(int(stop_yearfrac))+'.pkl') as fp:
    #        mfi_data=pickle.load(fp)
    #    print 'after load afm mag'
    #    #find all indices within current time range
    mag_ind = np.where((mfi_data['yearfrac'] > start_yearfrac) &
                       (mfi_data['yearfrac'] < stop_yearfrac))  #returns tuple!

    #compute average component of B vector (GSE coords)
    ave_bx = np.mean(
        mfi_data['bx_gse (nT)']
        [mag_ind])  #nT -> may need to check for fill values and NaNs here...
    ave_by = np.mean(mfi_data['by_gse (nT)'][mag_ind])  #nT
    ave_bz = np.mean(mfi_data['bz_gse (nT)'][mag_ind])  #nT

    ave_b_vec = np.array([ave_bx, ave_by, ave_bz])  #nT
    #functionality for crossing a year is not available yet
    '''        
    elif int(stop_yearfrac)-1 == int(start_yearfrac): #spanning a year
        with open(mag_data_dir+'wind_mfi_'+str(int(start_yearfrac))+'.pkl') as fp:
            mfi_data_1=pickle.load(fp)
            
        with open(mag_data_dir+'wind_mfi_'+str(int(stop_yearfrac))+'.pkl') as fp:
            mfi_data_2=pickle.load(fp)  
            
        #find indices form year 1 that are within time range
        mag_ind1=np.where(mfi_data_1['yearfrac'] > start_yearfrac)
        mag_ind2=np.where(mfi_data_2['yearfrac'] < stop_yearfrac)
        
        bx_combined_arr=np.concatenate( (mfi_data_1['bx_gse (nT)'][mag_ind1], mfi_data_2['bx_gse (nT)'][mag_ind2]), axis=0)
        by_combined_arr=np.concatenate( (mfi_data_1['by_gse (nT)'][mag_ind1], mfi_data_2['by_gse (nT)'][mag_ind2]), axis=0)
        bz_combined_arr=np.concatenate( (mfi_data_1['bz_gse (nT)'][mag_ind1], mfi_data_2['bz_gse (nT)'][mag_ind2]), axis=0)
        
        #compute average component of B vector (GSE coords)
        ave_bx=np.mean(bx_combined_arr) #nT -> may need to check for fill values and NaNs here...
        ave_by=np.mean(by_combined_arr) #nT
        ave_bz=np.mean(bz_combined_arr) #nT

        ave_b_vec=np.array([ave_bx, ave_by, ave_bz]) #nT
    else:
        raise NameError, 'strange yearfractions detected for mag data loading'
    '''
    #---------------------------------

    for i in xrange(n_epq * n_telescope * n_sector * n_time_steps):

        #need to integrate numerically over E/q dimension.  Accomplish with Reimann sum
        #type integration so that each E/q step can contribute in integral independently

        #Find the E/q step of the current dJ value
        small_val = 0.01  #small number, smaller than % difference of adjacent E/q steps (for np.where search)
        eoq_ind = np.where((STICS_data['eoq'][i] > epq_table *
                            (1.0 - small_val))
                           & (STICS_data['eoq'][i] < epq_table *
                              (1.0 + small_val)))

        #INTEGRATION OVER E DIMENSION
        integrated_flux_contribution = STICS_data['dJ'][i] * dE[
            eoq_ind]  #[1/(cm^2 *sr * sec)], dJ/dE * dE
        #note that we are not multiplying by solid angle here, the final value we are
        #integrating to find is the flux divided by solid angle. This ensures that
        #an isotropic distribution will show up as a flat field even when the
        #angular bins have different solid angles.

        #need E/q width of the different E/q bins, (delta E/q) / (E/q) = 1.9% (Chotoo, 1998)
        afm_array_wtd[STICS_data['telescope'][i],
                      STICS_data['sector'][i]] += (  #line continuation
                          integrated_flux_contribution * STICS_data['delT'][i]
                      )  #time weighted dJ value

        #keep track of total observation time in each angular direction
        accum_time_array[STICS_data['telescope'][i],
                         STICS_data['sector'][i]] += STICS_data['delT'][i]
        #-----------end of for loop over data file measurements-----------------

    afm_array = afm_array_wtd / accum_time_array  #divide time back out of weighted average

    return afm_array, start_yearfrac, stop_yearfrac, delta_t, ave_b_vec
예제 #4
0
def batch_run(
        target_file,
        mag_data_dir='/Users/ptracy/Box Sync/00_postdoc_projects/Wind-STICS/wind_mfi/pickled/',
        output_dir='/Users/ptracy/Box Sync/00_postdoc_projects/Wind-STICS/zephyrus_transfer/output_dir/',
        create_ERPA_plot=True,
        create_AFM_plot=True):
    '''
    target_file - file produced by Jacob's processor, to be read in and
        used to create AFM, ERPA products
    mag_data_dir - directory where the mag_data is located (include default local location)
    create_ERPA_plot - toggle to create ERPA plot
    create_AFM_plot - toggle to create AFM plot
    '''
    #check that file exists
    if not (os.path.isfile(target_file)):
        print(target_file)
        sys.exit("No data file produced by wtdcLV2 processor")

    #Set the desired time resolution for the output files
    #input_time_res=3.0*60.0*60.0 #seconds
    input_time_res = 30.0 * 60.0  #seconds

    #create list of input data files
    file_list = [target_file]  #one element list

    #STICS_data_product='dJ' #require this for now

    #Retrieve STICS data
    #STICS_3D_data_format=np.dtype([('year',float), ('doy', float),
    #    ('sector', int), ('telescope', int), ('eoq', float), ('ion', 'S4' ), (STICS_data_product, float),
    #    (STICS_data_product+'_error', float), ('delT', float)])

    #change to new data format
    STICS_3D_data_format = np.dtype([('year', float), ('doy', float),
                                     ('sector', int), ('telescope', int),
                                     ('eoq', float), ('ion', 'S5'),
                                     ('DF', float), ('DF' + '_error', float),
                                     ('Counts', float),
                                     ('Counts_error', float), ('dJ', float),
                                     ('dJ_error', float), ('delT', float)])

    #Load in mag data for entire year up front (speed up code running)
    temp1 = file_list[0].split(
        '-')  #estimate start/stop year from file names in file_list
    temp2 = temp1[-1].split('.')
    start_year = int(temp2[0][0:4])
    temp1 = file_list[-1].split('-')
    temp2 = temp1[-1].split('.')
    stop_year = int(temp2[0][0:4])

    #load in mag data

    mfi_data = ERPA.load_mag_data(start_year, stop_year, mag_data_dir)

    j = 0
    STICS_data = np.loadtxt(file_list[j],
                            skiprows=4,
                            dtype=STICS_3D_data_format)

    #extract ion and time stamp from file name
    temp1 = file_list[j].split('/')  #split by folder to get just filename
    temp1 = temp1[-1]  #just take last element (filename)
    temp2 = temp1.split('_')
    ion_name = temp2[3]  #recover ion name
    temp3 = temp2[6]  #grab date portion
    date_string = temp3[0:8]

    ion_m, ion_q = ion_mq_stats(
        STICS_data['ion'][0])  #return mass [amu], charge [e]

    #seperate into the time subranges contained in the file
    leap_add = np.array([int(calendar.isleap(a)) for a in STICS_data['year']])
    #isleap only works on scalar years, need to use list comprehension
    yearfrac = STICS_data['year'] + (STICS_data['doy'] - 1.0) / (365.0 +
                                                                 leap_add)

    unique_times = np.unique(yearfrac)
    n_time_steps = unique_times.size
    print 'date: ', date_string
    print '# time steps: ', n_time_steps

    #need to regroup time steps into desired time resolution...

    input_time_res_yrfrac = (
        input_time_res / (60.0 * 60.0 * 24.0 *
                          (365.0 + calendar.isleap(STICS_data['year'][0]))))
    #n_time_steps_at_res=( (unique_times[-1]-unique_times[0]) /input_time_res_yrfrac )
    #estimate number of time steps in time period at given resolution
    time_group_ind = np.zeros(n_time_steps, dtype=int)  #preallocate
    n = 0  #counter, initiallize
    t0 = unique_times[0]  #initialize to first time in current time sequence
    #the way this is written, it behaves funny if you try to pick time resolution finer than what came
    #out of the STICS processer (it essentially doesn't do anything in that case).
    for k in xrange(n_time_steps):
        if (unique_times[k] < t0 + input_time_res_yrfrac
            ):  #check if we moved into next interval yet
            time_group_ind[k] = n
        else:  #moved into next interval
            time_group_ind[k] = n + 1
            n = n + 1  #increment index
            t0 = t0 + input_time_res_yrfrac  #increment time

    n_time_steps_at_res = len(np.unique(
        time_group_ind))  #number of time steps at specified resolution

    #Open a file for the current day to write all time steps to
    outfile1 = output_dir + 'wstics_afm_' + STICS_data['ion'][
        0] + '_' + date_string + '.dat'
    outfile2 = output_dir + 'wstics_erpa_' + STICS_data['ion'][
        0] + '_' + date_string + '.dat'
    with open(outfile1, 'a') as afm_handle, open(outfile2, 'a') as erpa_handle:

        for i in xrange(n_time_steps_at_res):
            unique_times_subset = unique_times[time_group_ind == i]
            small_val2 = 1.0E-10  #small number for yearfrac comparison

            #modify the time index method to group over multiple native time steps (native= time step of STICS
            # processor data file)
            #current_time_ind=np.where( ( (yearfrac+small_val2) > unique_times[i]) &
            #    ( (yearfrac - small_val2) < unique_times[i] )  )

            current_time_ind = np.where((
                (yearfrac + small_val2) > unique_times_subset[0]) & (
                    (yearfrac - small_val2) < unique_times_subset[-1]))

            #compute AFM
            afm_array, start_yearfrac, stop_yearfrac, delta_t, ave_b_vec = AFM.get_afm_data_mag_input(
                STICS_data[current_time_ind], mfi_data)

            #Generate plot and save
            if create_AFM_plot:
                plot_AFM.create_mollweide_plot(
                    afm_array,
                    start_yearfrac,
                    stop_yearfrac,
                    delta_t,
                    STICS_data['ion'][0],
                    ave_b_vec,
                    afm_plot_savedir=output_dir)  #generates a lot of plots!

            #compute ERPA
            erpa_array, start_yearfrac2, stop_yearfrac2, delta_t = ERPA.get_erpa_data_mag_input(
                STICS_data[current_time_ind], mfi_data)

            #Generate ERPA plot and save
            if create_ERPA_plot:
                plot_ERPA.create_polar_plot(
                    erpa_array,
                    start_yearfrac,
                    stop_yearfrac,
                    delta_t,
                    STICS_data['ion'][0],
                    erpa_plot_savedir=output_dir)  #generates a lot of plots!

            #write to AFM file
            time_array = np.array(
                np.transpose([
                    np.repeat(STICS_data['year'][current_time_ind[0][0]], 3),
                    np.repeat(STICS_data['doy'][current_time_ind[0][0]], 3),
                    STICS_data['telescope'][0:3]
                ]))
            temp = np.concatenate((time_array, afm_array), axis=1)
            if i == 0:  #only print header on first time through file
                afm_handle.write(
                    '# Wind / STICS Angular flux map (AFM) for ion ' +
                    ion_name + '.\n')
                afm_handle.write('# Created: ' + str(datetime.datetime.now()) +
                                 '\n')
                afm_handle.write('#\n')
                np.savetxt(
                    afm_handle,
                    temp,
                    header=
                    'Year, doyfrac, Telescope, Sec 0, Sec 1, Sec 2, Sec 3, '
                    'Sec 4, Sec 5, Sec 6, Sec 7, Sec 8, Sec 9, Sec 10, Sec 11, Sec 12, Sec 13, Sec 14, Sec 15',
                    fmt=
                    ('%d %f %d %e %e %e %e %e %e %e %e %e %e %e %e %e %e %e %e'
                     ))
            else:
                np.savetxt(
                    afm_handle,
                    temp,
                    fmt=
                    ('%d %f %d %e %e %e %e %e %e %e %e %e %e %e %e %e %e %e %e'
                     ))

            #write to ERPA file
            time_array = np.array(
                np.transpose([
                    np.repeat(STICS_data['year'][current_time_ind[0][0]], 32),
                    np.repeat(STICS_data['doy'][current_time_ind[0][0]], 32),
                    cnst.epq_table
                ]))
            temp = np.concatenate((time_array, np.transpose(erpa_array)),
                                  axis=1)
            if i == 0:  #only print header on first time through file
                erpa_handle.write(
                    '# Wind / STICS Energy-resolved pitch-angle (ERPA) distributions for ion '
                    + ion_name + '.\n')
                erpa_handle.write('# Created: ' +
                                  str(datetime.datetime.now()) + '\n')
                erpa_handle.write('#\n')
                np.savetxt(
                    erpa_handle,
                    temp,
                    header='Year, doyfrac, eoq, PA 0, PA 1, PA 2, PA 3, '
                    'PA 4, PA 5, PA 6, PA 7, PA 8, PA 9, PA 10, PA 11',
                    fmt=('%d %f %f %e %e %e %e %e %e %e %e %e %e %e %e'))
            else:
                np.savetxt(
                    erpa_handle,
                    temp,
                    fmt=('%d %f %f %e %e %e %e %e %e %e %e %e %e %e %e'))

    print "Finished AFM-ERPA Normally"  #Add this so I can search for it in the output files.