示例#1
0
 def test_center_brst_moments_data(self):
     data = mms_load_hpca(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst')
     centered = mms_load_hpca(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', center_measurement=True, suffix='_centered')
     
     t, d = get_data('mms1_hpca_hplus_ion_bulk_velocity')
     c, d = get_data('mms1_hpca_hplus_ion_bulk_velocity_centered')
     self.assertTrue(np.round(c[0]-t[0], decimals=3) == 5.0)
示例#2
0
 def test_center_fast_electron_data(self):
     data = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'])
     centered = mms_load_fpi(trange=['2015-10-16/14:00', '2015-10-16/15:00'], center_measurement=True, suffix='_centered')
     
     t, d = get_data('mms1_des_bulkv_gse_fast')
     c, d = get_data('mms1_des_bulkv_gse_fast_centered')
     self.assertTrue(np.round(c[0]-t[0], decimals=3) == 2.25)
示例#3
0
def cotrans_set_coord(name, coord):
    '''
    This function sets the coordinate system of a tplot variable

    Parameters:
        name: str
            name of the tplot variable

    Notes:
        The coordinate system is stored in the variable's metadata at:
            metadata['data_att']['coord_sys']

        See cotrans_get_coord to return the coordinate system

    Returns:
        bool: True/False depending on if the operation was successful
    '''

    # check that the variable exists
    data = get_data(name)
    if data == None:
        return False

    metadata = get_data(name, metadata=True)

    if metadata.get('data_att') == None:
        metadata['data_att'] = {}

    # note: updating the metadata dict directly updates
    # the variable's metadata in memory, so there's
    # no need to update the variable with store_data
    metadata['data_att'] = {'coord_sys': coord}
    return True
示例#4
0
def tinterpol(names, interp_names=None, method=None, new_names=None,
              suffix=None):

    old_names = pyspedas.tnames(names)

    if len(old_names) < 1:
        print('tinterpol error: No pytplot names were provided.')
        return

    if suffix is None:
        suffix = '-itrp'

    if method is None:
        method = 'linear'

    if new_names is None:
        n_names = [s + suffix for s in old_names]
    elif new_names == '':
        n_names = old_names
    else:
        n_names = new_names

    if len(n_names) != len(old_names):
        n_names = [s + suffix for s in old_names]

    for i in range(len(old_names)):
        time, data = pytplot.get_data(old_names[i])
        new_time, data1 = pytplot.get_data(interp_names[i])
        data = numpy.asarray(data).squeeze()
        f2 = interp1d(time, data, kind=method)
        new_data = f2(new_time)
        pytplot.store_data(n_names[i], data={'x': new_time, 'y': new_data})
        print('tinterpol (' + method + ') was applied to: ' + n_names[i])
示例#5
0
 def test_avg_data(self):
     """Test avg_data."""
     avg_data('aaabbbccc', width=2)  # Test non-existent name
     avg_data('test', width=2)
     d = get_data('test-avg')
     self.assertTrue((d[1] == [4.0, 11.5, 10.5]).all())
     avg_data('test', width=2, overwrite=True)  # Test overwrite
     avg_data('test', dt=4.0, noremainder=False)  # Test dt option
     store_data('test',
                data={
                    'x': [1., 2., 3., 4., 5., 6.],
                    'y': [3., 5., 8., -4., 20., 1.]
                })
     avg_data('test', width=2, new_names='aabb')  # Test new_names
     d = get_data('aabb')
     # Test multiple names
     avg_data(['test', 'aabb'], new_names='aaabbb', width=2)
     dn = [[3., 5., 8.], [15., 20., 1.], [3., 5., 8.], [15., 20., 1.],
           [23., 15., 28.], [15., 20., 1.]]
     store_data('test1', data={'x': [1., 12., 13., 14., 15., 16.], 'y': dn})
     avg_data('test1', width=2)  # Test 3-d data
     avg_data('test1', new_names='test2', dt=2.)  # Test a reasonable dt
     avg_data('test1', dt=-1.)  # Test dt error
     avg_data('test1', dt=1.e8)  # Test dt error
     d2 = get_data('test2')
     self.assertTrue(len(d) > 0)
     self.assertTrue(d2[1][-1][0] == 19.0)
示例#6
0
    def _visdata(self):
        self._setcolors()
        datasets = []
        if isinstance(pytplot.data_quants[self.tvar_name].data, list):
            for oplot_name in pytplot.data_quants[self.tvar_name].data:
                datasets.append(pytplot.data_quants[oplot_name])
        else:
            datasets.append(pytplot.data_quants[self.tvar_name])

        cm_index = 0
        for dataset in datasets:
            #TODO: Add a check that lon and lat are only 1D
            _, x = pytplot.get_data(dataset.links['lon'])
            _, y = pytplot.get_data(dataset.links['lat'])
            for column_name in dataset.data.columns:
                values = dataset.data[column_name].tolist()
                colors = []
                colors.extend(
                    pytplot.tplot_utilities.get_heatmap_color(
                        color_map=self.colors[cm_index],
                        min_val=self.zmin,
                        max_val=self.zmax,
                        values=values,
                        zscale=self.zscale))
                circle_source = ColumnDataSource(
                    data=dict(x=x, y=y, value=values, colors=colors))
                self.fig.scatter(x='x',
                                 y='y',
                                 radius=1.0,
                                 fill_color='colors',
                                 fill_alpha=1,
                                 line_color=None,
                                 source=circle_source)
            cm_index += 1
示例#7
0
 def _visdata(self):    
     datasets = []
     if isinstance(pytplot.data_quants[self.tvar_name].data, list):
         for oplot_name in pytplot.data_quants[self.tvar_name].data:
             datasets.append(pytplot.data_quants[oplot_name])
     else:
         datasets.append(pytplot.data_quants[self.tvar_name])
     
     for dataset in datasets: 
         _, lat = pytplot.get_data(pytplot.data_quants[self.tvar_name].links['lat']) 
         lat = lat.transpose()[0]
         _, lon = pytplot.get_data(pytplot.data_quants[self.tvar_name].links['lon']) 
         lon = lon.transpose()[0]    
         for column_name in dataset.data.columns:
             values = dataset.data[column_name].tolist()
             colors = pytplot.tplot_utilities.get_heatmap_color(color_map=self.colormap, 
                                                                     min_val=self.zmin, 
                                                                     max_val=self.zmax, 
                                                                     values=values, 
                                                                     zscale=self.zscale)
             brushes = []
             for color in colors:
                 brushes.append(pg.mkBrush(color))
             self.curves.append(self.plotwindow.scatterPlot(lon.tolist(), lat.tolist(), 
                                                            pen=pg.mkPen(None), brush=brushes))
示例#8
0
 def test_center_brst_electron_data(self):
     data = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst')
     centered = mms_load_fpi(trange=['2015-10-16/13:06', '2015-10-16/13:07'], data_rate='brst', center_measurement=True, suffix='_centered')
     
     t, d = get_data('mms1_des_bulkv_gse_brst')
     c, d = get_data('mms1_des_bulkv_gse_brst_centered')
     self.assertTrue(np.round(c[0]-t[0], decimals=3) == 0.015)
示例#9
0
def tdotp(variable1, variable2, newname=None):
    """
        Routine to calculate the dot product of two tplot variables 
        containing arrays of vectors and storing the results in a 
        tplot variable
    """

    data1 = get_data(variable1, xarray=True)
    data2 = get_data(variable2, xarray=True)

    if data1 is None:
        print('Variable not found: ' + variable1)
        return

    if data2 is None:
        print('Variable not found: ' + variable2)
        return

    if newname is None:
        newname = variable1 + '_dot_' + variable2

    # calculate the dot product
    out = data1.dot(data2, dims='v_dim')

    # save the output
    saved = store_data(newname, data={'x': data1.time.values, 'y': out.values})

    return newname
示例#10
0
def solarwind_load(trange, level='hro2', min5=False):
    if min5:
        datatype = '5min'
    else:
        datatype = '1min'
    omni_vars = omni.data(trange=trange, level=level, datatype=datatype)
    bzgsm = get_data('BZ_GSM')
    dp = get_data('Pressure')
    return np.array([bzgsm.times, bzgsm.y, dp.y]).T
示例#11
0
def mms_fgm_remove_flags(probe, data_rate, level, instrument, suffix=''):
    """
    This function removes data flagged by the FGM 'flag' variable (flags > 0), 
    in order to only show science quality data by default.
    
    Parameters:
        probe : str or list of str
            probe or list of probes, valid values for MMS probes are ['1','2','3','4']. 

        data_rate : str or list of str
            instrument data rates for FGM include 'brst' 'fast' 'slow' 'srvy'. The
            default is 'srvy'.

        level : str
            indicates level of data processing. the default if no level is specified is 'l2'

        instrument : str
            instrument; probably 'fgm'
            
        suffix: str
            The tplot variable names will be given this suffix.  By default, 
            no suffix is added.

    """
    if not isinstance(probe, list): probe = [probe]
    if not isinstance(data_rate, list): data_rate = [data_rate]
    if not isinstance(level, list): level = [level]

    tplot_vars = set(tnames())

    for this_probe in probe:
        for this_dr in data_rate:
            for this_lvl in level:
                if level == 'ql':
                    flag_var = 'mms' + str(
                        this_probe
                    ) + '_' + instrument + '_' + this_dr + '_' + this_lvl + '_flag' + suffix
                else:
                    flag_var = 'mms' + str(
                        this_probe
                    ) + '_' + instrument + '_flag_' + this_dr + '_' + this_lvl + suffix
                flagged = get_data(flag_var)
                if flagged == None:
                    continue
                times, flags = flagged
                flagged_data = np.where(flags != 0.0)[0]

                for var_specifier in [
                        '_b_gse_', '_b_gsm_', '_b_dmpa_', '_b_bcs_'
                ]:
                    var_name = 'mms' + str(
                        this_probe
                    ) + '_' + instrument + var_specifier + this_dr + '_' + this_lvl + suffix
                    if var_name in tplot_vars:
                        times, var_data = get_data(var_name)
                        var_data[flagged_data] = np.nan
                        store_data(var_name, data={'x': times, 'y': var_data})
示例#12
0
    def test_all_cotrans(self):
        """Test all cotrans pairs.

        Apply transformation, then inverse transformation and compare.
        """
        cotrans()
        all_cotrans = ['gei', 'geo', 'j2000', 'gsm', 'mag', 'gse', 'sm']
        d = [[245.0, -102.0, 251.0], [775.0, 10.0, -10], [121.0, 545.0, -1.0],
             [304.65, -205.3, 856.1], [464.34, -561.55, -356.22]]
        dd1 = d[1]
        t = [1577112800, 1577308800, 1577598800, 1577608800, 1577998800]
        in_len = len(t)
        name1 = "name1"
        name2 = "name2"
        count = 0
        # Test non-existent system.
        cotrans(name_out=name1,
                time_in=t,
                data_in=d,
                coord_in="coord_in",
                coord_out="coord_out")
        # Test empty data.
        cotrans(name_out=name1,
                time_in=t,
                data_in=[],
                coord_in="gei",
                coord_out="geo")
        cotrans(time_in=t, data_in=d, coord_in="gse", coord_out="gsm")
        # Test all combinations.
        for coord_in in all_cotrans:
            for coord_out in all_cotrans:
                count += 1
                del_data()
                cotrans(name_out=name1,
                        time_in=t,
                        data_in=d,
                        coord_in=coord_in,
                        coord_out=coord_out)
                dout = get_data(name1)
                out_len1 = len(dout[0])
                self.assertTrue(out_len1 == in_len)
                # Now perform inverse transformation.
                cotrans(name_in=name1,
                        name_out=name2,
                        coord_in=coord_out,
                        coord_out=coord_in)
                dout2 = get_data(name2)
                out_len2 = len(dout2[0])
                dd2 = dout2[1][1]
                print(count, "--- in:", coord_in, "out:", coord_out)
                # print(dout[1][1])
                # print(dd2)
                self.assertTrue(out_len2 == in_len)
                self.assertTrue(abs(dd1[0] - dd2[0]) <= 1e-6)
                self.assertTrue(abs(dd1[1] - dd2[1]) <= 1e-6)
                self.assertTrue(abs(dd1[2] - dd2[2]) <= 1e-6)
示例#13
0
def tinterpol(names, interp_to, method=None, newname=None, suffix=None):

    if not isinstance(names, list):
        names = [names]
    if not isinstance(newname, list):
        newname = [newname]

    old_names = tnames(names)

    if len(old_names) < 1:
        print('tinterpol error: No pytplot names were provided.')
        return

    if suffix == None:
        suffix = '-itrp'

    if method == None:
        method = 'linear'

    if (newname == None) or (len(newname) == 1 and newname[0] == None):
        n_names = [s + suffix for s in old_names]
    elif newname == '':
        n_names = old_names
    else:
        n_names = newname

    interp_to_data = get_data(interp_to)

    if interp_to_data == None:
        print('Error, tplot variable: ' + interp_to + ' not found.')
        return

    interp_to_times = interp_to_data[0]

    for name_idx, name in enumerate(old_names):
        xdata = get_data(name, xarray=True)
        xdata_interpolated = xdata.interp({'time': interp_to_times},
                                          method=method)

        if 'spec_bins' in xdata.coords:
            store_data(n_names[name_idx],
                       data={
                           'x': interp_to_times,
                           'y': xdata_interpolated.values,
                           'v': xdata_interpolated.coords['spec_bins'].values
                       })
        else:
            store_data(n_names[name_idx],
                       data={
                           'x': interp_to_times,
                           'y': xdata_interpolated.values
                       })

        print('tinterpol (' + method + ') was applied to: ' +
              n_names[name_idx])
示例#14
0
 def test_regression_multi_imports_spdf(self):
     data = mms_load_fgm(data_rate='brst',
                         trange=['2015-10-16/13:06', '2015-10-16/13:10'],
                         spdf=True)
     t1, d1 = get_data('mms1_fgm_b_gse_brst_l2')
     data = mms_load_fgm(data_rate='brst',
                         trange=['2015-10-16/13:06', '2015-10-16/13:10'],
                         spdf=True)
     t2, d2 = get_data('mms1_fgm_b_gse_brst_l2')
     self.assertTrue(t1.shape == t2.shape)
     self.assertTrue(d1.shape == d2.shape)
示例#15
0
def tnormalize(variable, newname=None, return_data=False):
    """
    Normalize all the vectors stored in a tplot variable

    Input
    ----------
        variable: str or np.ndarray
            tplot variable (or numpy array) containing the vectors to be normalized

    Parameters
    ----------
        newname: str
            name of the output variable; default: variable_normalized

        return_data: bool
            return the normalized vectors instead of creating a tplot variable
    
    Returns
    ----------

        name of the tplot variable created or normalized vectors if return_data
        is set

    """
    metadata_in = {}
    if isinstance(variable, str):
        data_in = get_data(variable)
        metadata_in = get_data(variable, metadata=True)
        data = data_in[1]
        times = data_in[0]
    else:
        data = np.atleast_2d(variable)
        times = np.zeros(data.shape[0])

    n = np.sqrt(np.nansum(data**2, axis=1))

    # to do element-wise division, the magnitude needs to be repeated for each component
    norm_reshaped = np.reshape(n, [len(times), 1])
    norm_mag = np.repeat(norm_reshaped, len(data[0, :]), axis=1)

    data_norm = data / norm_mag

    if return_data:
        return data_norm
    else:
        if newname is None:
            newname = variable + '_normalized'
        store_data(newname,
                   data={
                       'x': times,
                       'y': data_norm
                   },
                   attr_dict=metadata_in)
        return newname
示例#16
0
    def _visdata(self):
        self._setcolors()
        datasets = []
        if isinstance(pytplot.data_quants[self.tvar_name].data, list):
            for oplot_name in pytplot.data_quants[self.tvar_name].data:
                datasets.append(pytplot.data_quants[oplot_name])
        else:
            datasets.append(pytplot.data_quants[self.tvar_name])

        cm_index = 0
        for dataset in datasets:
            # TODO: Add a check that lon and lat are only 1D
            t_link_lon, x = pytplot.get_data(dataset.links['lon'])
            t_link_lat, y = pytplot.get_data(dataset.links['lat'])

            for column_name in dataset.data.columns:
                data = dataset.data[column_name].values

                # Need to trim down the data points to fit within the link
                t_tvar = dataset.data.index.values
                while t_tvar[-1] > t_link_lon[-1]:
                    t_tvar = np.delete(t_tvar, -1)
                    data = np.delete(data, -1)
                while t_tvar[0] < t_link_lon[0]:
                    t_tvar = np.delete(t_tvar, 0)
                    data = np.delete(data, 0)
                while t_tvar[-1] > t_link_lat[-1]:
                    t_tvar = np.delete(t_tvar, -1)
                    data = np.delete(data, -1)
                while t_tvar[0] < t_link_lat[0]:
                    t_tvar = np.delete(t_tvar, 0)
                    data = np.delete(data, 0)

                colors = []
                colors.extend(
                    pytplot.tplot_utilities.get_heatmap_color(
                        color_map=self.colors[cm_index % len(self.colors)],
                        min_val=self.zmin,
                        max_val=self.zmax,
                        values=data.tolist(),
                        zscale=self.zscale))

                circle_source = ColumnDataSource(
                    data=dict(x=x, y=y, value=data.tolist(), colors=colors))
                self.fig.scatter(x='x',
                                 y='y',
                                 radius=1.0,
                                 fill_color='colors',
                                 fill_alpha=1,
                                 line_color=None,
                                 source=circle_source)
            cm_index += 1
示例#17
0
 def test_tcopy(self):
     store_data('test', data={'x': [1, 2, 3], 'y': [5, 5, 5]})
     tcopy('test')
     tcopy('test', 'another-copy')
     t, d = get_data('test-copy')
     self.assertTrue(t.tolist() == [1, 2, 3])
     self.assertTrue(d.tolist() == [5, 5, 5])
     t, d = get_data('another-copy')
     self.assertTrue(t.tolist() == [1, 2, 3])
     self.assertTrue(d.tolist() == [5, 5, 5])
     # the following should gracefully error
     tcopy('doesnt exist', 'another-copy')
     tcopy(['another-copy', 'test'], 'another-copy')
示例#18
0
def DataLoad(trange=['2017-05-01', '2017-05-02/15:30:02'], data_rate='srvy', level='l2'):
    '''
    Loads all data needed for calculating magnnetic field curvature from MMS FGM data.  
    Uses pyspedas and pytplot.get_data for accessing the SDC API, file downloading, 
    data file version control, and CDF unpacking.

    Parameters:
    trange:     A list with two strings for the date range [tstart, tend]
                e.g. trange=['2017-05-01', '2017-05-02/15:30:02']

    data_rate:  The cadance of data which should be loaded.
                Options are 'srvy', 'brst'

    level:      The data level which will be loaded.  Use 'l2' unless you're sure otherwise.

    '''
    logging.info('Start DataLoad.')
    # load data files from SDC/local storage into tplot variables
    pyspedas.mms_load_mec(trange=trange, probe=['1', '2', '3', '4'], data_rate='srvy', level=level, time_clip=True)
    pyspedas.mms_load_fgm(trange=trange, probe=['1', '2', '3', '4'], data_rate=data_rate, level=level, time_clip=True)
    # extract data from tplot variables to numpy arrays.  NOTE: all done in GSM.
    postime1, pos1 = get_data('mms1_mec_r_gsm')
    postime2, pos2 = get_data('mms2_mec_r_gsm')
    postime3, pos3 = get_data('mms3_mec_r_gsm')
    postime4, pos4 = get_data('mms4_mec_r_gsm')
    magtime1, mag1 = get_data('mms1_fgm_b_gsm_'+data_rate+'_l2')
    magtime2, mag2 = get_data('mms2_fgm_b_gsm_'+data_rate+'_l2')
    magtime3, mag3 = get_data('mms3_fgm_b_gsm_'+data_rate+'_l2')
    magtime4, mag4 = get_data('mms4_fgm_b_gsm_'+data_rate+'_l2')
    # return all arrays
    logging.info('Returning from DataLoad.')
    return (postime1, pos1, magtime1, mag1, postime2, pos2, magtime2, mag2, postime3, pos3, magtime3, mag3, postime4, pos4, magtime4, mag4)
示例#19
0
 def test_tcrossp(self):
     """ cross product tests"""
     cp = tcrossp([3, -3, 1], [4, 9, 2], return_data=True)
     self.assertTrue(cp.tolist() == [-15, -2, 39])
     cp = tcrossp([3, -3, 1], [4, 9, 2])
     cp = get_data(cp)
     self.assertTrue(cp.y[0, :].tolist() == [-15, -2, 39])
     store_data('var1', data={'x': [0], 'y': [[3, -3, 1]]})
     store_data('var2', data={'x': [0], 'y': [[4, 9, 2]]})
     cp = tcrossp('var1', 'var2', return_data=True)
     self.assertTrue(cp[0].tolist() == [-15, -2, 39])
     cp = tcrossp('var1', 'var2', newname='test_crossp')
     cp = get_data('test_crossp')
     self.assertTrue(cp.y[0, :].tolist() == [-15, -2, 39])
示例#20
0
def mms_split_fgm_data(probe, data_rate, level, instrument, suffix=''):
    """


    """

    probe = probe.lower()
    instrument = instrument.lower()
    data_rate = data_rate.lower()
    level = level.lower()

    if level.lower() == 'l2pre':
        data_rate_mod = data_rate + '_l2pre'
    else:
        data_rate_mod = data_rate

    coords = ['dmpa', 'gse', 'gsm', 'bcs']

    out_vars = []

    for coord in coords:
        if level in ['l2', 'l2pre']:
            tplot_name = 'mms' + probe + '_' + instrument + '_b_' + coord + '_' + data_rate + '_' + level + suffix
        else:
            tplot_name = 'mms' + probe + '_' + instrument + '_' + data_rate_mod + '_' + coord + suffix

        if data_exists(tplot_name) == False:
            continue

        fgm_data = get_data(tplot_name)

        if fgm_data is None:
            continue

        metadata = get_data(tplot_name, metadata=True)

        if suffix != '':
            tplot_name = tplot_name[0:-len(suffix)]

        store_data(tplot_name + '_bvec' + suffix, data={'x': fgm_data.times, 'y': fgm_data.y[:, :3]}, attr_dict=metadata)
        store_data(tplot_name + '_btot' + suffix, data={'x': fgm_data.times, 'y': fgm_data.y[:, 3]}, attr_dict=metadata)

        options(tplot_name + '_btot' + suffix, 'legend_names', 'Bmag')
        options(tplot_name + '_btot' + suffix, 'ytitle', 'MMS'+probe + ' FGM')

        out_vars.append(tplot_name + '_bvec' + suffix)
        out_vars.append(tplot_name + '_btot' + suffix)

    return out_vars
示例#21
0
def mms_hpca_calc_anodes(fov=[0, 360], probe='1', suffix=''):
    """
    This function will sum (or average, for flux) the HPCA data over the requested field-of-view (fov)
    Parameters:
        fov : list of int
            field of view, in angles, from 0-360
        probe : str
            probe #, e.g., '4' for MMS4
        suffix: str
            suffix of the loaded data

    Returns:
        List of tplot variables created.
    """
    sum_anodes = [a+suffix for a in ['*_count_rate', '*_RF_corrected', '*_bkgd_corrected', '*_norm_counts']]
    avg_anodes = ['*_flux'+suffix]
    output_vars = []

    fov_str = '_elev_'+str(fov[0])+'-'+str(fov[1])

    for sum_anode in sum_anodes:
        vars_to_sum = tnames(sum_anode)

        for var in vars_to_sum:
            times, data, angles, energies = get_data(var)

            updated_spectra = mms_hpca_sum_fov(times, data, angles, energies, fov=fov)

            store_data(var+fov_str, data={'x': times, 'y': updated_spectra, 'v': energies})
            options(var+fov_str, 'spec', True)
            options(var+fov_str, 'ylog', True)
            options(var+fov_str, 'zlog', True)
            options(var+fov_str, 'Colormap', 'jet')
            output_vars.append(var+fov_str)

    for avg_anode in avg_anodes:
        vars_to_avg = tnames(avg_anode)

        for var in vars_to_avg:
            times, data, angles, energies = get_data(var)

            updated_spectra = mms_hpca_avg_fov(times, data, angles, energies, fov=fov)

            store_data(var+fov_str, data={'x': times, 'y': updated_spectra, 'v': energies})
            options(var+fov_str, 'spec', True)
            options(var+fov_str, 'ylog', True)
            options(var+fov_str, 'zlog', True)
            options(var+fov_str, 'Colormap', 'jet')
            output_vars.append(var+fov_str)
示例#22
0
    def _visdata(self):
        datasets = []
        if isinstance(pytplot.data_quants[self.tvar_name].data, list):
            for oplot_name in pytplot.data_quants[self.tvar_name].data:
                datasets.append(pytplot.data_quants[oplot_name])
        else:
            datasets.append(pytplot.data_quants[self.tvar_name])

        cm_index = 0
        for dataset in datasets:
            t_link, lat = pytplot.get_data(dataset.links['lat'])
            lat = lat.transpose()[0]
            # Need to trim down the data points to fit within the link
            t_tvar = dataset.data.index.values
            data = dataset.data[0].values
            while t_tvar[-1] > t_link[-1]:
                t_tvar = np.delete(t_tvar, -1)
                data = np.delete(data, -1)
            while t_tvar[0] < t_link[0]:
                t_tvar = np.delete(t_tvar, 0)
                data = np.delete(data, 0)

            t_link, lon = pytplot.get_data(dataset.links['lon'])
            # Need to trim down the data points to fit within the link
            while t_tvar[-1] > t_link[-1]:
                t_tvar = np.delete(t_tvar, -1)
                data = np.delete(data, -1)
            while t_tvar[0] < t_link[0]:
                t_tvar = np.delete(t_tvar, 0)
                data = np.delete(data, 0)

            lon = lon.transpose()[0]
            for column_name in dataset.data.columns:
                values = data.tolist()
                colors = pytplot.tplot_utilities.get_heatmap_color(
                    color_map=self.colormap[cm_index % len(self.colormap)],
                    min_val=self.zmin,
                    max_val=self.zmax,
                    values=values,
                    zscale=self.zscale)
                brushes = []
                for color in colors:
                    brushes.append(pg.mkBrush(color))
                self.curves.append(
                    self.plotwindow.scatterPlot(lon.tolist(),
                                                lat.tolist(),
                                                pen=pg.mkPen(None),
                                                brush=brushes))
                cm_index += 1
示例#23
0
def mms_pgs_clean_support(times,
                          mag_name=None,
                          vel_name=None,
                          sc_pot_name=None):
    """
	Transform and/or interpolate support data to match the particle data

	Parameters
    ----------
		mag_name: str
			Tplot variable containing magnetic field data

		vel_name: str
			Tplot variable containing bulk velocity data

		sc_pot_name: str
			Tplot variable containing spacecraft potential data

	Returns
    ----------
		Tuple containing interpolated (magnetic field, velocity, spacecraft potential)
	"""

    out_mag = None
    out_vel = None
    out_scpot = None

    if mag_name is not None:
        mag_temp = mag_name + '_pgs_temp'
        tinterpol(mag_name, times, newname=mag_temp)
        interpolated_bfield = get_data(mag_temp)
        if interpolated_bfield is not None:
            out_mag = interpolated_bfield.y

    if vel_name is not None:
        vel_temp = vel_name + '_pgs_temp'
        tinterpol(vel_name, times, newname=vel_temp)
        interpolated_vel = get_data(vel_temp)
        if interpolated_vel is not None:
            out_vel = interpolated_vel.y

    if sc_pot_name is not None:
        scpot_temp = sc_pot_name + '_pgs_temp'
        tinterpol(sc_pot_name, times, newname=scpot_temp)
        interpolated_scpot = get_data(scpot_temp)
        if interpolated_scpot is not None:
            out_scpot = interpolated_scpot.y

    return (out_mag, out_vel, out_scpot)
示例#24
0
def remove_duplicated_tframe(tvars=[]):

    tvars = tnames(tvars)

    if len(tvars) < 1:
        return

    for tvar in tvars:

        input_attr_dict = get_data(tvar, metadata=True)
        get_data_vars = get_data(tvar)

        unique_array, counts_array = np.unique(
            get_data_vars[0], return_counts=True)
        duplicate_time_indices = np.where(counts_array > 1)[0]
        if duplicate_time_indices.shape[0] > 0:  # duplication check
            delete_indices_array = np.array(
                [np.where(get_data_vars[0] == unique_array[index])[0][0]
                    for index in duplicate_time_indices])
            input_data_dictionary = {}
            input_data_dictionary['x'] = np.delete(
                get_data_vars[0], delete_indices_array, axis=0)
            input_data_dictionary['y'] = np.delete(
                get_data_vars[1], delete_indices_array, axis=0)

            # for v or v1, v2.. elements. not tested yet.
            if len(get_data_vars) >= 3:
                element_counts = len(get_data_vars)
                if element_counts == 3:
                    if get_data_vars[2].ndim >= 2:
                        input_data_dictionary['v'] = np.delete(
                            get_data_vars[2], delete_indices_array, axis=0)
                    elif get_data_vars[2].ndim == 1:
                        input_data_dictionary['v'] = get_data_vars[2]
                elif element_counts > 3:
                    for element_index in range(2, element_counts):
                        v_element_name = f'v{element_index-1}'
                        if get_data_vars[element_index].ndim >= 2:
                            input_data_dictionary[v_element_name] = np.delete(
                                get_data_vars[element_index], delete_indices_array,
                                axis=0)
                        elif get_data_vars[element_index].ndim == 1:
                            input_data_dictionary[v_element_name] = get_data_vars[element_index]

            store_data(tvar, data=input_data_dictionary,
                       attr_dict=input_attr_dict)

    return
示例#25
0
def cotrans_get_coord(name):
    '''
    This function returns the coordinate system of a tplot variable

    Parameters:
        name: str
            name of the tplot variable

    Notes:
        The coordinate system is stored in the variable's metadata at:
            metadata['data_att']['coord_sys']

        See cotrans_set_coord to update the coordinate system

    Returns:
        Coordinate system of the tplot variable 
        or 
        None if the coordinate system isn't set
    '''

    metadata = get_data(name, metadata=True)
    if metadata is None:
        return None

    if metadata.get('data_att'):
        if metadata['data_att'].get('coord_sys'):
            return metadata['data_att']['coord_sys']

    print('Coordinate system not found: ' + name)
    return None
示例#26
0
def mms_feeps_correct_energies(probes, data_rate, level='l2', suffix=''):

    types = ['top', 'bottom']
    sensors = range(1, 13)
    units_types = ['intensity', 'count_rate', 'counts']

    for probe in probes:
        for sensor_type in types:
            for sensor in sensors:
                if sensor >= 6 and sensor <= 8:
                    species = 'ion'
                else:
                    species = 'electron'

                for units in units_types:
                    var_name = 'mms' + probe + '_epd_feeps_' + data_rate + '_' + level + '_' + species + '_' + sensor_type + '_' + units + '_sensorid_' + str(
                        sensor)

                    times, data = get_data(var_name + suffix)
                    energies = pytplot.data_quants[var_name +
                                                   suffix].spec_bins.values

                    energy_map = mms_feeps_energy_table(
                        probe, sensor_type[0:3], sensor)

                    store_data(var_name + suffix,
                               data={
                                   'x': times,
                                   'y': data,
                                   'v': energy_map
                               })
示例#27
0
def tdeflag(names, method=None, flag=None, new_names=None, suffix=None):

    old_names = pyspedas.tnames(names)

    if len(old_names) < 1:
        print('tdeflag error: No pytplot names were provided.')
        return

    if suffix is None:
        suffix = '-deflag'

    if flag is None:
        flag = float('nan')

    if new_names is None:
        n_names = [s + suffix for s in old_names]
    elif new_names == '':
        n_names = old_names
    else:
        n_names = new_names

    if len(n_names) != len(old_names):
        n_names = [s + suffix for s in old_names]

    for i in range(len(old_names)):
        time, data = pytplot.get_data(old_names[i])
        new_time = []
        new_data = []
        for j in range(len(time)):
            if not numpy.isnan(data[j]):
                new_time.append(time[j])
                new_data.append(data[j])
        pytplot.store_data(n_names[i], data={'x': new_time, 'y': new_data})
        print('tdeflag was applied to: ' + n_names[i])
示例#28
0
def tclip(names, ymin, ymax, flag=None, new_names=None, suffix=None):

    old_names = pyspedas.tnames(names)

    if len(old_names) < 1:
        print('tclip error: No pytplot names were provided.')
        return

    if suffix is None:
        suffix = '-clip'

    if flag is None:
        flag = float('nan')

    if new_names is None:
        n_names = [s + suffix for s in old_names]
    elif new_names == '':
        n_names = old_names
    else:
        n_names = new_names

    if len(n_names) != len(old_names):
        n_names = [s + suffix for s in old_names]

    for i in range(len(old_names)):
        alldata = pytplot.get_data(old_names[i])
        time = alldata[0]
        data = alldata[1]
        new_data = numpy.array(data)
        new_data[new_data <= ymin] = flag
        new_data[new_data >= ymax] = flag
        pytplot.store_data(n_names[i], data={'x': time, 'y': new_data})
        print('tclip was applied to: ' + n_names[i])
示例#29
0
    def test_dsl2gse(self):
        """Test themis.cotrans.dsl2gse."""
        del_data()
        # Try with missing variables. It should exit without problems.
        dsl2gse('tha_fgl_dsl', 'tha_spinras', 'tha_spindec', 'tha_fgl_gse')
        # Now load the needed variables.
        time_range = ['2017-03-23 00:00:00', '2017-03-23 23:59:59']
        pyspedas.themis.state(probe='a',
                              trange=time_range,
                              get_support_data=True,
                              varnames=['tha_spinras', 'tha_spindec'])
        pyspedas.themis.fgm(probe='a',
                            trange=time_range,
                            varnames=['tha_fgl_dsl'])

        dsl2gse('tha_fgl_dsl', 'tha_spinras', 'tha_spindec', 'tha_fgl_gse')

        t, d = get_data('tha_fgl_gse')
        # Now test the inverse.
        dsl2gse('tha_fgl_dsl',
                'tha_spinras',
                'tha_spindec',
                'tha_fgl_gse',
                isgsetodsl=1)

        self.assertTrue(abs(d[0].tolist()[0] - 15.905078404701147) <= 1e-6)
        self.assertTrue(abs(d[0].tolist()[1] - -13.962618931740064) <= 1e-6)
        self.assertTrue(abs(d[0].tolist()[2] - 16.392516225582813) <= 1e-6)

        self.assertTrue(abs(d[50000].tolist()[0] - 16.079111468932435) <= 1e-6)
        self.assertTrue(
            abs(d[50000].tolist()[1] - -18.858874541698583) <= 1e-6)
        self.assertTrue(abs(d[50000].tolist()[2] - 14.75796300561617) <= 1e-6)
示例#30
0
def subtract_median(names, new_names=None, suffix=None):

    old_names = pyspedas.tnames(names)

    if len(old_names) < 1:
        print('Subtract Median error: No pytplot names were provided.')
        return

    if suffix is None:
        suffix = '-m'

    if new_names is None:
        n_names = [s + suffix for s in old_names]
    elif new_names == '':
        n_names = old_names
    else:
        n_names = new_names

    if len(n_names) != len(old_names):
        n_names = [s + suffix for s in old_names]

    for i in range(len(old_names)):
        alldata = pytplot.get_data(old_names[i])
        time = alldata[0]
        data = alldata[1]
        new_data = data-numpy.median(data, axis=0)
        pytplot.store_data(n_names[i], data={'x': time, 'y': new_data})
        print('Subtract Median was applied to: ' + n_names[i])