예제 #1
0
def test_parse_cdftt2000():
    x = "2004-03-01 12:24:22.351793238"
    parsed = cdfepoch.parse(x)
    assert parsed == [131415926535793232]

    assert cdfepoch.to_datetime(parsed) == [
        datetime(2004, 3, 1, 12, 25, 26, 535793)
    ]
예제 #2
0
def test_parse_cdfepoch16():
    input_time = 53467976543.0 + 543218654100j
    x = cdfepoch.encode(input_time)
    assert x == "1694-05-01 07:42:23.543218654"
    add_precision = x + "000"
    parsed = cdfepoch.parse(add_precision)
    assert parsed[0] == approx(53467976543 + .543218654)

    assert cdfepoch.to_datetime(input_time) == datetime(1694, 5, 1, 7, 42, 23, 543219)
예제 #3
0
def test_findepochrange_cdfepoch():
    start_time = "2013-12-01 12:24:22.000"
    end_time = "2014-12-01 12:24:22.000"
    x = cdfepoch.parse([start_time, end_time])
    time_array = np.arange(x[0], x[1], step=1000000)

    test_start = [2014, 8, 1, 8, 1, 54, 123]
    test_end = [2018, 1, 1, 1, 1, 1, 1]
    index = cdfepoch.findepochrange(time_array, starttime=test_start, endtime=test_end)
    # Test that the test_start is less than the first index, but more than one less
    assert time_array[index[0]] >= cdfepoch.compute(test_start)
    assert time_array[index[0]-1] <= cdfepoch.compute(test_start)

    assert time_array[index[-1]] <= cdfepoch.compute(test_end)
예제 #4
0
def test_encode_cdfepoch16():
    '''
    cdf_encode_epoch16(dcomplex(63300946758.000000, 176214648000.00000)) in IDL
    returns 04-Dec-2005 20:39:28.176.214.654.976

    However, I believe this IDL routine is bugged.  This website:
    https://www.epochconverter.com/seconds-days-since-y0

    shows a correct answer.
    '''
    x = cdfepoch.encode(np.complex128(63300946758.000000 + 176214648000.00000j))
    assert x == '2005-12-04 20:19:18.176214648'
    y = cdfepoch.encode(np.complex128([33300946758.000000 + 106014648000.00000j,
                                       61234543210.000000 + 000011148000.00000j]))
    assert y[0] == '1055-04-07 14:59:18.106014648'
    assert y[1] == '1940-06-12 03:20:10.000011148'
예제 #5
0
def test_compute_cdfepoch():
    '''
    Using random numbers for the compute tests
    '''
    random_time = []
    random_time.append(randint(0, 2018))  # Year
    random_time.append(randint(1, 12))  # Month
    random_time.append(randint(1, 28))  # Date
    random_time.append(randint(0, 23))  # Hour
    random_time.append(randint(0, 59))  # Minute
    random_time.append(randint(0, 59))  # Second
    random_time.append(randint(0, 999))  # Millisecond
    x = cdfepoch.breakdown(cdfepoch.compute(random_time))
    i = 0
    for t in x[0]:
        assert t == random_time[i], f'Time {random_time} was not equal to {x}'
        i += 1
예제 #6
0
def test_breakdown_cdftt2000():
    x = cdfepoch.breakdown(123456789101112131)
    assert x[0][0] == 2003
    assert x[0][1] == 11
    assert x[0][2] == 30
    assert x[0][3] == 9
    assert x[0][4] == 33
    assert x[0][5] == 9
    assert x[0][6] == 101
    assert x[0][7] == 112
예제 #7
0
def test_breakdown_cdfepoch16():
    x = cdfepoch.breakdown(np.complex128(63300946758.000000 + 176214648000.00000j))
    assert x[0][0] == 2005
    assert x[0][1] == 12
    assert x[0][2] == 4
    assert x[0][3] == 20
    assert x[0][4] == 19
    assert x[0][5] == 18
    assert x[0][6] == 176
    assert x[0][7] == 214
    assert x[0][8] == 648
    assert x[0][9] == 0
예제 #8
0
def test_compute_cdfepoch16():
    random_time = []
    random_time.append(randint(0, 2018))  # Year
    random_time.append(randint(1, 12))  # Month
    random_time.append(randint(1, 28))  # Date
    random_time.append(randint(0, 23))  # Hour
    random_time.append(randint(0, 59))  # Minute
    random_time.append(randint(0, 59))  # Second
    random_time.append(randint(0, 999))  # Millisecond
    random_time.append(randint(0, 999))  # Microsecond
    random_time.append(randint(0, 999))  # Nanosecond
    random_time.append(randint(0, 999))  # Picosecond
    cdftime = cdfepoch.convert_to_astropy(cdfepoch.compute(random_time), format='cdf_epoch16')
    x = cdfepoch.breakdown(cdftime)
    i = 0
    for t in x[0]:
        assert t == random_time[i], 'Time {} was not equal to {}'.format(random_time, x)
        i += 1
        # Unfortunately, currently there is a pretty big loss of precision that comes with
        # the compute function.  Need to stop testing early.
        if i > 6:
            return
예제 #9
0
def test_breakdown_cdfepoch():
    x = cdfepoch.breakdown([62285326000000.0, 62985326000000.0])
    # First in the array
    assert x[0][0] == 1973
    assert x[0][1] == 9
    assert x[0][2] == 28
    assert x[0][3] == 23
    assert x[0][4] == 26
    assert x[0][5] == 40
    assert x[0][6] == 0
    # Second in the array
    assert x[1][0] == 1995
    assert x[1][1] == 12
    assert x[1][2] == 4
    assert x[1][3] == 19
    assert x[1][4] == 53
    assert x[1][5] == 20
    assert x[1][6] == 0
예제 #10
0
def test_findepochrange_cdftt2000():
    start_time = "2004-03-01 12:24:22.351793238"
    end_time = "2004-03-01 12:28:22.351793238"
    x = cdfepoch.parse([start_time, end_time])
    time_array = np.arange(x[0], x[1], step=1000000)

    test_start = [2004, 3, 1, 12, 25, 54, 123, 111, 98]
    test_end = [2004, 3, 1, 12, 26, 4, 123, 456, 789]
    index = cdfepoch.findepochrange(time_array, starttime=test_start, endtime=test_end)
    # Test that the test_start is less than the first index, but more than one less
    assert time_array[index[0]] >= cdfepoch.compute(test_start)
    assert time_array[index[0]-1] <= cdfepoch.compute(test_start)

    assert time_array[index[-1]] <= cdfepoch.compute(test_end)
    assert time_array[index[-1]+1] >= cdfepoch.compute(test_end)
예제 #11
0
def cdf_to_tplot(filenames, varformat=None, get_support_data=False,
                 prefix='', suffix='', plot=False, merge=False,
                 center_measurement=False, notplot=False):
    """
    This function will automatically create tplot variables from CDF files.
    .. note::
    Variables must have an attribute named "VAR_TYPE". If the attribute entry
    is "data" (or "support_data"), then they will be added as tplot variables.
    Additionally, data variables should have attributes named "DEPEND_TIME" or
    "DEPEND_0" that describes which variable is x axis.  If the data is 2D,
    then an attribute "DEPEND_1" must describe which variable contains the
    secondary axis.
    Parameters:
        filenames : str/list of str
            The file names and full paths of CDF files.
        varformat : str
            The file variable formats to load into tplot.  Wildcard character
            "*" is accepted.  By default, all variables are loaded in.
        get_support_data: bool
            Data with an attribute "VAR_TYPE" with a value of "support_data"
            will be loaded into tplot.  By default, only loads in data with a
            "VAR_TYPE" attribute of "data".
        prefix: str
            The tplot variable names will be given this prefix.  By default,
            no prefix is added.
        suffix: str
            The tplot variable names will be given this suffix.  By default,
            no suffix is added.
        plot: bool
            The data is plotted immediately after being generated.  All tplot
            variables generated from this function will be on the same plot.
        merge: bool
            If True, then data from different cdf files will be merged into
            a single pytplot variable.
        center_measurement: bool
            If True, the CDF epoch variables are time-shifted to the middle
            of the accumulation interval by their DELTA_PLUS_VAR and
            DELTA_MINUS_VAR variable attributes
        notplot: bool
            If True, then data are returned in a hash table instead of
            being stored in tplot variables (useful for debugging, and
            access to multi-dimensional data products)

    Returns:
        List of tplot variables created (unless notplot keyword is used).
    """

    stored_variables = []
    epoch_cache = {}
    output_table = {}
    metadata = {}

    data_quants = {}
    if isinstance(filenames, str):
        filenames = [filenames]
    elif isinstance(filenames, list):
        filenames = filenames
    else:
        print("Invalid filenames input.")
        return stored_variables

    var_type = ['data']
    if varformat is None:
        varformat = ".*"
    if get_support_data:
        var_type.append('support_data')

    varformat = varformat.replace("*", ".*")
    var_regex = re.compile(varformat)

    for filename in filenames:
        cdf_file = cdflib.CDF(filename)
        cdf_info = cdf_file.cdf_info()
        all_cdf_variables = cdf_info['rVariables'] + cdf_info['zVariables']

        # Find the data variables
        for var in all_cdf_variables:
            if not re.match(var_regex, var):
                continue
            var_atts = cdf_file.varattsget(var)

            if 'VAR_TYPE' not in var_atts:
                continue

            if var_atts['VAR_TYPE'] in var_type:
                var_atts = cdf_file.varattsget(var)
                var_properties = cdf_file.varinq(var)
                if "DEPEND_TIME" in var_atts:
                    x_axis_var = var_atts["DEPEND_TIME"]
                elif "DEPEND_0" in var_atts:
                    x_axis_var = var_atts["DEPEND_0"]
                else:
                    if var_atts['VAR_TYPE'].lower() == 'data':
                        print("Cannot find x axis.")
                        print("No attribute named DEPEND_TIME or DEPEND_0 in \
                          variable " + var)
                    continue
                data_type_description \
                    = cdf_file.varinq(x_axis_var)['Data_Type_Description']

                # Find data name and if it is already in stored variables
                var_name = prefix + var + suffix

                if epoch_cache.get(filename+x_axis_var) is None:
                    delta_plus_var = 0.0
                    delta_minus_var = 0.0
                    delta_time = 0.0

                    xdata = cdf_file.varget(x_axis_var)
                    epoch_var_atts = cdf_file.varattsget(x_axis_var)

                    # check for DELTA_PLUS_VAR/DELTA_MINUS_VAR attributes
                    if center_measurement:
                        if 'DELTA_PLUS_VAR' in epoch_var_atts:
                            delta_plus_var = cdf_file.varget(epoch_var_atts['DELTA_PLUS_VAR'])
                            delta_plus_var_att = cdf_file.varattsget(epoch_var_atts['DELTA_PLUS_VAR'])

                            # check if a conversion to seconds is required
                            if 'SI_CONVERSION' in delta_plus_var_att:
                                si_conv = delta_plus_var_att['SI_CONVERSION']
                                delta_plus_var = delta_plus_var.astype(float)*np.float(si_conv.split('>')[0])
                            elif 'SI_CONV' in delta_plus_var_att:
                                si_conv = delta_plus_var_att['SI_CONV']
                                delta_plus_var = delta_plus_var.astype(float)*np.float(si_conv.split('>')[0])

                        if 'DELTA_MINUS_VAR' in epoch_var_atts:
                            delta_minus_var = cdf_file.varget(epoch_var_atts['DELTA_MINUS_VAR'])
                            delta_minus_var_att = cdf_file.varattsget(epoch_var_atts['DELTA_MINUS_VAR'])

                            # check if a conversion to seconds is required
                            if 'SI_CONVERSION' in delta_minus_var_att:
                                si_conv = delta_minus_var_att['SI_CONVERSION']
                                delta_minus_var = delta_minus_var.astype(float)*np.float(si_conv.split('>')[0])
                            elif 'SI_CONV' in delta_minus_var_att:
                                si_conv = delta_minus_var_att['SI_CONV']
                                delta_minus_var = delta_minus_var.astype(float)*np.float(si_conv.split('>')[0])

                        # sometimes these are specified as arrays
                        if isinstance(delta_plus_var, np.ndarray) and isinstance(delta_minus_var, np.ndarray):
                            delta_time = (delta_plus_var-delta_minus_var)/2.0
                        else: # and sometimes constants
                            if delta_plus_var != 0.0 or delta_minus_var != 0.0:
                                delta_time = (delta_plus_var-delta_minus_var)/2.0

                if epoch_cache.get(filename + x_axis_var) is None:
                    if ('CDF_TIME' in data_type_description) or \
                            ('CDF_EPOCH' in data_type_description):
                        xdata = cdfepoch.unixtime(xdata)
                        epoch_cache[filename+x_axis_var] = np.array(xdata)+delta_time
                else:
                    xdata = epoch_cache[filename + x_axis_var]

                try:
                    ydata = cdf_file.varget(var)
                except:
                    continue

                if ydata is None:
                    continue
                if "FILLVAL" in var_atts:
                    if (var_properties['Data_Type_Description'] ==
                            'CDF_FLOAT' or
                            var_properties['Data_Type_Description'] ==
                            'CDF_REAL4' or
                            var_properties['Data_Type_Description'] ==
                            'CDF_DOUBLE' or
                            var_properties['Data_Type_Description'] ==
                            'CDF_REAL8'):

                        if ydata[ydata == var_atts["FILLVAL"]].size != 0:
                            ydata[ydata == var_atts["FILLVAL"]] = np.nan

                tplot_data = {'x': xdata, 'y': ydata}

                depend_1 = None
                depend_2 = None
                depend_3 = None
                if "DEPEND_1" in var_atts:
                    if var_atts["DEPEND_1"] in all_cdf_variables:
                        depend_1 = np.array(cdf_file.varget(var_atts["DEPEND_1"]))
                if "DEPEND_2" in var_atts:
                    if var_atts["DEPEND_2"] in all_cdf_variables:
                        depend_2 = np.array(cdf_file.varget(var_atts["DEPEND_2"]))
                if "DEPEND_3" in var_atts:
                    if var_atts["DEPEND_3"] in all_cdf_variables:
                        depend_3 = np.array(cdf_file.varget(var_atts["DEPEND_3"]))

                nontime_varying_depends = []

                if depend_1 is not None and depend_2 is not None and depend_3 is not None:
                    tplot_data['v1'] = depend_1
                    tplot_data['v2'] = depend_2
                    tplot_data['v3'] = depend_3

                    if len(depend_1.shape) == 1:
                        nontime_varying_depends.append('v1')
                    if len(depend_2.shape) == 1:
                        nontime_varying_depends.append('v2')
                    if len(depend_3.shape) == 1:
                        nontime_varying_depends.append('v3')

                elif depend_1 is not None and depend_2 is not None:
                    tplot_data['v1'] = depend_1
                    tplot_data['v2'] = depend_2
                    if len(depend_1.shape) == 1:
                        nontime_varying_depends.append('v1')
                    if len(depend_2.shape) == 1:
                        nontime_varying_depends.append('v2')
                elif depend_1 is not None:
                    tplot_data['v'] = depend_1
                    if len(depend_1.shape) == 1:
                        nontime_varying_depends.append('v')
                elif depend_2 is not None:
                    tplot_data['v'] = depend_2
                    if len(depend_2.shape) == 1:
                        nontime_varying_depends.append('v')

                metadata[var_name] = {'display_type': var_atts.get("DISPLAY_TYPE", "time_series"),
                                        'scale_type': var_atts.get("SCALE_TYP", "linear")}

                if var_name not in output_table:
                    output_table[var_name] = tplot_data
                else:
                    var_data = output_table[var_name]
                    for output_var in var_data:
                        if output_var not in nontime_varying_depends:
                            var_data[output_var] = np.concatenate((var_data[output_var], tplot_data[output_var]))

    if notplot:
        return output_table

    for var_name in output_table.keys():
        to_merge = False
        if var_name in data_quants.keys() and merge:
            prev_data_quant = data_quants[var_name]
            to_merge = True

        try:
            store_data(var_name, data=output_table[var_name])
        except ValueError:
            continue

        if var_name not in stored_variables:
            stored_variables.append(var_name)

        if metadata.get(var_name) is not None:
            if metadata[var_name]['display_type'] == "spectrogram":
                options(var_name, 'spec', 1)
            if metadata[var_name]['scale_type'] == 'log':
                options(var_name, 'ylog', 1)

        if to_merge is True:
            cur_data_quant = data_quants[var_name]
            plot_options = copy.deepcopy(data_quants[var_name].attrs['plot_options'])
            data_quants[var_name] = xr.concat([prev_data_quant, cur_data_quant], dim='time')
            data_quants[var_name].attrs['plot_options'] = plot_options

    if notplot:
        return output_table

    if plot:
        tplot(stored_variables)

    return stored_variables
예제 #12
0
def test_unixtime():
    x = cdfepoch.unixtime([500000000100, 123456789101112131])
    assert approx(x[0]) == 946728435.816
    assert x[1] == approx(1070184724.917112)
예제 #13
0
def test_encode_cdftt2000():
    x = cdfepoch.encode(186999622360321123)
    assert x == '2005-12-04 20:20:22.360321120'
    y = cdfepoch.encode([500000000100, 123456789101112131])
    assert y[0] == '2000-01-01 12:08:20.000000100'
    assert y[1] == '2003-11-30 09:33:09.101112128'
예제 #14
0
def cdf_to_tplot(filenames,
                 varformat=None,
                 get_support_data=False,
                 get_ignore_data=False,
                 string_encoding='ascii',
                 prefix='',
                 suffix='',
                 plot=False,
                 merge=False,
                 center_measurement=False,
                 notplot=False,
                 varnames=[]):
    """
    This function will automatically create tplot variables from CDF files.  In general, the files should be
    ISTP compliant for this importer to work.  Each variable is read into a new tplot variable (a.k.a an xarray DataArray),
    and all associated file/variable metadata is read into the attrs dictionary.

    .. note::
        Variables must have an attribute named "VAR_TYPE". If the attribute entry
        is "data" (or "support_data"), then they will be added as tplot variables.
        Additionally, data variables should have attributes named "DEPEND_TIME" or
        "DEPEND_0" that describes which variable is x axis.  If the data is 2D,
        then an attribute "DEPEND_1" must describe which variable contains the
        secondary axis.

    Parameters:
        filenames : str/list of str
            The file names and full paths of CDF files.
        varformat : str
            The file variable formats to load into tplot.  Wildcard character
            "*" is accepted.  By default, all variables are loaded in.
        get_support_data: bool
            Data with an attribute "VAR_TYPE" with a value of "support_data"
            will be loaded into tplot.  By default, only loads in data with a
            "VAR_TYPE" attribute of "data".
        prefix: str
            The tplot variable names will be given this prefix.  By default,
            no prefix is added.
        suffix: str
            The tplot variable names will be given this suffix.  By default,
            no suffix is added.
        plot: bool
            The data is plotted immediately after being generated.  All tplot
            variables generated from this function will be on the same plot.
        merge: bool
            If True, then data from different cdf files will be merged into
            a single pytplot variable.
        get_ignore_data: bool
            Data with an attribute "VAR_TYPE" with a value of "ignore_data"
            will be loaded into tplot.  By default, only loads in data with a
            "VAR_TYPE" attribute of "data".
        center_measurement: bool
            If True, the CDF epoch variables are time-shifted to the middle
            of the accumulation interval by their DELTA_PLUS_VAR and
            DELTA_MINUS_VAR variable attributes
        notplot: bool
            If True, then data are returned in a hash table instead of
            being stored in tplot variables (useful for debugging, and
            access to multi-dimensional data products)
        varnames: str or list of str
            Load these variables only. If [] or ['*'], then load everything.

    Returns:
        List of tplot variables created (unless notplot keyword is used).
    """

    stored_variables = []
    epoch_cache = {}
    output_table = {}
    metadata = {}

    if not isinstance(varnames, list):
        varnames = [varnames]

    if len(varnames) > 0:
        if '*' in varnames:
            varnames = []

    # pytplot.data_quants = {}
    if isinstance(filenames, str):
        filenames = [filenames]
    elif isinstance(filenames, list):
        filenames = filenames
    else:
        print("Invalid filenames input.")
        return stored_variables

    var_type = ['data']
    if varformat is None:
        varformat = ".*"
    if get_support_data:
        var_type.append('support_data')
    if get_ignore_data:
        var_type.append('ignore_data')

    varformat = varformat.replace("*", ".*")
    var_regex = re.compile(varformat)
    filenames.sort()
    for filename in filenames:
        cdf_file = cdflib.CDF(filename)
        cdf_file.string_encoding = string_encoding
        cdf_info = cdf_file.cdf_info()
        all_cdf_variables = cdf_info['rVariables'] + cdf_info['zVariables']
        # User defined variables.
        if len(varnames) > 0:
            load_cdf_variables = [
                value for value in varnames if value in all_cdf_variables
            ]
        else:
            load_cdf_variables = all_cdf_variables

        try:
            gatt = cdf_file.globalattsget()
        except:
            gatt = {}

        for var in load_cdf_variables:
            if not re.match(var_regex, var):
                continue
            var_atts = cdf_file.varattsget(var)

            if 'VAR_TYPE' in var_atts:
                this_var_type = var_atts['VAR_TYPE'].lower()
            elif 'PARAMETER_TYPE' in var_atts:
                this_var_type = var_atts['PARAMETER_TYPE'].lower()
            else:
                # 'VAR_TYPE' and 'PARAMETER_TYPE' not found in the variable attributes
                continue

            if this_var_type in var_type:
                var_atts = cdf_file.varattsget(var)
                var_properties = cdf_file.varinq(var)

                # Find data name and if it is already in stored variables
                if 'TPLOT_NAME' in var_atts:
                    var_name = prefix + var_atts['TPLOT_NAME'] + suffix
                else:
                    var_name = prefix + var + suffix

                if "DEPEND_TIME" in var_atts:
                    x_axis_var = var_atts["DEPEND_TIME"]
                elif "DEPEND_0" in var_atts:
                    x_axis_var = var_atts["DEPEND_0"]
                else:
                    # non-record varying variables (NRVs)
                    # added by egrimes, 13Jan2021
                    # here we assume if there isn't a DEPEND_TIME or DEPEND_0, there are no other depends
                    try:
                        ydata = cdf_file.varget(var)
                    except:
                        continue

                    if ydata is None:
                        continue

                    # since NRVs don't vary with time, they shouldn't vary across files
                    output_table[var_name] = {'y': ydata}

                    continue

                data_type_description \
                    = cdf_file.varinq(x_axis_var)['Data_Type_Description']

                if epoch_cache.get(filename + x_axis_var) is None:
                    delta_plus_var = 0.0
                    delta_minus_var = 0.0
                    delta_time = 0.0

                    # Skip variables with ValueErrors.
                    try:
                        xdata = cdf_file.varget(x_axis_var)
                        epoch_var_atts = cdf_file.varattsget(x_axis_var)
                    except ValueError:
                        continue

                    # check for DELTA_PLUS_VAR/DELTA_MINUS_VAR attributes
                    if center_measurement:
                        if 'DELTA_PLUS_VAR' in epoch_var_atts:
                            delta_plus_var = cdf_file.varget(
                                epoch_var_atts['DELTA_PLUS_VAR'])
                            delta_plus_var_att = cdf_file.varattsget(
                                epoch_var_atts['DELTA_PLUS_VAR'])

                            # check if a conversion to seconds is required
                            if 'SI_CONVERSION' in delta_plus_var_att:
                                si_conv = delta_plus_var_att['SI_CONVERSION']
                                delta_plus_var = delta_plus_var.astype(
                                    float) * np.float(si_conv.split('>')[0])
                            elif 'SI_CONV' in delta_plus_var_att:
                                si_conv = delta_plus_var_att['SI_CONV']
                                delta_plus_var = delta_plus_var.astype(
                                    float) * np.float(si_conv.split('>')[0])

                        if 'DELTA_MINUS_VAR' in epoch_var_atts:
                            delta_minus_var = cdf_file.varget(
                                epoch_var_atts['DELTA_MINUS_VAR'])
                            delta_minus_var_att = cdf_file.varattsget(
                                epoch_var_atts['DELTA_MINUS_VAR'])

                            # check if a conversion to seconds is required
                            if 'SI_CONVERSION' in delta_minus_var_att:
                                si_conv = delta_minus_var_att['SI_CONVERSION']
                                delta_minus_var = delta_minus_var.astype(
                                    float) * np.float(si_conv.split('>')[0])
                            elif 'SI_CONV' in delta_minus_var_att:
                                si_conv = delta_minus_var_att['SI_CONV']
                                delta_minus_var = delta_minus_var.astype(
                                    float) * np.float(si_conv.split('>')[0])

                        # sometimes these are specified as arrays
                        if isinstance(delta_plus_var,
                                      np.ndarray) and isinstance(
                                          delta_minus_var, np.ndarray):
                            delta_time = (delta_plus_var -
                                          delta_minus_var) / 2.0
                        else:  # and sometimes constants
                            if delta_plus_var != 0.0 or delta_minus_var != 0.0:
                                delta_time = (delta_plus_var -
                                              delta_minus_var) / 2.0

                if epoch_cache.get(filename + x_axis_var) is None:
                    if ('CDF_TIME' in data_type_description) or \
                            ('CDF_EPOCH' in data_type_description):
                        xdata = cdfepoch.unixtime(xdata)
                        epoch_cache[filename +
                                    x_axis_var] = np.array(xdata) + delta_time
                else:
                    xdata = epoch_cache[filename + x_axis_var]

                try:
                    ydata = cdf_file.varget(var)
                except:
                    continue

                if ydata is None:
                    continue
                if "FILLVAL" in var_atts:
                    if (var_properties['Data_Type_Description'] == 'CDF_FLOAT'
                            or var_properties['Data_Type_Description']
                            == 'CDF_REAL4'
                            or var_properties['Data_Type_Description']
                            == 'CDF_DOUBLE'
                            or var_properties['Data_Type_Description']
                            == 'CDF_REAL8'):

                        if ydata[ydata == var_atts["FILLVAL"]].size != 0:
                            ydata[ydata == var_atts["FILLVAL"]] = np.nan
                    elif var_properties[
                            'Data_Type_Description'][:7] == 'CDF_INT':
                        # NaN is only valid for floating point data
                        # but we still need to handle FILLVAL's for
                        # integer data, so we'll just set those to 0
                        ydata[ydata == var_atts["FILLVAL"]] = 0

                tplot_data = {'x': xdata, 'y': ydata}

                # Data may depend on other data in the CDF.
                depend_1 = None
                depend_2 = None
                depend_3 = None
                if "DEPEND_1" in var_atts:
                    if var_atts["DEPEND_1"] in all_cdf_variables:
                        depend_1 = np.array(
                            cdf_file.varget(var_atts["DEPEND_1"]))
                        # Ignore the depend types if they are strings
                        if depend_1.dtype.type is np.str_:
                            depend_1 = None
                if "DEPEND_2" in var_atts:
                    if var_atts["DEPEND_2"] in all_cdf_variables:
                        depend_2 = np.array(
                            cdf_file.varget(var_atts["DEPEND_2"]))
                        # Ignore the depend types if they are strings
                        if depend_2.dtype.type is np.str_:
                            depend_2 = None
                if "DEPEND_3" in var_atts:
                    if var_atts["DEPEND_3"] in all_cdf_variables:
                        depend_3 = np.array(
                            cdf_file.varget(var_atts["DEPEND_3"]))
                        # Ignore the depend types if they are strings
                        if depend_3.dtype.type is np.str_:
                            depend_3 = None

                nontime_varying_depends = []

                if depend_1 is not None and depend_2 is not None and depend_3 is not None:
                    tplot_data['v1'] = depend_1
                    tplot_data['v2'] = depend_2
                    tplot_data['v3'] = depend_3

                    if len(depend_1.shape) == 1:
                        nontime_varying_depends.append('v1')
                    if len(depend_2.shape) == 1:
                        nontime_varying_depends.append('v2')
                    if len(depend_3.shape) == 1:
                        nontime_varying_depends.append('v3')

                elif depend_1 is not None and depend_2 is not None:
                    tplot_data['v1'] = depend_1
                    tplot_data['v2'] = depend_2
                    if len(depend_1.shape) == 1:
                        nontime_varying_depends.append('v1')
                    if len(depend_2.shape) == 1:
                        nontime_varying_depends.append('v2')
                elif depend_1 is not None:
                    tplot_data['v'] = depend_1
                    if len(depend_1.shape) == 1:
                        nontime_varying_depends.append('v')
                elif depend_2 is not None:
                    tplot_data['v'] = depend_2
                    if len(depend_2.shape) == 1:
                        nontime_varying_depends.append('v')

                metadata[var_name] = {
                    'display_type': var_atts.get("DISPLAY_TYPE",
                                                 "time_series"),
                    'scale_type': var_atts.get("SCALE_TYP", "linear"),
                    'var_attrs': var_atts,
                    'file_name': filename,
                    'global_attrs': gatt
                }

                # Check if the variable already exists in the for loop output
                if var_name not in output_table:
                    output_table[var_name] = tplot_data
                else:
                    # If it does, loop though the existing variable's x,y,v,v2,v3,etc
                    var_data = output_table[var_name]
                    for output_var in var_data:
                        if output_var not in nontime_varying_depends:
                            if np.asarray(tplot_data[output_var]
                                          ).ndim == 0 and np.equal(
                                              tplot_data[output_var], None):
                                # If there is nothing in the new variable, then pass
                                pass
                            elif np.asarray(var_data[output_var]
                                            ).ndim == 0 and np.equal(
                                                var_data[output_var], None):
                                # If there is nothing in the old variable, then replace
                                var_data[output_var] = tplot_data[output_var]
                            else:  # If they both have something, then concatenate
                                var_data[output_var] = np.concatenate(
                                    (var_data[output_var],
                                     tplot_data[output_var]))

    if notplot:
        return output_table

    for var_name in output_table.keys():
        to_merge = False
        if var_name in pytplot.data_quants.keys() and merge:
            prev_data_quant = pytplot.data_quants[var_name]
            to_merge = True

        try:
            attr_dict = {}
            if metadata.get(var_name) is not None:
                attr_dict["CDF"] = {}
                attr_dict["CDF"]["VATT"] = metadata[var_name]['var_attrs']
                attr_dict["CDF"]["GATT"] = metadata[var_name]['global_attrs']
                attr_dict["CDF"]["FILENAME"] = metadata[var_name]['file_name']

                # extract the coordinate system, if available
                vatt_keys = list(attr_dict["CDF"]["VATT"].keys())
                vatt_lower = [k.lower() for k in vatt_keys]
                if 'coordinate_system' in vatt_lower:
                    attr_dict['data_att'] = {
                        'coord_sys':
                        attr_dict["CDF"]["VATT"][vatt_keys[vatt_lower.index(
                            'coordinate_system')]]
                    }
            store_data(var_name,
                       data=output_table[var_name],
                       attr_dict=attr_dict)
        except ValueError:
            continue

        if var_name not in stored_variables:
            stored_variables.append(var_name)

        if metadata.get(var_name) is not None:
            if metadata[var_name]['display_type'] == "spectrogram":
                options(var_name, 'spec', 1)
            if metadata[var_name]['scale_type'] == 'log':
                options(var_name, 'ylog', 1)
            if metadata[var_name].get('var_attrs') is not None:
                if metadata[var_name]['var_attrs'].get('LABLAXIS') is not None:
                    options(var_name, 'ytitle',
                            metadata[var_name]['var_attrs']['LABLAXIS'])
                if metadata[var_name]['var_attrs'].get('UNITS') is not None:
                    if metadata[var_name]['display_type'] == 'spectrogram':
                        options(
                            var_name, 'ztitle', '[' +
                            metadata[var_name]['var_attrs']['UNITS'] + ']')
                    else:
                        options(
                            var_name, 'ysubtitle', '[' +
                            metadata[var_name]['var_attrs']['UNITS'] + ']')

            # Gather up all options in the variable attribute section, toss them into options and see what sticks
            options(var_name, opt_dict=metadata[var_name]['var_attrs'])

        if to_merge is True:
            cur_data_quant = pytplot.data_quants[var_name]
            plot_options = copy.deepcopy(pytplot.data_quants[var_name].attrs)
            pytplot.data_quants[var_name] = xr.concat(
                [prev_data_quant, cur_data_quant], dim='time').sortby('time')
            pytplot.data_quants[var_name].attrs = plot_options

    if notplot:
        return output_table

    if plot:
        tplot(stored_variables)

    return stored_variables
예제 #15
0
def test_parse_cdfepoch():
    x = cdfepoch.encode(62567898765432.0)
    assert x == "1982-09-12 11:52:45.432000000"
    stripped_time = x[:23]
    parsed = cdfepoch.parse(stripped_time)
    assert parsed[0] == approx(62567898765432.0)
예제 #16
0
def test_encode_cdfepoch():
    x = cdfepoch.encode([62285326000000.0, 62985326000000.0])
    assert x[0] == '1973-09-28 23:26:40.000000000'
    assert x[1] == '1995-12-04 19:53:20.000000000'
예제 #17
0
파일: util.py 프로젝트: dstansby/soloswapy
 def varget_time(self, variable):
     """
     Get a variable and return it as astropy Time.
     """
     var = self.varget(variable)
     return CDFAstropy.convert_to_astropy(var)