Example #1
0
    def filter(self):

        from ptsa.filt  import buttfilt

        # find index  of the  axis called 'time'
        if self.time_axis<0:

            time_index_array = np.where(np.array(self.time_series.dims) == 'time')
            if len(time_index_array)>0:
                self.time_axis =time_index_array[0] # picking first index that corresponds to the dimension
            else:
                raise RuntimeError("Could not locate 'time' axis in your time series."
                                   " Make sure to either label appropriate axis of your time series 'time' or specify"
                                   "time axis explicitely as a non-negative integer '")

        filtered_array = buttfilt(self.time_series,
                                       self.freq_range, self.samplerate, self.filt_type,
                                       self.order,axis=self.time_axis)


        self.filtered_time_series = xray.DataArray(
            filtered_array,
            coords = [xray.DataArray(coord.copy()) for coord_name, coord in self.time_series.coords.items() ]
        )


        # l = [xray.DataArray(coord.copy()) for coord_name, coord in self.time_series.coords.items() ]

        return self.filtered_time_series

        # attrs = self._attrs.copy()
        # for k in self._required_attrs.keys():
        #     attrs.pop(k,None)
        # return TimeSeries(filtered_array,self.tdim, self.samplerate,
        #                   dims=self.dims.copy(), **attrs)
Example #2
0
    def write(self, filename, varname=None):
        """Write a :class:`Field` to a netcdf file

        :param filename: Basename of the file
        :param varname: Name of the field, to be appended to the filename"""
        filepath = str(path.local('%s%s.nc' % (filename, self.name)))
        if varname is None:
            varname = self.name
        # Derive name of 'depth' variable for NEMO convention
        vname_depth = 'depth%s' % self.name.lower()

        # Create DataArray objects for file I/O
        t, d, x, y = (self.time.size, self.depth.size, self.lon.size,
                      self.lat.size)
        nav_lon = xray.DataArray(self.lon + np.zeros((y, x), dtype=np.float32),
                                 coords=[('y', self.lat), ('x', self.lon)])
        nav_lat = xray.DataArray(self.lat.reshape(y, 1) +
                                 np.zeros(x, dtype=np.float32),
                                 coords=[('y', self.lat), ('x', self.lon)])
        vardata = xray.DataArray(self.data.reshape((t, d, y, x)),
                                 coords=[('time_counter', self.time),
                                         (vname_depth, self.depth),
                                         ('y', self.lat), ('x', self.lon)])
        # Create xray Dataset and output to netCDF format
        dset = xray.Dataset({varname: vardata},
                            coords={
                                'nav_lon': nav_lon,
                                'nav_lat': nav_lat
                            })
        dset.to_netcdf(filepath)
 def to_xray(self, ndarray):
     """Create xray object matching original one from spharm object."""
     # Re-expand collapsed non-lat/lon dims.
     arr_orig = self._u
     ax_lat_orig = arr_orig.get_axis_num(LAT_STR)
     ax_lon_orig = arr_orig.get_axis_num(LON_STR)
     ax_other_dims = set(range(arr_orig.ndim)) - {ax_lat_orig, ax_lon_orig}
     shape_other_dims = [
         axlen for n, axlen in enumerate(arr_orig.shape)
         if n in ax_other_dims
     ]
     arr_new = ndarray.reshape([ndarray.shape[0], ndarray.shape[1]] +
                               shape_other_dims)
     # Return to original axis order.
     arr_new = np.rollaxis(arr_new, 2, 0)
     if arr_orig.ndim == 4:
         arr_new = np.rollaxis(arr_new, -1, 1)
     # Return to original latitude orientation.
     # If the original array got flipped, flip it back.
     if self.flag_flip_lat(arr_orig, out_north_to_south=True):
         arr_new = np.swapaxes(
             arr_new.swapaxes(ax_lat_orig, 0)[::-1], ax_lat_orig, 0)
     # Reapply the mask and return to an xray object.
     return xray.DataArray(np.ma.array(arr_new, mask=self.mask),
                           dims=arr_orig.dims,
                           coords=arr_orig.coords)
Example #4
0
 def compute_mtf(self):
     """ Compute the MTF of a FocalPlaneArray. In this model, the FPA is assumed to be a rectangular array
         having pixels with rectangular aperture.
     :return:
     """
     # First set up a set of spatial frequencies up to a factor of 20 times the pixel nyquist
     nyquist_x = 1.0/(2.0*self.pitchx.data)  # cy/mm
     self.nyquist_x = Scalar('nyqx', nyquist_x, '1/' + self.pitchx.units)
     nyquist_y = 1.0/(2.0*self.pitchy.data)  # cy/mm
     self.nyquist_y = Scalar('nyqy', nyquist_y, '1/' + self.pitchy.units)
     # Generate a relative set of spatial frequencies, with variable spacing up to 20 times relative nyquist
     spf_rel = np.hstack((np.linspace(0., 1, 10), np.linspace(1., 2, 9), np.linspace(2. ,3, 7),
                          np.linspace(3., 4, 7),  np.linspace(4., 5, 7), np.linspace(5. ,6, 5),
                          np.linspace(6., 7, 5),  np.linspace(7., 8, 5), np.linspace(8. ,9 ,3),
                          np.linspace(9., 10, 3)))  # Will use sinc function up to argument of 10
     spf_rel = np.unique(spf_rel)   # Will be duplication at the zeroes
     spf_x = spf_rel * nyquist_x * 2.0
     spf_y = spf_rel * nyquist_y * 2.0
     spf = xd_identity(np.unique(np.hstack((spf_x, spf_y))), 'spf', attrs={'units': '1/mm'})  # Create spat freq axis
     self.spf = spf
     # Get back spf_rel, which could be different in x and y directions
     spf_rel_x = spf.data / (nyquist_x * 2.0)
     spf_rel_y = spf.data / (nyquist_y * 2.0)
     fldo = xd_identity([0.0, 90.0], 'fldo')
     self.mtf = xray.DataArray(np.sinc(np.vstack((spf_rel_x, spf_rel_y))).T,
                               [(spf), (fldo)],
                               name='mtf', attrs={'units': ''})
Example #5
0
def _read_one_signal(connection, signal):
    """ Read one signal through the connection """
    if 'oned' in signal:
        dimensions = {0: 'time'}
    elif 'twod' in signal:
        dimensions = {0: 'rho', 1: 'time'}
    else:
        raise NotImplementedError

    data = connection.get(signal)

    coords = {}
    units = {}
    for i in xrange(2):
        dim = r'dim_of({}, {})'.format(signal, i)
        unit = r'units_of({})'.format(dim)

        try:
            coords[dimensions[i]] = connection.get(dim)
            units[dimensions[i]] = connection.get(unit)
        except mds.MdsException:
            break

    # FIXME why do we need the transpose here?
    return xray.DataArray(data.T, coords, attrs={'units': units})
def cyclic_dataarray(da, coord='lon'):
    """ Add a cyclic coordinate point to a DataArray along a specified
    named coordinate dimension.

    >>> from xray import DataArray
    >>> data = DataArray([[1, 2, 3], [4, 5, 6]],
    ...                      coords={'x': [1, 2], 'y': range(3)},
    ...                      dims=['x', 'y'])
    >>> cd = cyclic_dataarray(data, 'y')
    >>> print cd.data
    array([[1, 2, 3, 1],
           [4, 5, 6, 4]])
    """
    assert isinstance(da, xray.DataArray)

    lon_idx = da.dims.index(coord)
    cyclic_data, cyclic_coord = add_cyclic_point(da.values,
                                                 coord=da.coords[coord],
                                                 axis=lon_idx)

    # Copy and add the cyclic coordinate and data
    new_coords = dict(da.coords)
    new_coords[coord] = cyclic_coord
    new_values = cyclic_data

    new_da = xray.DataArray(new_values, dims=da.dims, coords=new_coords)

    # Copy the attributes for the re-constructed data and coords
    for att, val in da.attrs.items():
        new_da.attrs[att] = val
    for c in da.coords:
        for att in da.coords[c].attrs:
            new_da.coords[c].attrs[att] = da.coords[c].attrs[att]

    return new_da
Example #7
0
def season_mean(ds, calendar='standard'):
    """
    Calculate the seasonal mean from a xray Dataset of monthly means. Weight
    the means by the number of days in each month.

    Parameters
    ----------
    ds : xray.Dataset
        Monthly frequency Pandas DatetimeIndex.
    calendar : {'standard', 'gregorian', 'proleptic_gregorian', 'julian'}
        netCDF calendar with leap years.

    Returns
    ----------
    seasonal_mean : xray.Dataset
        Dataset representing the seasonal mean of `ds`.
    """
    # Make a DataArray with the number of days in each month, size = len(time)
    month_length = xray.DataArray(get_dpm(ds.time.to_index(),
                                          calendar=calendar),
                                  coords=[ds.time],
                                  name='month_length')
    # Calculate the weights by grouping by 'time.season'
    weights = (month_length.groupby('time.season') /
               month_length.groupby('time.season').sum())

    # Test that the sum of the weights for each season is 1.0
    np.testing.assert_allclose(
        weights.groupby('time.season').sum().values, np.ones(4))

    # Calculate the weighted average
    return (ds * weights).groupby('time.season').sum(dim='time')
Example #8
0
    def diff_zp1_to_z(self, array, zname='Z', zp1name='Zp1'):
        """Take the vertical difference of an array located at zp1 points, resulting
        in a new array at z points.

        Parameters
        ----------
        array : xray DataArray
            The array to difference. Must have the coordinate zp1.
        zname : str, optional
            The variable name for the z point
        zp1name : str, optional
            The variable name for the zp1 point

        Returns
        -------
        diff : xray DataArray
            A new array with vertical coordinate z.
        """
        a_up = array.isel(**{zp1name: slice(None, -1)})
        a_dn = array.isel(**{zp1name: slice(1, None)})
        a_diff = a_up.data - a_dn.data
        # dimensions and coords of new array
        coords, dims = self._get_coords_from_dims(array.dims,
                                                  replace={zp1name: zname})
        return xray.DataArray(a_diff,
                              coords,
                              dims,
                              name=_append_to_name(array, 'diff_zp1_to_z'))
Example #9
0
def dict_toxray(data, ds={}, **kwargs):
    """
    Converts a dictionary with keys as variable names to an
    xray.Dataset object

    The dictionary keys should correspond with variable names in
    ncmetadata.yaml

    **kwargs are passed directly to xray.DataArray()
    """

    for vv in list(data.keys()):

        if vv in ncmeta:
            attrs = ncmeta[vv]['attributes']
        else:
            print(
                'Warning variable: %s not in ncmetadata.yaml. Dataset will have no attrs'
            )
            attrs = {}

        da = xray.DataArray(data[vv], attrs=attrs, **kwargs)

        ds.update({vv: da})

    return xray.Dataset(ds)
Example #10
0
def annual_mean(ds, calendar='standard'):
    """
    Calculate the annual mean from a xray Dataset of monthly means. Weight
    the means by the number of days in each month.

    Parameters
    ----------
    ds : xray.Dataset
        Monthly frequency Pandas DatetimeIndex.
    calendar : {'standard', 'gregorian', 'proleptic_gregorian', 'julian'}
        netCDF calendar with leap years.

    Returns
    ----------
    seasonal_mean : xray.Dataset
        Dataset representing the annual mean of `ds`.
    """
    # Make a DataArray with the number of days in each month, size = len(time)
    month_length = xray.DataArray(get_dpm(ds.time.to_index(),
                                          calendar=calendar),
                                  coords=[ds.time],
                                  name='month_length')
    # Calculate the weights by grouping by 'time.season'
    weights = month_length / month_length.sum()

    # Calculate the weighted average
    return (ds * weights).sum(dim='time')
Example #11
0
def test_vrtdiv():
    path = ('/archive/Spencer.Hill/am2/am2clim_reyoi/gfdl.ncrc2-default-prod/'
            'pp/atmos_level/ts/monthly/1yr/atmos_level.198301-198312.')

    # Vertically defined, sigma levels.
    u_arr = xray.open_dataset(path + 'ucomp.nc').ucomp
    v_arr = xray.open_dataset(path + 'vcomp.nc').vcomp
    vort, divg = compute_vrtdiv(u_arr, v_arr)
    assert vort.shape == u_arr.shape
    assert divg.shape == u_arr.shape
    np.testing.assert_array_equal(u_arr.lat, vort.lat)
    np.testing.assert_array_equal(u_arr.lon, vort.lon)
    np.testing.assert_array_equal(u_arr.time, vort.time)
    np.testing.assert_array_equal(u_arr.pfull, vort.pfull)

    # Not vertically defined.
    u0 = u_arr[:, 0]
    v0 = v_arr[:, 0]
    vort0, divg0 = compute_vrtdiv(u0, v0)
    assert vort0.shape == u0.shape
    assert divg0.shape == u0.shape

    # Dummy case: zeros everywhere
    u_arr_zeros = xray.DataArray(np.zeros_like(u_arr.values),
                                 dims=u_arr.dims,
                                 coords=u_arr.coords)
    v_arr_zeros = u_arr_zeros.copy()
    vort_zeros, divg_zeros = compute_vrtdiv(u_arr_zeros, v_arr_zeros)
    assert not vort_zeros.any()
    assert not divg_zeros.any()
Example #12
0
    def diff_z_to_zp1(self, array):
        """Take the vertical difference of an array located at z points, resulting
        in a new array at zp1 points, but missing the upper and lower point.

        Parameters
        ----------
        array : xray DataArray
            The array to difference. Must have the coordinate z.

        Returns
        -------
        diff : xray DataArray
            A new array with vertical coordinate zp1.
        """
        a_up = array.isel(Z=slice(None, -1))
        a_dn = array.isel(Z=slice(1, None))
        a_diff = a_up.data - a_dn.data
        # dimensions and coords of new array
        coords, dims = self._get_coords_from_dims(array.dims,
                                                  replace={'Z': 'Zp1'})
        # trim vertical
        coords['Zp1'] = coords['Zp1'][1:-1]
        return xray.DataArray(a_diff,
                              coords,
                              dims,
                              name=_append_to_name(array, 'diff_z_to_zp1'))
Example #13
0
    def __init__(self, fpa, ad_bit_depth, digital_gain=None, digital_offset=(0.0, 'count'), noise=(0.0, 'count'),
                 sitf=None, exp_time_min=(10.0e-6, 's'), exp_time_max=(np.inf, 's'), attrs=None):
        """ Camera constructor. This class if for representation of a Camera, which incorporates
        a FocalPlaneArray and a converter stage which converts photoelectrons into digital levels (also called
        digital numbers - DN and in MORTICIA, the pint unit 'count' is used).

        :param fpa: The FocalPlaneArray incorporated into the Camera
        :param ad_bit_depth: Number of bits in the Analogue-to-Digital A/D converter. Must be provided in MORTICIA
            scalar format as e.g. [16, 'bit']
        :param digital_gain: The number of photoelectrons required to raise the output by 1 digital level (DN)
            Must be provided in MORTICIA scalar format as e.g. [2.2, 'e/count'].
        :param digital_offset: The digital level (DN) output of the camera for zero photoelectrons. This is not
            the "black level", which typically includes additional dark signal. Must be provided in MORTICIA
            scalar format as e.g. [10.0, 'count'].
        :param noise: An optional additional noise component to add to the signal. Must be provided as a
            MORTICIA scalar in either electrons or counts e.g. [2, 'e']. This is an RMS noise component,
            which is equivalent to a standard deviation.
        :param sitf: Signal transfer function (SiTF). As an alternative to providing the digital_gain and
            digital_offset, (particularly should the camera have essentially non-linear response) the SiTF
            can be provided as an xray.DataArray, with the input axis in units of electrons ('e') and
            the data in units of digital level ('count'). If digital_gain and digital_offset are provided,
            the SiTF is calculated and stored internally as an xray.DataArray.
        :param
        :param attrs: A user-defined dictionary of other information about this Camera object. Could include
            items such as 'model', 'manufacturer' etc. A 'title' and 'long_name' are recommended attributes.
        :return:
        """
        self.fpa = fpa
        self.ad_bit_depth = Scalar('bitdepth', *ad_bit_depth)
        if digital_gain is not None:
            self.digital_gain = Scalar('dgain', *digital_gain)
            if sitf is not None:
                warnings.warn('The digital_gain and the sitf of a Camera object should not both be provided.')
        self.digital_offset = Scalar('doffset', *digital_offset)  # Initialise
        if sitf is not None:  # Check and save the sitf
            xd_check_convert_units(sitf, 'phe', default_units('phe'))
            xd_check_convert_units(sitf, 'dn', default_units('dn'))
            self.sitf = sitf
        else:  # Create an sitf from the digital gain and offet inputs
            if digital_gain is None:
                warnings.warn('Estimating Camera digital gain from bit depth and FPA well capacity')
                self.digital_gain = Scalar('dgain', self.fpa.wellcapacity.data / 2.0**self.ad_bit_depth.data, 'e/count')
            slope = 1.0 / self.digital_gain.data
            # Compute the upper point of the sitf, taking A/D limit and well saturation into account
            # Calculate digital numbers at the well capacity
            dn_at_well_capacity = self.fpa.wellcapacity.data * slope + self.digital_offset.data
            # Calculate the number of photelectrons at maximum DN
            phe_at_max_dn = (2.0**self.ad_bit_depth.data - self.digital_offset.data) / slope
            phe_max = np.minimum(phe_at_max_dn, self.fpa.wellcapacity.data) # Can't have more photoelectrons than well cap.
            dn_max = np.minimum(dn_at_well_capacity, 2.0**self.ad_bit_depth.data)  # Can't have more DN than 2^bits
            self.sitf = xray.DataArray([self.digital_offset.data, dn_max],
                                       [('phe', [0.0, phe_max], {'units': 'e', 'extrap_hi': 'sustain',
                                                                 'extrap_lo': np.nan})],
                                       name='dn', attrs={'units': 'count'})
        if noise[1] == 'e':  # Convert to dn count using the sitf
            self.noise = Scalar('dnoise', noise[0] * slope, 'count')
        else:
            self.noise = Scalar('dnoise', *noise)
        self.attrs = attrs  # user-defined info about this camera
Example #14
0
    def pad_zl_to_zp1(self, array, fill_value=0., zlname='Zl', zp1name='Zp1'):
        """Pad an array located at zl points such that it is located at
        zp1 points. An additional fill value is required for the bottom point.

        Parameters
        ----------
        array : xray DataArray
            The array to difference. Must have the coordinate zp1.
        fill_value : number, optional
            The value to be used at the bottom point.
        zlname : str, optional
            The variable name for the zl point
        zp1name : str, optional
            The variable name for the zp1 point

        Returns
        -------
        padded : xray DataArray
            Padded array with vertical coordinate zp1.
        """
        coords, dims = self._get_coords_from_dims(array.dims)
        zdim = dims.index(zlname)
        # shape of the new array to concat at the bottom
        shape = list(array.shape)
        shape[zdim] = 1
        # replace Zl with the bottom level
        coords[zlname] = np.atleast_1d(self.ds[zp1name][-1].data)
        # an array of zeros at the bottom
        # need different behavior for numpy vs dask
        if array.chunks:
            chunks = list(array.data.chunks)
            chunks[zdim] = (1, )
            zarr = fill_value * da.ones(
                shape, dtype=array.dtype, chunks=chunks)
            zeros = xray.DataArray(zarr, coords, dims).chunk()
        else:
            zarr = np.zeros(shape, array.dtype)
            zeros = xray.DataArray(zarr, coords, dims)
        newarray = xray.concat([array, zeros],
                               dim=zlname).rename({zlname: zp1name})
        if newarray.chunks:
            # this assumes that there was only one chunk in the vertical to begin with
            # how can we do that better
            return newarray.chunk({zp1name: len(newarray[zp1name])})
        else:
            return newarray
Example #15
0
    def construct_output_array(self, array, dims, coords):
        out_array = xray.DataArray(array, dims=dims, coords=coords)
        out_array.attrs['samplerate'] = self.time_series.attrs['samplerate']
        if self.resamplerate > 0.0:
            out_array.attrs['samplerate'] = self.time_series.attrs[
                'samplerate']

        return out_array
def running_mean(darray, window):
    """Calculate the running mean."""

    dframe = darray.to_pandas()
    dframe = pandas.rolling_mean(dframe, window, center=True)
    dframe = dframe.dropna()

    return xray.DataArray(dframe)
Example #17
0
 def test_unary(self):
     args = [0,
             np.zeros(2),
             xray.Variable(['x'], [0, 0]),
             xray.DataArray([0, 0], dims='x'),
             xray.Dataset({'y': ('x', [0, 0])})]
     for a in args:
         self.assertIdentical(a + 1, xu.cos(a))
Example #18
0
 def __init__(self, treat_csv, notes, shape=((38, 14))):
     self.shape = shape  # plate format
     self.treatments = pd.DataFrame.from_csv(treat_csv)
     # row = treatment;
     # columns = components;
     # each item has tuple (quantity, units, start, stop)
     self.map = xray.DataArray()
     self.metadata = dict(notes)
Example #19
0
 def bwd_diff1(arr, dim, is_coord=False):
     """Backward differencing of the array.  Not its full derivative."""
     if is_coord:
         arr_diff = arr[dim].diff(dim, n=1, label='upper')
         return xray.DataArray(np.diff(arr[dim]),
                               dims=[dim],
                               coords=[arr_diff[dim]])
     return arr.diff(dim, n=1, label='upper')
Example #20
0
 def create_time_array(self):
     """Create an xray.DataArray comprising the desired months."""
     all_months = pd.date_range(start=self.apply_year_offset(
         self.start_date),
                                end=self.apply_year_offset(self.end_date),
                                freq='M')
     time = xray.DataArray(all_months, dims=[TIME_STR])
     month_cond = self._construct_month_conditional(time, self.months)
     return time[month_cond]
Example #21
0
def coord_to_new_dataarray(arr, dim):
    """Create a DataArray comprising the coord for the specified dim.

    Useful, for example, when wanting to resample in time, because at least
    for xray 0.6.0 and prior, the `resample` method doesn't work when applied
    to coords.  The DataArray returned by this method lacks that limitation.
    """
    return xray.DataArray(arr[dim].values,
                          coords=[arr[dim].values],
                          dims=[dim])
Example #22
0
 def test_invalid_dataarray_names_raise(self):
     te = (TypeError, 'string or None')
     ve = (ValueError, 'string must be length 1 or')
     data = np.random.random((2, 2))
     da = xray.DataArray(data)
     for name, e in zip([0, (4, 5), True, ''], [te, te, te, ve]):
         ds = Dataset({name: da})
         with self.assertRaisesRegexp(*e):
             with self.roundtrip(ds) as actual:
                 pass
Example #23
0
    def to_xray(self):
        import xray

        das = {}
        for varname, unit in fields:
            x, t, val = self.xtargs(varname)
            das[varname] = xray.DataArray(val,
                                          coords=(t, x),
                                          dims=('time', 'x'))
            
        return xray.Dataset(das)
Example #24
0
def _initialize_array(global_grid, result_array):
    "Fill in starting values for the energy array."
    max_energies = global_grid['GM'].max(dim='points', skipna=False)
    len_comps = result_array.dims['component']
    if max_energies.isnull().any():
        raise ValueError('Input energy surface contains one or more NaNs.')
    result_array['GM'] = xray.broadcast_arrays(max_energies, result_array['GM'])[0].copy()
    result_array['MU'] = xray.broadcast_arrays(max_energies, result_array['MU'])[0].copy()
    result_array['MU'].values[:] = np.nan
    result_array['NP'] = xray.broadcast_arrays(xray.DataArray(np.nan), result_array['NP'])[0].copy()
    # Initial simplex for each target point in will be
    #     the fictitious hyperplane
    # This hyperplane sits above the system's energy surface
    # The reason for this is to guarantee our initial simplex contains
    #     the target point
    # Note: We're assuming that the max energy is in the first few, presumably
    # fictitious points instead of more rigorously checking with argmax.
    result_array['points'] = xray.broadcast_arrays(xray.DataArray(np.arange(len_comps),
                                                                  dims='vertex'),
                                                   result_array['points'])[0].copy()
Example #25
0
def test_tend_each_timestep():
    arr = load('ucomp')
    darr_dt = tend_each_timestep(arr)
    assert darr_dt.shape == arr.shape
    np.testing.assert_array_equal(arr.time.values, darr_dt.time.values)

    # Dummy case: zeros everywhere
    arr_zeros = xray.DataArray(np.zeros_like(arr.values),
                               dims=arr.dims,
                               coords=arr.coords)
    darr_dt = tend_each_timestep(arr_zeros)
    assert not darr_dt.any()
Example #26
0
 def test_binary(self):
     args = [0,
             np.zeros(2),
             xray.Variable(['x'], [0, 0]),
             xray.DataArray([0, 0], dims='x'),
             xray.Dataset({'y': ('x', [0, 0])})]
     for n, t1 in enumerate(args):
         for t2 in args[n:]:
             self.assertIdentical(t2 + 1, xu.maximum(t1, t2 + 1))
             self.assertIdentical(t2 + 1, xu.maximum(t2, t1 + 1))
             self.assertIdentical(t2 + 1, xu.maximum(t1 + 1, t2))
             self.assertIdentical(t2 + 1, xu.maximum(t2 + 1, t1))
Example #27
0
 def diff_xp1_to_x(self, array):
     """Difference DataArray ``array`` in the x direction.
     Assumes that ``array`` is located at the xp1 point."""
     left = array
     right = self.roll(array, -1, "Xp1")
     if array.chunks:
         right = right.chunk(array.chunks)
     diff = right.data - left.data
     coords, dims = self._get_coords_from_dims(array.dims, replace={"Xp1": "X"})
     return xray.DataArray(diff, coords, dims).rename(
         _append_to_name(array, "diff_xp1_to_x")
     )
Example #28
0
    def resampled(self,
                  resampled_rate,
                  window=None,
                  loop_axis=None,
                  num_mp_procs=0,
                  pad_to_pow2=False):
        '''

        :param resampled_rate: resample rate
        :param window: ignored for now - added for legacy reasons
        :param loop_axis: ignored for now - added for legacy reasons
        :param num_mp_procs: ignored for now - added for legacy reasons
        :param pad_to_pow2: ignored for now - added for legacy reasons
        :return: resampled time series
        '''

        from scipy.signal import resample
        samplerate = self.attrs['samplerate']

        time_axis = self['time']
        time_axis_length = np.squeeze(time_axis.shape)
        new_length = int(
            np.round(time_axis_length * resampled_rate / float(samplerate)))

        # print new_length

        # if self.time_axis_index<0:
        #     self.time_axis_index = get_axis_index(data_array=self, axis_name='time')

        # time_axis = self.coords[ self.dims[self.time_axis_index] ]

        # time_axis = self['time']

        resampled_array, new_time_axis = resample(self.values,
                                                  new_length,
                                                  t=time_axis.values,
                                                  axis=self.time_axis_index,
                                                  window=window)

        # print new_time_axis

        #constructing axes
        coords = []
        for i, dim_name in enumerate(self.dims):
            if i != self.time_axis_index:
                coords.append(self.coords[dim_name].copy())
            else:
                coords.append((dim_name, new_time_axis))

        resampled_time_series = xray.DataArray(resampled_array, coords=coords)
        resampled_time_series.attrs['samplerate'] = resampled_rate

        return resampled_time_series
Example #29
0
 def diff_yp1_to_y(self, array):
     """Difference DataArray ``array`` in the y direction.
     Assumes that ``array`` is located at the yp1 point."""
     left = array
     right = self.roll(array, -1, 'Yp1')
     if array.chunks:
         right = right.chunk(array.chunks)
     diff = right.data - left.data
     coords, dims = self._get_coords_from_dims(array.dims,
                                               replace={'Yp1': 'Y'})
     return xray.DataArray(diff, coords, dims).rename(
         _append_to_name(array, '_diff_yp1_to_y'))
Example #30
0
    def fwd_diff1(arr, dim, is_coord=False):
        """Forward differencing of the array.  Not its full derivative.

        A bug in xray version 0.6.1 and prior causes the `DataArray.diff`
        method to not work when applied to a coordinate array.  Therefore,
        a workaround is implemented here and used if the `is_coord` keyword
        argument is True.
        """
        if is_coord:
            arr_diff = arr[dim].diff(dim, n=1, label='lower')
            return xray.DataArray(np.diff(arr[dim]),
                                  dims=[dim],
                                  coords=[arr_diff[dim]])
        return arr.diff(dim, n=1, label='lower')