Beispiel #1
0
 def test_expanded_indexer(self):
     x = np.random.randn(10, 11, 12, 13, 14)
     y = np.arange(5)
     I = ReturnItem()  # noqa
     for i in [I[:], I[...], I[0, :, 10], I[..., 10], I[:5, ..., 0],
               I[..., 0, :], I[y], I[y, y], I[..., y, y],
               I[..., 0, 1, 2, 3, 4]]:
         j = indexing.expanded_indexer(i, x.ndim)
         assert_array_equal(x[i], x[j])
         assert_array_equal(self.set_to_zero(x, i),
                            self.set_to_zero(x, j))
     with raises_regex(IndexError, 'too many indices'):
         indexing.expanded_indexer(I[1, 2, 3], 2)
Beispiel #2
0
 def test_expanded_indexer(self):
     x = np.random.randn(10, 11, 12, 13, 14)
     y = np.arange(5)
     I = ReturnItem()
     for i in [I[:], I[...], I[0, :, 10], I[..., 10], I[:5, ..., 0],
               I[..., 0, :], I[y], I[y, y], I[..., y, y],
               I[..., 0, 1, 2, 3, 4]]:
         j = indexing.expanded_indexer(i, x.ndim)
         self.assertArrayEqual(x[i], x[j])
         self.assertArrayEqual(self.set_to_zero(x, i),
                               self.set_to_zero(x, j))
     with raises_regex(IndexError, 'too many indices'):
         indexing.expanded_indexer(I[1, 2, 3], 2)
Beispiel #3
0
    def __getitem__(self, item):
        """Override __getitem__."""

        # take care of ellipsis and other strange indexes
        item = list(indexing.expanded_indexer(item, len(self.dimensions)))

        # Slice to change
        was_scalar = False
        sl = item[self.ds]
        if np.isscalar(sl) and not isinstance(sl, slice):
            sl = slice(sl, sl + 1)
            was_scalar = True

        # Ok, get the indexes right
        start = sl.start or 0
        stop = sl.stop or self._ds_shape
        if stop < 0:
            stop += self._ds_shape - 1
        stop = np.clip(stop + 1, 0, self._ds_shape)
        itemr = copy.deepcopy(item)
        if was_scalar:
            item[self.ds] = start
            itemr[self.ds] = start + 1
        else:
            item[self.ds] = slice(start, stop - 1)
            itemr[self.ds] = slice(start + 1, stop)
        return 0.5 * (self.ncvar[tuple(item)] + self.ncvar[tuple(itemr)])
Beispiel #4
0
    def __getitem__(self, item):

        # take care of ellipsis and other strange indexes
        item = list(indexing.expanded_indexer(item, len(self.dimensions)))

        # time is always going to be first dim I hope
        sl = item[0]
        if np.isscalar(sl) and not isinstance(sl, slice):
            sl = slice(sl, sl+1)

        # Ok, get the indexes right
        start = sl.start or 0
        stop = sl.stop or self._nel
        if stop < 0:
            stop += self._nel-1
        start -= 1
        do_nan = False
        if start < 0:
            do_nan = True
        itemr = copy.deepcopy(item)
        item[0] = slice(start, stop-1)
        itemr[0] = slice(start+1, stop)

        # done
        var = self.nc.variables[self.accvn]
        if do_nan:
            item[0] = slice(0, stop-1)
            out = var[itemr]
            out[1:, ...] -= var[item]
            out[0, ...] = np.NaN
        else:
            out = var[itemr]
            out -= var[item]
        return out * self._factor
Beispiel #5
0
    def __getitem__(self, item):

        # take care of ellipsis and other strange indexes
        item = list(indexing.expanded_indexer(item, len(self.dimensions)))

        # time is always going to be first dim I hope
        sl = item[0]
        if np.isscalar(sl) and not isinstance(sl, slice):
            sl = slice(sl, sl + 1)

        # Ok, get the indexes right
        start = sl.start or 0
        stop = sl.stop or self._nel
        if stop < 0:
            stop += self._nel - 1
        start -= 1
        do_nan = False
        if start < 0:
            do_nan = True
        itemr = copy.deepcopy(item)
        item[0] = slice(start, stop - 1)
        itemr[0] = slice(start + 1, stop)

        # done
        var = self.nc.variables[self.accvn]
        if do_nan:
            item[0] = slice(0, stop - 1)
            out = var[itemr]
            out[1:, ...] -= var[item]
            out[0, ...] = np.NaN
        else:
            out = var[itemr]
            out -= var[item]
        return out * self._factor
Beispiel #6
0
    def __getitem__(self, item):
        """Override __getitem__."""

        # take care of ellipsis and other strange indexes
        item = list(indexing.expanded_indexer(item, len(self.dimensions)))

        # Slice to change
        was_scalar = False
        sl = item[self.ds]
        if np.isscalar(sl) and not isinstance(sl, slice):
            sl = slice(sl, sl+1)
            was_scalar = True

        # Ok, get the indexes right
        start = sl.start or 0
        stop = sl.stop or self._ds_shape
        if stop < 0:
            stop += self._ds_shape-1
        stop = np.clip(stop+1, 0, self._ds_shape)
        itemr = copy.deepcopy(item)
        if was_scalar:
            item[self.ds] = start
            itemr[self.ds] = start+1
        else:
            item[self.ds] = slice(start, stop-1)
            itemr[self.ds] = slice(start+1, stop)
        return 0.5*(self.ncvar[item] + self.ncvar[itemr])
Beispiel #7
0
    def __getitem__(self, item):

        # take care of ellipsis and other strange indexes
        item = list(indexing.expanded_indexer(item, len(self.dimensions)))
        # we need the empty dims for _ncl_slp() to work
        squeezax = []
        for i, c in enumerate(item):
            if np.isscalar(c) and not isinstance(c, slice):
                item[i] = slice(c, c+1)
                squeezax.append(i)
        # add a slice in the 4th dim
        item.insert(self.ds, slice(0, self._ds_shape+1))
        item = tuple(item)

        # get data
        vars = self.nc.variables
        with ScaledVar(vars['TK']) as var:
            tk = var[item]
        with ScaledVar(vars['P']) as p, ScaledVar(vars['PB']) as pb:
            p = p[item] + pb[item]
        with ScaledVar(vars['QVAPOR']) as var:
            q = var[item]
        with ScaledVar(vars['PH']) as ph, ScaledVar(vars['PHB']) as phb:
            z = (ph[item] + phb[item]) / 9.81
        return np.squeeze(_ncl_slp(z, tk, p, q), axis=tuple(squeezax))
Beispiel #8
0
 def test_expanded_indexer(self) -> None:
     x = np.random.randn(10, 11, 12, 13, 14)
     y = np.arange(5)
     arr = ReturnItem()
     for i in [
             arr[:],
             arr[...],
             arr[0, :, 10],
             arr[..., 10],
             arr[:5, ..., 0],
             arr[..., 0, :],
             arr[y],
             arr[y, y],
             arr[..., y, y],
             arr[..., 0, 1, 2, 3, 4],
     ]:
         j = indexing.expanded_indexer(i, x.ndim)
         assert_array_equal(x[i], x[j])
         assert_array_equal(self.set_to_zero(x, i), self.set_to_zero(x, j))
     with pytest.raises(IndexError, match=r"too many indices"):
         indexing.expanded_indexer(arr[1, 2, 3], 2)
Beispiel #9
0
 def test_expanded_indexer(self):
     x = np.random.randn(10, 11, 12, 13, 14)
     y = np.arange(5)
     I = ReturnItem()
     for i in [
         I[:],
         I[...],
         I[0, :, 10],
         I[..., 10],
         I[:5, ..., 0],
         I[..., 0, :],
         I[y],
         I[y, y],
         I[..., y, y],
         I[..., 0, 1, 2, 3, 4],
     ]:
         j = indexing.expanded_indexer(i, x.ndim)
         self.assertArrayEqual(x[i], x[j])
         self.assertArrayEqual(self.set_to_zero(x, i), self.set_to_zero(x, j))
     with self.assertRaisesRegexp(IndexError, "too many indices"):
         indexing.expanded_indexer(I[1, 2, 3], 2)
Beispiel #10
0
    def __getitem__(self, item):

        # take care of ellipsis and other strange indexes
        item = list(indexing.expanded_indexer(item, len(self.dimensions)))

        # time is always going to be first dim I hope
        sl = item[0]
        was_scalar = False
        if np.isscalar(sl) and not isinstance(sl, slice):
            was_scalar = True
            sl = slice(sl, sl+1)

        # Ok, get the indexes right
        start = sl.start or 0
        stop = sl.stop or self._nel
        if stop < 0:
            stop += self._nel-1
        start -= 1
        do_nan = False
        if start < 0:
            do_nan = True
        itemr = copy.deepcopy(item)
        item[0] = slice(start, stop-1)
        itemr[0] = slice(start+1, stop)

        # done
        with ScaledVar(self.nc.variables[self.accvn]) as var:
            if do_nan:
                item[0] = slice(0, stop-1)
                out = var[itemr]
                try:
                    # in case we have a masked array
                    out.unshare_mask()
                except:
                    pass
                out[1:, ...] -= var[item]
                out[0, ...] = np.NaN
            else:
                out = var[itemr]
                out -= var[item]
        if was_scalar:
            out = out[0, ...]
        return out * self._factor
Beispiel #11
0
    def __getitem__(self, item):

        # take care of ellipsis and other strange indexes
        item = list(indexing.expanded_indexer(item, len(self.dimensions)))
        # we need the empty dims for _ncl_slp() to work
        squeezax = []
        for i, c in enumerate(item):
            if np.isscalar(c) and not isinstance(c, slice):
                item[i] = slice(c, c+1)
                squeezax.append(i)
        # add a slice in the 4th dim
        item.insert(self.ds, slice(0, self._ds_shape+1))
        item = tuple(item)

        # get data
        vars = self.nc.variables
        tk = vars['TK'][item]
        p = vars['P'][item] + vars['PB'][item]
        q = vars['QVAPOR'][item]
        z = (vars['PH'][item] + vars['PHB'][item]) / 9.81
        return np.squeeze(_ncl_slp(z, tk, p, q), axis=squeezax)
Beispiel #12
0
 def expand(self, key):
     """Parse key using xarray utils to ensure we have dimension names."""
     if not is_dict_like(key):
         labels = expanded_indexer(key, self.data_array.ndim)
         key = dict(zip(self.data_array.dims, labels))
     return key