Exemplo n.º 1
0
    def from_array(
        cls,
        data: ArrayLike,
        *,
        name: str = "unnamed",
        label: str = "unlabeled",
        quantities: Optional[Tuple[Tuple[str, str, str], ...]] = None,
        time: Optional[Union[Axis, int, float]] = None,
    ) -> "Particle":
        if time is None:
            time = Axis.from_array(0.0, name="time", label="time")
        else:
            if not isinstance(time, Axis):
                time = Axis.from_array(time, name="time", label="time")

        if not isinstance(data, da.Array):
            data = da.asanyarray(data)

        if quantities is None:
            if data.dtype.fields:
                quantities = tuple(
                    (f, f"{f} label", "") for f in data.dtype.fields)
            else:
                quantities = (("quant1", "quant1 label", ""), )
                new_dtype = np.dtype([("quant1", data.dtype)])
                data = unstructured_to_structured(data[..., np.newaxis],
                                                  new_dtype)
        else:
            if not data.dtype.fields:
                new_dtype = np.dtype([(q[0], data.dtype) for q in quantities])
                data = unstructured_to_structured(data, new_dtype)

        return cls(data, quantities, time, name, label)
Exemplo n.º 2
0
    def from_array(
        cls,
        data: ArrayLike,
        *,
        name: str = "unnamed",
        label: str = "unlabeled",
        unit: str = "",
        axes: Optional[Sequence[ArrayLike]] = None,
    ) -> "GridDataset":
        if not isinstance(data, da.Array):
            data = da.asanyarray(data)

        if axes is None:
            axes = ()
            time_steps = None
            for i, l in enumerate(data.shape):
                if i == 0:
                    time_steps = l
                    time = Axis.from_array(da.arange(time_steps),
                                           name="time",
                                           label="time")
                    axes += (time, )
                else:
                    axis_shape = (time_steps, 1)
                    axis = Axis.from_array(da.tile(da.arange(l), axis_shape),
                                           name=f"axis{i-1}")
                    axes += (axis, )

        else:
            # ensure that every element in axes is an axis
            if any(not isinstance(ax, Axis) for ax in axes):
                tmp = []

                for i, ax in enumerate(axes):
                    name = "time" if i == 0 else f"axis{i-1}"
                    label = "time" if i == 0 else "unlabeled"

                    if not isinstance(ax, Axis):
                        ax = Axis.from_array(da.asanyarray(ax),
                                             name=name,
                                             label=label)

                    tmp.append(ax)

                axes = tuple(tmp)

        return cls(data, axes, name, label, unit)
Exemplo n.º 3
0
    def from_array(
        cls,
        data: Any,
        *,
        name: str = "unnamed",
        label: str = "unlabeled",
        unit: str = "",
        axes: Optional[Sequence[Any]] = None,
        time: Optional[Union[Axis, int, float]] = None,
    ) -> "GridArray":
        """
        Recommended way of creating a `GridArray` from array-like object. The
        arguments are used to for initialization but their validity is only
        checked at the initialization and not during call of `.from_array`.

        Note:
            The method `.from_array` only requires one argument and the other
            arguments are keyword arguments that allow to change the default
            behaviour.

        Arguments:
            data: Dask array or any object that can be converted to a dask array using
                `dask.array.asanyarray`.
            name: Name of the `GridArray`. The nanme has to be a valid identifier.
            label: Label of the `GridArray`. Any string labeling the grid is supported.
            unit: Unit of the `GridArray`. Any string can be used here.
            axes: Axes representing each dimension of the grid. Each axis has to
                match the length of the dimension to ensure shape consistency, e.g.:

                - GridArray with shape `(10,)` requires 1 axis with shape `(10,)`
                - GridArray with shape `(10, 15)` requires 2 axis with shape
                `(10,)` and `(15,)`

                If `None` axes based on the index of the grid are created with
                default naming.
        """
        if not isinstance(data, da.Array):
            data = da.asanyarray(data)

        if axes is None:
            axes = ()
            for i, l in enumerate(data.shape):
                axes += (Axis.from_array(da.arange(l), name=f"axis{i}"), )
        else:
            # ensure that every element in axes is an axis
            if any(not isinstance(ax, Axis) for ax in axes):
                tmp = []
                for i, ax in enumerate(axes):
                    if not isinstance(ax, Axis):
                        ax = Axis.from_array(ax, name=f"axis{i}")
                    tmp.append(ax)

                axes = tuple(tmp)

        if not isinstance(time, Axis):
            time = Axis.from_array(time, name="time", label="time")

        return cls(data, axes, time, name, label, unit)
Exemplo n.º 4
0
 def from_array(
     cls,
     data: ArrayLike,
     *,
     name: str = "unnamed",
     label: str = "unlabeled",
     unit: str = "",
 ) -> "Axis":
     data = data if isinstance(data, da.Array) else da.asanyarray(data)
     return cls(data, name, label, unit)
Exemplo n.º 5
0
    def from_array(
        cls,
        data: ArrayLike,
        *,
        name: str = "unnamed",
        label: str = "unlabeled",
        unit: str = "",
        time: Optional[Union[Axis, int, float]] = None,
    ) -> "Quantity":
        data = data if isinstance(data, da.Array) else da.asanyarray(data)

        if time is None:
            time = Axis.from_array(0.0, name="time", label="time")
        else:
            if not isinstance(time, Axis):
                time = Axis.from_array(time, name="time", label="time")

        return cls(data, time, name, label, unit)
Exemplo n.º 6
0
def expand_and_stack(data: Iterable) -> da.Array:
    arrays = [d if isinstance(d, da.Array) else da.asanyarray(d) for d in data]
    max_shape = max(arr.shape for arr in arrays)
    arrays = [expand_arr(arr, max_shape) for arr in arrays]
    return da.stack(arrays)
Exemplo n.º 7
0
def test_asanyarray():
    y = da.asanyarray(xr.DataArray([1, 2, 3.0]))
    assert isinstance(y, da.Array)
    assert_eq(y, y)
Exemplo n.º 8
0
    def reflectance_from_tbs(self, sun_zenith, tb_near_ir, tb_thermal,
                             **kwargs):
        """Derive reflectances from Tb's in the 3.x band.

        The relfectance calculated is without units and should be between 0 and 1.

        Inputs:

          sun_zenith: Sun zenith angle for every pixel - in degrees

          tb_near_ir: The 3.7 (or 3.9 or equivalent) IR Tb's at every pixel
                      (Kelvin)

          tb_thermal: The 10.8 (or 11 or 12 or equivalent) IR Tb's at every
                      pixel (Kelvin)

          tb_ir_co2: The 13.4 micron channel (or similar - co2 absorption band)
                     brightness temperatures at every pixel. If None, no CO2
                     absorption correction will be applied.

        """
        # Check for dask arrays
        if hasattr(tb_near_ir, 'compute') or hasattr(tb_thermal, 'compute'):
            compute = False
        else:
            compute = True
        if hasattr(tb_near_ir, 'mask') or hasattr(tb_thermal, 'mask'):
            is_masked = True
        else:
            is_masked = False

        if np.isscalar(tb_near_ir):
            tb_nir = array([
                tb_near_ir,
            ])
        else:
            tb_nir = asanyarray(tb_near_ir)

        if np.isscalar(tb_thermal):
            tb_therm = array([
                tb_thermal,
            ])
        else:
            tb_therm = asanyarray(tb_thermal)

        if tb_therm.shape != tb_nir.shape:
            errmsg = 'Dimensions do not match! {0} and {1}'.format(
                str(tb_therm.shape), str(tb_nir.shape))
            raise ValueError(errmsg)

        tb_ir_co2 = kwargs.get('tb_ir_co2')
        lut = kwargs.get('lut', self.lut)

        if tb_ir_co2 is None:
            co2corr = False
            tbco2 = None
        else:
            co2corr = True
            if np.isscalar(tb_ir_co2):
                tbco2 = array([
                    tb_ir_co2,
                ])
            else:
                tbco2 = asanyarray(tb_ir_co2)

        if not self.rsr:
            raise NotImplementedError("Reflectance calculations without "
                                      "rsr not yet supported!")

        # Assume rsr is in microns!!!
        # FIXME!
        self._rad3x_t11 = self.tb2radiance(tb_therm, lut=lut)['radiance']
        thermal_emiss_one = self._rad3x_t11 * self.rsr_integral

        l_nir = self.tb2radiance(tb_nir, lut=lut)['radiance']
        self._rad3x = l_nir.copy()
        l_nir *= self.rsr_integral

        if thermal_emiss_one.ravel().shape[0] < 10:
            LOG.info('thermal_emiss_one = %s', str(thermal_emiss_one))
        if l_nir.ravel().shape[0] < 10:
            LOG.info('l_nir = %s', str(l_nir))

        LOG.debug("Apply sun-zenith angle clipping between 0 and %5.2f",
                  self.masking_limit)
        sunz = sun_zenith.clip(0, self.sunz_threshold)
        mu0 = np.cos(np.deg2rad(sunz))

        # mu0 = np.where(np.less(mu0, 0.1), 0.1, mu0)
        self._solar_radiance = self.solar_flux * mu0 / np.pi

        # CO2 correction to the 3.9 radiance, only if tbs of a co2 band around
        # 13.4 micron is provided:
        if co2corr:
            self.derive_rad39_corr(tb_therm, tbco2)
            LOG.info("CO2 correction applied...")
        else:
            self._rad3x_correction = 1.0

        corrected_thermal_emiss_one = thermal_emiss_one * self._rad3x_correction
        nomin = l_nir - corrected_thermal_emiss_one
        denom = self._solar_radiance - corrected_thermal_emiss_one
        data = nomin / denom
        mask = denom < EPSILON

        if self.masking_limit is not None:
            sunzmask = (sun_zenith < 0.0) | (sun_zenith > self.masking_limit)
            logical_or(sunzmask, mask, out=mask)
        logical_or(mask, np.isnan(tb_nir), out=mask)

        self._r3x = where(mask, np.nan, data)

        # Reflectances should be between 0 and 1, but values above 1 is
        # perfectly possible and okay! (Multiply by 100 to get reflectances
        # in percent)
        if hasattr(self._r3x, 'compute') and compute:
            res = self._r3x.compute()
        else:
            res = self._r3x
        if is_masked:
            res = np.ma.masked_invalid(res)
        return res
import numpy as np
import dask.array as da


def num_divisible(a, b, c):
    r = a % c
    if r == 0:
        start = a
    else:
        start = a + (c - r)

    if start > b:
        return 0
    else:
        return 1 + (b - start) // c


num_divisible_vect = np.vectorize(num_divisible)
x = da.asanyarray([(1, 100, 10), (16789, 445267839, 7), (34, 10**18, 3000),
                   (3, 7, 9)])
x = x.rechunk(chunks=(2, -1))
y = x.map_blocks(lambda block: num_divisible_vect(*block.T),
                 chunks=(-1, ),
                 drop_axis=1,
                 dtype='i8')
print(y.compute())