Esempio n. 1
0
    def interpolate_angles(self, angles, resolution):
        """Interpolate the angles."""
        # FIXME: interpolate in cartesian coordinates if the lons or lats are
        # problematic
        from geotiepoints.multilinear import MultilinearInterpolator

        geocoding = self.root.find('.//Tile_Geocoding')
        rows = int(
            geocoding.find('Size[@resolution="' + str(resolution) +
                           '"]/NROWS').text)
        cols = int(
            geocoding.find('Size[@resolution="' + str(resolution) +
                           '"]/NCOLS').text)

        smin = [0, 0]
        smax = np.array(angles.shape) - 1
        orders = angles.shape
        minterp = MultilinearInterpolator(smin, smax, orders)
        minterp.set_values(da.atleast_2d(angles.ravel()))

        x = da.arange(rows, dtype=angles.dtype,
                      chunks=CHUNK_SIZE) / (rows - 1) * (angles.shape[0] - 1)
        y = da.arange(cols, dtype=angles.dtype,
                      chunks=CHUNK_SIZE) / (cols - 1) * (angles.shape[1] - 1)
        xcoord, ycoord = da.meshgrid(x, y)
        return da.map_blocks(self._do_interp,
                             minterp,
                             xcoord,
                             ycoord,
                             dtype=angles.dtype,
                             chunks=xcoord.chunks)
Esempio n. 2
0
    def interpolate_angles(self, angles, resolution):
        # FIXME: interpolate in cartesian coordinates if the lons or lats are
        # problematic
        from geotiepoints.multilinear import MultilinearInterpolator

        geocoding = self.root.find('.//Tile_Geocoding')
        rows = int(geocoding.find('Size[@resolution="' + str(resolution) + '"]/NROWS').text)
        cols = int(geocoding.find('Size[@resolution="' + str(resolution) + '"]/NCOLS').text)

        smin = [0, 0]
        smax = np.array(angles.shape) - 1
        orders = angles.shape
        minterp = MultilinearInterpolator(smin, smax, orders)
        minterp.set_values(da.atleast_2d(angles.ravel()))

        def _do_interp(minterp, xcoord, ycoord):
            interp_points2 = np.vstack((xcoord.ravel(),
                                        ycoord.ravel()))
            res = minterp(interp_points2)
            return res.reshape(xcoord.shape)

        x = da.arange(rows, dtype=angles.dtype, chunks=CHUNK_SIZE) / (rows-1) * (angles.shape[0] - 1)
        y = da.arange(cols, dtype=angles.dtype, chunks=CHUNK_SIZE) / (cols-1) * (angles.shape[1] - 1)
        xcoord, ycoord = da.meshgrid(x, y)
        return da.map_blocks(_do_interp, minterp, xcoord, ycoord, dtype=angles.dtype,
                             chunks=xcoord.chunks)
Esempio n. 3
0
def zeroing(a_zeroing, Ag_old, Cg, Ah_old):
    """
    Zeroing: correct Ag_old, Ah_old
    Depreciated: copied to incl_h5ckc to remove dependance of wafo but modified there
    :param a_zeroing: dask.dataframe with columns 'Ax','Ay','Az'
    :param Ag_old, Cg: numpy.arrays, rotation matrix and shift for accelerometer
    :param Ah_old: numpy.array 3x3, rotation matrix for magnetometer
    return (Ag, Ah): numpy.arrays (3x3, 3x3), corrected rotation matrices
    """

    if not len(a_zeroing):
        print(f'zeroing(): no data {a_zeroing}, returning same coef')
        return Ag_old, Ah_old

    mean_countsG0 = da.atleast_2d(da.from_delayed(
        a_zeroing.loc[:, ('Ax', 'Ay', 'Az')].mean(
            ).values.to_delayed()[0], shape=(3,), dtype=np.float64, name='mean_G0'))  #
    Gxyz0old = fG(mean_countsG0, Ag_old, Cg)  # .compute()

    # Gxyz0old = delayed(fG, pure=True)(a.loc[:, ('Ax','Ay','Az')].mean().values, Ag_old, Cg).compute().compute()

    # Gxyz0old = [[np.NaN, np.NaN, np.NaN]] if np.nansum(isnan(iCalibr0V))>0 else \
    # fG(np.column_stack((np.nanmean(a['Ax'][slice(*iCalibr0V[0,:])]),
    # np.nanmean(a['Ay'][slice(*iCalibr0V[0,:])]),
    # np.nanmean(a['Az'][slice(*iCalibr0V[0,:])]) )), Ag_old, Cg)

    # fPitch = lambda Gxyz: -da.arctan2(Gxyz[0,:], da.sqrt(da.sum(da.square(Gxyz[1:,:]), 0)))
    # fRoll = lambda Gxyz: da.arctan2(Gxyz[1,:], Gxyz[2,:]) #da.arctan2(Gxyz[1,:], da.sqrt(da.sum(da.square(Gxyz[(0,2),:]), 0)))
    old1pitch = -fPitch(Gxyz0old).compute()
    old1roll = -fRoll(Gxyz0old).compute()
    print(
        f'zeroing pitch = {np.rad2deg(old1pitch[0])}, roll = {np.rad2deg(old1roll[0])} degrees ({old1pitch[0]}, {old1roll[0]} radians)')

    # 'coeficient'
    def rotate(A, old1pitch, old1roll):
        return np.transpose(np.dot(
            np.dot([[np.cos(old1pitch), 0, -np.sin(old1pitch)],
                    [0, 1, 0],
                    [np.sin(old1pitch), 0, np.cos(old1pitch)]],

                   [[1, 0, 0],
                    [0, np.cos(old1roll), np.sin(old1roll)],
                    [0, -np.sin(old1roll), np.cos(old1roll)]]),

            np.transpose(A)))

    Ag = rotate(Ag_old, old1pitch, old1roll)
    Ah = rotate(Ah_old, old1pitch, old1roll)

    # # test: should be close to zero:
    # Gxyz0 = fG(mean_countsG0, Ag, Cg)
    # #? Gxyz0mean = np.transpose([np.nanmean(Gxyz0, 1)])

    return Ag, Ah
Esempio n. 4
0
    def get_reflectance(self, sun_zenith, sat_zenith, azidiff, bandname, redband=None):
        """Get the reflectance from the three sun-sat angles"""
        # Get wavelength in nm for band:
        if isinstance(bandname, float):
            LOG.warning('A wavelength is provided instead of band name - ' +
                        'disregard the relative spectral responses and assume ' +
                        'it is the effective wavelength: %f (micro meter)', bandname)
            wvl = bandname * 1000.0
        else:
            wvl = self.get_effective_wavelength(bandname)
            if wvl is None:
                LOG.error("Can't get effective wavelength for band %s on platform %s and sensor %s",
                          str(bandname), self.platform_name, self.sensor)
                return None
            else:
                wvl = wvl * 1000.0

        rayl, wvl_coord, azid_coord, satz_sec_coord, sunz_sec_coord = \
            self.get_reflectance_lut()

        # force dask arrays
        compute = False
        if HAVE_DASK and not isinstance(sun_zenith, Array):
            compute = True
            sun_zenith = from_array(sun_zenith, chunks=sun_zenith.shape)
            sat_zenith = from_array(sat_zenith, chunks=sat_zenith.shape)
            azidiff = from_array(azidiff, chunks=azidiff.shape)
            if redband is not None:
                redband = from_array(redband, chunks=redband.shape)

        clip_angle = rad2deg(arccos(1. / sunz_sec_coord.max()))
        sun_zenith = clip(sun_zenith, 0, clip_angle)
        sunzsec = 1. / cos(deg2rad(sun_zenith))
        clip_angle = rad2deg(arccos(1. / satz_sec_coord.max()))
        sat_zenith = clip(sat_zenith, 0, clip_angle)
        satzsec = 1. / cos(deg2rad(sat_zenith))
        shape = sun_zenith.shape

        if not(wvl_coord.min() < wvl < wvl_coord.max()):
            LOG.warning(
                "Effective wavelength for band %s outside 400-800 nm range!",
                str(bandname))
            LOG.info(
                "Set the rayleigh/aerosol reflectance contribution to zero!")
            if HAVE_DASK:
                chunks = sun_zenith.chunks if redband is None \
                    else redband.chunks
                res = zeros(shape, chunks=chunks)
                return res.compute() if compute else res
            else:
                return zeros(shape)

        idx = np.searchsorted(wvl_coord, wvl)
        wvl1 = wvl_coord[idx - 1]
        wvl2 = wvl_coord[idx]

        fac = (wvl2 - wvl) / (wvl2 - wvl1)
        raylwvl = fac * rayl[idx - 1, :, :, :] + (1 - fac) * rayl[idx, :, :, :]
        tic = time.time()

        smin = [sunz_sec_coord[0], azid_coord[0], satz_sec_coord[0]]
        smax = [sunz_sec_coord[-1], azid_coord[-1], satz_sec_coord[-1]]
        orders = [
            len(sunz_sec_coord), len(azid_coord), len(satz_sec_coord)]
        f_3d_grid = atleast_2d(raylwvl.ravel())

        if HAVE_DASK and isinstance(smin[0], Array):
            # compute all of these at the same time before passing to the interpolator
            # otherwise they are computed separately
            smin, smax, orders, f_3d_grid = da.compute(smin, smax, orders, f_3d_grid)
        minterp = MultilinearInterpolator(smin, smax, orders)
        minterp.set_values(f_3d_grid)

        def _do_interp(minterp, sunzsec, azidiff, satzsec):
            interp_points2 = np.vstack((sunzsec.ravel(),
                                        180 - azidiff.ravel(),
                                        satzsec.ravel()))
            res = minterp(interp_points2)
            return res.reshape(sunzsec.shape)

        if HAVE_DASK:
            ipn = map_blocks(_do_interp, minterp, sunzsec, azidiff,
                             satzsec, dtype=raylwvl.dtype,
                             chunks=azidiff.chunks)
        else:
            ipn = _do_interp(minterp, sunzsec, azidiff, satzsec)

        LOG.debug("Time - Interpolation: {0:f}".format(time.time() - tic))

        ipn *= 100
        res = ipn
        if redband is not None:
            res = where(redband < 20., res,
                        (1 - (redband - 20) / 80) * res)

        res = clip(res, 0, 100)
        if compute:
            res = res.compute()
        return res
Esempio n. 5
0
def smooth(xds,
           dv='IMAGE',
           kernel='gaussian',
           size=[1., 1., 30.],
           current=None,
           scale=1.0,
           name='BEAM'):
    """                                                                                                                                                                                                     
    Smooth data along the spatial plane of the image cube.
    
    Computes a correcting beam to produce defined size when kernel=gaussian and current is defined.  Otherwise the size
    or existing beam is used directly.

    Parameters
    ----------
    xds : xarray.core.dataset.Dataset
        input Image Dataset
    dv : str
        name of data_var in xds to smooth. Default is 'IMAGE'
    kernel : str
        Type of kernel to use:'boxcar', 'gaussian' or the name of a data var in this xds.  Default is 'gaussian'.
    size : list of floats
        for gaussian kernel, list of three values corresponding to major and minor axes (in arcseconds) and position angle (in degrees).
        for boxcar kernel, list of two valuess corresponding to l,m bin width.  Default is [1., 1., 30.] (for a gaussian)
    current : list of floats
        same structure as size, a list of three values corresponding to major and minor axes (in arcseconds) and position
        angle (in degrees) of the current beam applied to the image.  Default is None
    scale : float
        gain factor after convolution. Default is unity gain (1.0)
    name : str
        dataset variable name for kernel, overwrites if already present
        
    Returns                                                                                                                                                                                                 
    -------                                                                                                                                                                                                 
    xarray.core.dataset.Dataset                                                                                                                                                                             
        output Image
    """
    import xarray
    import dask.array as da
    import numpy as np
    import cngi._utils._beams as chb

    # compute kernel beam
    size_corr = None
    if kernel in xds.data_vars:
        beam = xds[kernel] / xds[kernel].sum(axis=[0, 1])
    elif kernel == 'gaussian':
        beam, parms_tar = chb.synthesizedbeam(size[0], size[1], size[2],
                                              len(xds.l), len(xds.m),
                                              xds.incr[:2])
        beam = xarray.DataArray(da.from_array(beam /
                                              np.sum(beam, axis=(0, 1))),
                                dims=['l', 'm'],
                                name=name)  # normalized to unity
        cf_tar = ((4 * np.pi**2) /
                  (4 * parms_tar[0] * parms_tar[2] -
                   parms_tar[1]**2)) * parms_tar  # equation 12
        size_corr = size
    else:  # boxcar
        incr = np.abs(xds.incr[:2]) * 180 / np.pi * 60 * 60
        xx, yy = np.mgrid[:int(np.round(size[0] / incr[0])
                               ), :int(np.round(size[1] / incr[1]))]
        box = np.array((xx.ravel() - np.max(xx) // 2,
                        yy.ravel() - np.max(yy) // 2)) + np.array(
                            [len(xds.l) // 2, len(xds.m) // 2])[:, None]
        beam = np.zeros((len(xds.l), len(xds.m)))
        beam[box[0], box[1]] = 1.0
        beam = xarray.DataArray(da.from_array(beam /
                                              np.sum(beam, axis=(0, 1))),
                                dims=['l', 'm'],
                                name=name)  # normalized to unity

    # compute the correcting beam if necessary
    # this is done analytically using the parameters of the current beam, not the actual data
    # see equations 19 - 26 here:
    # https://casa.nrao.edu/casadocs-devel/stable/memo-series/casa-memos/casa_memo10_restoringbeam.pdf/view
    if (kernel == 'gaussian') and (current is not None):
        parms_curr = chb.synthesizedbeam(current[0], current[1], current[2],
                                         len(xds.l), len(xds.m),
                                         xds.incr[:2])[1]
        cf_curr = ((4 * np.pi**2) /
                   (4 * parms_curr[0] * parms_curr[2] -
                    parms_curr[1]**2)) * parms_curr  # equation 12
        cf_corr = (cf_tar - cf_curr)  # equation 19
        c_corr = ((4 * np.pi**2) / (4 * cf_corr[0] * cf_corr[2] -
                                    cf_corr[1]**2)) * cf_corr  # equation 12
        # equations 21 - 23
        d1 = np.sqrt(8 * np.log(2) /
                     ((c_corr[0] + c_corr[2]) -
                      np.sqrt(c_corr[0]**2 - 2 * c_corr[0] * c_corr[2] +
                              c_corr[2]**2 + c_corr[1]**2)))
        d2 = np.sqrt(8 * np.log(2) /
                     ((c_corr[0] + c_corr[2]) +
                      np.sqrt(c_corr[0]**2 - 2 * c_corr[0] * c_corr[2] +
                              c_corr[2]**2 + c_corr[1]**2)))
        theta = 0.5 * np.arctan2(-c_corr[1], c_corr[2] - c_corr[0])

        # make a beam out of the correcting size
        incr_arcsec = np.abs(xds.incr[:2]) * 180 / np.pi * 60 * 60
        size_corr = [
            d1 * incr_arcsec[0], d2 * incr_arcsec[1], theta * 180 / np.pi
        ]
        scale_corr = (4 * np.log(2) / (np.pi * d1 * d2)) * (
            size[0] * size[1] / (current[0] * current[1]))  # equation 20
        beam = scale_corr * chb.synthesizedbeam(size_corr[0], size_corr[1],
                                                size_corr[2], len(xds.l),
                                                len(xds.m), xds.incr[:2])[0]
        beam = xarray.DataArray(
            da.from_array(beam),
            dims=[xds[dv].dims[dd] for dd in range(beam.ndim)],
            name=name)

    # scale and FFT the kernel beam
    da_beam = da.atleast_2d(beam.data)
    if da_beam.ndim == 2: da_beam = da_beam[:, :, None, None, None]
    if da_beam.ndim < 5: da_beam = da_beam[:, :, None, :, :]
    ft_beam = da.fft.fft2((da_beam * scale), axes=[0, 1])

    # FFT the image, multiply by the kernel beam FFT, then inverse FFT it back
    ft_image = da.fft.fft2(xds[dv].data, axes=[0, 1])
    ft_smooth = ft_image * ft_beam
    ift_smooth = da.fft.fftshift(da.fft.ifft2(ft_smooth, axes=[0, 1]),
                                 axes=[0, 1])

    # store the smooth image and kernel beam back in the xds
    xda_smooth = xarray.DataArray(da.absolute(ift_smooth),
                                  dims=xds[dv].dims,
                                  coords=xds[dv].coords)
    new_xds = xds.assign({dv: xda_smooth, name: beam * scale})
    if size_corr is not None:
        new_xds = new_xds.assign_attrs({name + '_params': tuple(size_corr)})
    return new_xds
Esempio n. 6
0
    def get_reflectance(self, sun_zenith, sat_zenith, azidiff, bandname, redband=None):
        """Get the reflectance from the three sun-sat angles"""
        # Get wavelength in nm for band:
        if isinstance(bandname, float):
            LOG.warning('A wavelength is provided instead of band name - ' +
                        'disregard the relative spectral responses and assume ' +
                        'it is the effective wavelength: %f (micro meter)', bandname)
            wvl = bandname * 1000.0
        else:
            wvl = self.get_effective_wavelength(bandname)
            wvl = wvl * 1000.0

        rayl, wvl_coord, azid_coord, satz_sec_coord, sunz_sec_coord = self.get_reflectance_lut()

        # force dask arrays
        compute = False
        if HAVE_DASK and not isinstance(sun_zenith, Array):
            compute = True
            sun_zenith = from_array(sun_zenith, chunks=sun_zenith.shape)
            sat_zenith = from_array(sat_zenith, chunks=sat_zenith.shape)
            azidiff = from_array(azidiff, chunks=azidiff.shape)
            if redband is not None:
                redband = from_array(redband, chunks=redband.shape)

        clip_angle = rad2deg(arccos(1. / sunz_sec_coord.max()))
        sun_zenith = clip(sun_zenith, 0, clip_angle)
        sunzsec = 1. / cos(deg2rad(sun_zenith))
        clip_angle = rad2deg(arccos(1. / satz_sec_coord.max()))
        sat_zenith = clip(sat_zenith, 0, clip_angle)
        satzsec = 1. / cos(deg2rad(sat_zenith))
        shape = sun_zenith.shape

        if not(wvl_coord.min() < wvl < wvl_coord.max()):
            LOG.warning(
                "Effective wavelength for band %s outside 400-800 nm range!",
                str(bandname))
            LOG.info(
                "Set the rayleigh/aerosol reflectance contribution to zero!")
            if HAVE_DASK:
                chunks = sun_zenith.chunks if redband is None else redband.chunks
                res = zeros(shape, chunks=chunks)
                return res.compute() if compute else res
            else:
                return zeros(shape)

        idx = np.searchsorted(wvl_coord, wvl)
        wvl1 = wvl_coord[idx - 1]
        wvl2 = wvl_coord[idx]

        fac = (wvl2 - wvl) / (wvl2 - wvl1)
        raylwvl = fac * rayl[idx - 1, :, :, :] + (1 - fac) * rayl[idx, :, :, :]
        tic = time.time()

        smin = [sunz_sec_coord[0], azid_coord[0], satz_sec_coord[0]]
        smax = [sunz_sec_coord[-1], azid_coord[-1], satz_sec_coord[-1]]
        orders = [
            len(sunz_sec_coord), len(azid_coord), len(satz_sec_coord)]
        f_3d_grid = atleast_2d(raylwvl.ravel())

        if HAVE_DASK and isinstance(smin[0], Array):
            # compute all of these at the same time before passing to the interpolator
            # otherwise they are computed separately
            smin, smax, orders, f_3d_grid = da.compute(smin, smax, orders, f_3d_grid)
        minterp = MultilinearInterpolator(smin, smax, orders)
        minterp.set_values(f_3d_grid)

        if HAVE_DASK:
            ipn = map_blocks(self._do_interp, minterp, sunzsec, azidiff,
                             satzsec, dtype=raylwvl.dtype, chunks=azidiff.chunks)
        else:
            ipn = self._do_interp(minterp, sunzsec, azidiff, satzsec)

        LOG.debug("Time - Interpolation: {0:f}".format(time.time() - tic))

        ipn *= 100
        res = ipn
        if redband is not None:
            res = where(redband < 20., res,
                        (1 - (redband - 20) / 80) * res)

        res = clip(res, 0, 100)
        if compute:
            res = res.compute()
        return res