示例#1
0
    def resample_to_area(self, target_area_def, resample_method=None):
        """
        Resample existing scene to the provided area definition

        """
        if resample_method not in ['nn', 'gaussian']:
            raise Exception('Resample method {} not known'.format(resample_method))

        attributes_list_to_pass = ['bands', 'timestamp']
        resampled_scene = GenericScene()
        resampled_scene.area_def = target_area_def
        copy_attributes(self, resampled_scene, attributes_list_to_pass)

        try:
            self.area_def = geometry.SwathDefinition(lons=self.longitudes, lats=self.latitudes)
        except:
            self.get_area_def()

        if resample_method is 'nn':
            neighbours = 1
        else:
            neighbours = 8

        valid_input_index, valid_output_index, index_array, distance_array = \
                kd_tree.get_neighbour_info(self.area_def, resampled_scene.area_def,
                                           resampled_scene.area_def.pixel_size_x*2.5, neighbours = neighbours, nprocs=1)

        bands_number = len(resampled_scene.bands)

        for i, band in enumerate(resampled_scene.bands.values()):

            print "Resampling band {0:d}/{1:d}".format(i+1, bands_number)
            swath_data = deepcopy(band.data)

            if resample_method == 'nn':
                band.data = kd_tree.get_sample_from_neighbour_info('nn', resampled_scene.area_def.shape,
                                                                   swath_data,
                                                                   valid_input_index,
                                                                   valid_output_index,
                                                                   index_array)

            elif resample_method == 'gaussian':

                radius_of_influence = resampled_scene.area_def.pixel_size_x * 2.5
                sigma = pr.utils.fwhm2sigma(radius_of_influence * 1.5)
                gauss = lambda r: numpy.exp(-r ** 2 / float(sigma) ** 2)

                band.data = kd_tree.get_sample_from_neighbour_info('custom', resampled_scene.area_def.shape,
                                                                    swath_data,
                                                                    valid_input_index,
                                                                    valid_output_index,
                                                                    index_array,
                                                                    distance_array=distance_array,
                                                                    weight_funcs=gauss,
                                                                    fill_value=0,
                                                                    with_uncert=False)

            else:
                raise Exception('Resampling method not known')
        return resampled_scene
示例#2
0
    def test_custom_multi_from_sample(self):
        def wf1(dist):
            return 1 - dist / 100000.0

        def wf2(dist):
            return 1

        def wf3(dist):
            return numpy.cos(dist) ** 2

        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100))
        lons = numpy.fromfunction(
            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
        lats = numpy.fromfunction(
            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
                                         data.ravel()))

        if (sys.version_info < (2, 6) or
                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
            valid_input_index, valid_output_index, index_array, distance_array = \
                kd_tree.get_neighbour_info(swath_def,
                                           self.area_def,
                                           50000, segments=1)
        else:
            with warnings.catch_warnings(record=True) as w:
                valid_input_index, valid_output_index, index_array, distance_array = \
                    kd_tree.get_neighbour_info(swath_def,
                                               self.area_def,
                                               50000, segments=1)
                self.assertFalse(
                    len(w) != 1, 'Failed to create neighbour radius warning')
                self.assertFalse(('Possible more' not in str(
                    w[0].message)), 'Failed to create correct neighbour radius warning')

        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
                                                     data_multi,
                                                     valid_input_index, valid_output_index,
                                                     index_array, distance_array,
                                                     weight_funcs=[wf1, wf2, wf3])

        cross_sum = res.sum()

        expected = 1461.842980746
        self.assertAlmostEqual(cross_sum, expected,
                               msg='Swath multi channel custom resampling from neighbour info failed 1')
        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
                                                     data_multi,
                                                     valid_input_index, valid_output_index,
                                                     index_array, distance_array,
                                                     weight_funcs=[wf1, wf2, wf3])

        # Look for error where input data has been manipulated
        cross_sum = res.sum()
        expected = 1461.842980746
        self.assertAlmostEqual(cross_sum, expected,
                               msg='Swath multi channel custom resampling from neighbour info failed 2')
    def test_custom_multi_from_sample(self):
        def wf1(dist):
            return 1 - dist / 100000.0

        def wf2(dist):
            return 1

        def wf3(dist):
            return numpy.cos(dist) ** 2

        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100))
        lons = numpy.fromfunction(
            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
        lats = numpy.fromfunction(
            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
                                         data.ravel()))

        if (sys.version_info < (2, 6) or
                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
            valid_input_index, valid_output_index, index_array, distance_array = \
                kd_tree.get_neighbour_info(swath_def,
                                           self.area_def,
                                           50000, segments=1)
        else:
            with warnings.catch_warnings(record=True) as w:
                valid_input_index, valid_output_index, index_array, distance_array = \
                    kd_tree.get_neighbour_info(swath_def,
                                               self.area_def,
                                               50000, segments=1)
                self.assertFalse(
                    len(w) != 1, 'Failed to create neighbour radius warning')
                self.assertFalse(('Possible more' not in str(
                    w[0].message)), 'Failed to create correct neighbour radius warning')

        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
                                                     data_multi,
                                                     valid_input_index, valid_output_index,
                                                     index_array, distance_array,
                                                     weight_funcs=[wf1, wf2, wf3])

        cross_sum = res.sum()

        expected = 1461.842980746
        self.assertAlmostEqual(cross_sum, expected,
                               msg='Swath multi channel custom resampling from neighbour info failed 1')
        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
                                                     data_multi,
                                                     valid_input_index, valid_output_index,
                                                     index_array, distance_array,
                                                     weight_funcs=[wf1, wf2, wf3])

        # Look for error where input data has been manipulated
        cross_sum = res.sum()
        expected = 1461.842980746
        self.assertAlmostEqual(cross_sum, expected,
                               msg='Swath multi channel custom resampling from neighbour info failed 2')
示例#4
0
    def test_custom_multi_from_sample(self):
        def wf1(dist):
            return 1 - dist / 100000.0

        def wf2(dist):
            return 1

        def wf3(dist):
            return np.cos(dist)**2

        data = np.fromfunction(lambda y, x: (y + x) * 10**-6, (5000, 100))
        lons = np.fromfunction(lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
        lats = np.fromfunction(lambda y, x: 75 - (50.0 / 5000) * y,
                               (5000, 100))
        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
        data_multi = np.column_stack(
            (data.ravel(), data.ravel(), data.ravel()))

        with catch_warnings(UserWarning) as w:
            valid_input_index, valid_output_index, index_array, distance_array = \
                kd_tree.get_neighbour_info(swath_def,
                                           self.area_def,
                                           50000, segments=1)
            self.assertFalse(len(w) != 1)
            self.assertFalse(('Possible more' not in str(w[0].message)))

        res = kd_tree.get_sample_from_neighbour_info(
            'custom', (800, 800),
            data_multi,
            valid_input_index,
            valid_output_index,
            index_array,
            distance_array,
            weight_funcs=[wf1, wf2, wf3])

        cross_sum = res.sum()

        expected = 1461.8428378742638
        self.assertAlmostEqual(cross_sum, expected)
        res = kd_tree.get_sample_from_neighbour_info(
            'custom', (800, 800),
            data_multi,
            valid_input_index,
            valid_output_index,
            index_array,
            distance_array,
            weight_funcs=[wf1, wf2, wf3])

        # Look for error where input data has been manipulated
        cross_sum = res.sum()
        expected = 1461.8428378742638
        self.assertAlmostEqual(cross_sum, expected)
 def test_masked_multi_from_sample(self):
     data = numpy.ones((50, 10))
     data[:, 5:] = 2
     mask1 = numpy.ones((50, 10))
     mask1[:, :5] = 0
     mask2 = numpy.ones((50, 10))
     mask2[:, 5:] = 0
     mask3 = numpy.ones((50, 10))
     mask3[:25, :] = 0
     data_multi = numpy.column_stack(
         (data.ravel(), data.ravel(), data.ravel()))
     mask_multi = numpy.column_stack(
         (mask1.ravel(), mask2.ravel(), mask3.ravel()))
     masked_data = numpy.ma.array(data_multi, mask=mask_multi)
     lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
     lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
     swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
     valid_input_index, valid_output_index, index_array, distance_array = \
         kd_tree.get_neighbour_info(swath_def,
                                    self.area_def,
                                    50000, neighbours=1, segments=1)
     res = kd_tree.get_sample_from_neighbour_info('nn', (800, 800),
                                                  masked_data,
                                                  valid_input_index,
                                                  valid_output_index, index_array,
                                                  fill_value=None)
     expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__),
                                                      'test_files',
                                                      'mask_test_full_fill_multi.dat'),
                                         sep=' ').reshape((800, 800, 3))
     fill_mask = res.mask
     self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask),
                     msg='Failed to create fill mask on masked data')
示例#6
0
    def project_array(self, data):
        """Project an array *data* along the given Projector object.
        """

        if self.mode == "nearest":
            if not 'valid_index' in self._cache:
                self._cache['valid_index'] = self._file_cache['valid_index']
                self._cache['valid_output_index'] = \
                    self._file_cache['valid_output_index']
                self._cache['index_array'] = self._file_cache['index_array']

            valid_index, valid_output_index, index_array = \
                (self._cache['valid_index'],
                 self._cache['valid_output_index'],
                 self._cache['index_array'])

            res = kd_tree.get_sample_from_neighbour_info('nn',
                                                         self.out_area.shape,
                                                         data,
                                                         valid_index,
                                                         valid_output_index,
                                                         index_array,
                                                         fill_value=None)

        elif self.mode == "quick":
            if not 'row_idx' in self._cache:
                self._cache['row_idx'] = self._file_cache['row_idx']
                self._cache['col_idx'] = self._file_cache['col_idx']
            row_idx, col_idx = self._cache['row_idx'], self._cache['col_idx']
            img = image.ImageContainer(data, self.in_area, fill_value=None)
            res = np.ma.array(img.get_array_from_linesample(row_idx, col_idx),
                              dtype=data.dtype)

        return res
示例#7
0
 def test_masked_multi_from_sample(self):
     data = numpy.ones((50, 10))
     data[:, 5:] = 2
     mask1 = numpy.ones((50, 10))
     mask1[:, :5] = 0
     mask2 = numpy.ones((50, 10))
     mask2[:, 5:] = 0
     mask3 = numpy.ones((50, 10))
     mask3[:25, :] = 0
     data_multi = numpy.column_stack(
         (data.ravel(), data.ravel(), data.ravel()))
     mask_multi = numpy.column_stack(
         (mask1.ravel(), mask2.ravel(), mask3.ravel()))
     masked_data = numpy.ma.array(data_multi, mask=mask_multi)
     lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
     lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
     swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
     valid_input_index, valid_output_index, index_array, distance_array = \
         kd_tree.get_neighbour_info(swath_def,
                                    self.area_def,
                                    50000, neighbours=1, segments=1)
     res = kd_tree.get_sample_from_neighbour_info('nn', (800, 800),
                                                  masked_data,
                                                  valid_input_index,
                                                  valid_output_index, index_array,
                                                  fill_value=None)
     expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__),
                                                      'test_files',
                                                      'mask_test_full_fill_multi.dat'),
                                         sep=' ').reshape((800, 800, 3))
     fill_mask = res.mask
     self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask),
                     msg='Failed to create fill mask on masked data')
示例#8
0
    def resample_to_area(self):
        gridded_scene = GriddedSatScene()
        attributes_list_to_pass = ['bands', 'area_def', 'area_name']
        self.get_area_def()
        copy_attributes(self, gridded_scene, attributes_list_to_pass)

        try:
            self.swath_area_def = geometry.SwathDefinition(lons=self.longitudes, lats=self.latitudes)
        except:
            self.scene.get_area_def()
            self.swath_area_def = self.scene.area_def

        valid_input_index, valid_output_index, index_array, distance_array = \
                kd_tree.get_neighbour_info(self.swath_area_def, self.area_def,
                                            self.area_def.pixel_size_x*2.5, neighbours = 1)
        bands_number = len(self.bands)
        import ipdb; ipdb.set_trace() # BREAKPOINT

        for i, band in enumerate(self.bands.values()):
            print "Resampling band {0:d}/{1:d}".format(i+1, bands_number)
            swath_data = band.data.copy()
            band.data = kd_tree.get_sample_from_neighbour_info('nn', self.area_def.shape,
                                                                swath_data,
                                                                valid_input_index,
                                                                valid_output_index,
                                                                index_array)
        gridded_scene.gridded = True
        return gridded_scene
示例#9
0
    def project_array(self, data):
        """Project an array *data* along the given Projector object.
        """

        if self.mode == "nearest":
            if not 'valid_index' in self._cache:
                self._cache['valid_index'] = self._file_cache['valid_index']
                self._cache['valid_output_index'] = \
                    self._file_cache['valid_output_index']
                self._cache['index_array'] = self._file_cache['index_array']

            valid_index, valid_output_index, index_array = \
                (self._cache['valid_index'],
                 self._cache['valid_output_index'],
                 self._cache['index_array'])

            res = kd_tree.get_sample_from_neighbour_info('nn',
                                                         self.out_area.shape,
                                                         data,
                                                         valid_index,
                                                         valid_output_index,
                                                         index_array,
                                                         fill_value=None)

        elif self.mode == "quick":
            if not 'row_idx' in self._cache:
                self._cache['row_idx'] = self._file_cache['row_idx']
                self._cache['col_idx'] = self._file_cache['col_idx']
            row_idx, col_idx = self._cache['row_idx'], self._cache['col_idx']
            img = image.ImageContainer(data, self.in_area, fill_value=None)
            res = np.ma.array(img.get_array_from_linesample(row_idx, col_idx),
                              dtype=data.dtype)

        return res
示例#10
0
    def test_custom_multi_from_sample(self):
        def wf1(dist):
            return 1 - dist / 100000.0

        def wf2(dist):
            return 1

        def wf3(dist):
            return np.cos(dist) ** 2

        data = np.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100))
        lons = np.fromfunction(
            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
        lats = np.fromfunction(
            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
        data_multi = np.column_stack((data.ravel(), data.ravel(),
                                      data.ravel()))

        with catch_warnings(UserWarning) as w:
            valid_input_index, valid_output_index, index_array, distance_array = \
                kd_tree.get_neighbour_info(swath_def,
                                           self.area_def,
                                           50000, segments=1)
            self.assertFalse(len(w) != 1)
            self.assertFalse(('Possible more' not in str(w[0].message)))

        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
                                                     data_multi,
                                                     valid_input_index, valid_output_index,
                                                     index_array, distance_array,
                                                     weight_funcs=[wf1, wf2, wf3])

        cross_sum = res.sum()

        expected = 1461.8428378742638
        self.assertAlmostEqual(cross_sum, expected)
        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
                                                     data_multi,
                                                     valid_input_index, valid_output_index,
                                                     index_array, distance_array,
                                                     weight_funcs=[wf1, wf2, wf3])

        # Look for error where input data has been manipulated
        cross_sum = res.sum()
        expected = 1461.8428378742638
        self.assertAlmostEqual(cross_sum, expected)
示例#11
0
    def compute(self, data, weight_funcs=None, fill_value=None, with_uncert=False, **kwargs):

        del kwargs

        return get_sample_from_neighbour_info('nn',
                                              self.target_geo_def.shape,
                                              data,
                                              self.cache["valid_input_index"],
                                              self.cache["valid_output_index"],
                                              self.cache["index_array"],
                                              distance_array=self.cache["distance_array"],
                                              weight_funcs=weight_funcs,
                                              fill_value=fill_value,
                                              with_uncert=with_uncert)
示例#12
0
 def test_nearest_from_sample(self):
     data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
     lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
     lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
     swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
     valid_input_index, valid_output_index, index_array, distance_array = kd_tree.get_neighbour_info(
         swath_def, self.area_def, 50000, neighbours=1, segments=1
     )
     res = kd_tree.get_sample_from_neighbour_info(
         "nn", (800, 800), data.ravel(), valid_input_index, valid_output_index, index_array
     )
     cross_sum = res.sum()
     expected = 15874591.0
     self.assertEqual(cross_sum, expected, msg="Swath resampling from neighbour info nearest failed")
示例#13
0
 def test_nearest_from_sample(self):
     data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
     lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
     lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
     swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
     valid_input_index, valid_output_index, index_array, distance_array = \
         kd_tree.get_neighbour_info(swath_def,
                                    self.area_def,
                                    50000, neighbours=1, segments=1)
     res = kd_tree.get_sample_from_neighbour_info('nn', (800, 800), data.ravel(),
                                                  valid_input_index, valid_output_index,
                                                  index_array)
     cross_sum = res.sum()
     expected = 15874591.0
     self.assertEqual(cross_sum, expected)
示例#14
0
文件: projector.py 项目: pytroll/mpop
    def _project_array_nearest(self, data):
        """Project array *data* using nearest neighbour resampling"""
        if 'valid_index' not in self._cache:
            self._cache['valid_index'] = self._file_cache['valid_index']
            self._cache['valid_output_index'] = \
                self._file_cache['valid_output_index']
            self._cache['index_array'] = self._file_cache['index_array']

        valid_index, valid_output_index, index_array = \
            (self._cache['valid_index'],
             self._cache['valid_output_index'],
             self._cache['index_array'])

        res = kd_tree.get_sample_from_neighbour_info('nn',
                                                     self.out_area.shape,
                                                     data,
                                                     valid_index,
                                                     valid_output_index,
                                                     index_array,
                                                     fill_value=None)
        return res
示例#15
0
    def _project_array_nearest(self, data):
        """Project array *data* using nearest neighbour resampling"""
        if 'valid_index' not in self._cache:
            self._cache['valid_index'] = self._file_cache['valid_index']
            self._cache['valid_output_index'] = \
                self._file_cache['valid_output_index']
            self._cache['index_array'] = self._file_cache['index_array']

        valid_index, valid_output_index, index_array = \
            (self._cache['valid_index'],
             self._cache['valid_output_index'],
             self._cache['index_array'])

        res = kd_tree.get_sample_from_neighbour_info('nn',
                                                     self.out_area.shape,
                                                     data,
                                                     valid_index,
                                                     valid_output_index,
                                                     index_array,
                                                     fill_value=None)
        return res
示例#16
0
    def test_nearest_from_sample_np_dtypes(self):
        lons = np.fromfunction(lambda y, x: 3 + x, (50, 10))
        lats = np.fromfunction(lambda y, x: 75 - y, (50, 10))
        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
        valid_input_index, valid_output_index, index_array, distance_array = \
            kd_tree.get_neighbour_info(swath_def,
                                       self.area_def,
                                       50000, neighbours=1, segments=1)

        for dtype in [np.uint16, np.float32]:
            with self.subTest(dtype):
                data = np.fromfunction(lambda y, x: y * x, (50, 10)).astype(dtype)
                fill_value = dtype(0.0)
                res = \
                    kd_tree.get_sample_from_neighbour_info('nn', (800, 800),
                                                           data.ravel(),
                                                           valid_input_index,
                                                           valid_output_index,
                                                           index_array,
                                                           fill_value=fill_value)
                cross_sum = res.sum()
                expected = 15874591.0
                self.assertEqual(cross_sum, expected)
示例#17
0
def match_lonlat(source,
                 target,
                 radius_of_influence=0.7 * RESOLUTION * 1000.0,
                 n_neighbours=1):
    """
    Produce a masked array of the same shape as the arrays in *target*, with
    indices of nearest neighbours in *source*. *source* and *target* should be
    tuples (lon, lat) of the source and target swaths, respectively.

    Note::

        * Fastest matching is obtained when *target* has lower resolution than
        *source*.

        * *source* should have 2-dimensional lon and lat arrays.

    """
    from pyresample.geometry import SwathDefinition
    from pyresample.kd_tree import get_neighbour_info
    from pyresample.kd_tree import get_sample_from_neighbour_info

    lon, lat = source
    mask_out_lat = np.logical_or(lat < -90, lat > 90)
    mask_out_lon = np.logical_or(lon > 180, lon < -180)
    mask_out = np.logical_or(mask_out_lat, mask_out_lon)
    lat = np.ma.masked_array(lat, mask=mask_out)
    lon = np.ma.masked_array(lon, mask=mask_out)
    # lat = np.around(lat, decimals=4)
    # lon = np.around(lon, decimals=4)
    source_def = SwathDefinition(*(lon, lat))
    target_def = SwathDefinition(*target)
    logger.debug("Matching %d nearest neighbours", n_neighbours)
    valid_in, valid_out, indices, distances = get_neighbour_info(
        source_def, target_def, radius_of_influence, neighbours=n_neighbours)
    # import pdb; pdb.set_trace()
    # Use pyresampe code to find colmun and row numbers for each pixel
    # This is works also with no-data in imager lat/lon.
    cols_matrix, rows_matrix = np.meshgrid(np.array(range(0, lat.shape[1])),
                                           np.array(range(0, lat.shape[0])))
    if n_neighbours == 1:
        first_indices = indices
    else:
        first_indices = indices[:, 0]

    cols = get_sample_from_neighbour_info('nn', target_def.shape, cols_matrix,
                                          valid_in, valid_out, first_indices)
    rows = get_sample_from_neighbour_info('nn', target_def.shape, rows_matrix,
                                          valid_in, valid_out, first_indices)
    if n_neighbours > 1:
        rows_0 = rows.copy()
        cols_0 = cols.copy()
        rows = NODATA + np.zeros((len(rows_0), n_neighbours))
        cols = NODATA + np.zeros((len(cols_0), n_neighbours))
        rows[:, 0] = rows_0
        cols[:, 0] = cols_0
        for i in range(1, n_neighbours):
            cols[:,
                 i] = get_sample_from_neighbour_info('nn', target_def.shape,
                                                     cols_matrix, valid_in,
                                                     valid_out, indices[:, i])
            rows[:,
                 i] = get_sample_from_neighbour_info('nn', target_def.shape,
                                                     rows_matrix, valid_in,
                                                     valid_out, indices[:, i])
            test = (distances[:, 0] - distances[:, i])
            if sum(~np.isnan(test)) > 0 and np.max(test[~np.isnan(test)]) > 0:
                raise ValueError(
                    'We count on the first neighbour beeing the closest')

    rows = np.array(rows)
    cols = np.array(cols)
    # import pdb; pdb.set_trace()
    """ Code used during debugging, leaving it here for now
    if indices.dtype in ['uint32']:
        # With pykdtree installed get_neighbour_info returns indices
        # as type uint32
        # This does not combine well with a nodata value of -9.
        indices = np.array(indices, dtype=np.int64)
    """
    # Make sure all indices are valid
    # import pdb; pdb.set_trace()
    rows[rows >= source_def.shape[0]] = NODATA
    cols[cols >= source_def.shape[1]] = NODATA
    mask = np.logical_or(distances > radius_of_influence,
                         indices >= len(valid_in))
    distances[distances > radius_of_influence] = -9
    # import pdb; ipdb.set_trace()
    rows[mask] = NODATA
    cols[mask] = NODATA
    # import pdb; pdb.set_trace()
    return MatchMapper(rows, cols, mask), distances
def regridAllImages(workDir, ops_dir, vars, proj_geom_def, grid_info, ul_x, ul_y, data_geom_def, layer_names, output_filenames, overwrite_psf='yes', scale_data_types=None, scale_data_ranges=None):
	"""
	Uses pyresample to regrid all of the images. Each image that is created is written to the PSF. If one or 
	more images cannot be created, they will be skipped, but the other images will be created if possible.

	  Parameters:
	    workDir: 
	    vars: 
	    proj_geom_def:
	    grid_info:
	    ul_x:
	    ul_y:
	    data_geom_def:
	    layer_names: The names of the layers (in the vars dictionary) to convert into geoTIFF format.
	    output_filenames: A dictionary, mapping (layer_name -> output_filename).
		overwrite_psf: A If not specified, defaults to yes.
		scale_data_types: A dictionary mapping (layer_name -> data_type).
		scale_data_ranges: A dictionary mapping (layer_name -> tuple(min_valid_value_in_layer, max_valid_value_in_layer))
	
	  Returns: 
	    True if any images were written to the PSF, False if not.
	"""

	if not (overwrite_psf == 'yes' or overwrite_psf == 'no'):
		log.exception("Invalid value specified for overwrite_psf: '" + str(overwrite_psf) + "'. Must be 'yes' or 'no'.")

	# compute the information needed to re-project the data based on the input and output geometry definitions.
	log.info("Calculating re-gridding based on lat/lon information.")
	resampleRadius = float(readPCF(workDir, "resampleRadius"))
	valid_input_index, valid_output_index, index_array, distance_array = kd_tree.get_neighbour_info(data_geom_def, proj_geom_def, resampleRadius, neighbours=1, reduce_data=False)

	# Actually reproject the images, using the information computed above.
	# If one image fails to be re-projected, this script will return failure, but will still attempt to convert the others if possible.
	gtCitationGeoKey = readPCF(workDir, "GTCitationGeoKey")
	geogCitationGeoKey = readPCF(workDir, "GeogCitationGeoKey")
	last_failure_status = os.EX_OK
	for layer in layer_names:
		if not layer in vars:
			log.warning("The layer '" + layer + "' was not found in the NC4 file. Skipping.")
			continue
		output_filename = output_filenames[layer]
		original_data = vars[layer]["data"]

		fill_value = processFillValues(vars, layer, original_data)
		if numpy.sum(original_data == fill_value) == (original_data.shape[0] * original_data.shape[1]):
			log.info("The input layer '" + layer + "' is all fill values. Skipping.")
			continue

		log.info("Regridding layer: '" + layer + "'")
		resampled_data = kd_tree.get_sample_from_neighbour_info('nn', proj_geom_def.shape, original_data, valid_input_index, valid_output_index, index_array, fill_value=fill_value)

		if numpy.sum(resampled_data == fill_value) == (resampled_data.shape[0] * resampled_data.shape[1]):
			log.warning("Output file: '" + output_filename + "' was not produced. The result of re-sampling was all fill values. The input data probably missed the grid.")
			continue
		
		# If requested, do rescaling of the data.
		if scale_data_types is not None:
			if scale_data_ranges is not None:
				resampled_data, fill_value = scaleData(resampled_data, fill_value, scale_data_types[layer], min_in=scale_data_ranges[layer][0], max_in=scale_data_ranges[layer][1])
			else:
				resampled_data, fill_value = scaleData(resampled_data, fill_value, scale_data_types[layer])

		log.info("Creating geoTIFF file: '" + output_filename + "'.")
		createGeoTiff(output_filename, resampled_data, grid_info['proj4_str'], [grid_info['pixel_size_x'], grid_info['pixel_size_y']], [ul_x, ul_y])

		# Edit the GeoTIFF keys.
		editStatus = editGeoTiffKeys(output_filename, workDir, ops_dir, gtCitationGeoKey=gtCitationGeoKey, geogCitationGeoKey=geogCitationGeoKey)
		if editStatus != os.EX_OK:
			last_failure_status = editStatus
		else:
			writePSF(workDir, output_filename, overwrite=overwrite_psf, close=False)
			overwrite_psf = 'no'

	if last_failure_status != os.EX_OK:
		log.exception("There was an error creating one or more of the geoTIFF output products. Exiting with failure.")
		sys.exit(last_failure_status)

	return (overwrite_psf == 'no')
示例#19
0
def load_drop_sonde_data(path, results=None):

    data = []
    files = Path(PATH).glob("faam-dropsonde*.nc")

    for f in files:
        ds_data = xr.load_dataset(f)

        valid = (-90 <= ds_data["lat"].data) * (90 >= ds_data["lat"].data)
        ds_data = ds_data.loc[{"time": valid}]

        if results:
            lons = results["longitude"].data
            lats = results["latitude"].data
            retrieval_swath = geometry.SwathDefinition(lons=lons, lats=lats)
            lons = ds_data["lon"].data
            lats = ds_data["lat"].data
            ds_swath = geometry.SwathDefinition(lons=lons, lats=lats)
            ni = kd_tree.get_neighbour_info(retrieval_swath,
                                            ds_swath,
                                            radius_of_influence=100e3,
                                            neighbours=1)
            (valid_input_index, valid_output_index, index_array,
             distance_array) = ni

            n = ds_data.time.size
            n_levels = results.z.size

            t_r = np.zeros(n)
            t_a = np.zeros(n)
            h2o_r = np.zeros(n)
            h2o_a = np.zeros(n)

            t_z = np.zeros((n, n_levels))
            t_a_z = np.zeros((n, n_levels))
            h2o_z = np.zeros((n, n_levels))
            h2o_a_z = np.zeros((n, n_levels))
            z = np.zeros((n, n_levels))
            d = np.zeros((n))

            lats_r = kd_tree.get_sample_from_neighbour_info(
                "nn",
                (n, ),
                results["latitude"].data,
                valid_input_index,
                valid_output_index,
                index_array,
                fill_value=np.nan,
            )
            lons_r = kd_tree.get_sample_from_neighbour_info(
                "nn",
                (n, ),
                results["longitude"].data,
                valid_input_index,
                valid_output_index,
                index_array,
                fill_value=np.nan,
            )

            d = kd_tree.get_sample_from_neighbour_info(
                "nn",
                (n, ),
                results["d"].data,
                valid_input_index,
                valid_output_index,
                index_array,
                fill_value=np.nan,
            )

            for i in range(n_levels):
                # t_z[:, i] = kd_tree.get_sample_from_neighbour_info(
                #    "nn",
                #    (n,),
                #    results["temperature"][:, i].data,
                #    valid_input_index,
                #    valid_output_index,
                #    index_array,
                #    fill_value=np.nan)
                # t_a_z[:, i] = kd_tree.get_sample_from_neighbour_info(
                #    "nn",
                #    (n,),
                #    results["temperature_a_priori"][:, i].data,
                #    valid_input_index,
                #    valid_output_index,
                #    index_array,
                #    fill_value=np.nan)
                h2o_z[:, i] = kd_tree.get_sample_from_neighbour_info(
                    "nn",
                    (n, ),
                    results["H2O"][:, i].data,
                    valid_input_index,
                    valid_output_index,
                    index_array,
                    fill_value=np.nan,
                )
                h2o_a_z[:, i] = kd_tree.get_sample_from_neighbour_info(
                    "nn",
                    (n, ),
                    results["H2O_a_priori"][:, i].data,
                    valid_input_index,
                    valid_output_index,
                    index_array,
                    fill_value=np.nan,
                )
                z[:, i] = kd_tree.get_sample_from_neighbour_info(
                    "nn",
                    (n, ),
                    results["altitude"][:, i].data,
                    valid_input_index,
                    valid_output_index,
                    index_array,
                    fill_value=np.nan,
                )

            for i in range(n):
                if np.isnan(ds_data["alt"][i]):
                    t_r[i] = np.nan
                    t_a[i] = np.nan
                    h2o_r[i] = np.nan
                    h2o_a[i] = np.nan
                    continue

                t_r[i] = np.interp(ds_data["alt"][i], z[i, :], t_z[i, :])
                t_a[i] = np.interp(ds_data["alt"][i], z[i, :], t_a_z[i, :])
                h2o_r[i] = np.interp(ds_data["alt"][i], z[i, :], h2o_z[i, :])
                h2o_a[i] = np.interp(ds_data["alt"][i], z[i, :], h2o_a_z[i, :])

            ds_data["t_retrieved"] = (("time", ), t_r)
            ds_data["t_a_priori"] = (("time", ), t_a)
            ds_data["h2o_retrieved"] = (("time", ), h2o_r)
            ds_data["h2o_a_priori"] = (("time", ), h2o_a)
            ds_data["lons_r"] = (("time"), lons_r)
            ds_data["lats_r"] = (("time"), lats_r)
            ds_data["d"] = (("time"), d)
        data.append(ds_data)
    return data
示例#20
0
    def project_array(self, data):
        """Project an array *data* along the given Projector object.
        """

        if self.mode == "nearest":
            if not 'valid_index' in self._cache:
                self._cache['valid_index'] = self._file_cache['valid_index']
                self._cache['valid_output_index'] = \
                    self._file_cache['valid_output_index']
                self._cache['index_array'] = self._file_cache['index_array']

            valid_index, valid_output_index, index_array = \
                (self._cache['valid_index'],
                 self._cache['valid_output_index'],
                 self._cache['index_array'])

            res = kd_tree.get_sample_from_neighbour_info('nn',
                                                         self.out_area.shape,
                                                         data,
                                                         valid_index,
                                                         valid_output_index,
                                                         index_array,
                                                         fill_value=None)

        elif self.mode == "quick":
            if not 'row_idx' in self._cache:
                self._cache['row_idx'] = self._file_cache['row_idx']
                self._cache['col_idx'] = self._file_cache['col_idx']
            row_idx, col_idx = self._cache['row_idx'], self._cache['col_idx']
            img = image.ImageContainer(data, self.in_area, fill_value=None)
            res = np.ma.array(img.get_array_from_linesample(row_idx, col_idx),
                              dtype=data.dtype)

        elif self.mode == "ewa":
            from pyresample.ewa import fornav
            # TODO: should be user configurable?
            rows_per_scan = None

            if 'ewa_cols' not in self._cache:
                self._cache['ewa_cols'] = self._file_cache['ewa_cols']
                self._cache['ewa_rows'] = self._file_cache['ewa_rows']
            num_valid_points, res = fornav(self._cache['ewa_cols'],
                                           self._cache['ewa_rows'],
                                           self.out_area, data,
                                           rows_per_scan=rows_per_scan)

        elif self.mode == "bilinear":

            if 'bilinear_t' not in self._cache:
                self._cache['bilinear_t'] = self._file_cache['bilinear_t']
                self._cache['bilinear_s'] = self._file_cache['bilinear_s']
                self._cache['input_idxs'] = self._file_cache['input_idxs']
                self._cache['idx_arr'] = self._file_cache['idx_arr']

            res = get_sample_from_bil_info(data.ravel(),
                                           self._cache['bilinear_t'],
                                           self._cache['bilinear_s'],
                                           self._cache['input_idxs'],
                                           self._cache['idx_arr'],
                                           output_shape=self.out_area.shape)
            res = np.ma.masked_invalid(res)

        return res
示例#21
0
        if 'ALEXI_USDA' in e:  # USDA Contains proj info in metadata
            if 'ET' in e:
                metaName = 'L3_ET_ALEXI Metadata'
            else:
                metaName = 'L4_ESI_ALEXI Metadata'
            gt = f['{}/Geotransform'.format(metaName)].value
            proj = f['{}/OGC_Well_Known_Text'.format(metaName)].value.decode(
                'UTF-8')
            sdGEO = ecoSD
        else:
            try:
                # Perform kdtree resampling (swath 2 grid conversion)
                sdGEO = kdt.get_sample_from_neighbour_info('nn',
                                                           areaDef.shape,
                                                           ecoSD,
                                                           index,
                                                           outdex,
                                                           indexArr,
                                                           fill_value=fv)
                ps = np.min([areaDef.pixel_size_x, areaDef.pixel_size_y])
                gt = [
                    areaDef.area_extent[0], ps, 0, areaDef.area_extent[3], 0,
                    -ps
                ]
            except ValueError:
                continue

        # Apply Scale Factor and Add Offset
        sdGEO = sdGEO * sf + add_off

        # Set fill value
示例#22
0
def resample_to_grid(input_data, src_lon, src_lat, target_lon, target_lat,
                     methods='nn', weight_funcs=None,
                     min_neighbours=1, search_rad=18000, neighbours=8,
                     fill_values=None):
    """
    resamples data from dictionary of numpy arrays using pyresample
    to given grid.
    Searches for the neighbours and then resamples the data
    to the grid given in togrid if at least
    min_neighbours neighbours are found

    Parameters
    ----------
    input_data : dict of numpy.arrays
    src_lon : numpy.array
        longitudes of the input data
    src_lat : numpy.array
        src_latitudes of the input data
    target_lon : numpy.array
        longitudes of the output data
    target_src_lat : numpy.array
        src_latitudes of the output data
    methods : string or dict, optional
        method of spatial averaging. this is given to pyresample
        and can be
        'nn' : nearest neighbour
        'custom' : custom weight function has to be supplied in weight_funcs
        see pyresample documentation for more details
        can also be a dictionary with a method for each array in input data dict
    weight_funcs : function or dict of functions, optional
        if method is 'custom' a function like func(distance) has to be given
        can also be a dictionary with a function for each array in input data dict
    min_neighbours: int, optional
        if given then only points with at least this number of neighbours will be
        resampled
        Default : 1
    search_rad : float, optional
        search radius in meters of neighbour search
        Default : 18000
    neighbours : int, optional
        maximum number of neighbours to look for for each input grid point
        Default : 8
    fill_values : number or dict, optional
        if given the output array will be filled with this value if no valid
        resampled value could be computed, if not a masked array will be returned
        can also be a dict with a fill value for each variable
    Returns
    -------
    data : dict of numpy.arrays
        resampled data on given grid
    Raises
    ------
    ValueError :
        if empty dataset is resampled
    """
    output_data = {}

    output_shape = target_lat.shape
    if target_lon.ndim == 2:
        target_lat = target_lat.ravel()
        target_lon = target_lon.ravel()

    input_swath = geometry.SwathDefinition(src_lon, src_lat)
    output_swath = geometry.SwathDefinition(target_lon, target_lat)

    (valid_input_index,
     valid_output_index,
     index_array,
     distance_array) = kd_tree.get_neighbour_info(input_swath,
                                                  output_swath,
                                                  search_rad,
                                                  neighbours=neighbours)

    # throw away points with less than min_neighbours neighbours
    # find points with valid neighbours
    # get number of found neighbours for each grid point/row
    if neighbours > 1:
        nr_neighbours = np.isfinite(distance_array).sum(1)
        neigh_condition = nr_neighbours >= min_neighbours
        mask = np.invert(neigh_condition)
        enough_neighbours = np.nonzero(neigh_condition)[0]
    if neighbours == 1:
        nr_neighbours = np.isfinite(distance_array)
        neigh_condition = nr_neighbours >= min_neighbours
        mask = np.invert(neigh_condition)
        enough_neighbours = np.nonzero(neigh_condition)[0]
        distance_array = np.reshape(
            distance_array, (distance_array.shape[0], 1))
        index_array = np.reshape(index_array, (index_array.shape[0], 1))

    if enough_neighbours.size == 0:
        raise ValueError(
            "No points with at least %d neighbours found" % min_neighbours)

    # remove neighbourhood info of input grid points that have no neighbours to not have to
    # resample to whole output grid for small input grid file
    distance_array = distance_array[enough_neighbours, :]
    index_array = index_array[enough_neighbours, :]
    valid_output_index = valid_output_index[enough_neighbours]

    for param in input_data:

        data = input_data[param]

        if type(methods) == dict:
            method = methods[param]
        else:
            method = methods

        if method is not 'nn':
            if type(weight_funcs) == dict:
                weight_func = weight_funcs[param]
            else:
                weight_func = weight_funcs
        else:
            weight_func = None

        if type(fill_values) == dict:
            fill_value = fill_values[param]
        else:
            fill_value = fill_values

        # construct arrays in output grid form
        if fill_value is not None:
            output_array = np.zeros(
                output_swath.shape, dtype=np.float64) + fill_value
        else:
            output_array = np.zeros(output_swath.shape, dtype=np.float64)
            output_array = np.ma.array(output_array, mask=mask)

        neigh_slice = slice(None, None, None)
        # check if method is nn, if so only use first row of index_array and
        # distance_array
        if method == 'nn':
            neigh_slice = (slice(None, None, None), 0)

        output_array[enough_neighbours] = kd_tree.get_sample_from_neighbour_info(
            method,
            enough_neighbours.shape,
            data,
            valid_input_index,
            valid_output_index,
            index_array[neigh_slice],
            distance_array[neigh_slice],
            weight_funcs=weight_func,
            fill_value=fill_value)

        output_data[param] = output_array.reshape(output_shape)

    return output_data
示例#23
0
def match_lonlat(source,
                 target,
                 radius_of_influence=0.7 * RESOLUTION * 1000.0,
                 n_neighbours=1):
    """
    Produce a masked array of the same shape as the arrays in *target*, with
    indices of nearest neighbours in *source*. *source* and *target* should be
    tuples (lon, lat) of the source and target swaths, respectively.
    
    Note::
    
        * Fastest matching is obtained when *target* has lower resolution than
        *source*.
        
        * *source* should have 2-dimensional lon and lat arrays.
    
    """
    from pyresample.geometry import SwathDefinition
    from pyresample.kd_tree import get_neighbour_info
    from pyresample.kd_tree import get_sample_from_neighbour_info

    lon, lat = source
    mask_out_lat = np.logical_or(lat < -90, lat > 90)
    mask_out_lon = np.logical_or(lon > 180, lat < -180)
    mask_out = np.logical_or(mask_out_lat, mask_out_lon)
    lat = np.ma.masked_array(lat, mask=mask_out)
    lon = np.ma.masked_array(lon, mask=mask_out)
    #lat = np.around(lat, decimals=4)
    #lon = np.around(lon, decimals=4)
    source_def = SwathDefinition(*(lon, lat))
    target_def = SwathDefinition(*target)
    logger.debug("Matching %d nearest neighbours" % n_neighbours)
    valid_in, valid_out, indices, distances = get_neighbour_info(
        source_def, target_def, radius_of_influence, neighbours=n_neighbours)
    #Use pyresampe code to find colmun and row numbers for each pixel
    #This is works also with no-data in imager lat/lon.
    cols_matrix, rows_matrix = np.meshgrid(np.array(xrange(0, lat.shape[1])),
                                           np.array(xrange(0, lat.shape[0])))
    cols = get_sample_from_neighbour_info('nn', target_def.shape, cols_matrix,
                                          valid_in, valid_out, indices)
    rows = get_sample_from_neighbour_info('nn', target_def.shape, rows_matrix,
                                          valid_in, valid_out, indices)
    rows = np.array(rows)
    cols = np.array(cols)
    """ Code used during debugging, leaving it here for now
    #Hopfully not needed anymore as indices is not used directly
    if indices.dtype in ['uint32']:
        #With pykdtree installed get_neighbour_info returns indices
        # as type uint32
        #This does not combine well with a nodata value of -9.
        indices = np.array(indices,dtype=np.int64)
    #get_expected_output even for nodata in lat/lon!
    #print "indices", indices
    if 1==1:
        print distances, indices
        print max(indices)
        print min(indices)
        print len(valid_in)
        print len(valid_in[valid_in])
        # But why is +1 item needed??
        from_one_to_many = np.array(xrange(0,len(valid_in)+1))
        print from_one_to_many
        valid_in_new = np.append(valid_in,np.array([True]), axis=0)
        print valid_in_new
        use_these = indices[valid_out]
        print use_these
        new_numbers = from_one_to_many[valid_in_new]
        print new_numbers
        indices[valid_out] = new_numbers[use_these]
    #print "indices", indices
    shape = list(target_def.shape)
    shape.append(n_neighbours)
    indices.shape = shape
    distances.shape = shape
    rows = indices // source_def.shape[1]
    cols = indices % source_def.shape[1]
    print "c", cols, "r", rows
    print rows.shape, cols.shape
    """

    # Make sure all indices are valid
    #import ipdb; ipdb.set_trace()
    rows[rows >= source_def.shape[0]] = NODATA
    cols[cols >= source_def.shape[1]] = NODATA
    mask = distances > radius_of_influence
    return MatchMapper(rows, cols, mask)
示例#24
0
def match_lonlat(source, target,
                 radius_of_influence=0.7*RESOLUTION*1000.0,
                 n_neighbours=1):
    """
    Produce a masked array of the same shape as the arrays in *target*, with
    indices of nearest neighbours in *source*. *source* and *target* should be
    tuples (lon, lat) of the source and target swaths, respectively.
    
    Note::
    
        * Fastest matching is obtained when *target* has lower resolution than
        *source*.
        
        * *source* should have 2-dimensional lon and lat arrays.
    
    """
    from pyresample.geometry import SwathDefinition
    from pyresample.kd_tree import get_neighbour_info
    from pyresample.kd_tree import get_sample_from_neighbour_info

    lon, lat = source
    mask_out_lat = np.logical_or(lat<-90, lat>90)
    mask_out_lon = np.logical_or(lon>180, lon<-180)
    mask_out = np.logical_or(mask_out_lat, mask_out_lon)
    lat = np.ma.masked_array(lat, mask=mask_out)
    lon = np.ma.masked_array(lon, mask=mask_out)
    #lat = np.around(lat, decimals=4)
    #lon = np.around(lon, decimals=4)
    source_def = SwathDefinition(*(lon,lat))
    target_def = SwathDefinition(*target)
    logger.debug("Matching %d nearest neighbours", n_neighbours)
    valid_in, valid_out, indices, distances = get_neighbour_info(
        source_def, target_def, radius_of_influence, neighbours=n_neighbours)
    #import pdb; pdb.set_trace()
    #Use pyresampe code to find colmun and row numbers for each pixel
    #This is works also with no-data in imager lat/lon.
    cols_matrix, rows_matrix = np.meshgrid(np.array(range(0,lat.shape[1])),
                                           np.array(range(0,lat.shape[0])))
    if n_neighbours == 1:
        first_indices = indices
    else:
        first_indices = indices[:,0]

    cols = get_sample_from_neighbour_info('nn', target_def.shape,
                                          cols_matrix,
                                          valid_in,
                                          valid_out,
                                          first_indices)
    rows = get_sample_from_neighbour_info('nn', target_def.shape,
                                          rows_matrix,
                                          valid_in, valid_out,
                                          first_indices)
    if n_neighbours > 1:
        rows_0 = rows.copy()
        cols_0 = cols.copy()
        rows = NODATA + np.zeros((len(rows_0) , n_neighbours))
        cols = NODATA + np.zeros((len(cols_0) , n_neighbours))
        rows[:,0] = rows_0
        cols[:,0] = cols_0
        for i in range(1, n_neighbours):
            cols[:,i] = get_sample_from_neighbour_info('nn', target_def.shape,
                                                       cols_matrix,
                                                       valid_in, valid_out,
                                                       indices[:,i])
            rows[:,i] = get_sample_from_neighbour_info('nn', target_def.shape,
                                                       rows_matrix,
                                                       valid_in, valid_out,
                                                       indices[:,i]) 
            test = (distances[:,0] - distances[:,i])
            if  sum(~np.isnan(test))>0 and np.max(test[~np.isnan(test)]) > 0:
                raise ValueError(
                    'We count on the first neighbour beeing the closest')
            

    rows = np.array(rows)
    cols = np.array(cols)
    #import pdb; pdb.set_trace()                            
    """ Code used during debugging, leaving it here for now
    if indices.dtype in ['uint32']:
        #With pykdtree installed get_neighbour_info returns indices
        # as type uint32
        #This does not combine well with a nodata value of -9.
        indices = np.array(indices,dtype=np.int64)
    """
    # Make sure all indices are valid
    #import pdb; pdb.set_trace()
    rows[rows >= source_def.shape[0]] = NODATA
    cols[cols >= source_def.shape[1]] = NODATA
    mask = np.logical_or(distances > radius_of_influence, 
                         indices >= len(valid_in))
    distances[distances > radius_of_influence] =-9
    #import pdb; ipdb.set_trace()
    rows[mask] = NODATA
    cols[mask] = NODATA
    #import pdb; pdb.set_trace()
    return MatchMapper(rows, cols, mask), distances
    def add_detection_stats_on_fib_lattice(self, my_obj):
        #Start with the area and get lat and lon to calculate the stats:
        if  len(my_obj.longitude) == 0:
            print "Skipping file, no matches !"
            return
        lats = self.flattice.lats[:]
        max_distance=self.flattice.radius_km*1000*2.5
        area_def = SwathDefinition(*(self.flattice.lons,
                                     self.flattice.lats))
        target_def = SwathDefinition(*(my_obj.longitude, 
                                       my_obj.latitude)) 
        valid_in, valid_out, indices, distances = get_neighbour_info(
            area_def, target_def, radius_of_influence=max_distance, 
            epsilon=100, neighbours=1)
        cols = get_sample_from_neighbour_info('nn', target_def.shape,
                                              np.array(xrange(0,len(lats))),
                                              valid_in, valid_out,
                                              indices)
        cols = cols[valid_out]
        detected_clouds = my_obj.detected_clouds[valid_out]
        detected_clear = my_obj.detected_clear[valid_out]
        detected_height_low = my_obj.detected_height_low[valid_out]
        detected_height_high = my_obj.detected_height_high[valid_out]
        detected_height = my_obj.detected_height[valid_out]
        detected_height_both = my_obj.detected_height_both[valid_out]
        false_clouds = my_obj.false_clouds[valid_out]
        undetected_clouds = my_obj.undetected_clouds[valid_out]
        new_detected_clouds = my_obj.new_detected_clouds[valid_out]
        new_false_clouds = my_obj.new_false_clouds[valid_out]
        lapse_rate = my_obj.lapse_rate[valid_out]  
        t11ts_offset = my_obj.t11ts_offset[valid_out] 
        t11t12_offset = my_obj.t11t12_offset[valid_out] 
        t37t12_offset = my_obj.t37t12_offset[valid_out] 
        height_bias_low = my_obj.height_bias_low[valid_out]
        height_bias = my_obj.height_bias[valid_out]
        height_mae_diff = my_obj.height_mae_diff[valid_out]
        temperature_bias_low = my_obj.temperature_bias_low[valid_out]
        temperature_bias_low_t11 = my_obj.temperature_bias_low_t11[valid_out]
        lapse_bias_low = my_obj.lapse_bias_low[valid_out]
        height_bias_high = my_obj.height_bias_high[valid_out]
        lapse_bias_high = my_obj.lapse_bias_high[valid_out]
        is_clear = np.logical_or(detected_clear,false_clouds)
        #lets make things faster, I'm tired of waiting!
        cols[distances>max_distance]=-9 #don't use pixles matched too far away!
        import time        
        tic = time.time()      
        arr, counts = np.unique(cols, return_index=False, return_counts=True)        
        for d in arr[arr>0]:
            use = cols==d
            ind = np.where(use)[0]
            #if ind.any():
            self.flattice.N_false_clouds[d] += np.sum(false_clouds[ind])
            self.flattice.N_detected_clouds[d] += np.sum(detected_clouds[ind])
            self.flattice.N_detected_clear[d] += np.sum(detected_clear[ind])
            self.flattice.N_undetected_clouds[d] += np.sum(undetected_clouds[ind])
            self.flattice.N_new_false_clouds[d] += np.sum(new_false_clouds[ind])
            self.flattice.N_new_detected_clouds[d] += np.sum(new_detected_clouds[ind])
            self.flattice.N_detected_height_low[d] += np.sum(detected_height_low[ind])
            self.flattice.N_detected_height_high[d] += np.sum(detected_height_high[ind])
            self.flattice.N_detected_height[d] += np.sum(detected_height[ind])
            self.flattice.N_detected_height_both[d] += np.sum(detected_height_both[ind])
            self.flattice.Sum_ctth_bias_low[d] += np.sum(height_bias_low[ind])
            self.flattice.Sum_ctth_mae_low[d] += np.sum(np.abs(height_bias_low[ind]))
            self.flattice.Sum_ctth_mae[d] += np.sum(np.abs(height_bias[ind]))
            self.flattice.Sum_ctth_mae_diff[d] += np.sum(height_mae_diff[ind])
            self.flattice.Sum_lapse_bias_low[d] += np.sum(lapse_bias_low[ind])
            self.flattice.Sum_ctth_bias_high[d] += np.sum(height_bias_high[ind])
            self.flattice.Sum_ctth_mae_high[d] += np.sum(np.abs(height_bias_high[ind]))
            self.flattice.Sum_lapse_bias_high[d] += np.sum(lapse_bias_high[ind])
            self.flattice.Sum_ctth_bias_temperature_low[d] += np.sum(temperature_bias_low[ind])
            self.flattice.Sum_ctth_bias_temperature_low_t11[d] += np.sum(temperature_bias_low_t11[ind])
            self.flattice.Min_lapse_rate[d] = np.min([self.flattice.Min_lapse_rate[d],
                                                      np.min(lapse_rate[ind])])  
            if np.sum(is_clear[ind])>0:
                self.flattice.Min_t11ts_offset[d] = np.min([self.flattice.Min_t11ts_offset[d],
                                                            np.percentile(t11ts_offset[ind][is_clear[ind]], 5)])
                self.flattice.Max_t11t12_offset[d] = np.max([self.flattice.Max_t11t12_offset[d],
                                                             np.percentile(t11t12_offset[ind][is_clear[ind]], 95)])
                self.flattice.Max_t37t12_offset[d] = np.max([self.flattice.Max_t37t12_offset[d],
                                                             np.percentile(t37t12_offset[ind][is_clear[ind]], 95)])
            cc_type = 0
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 1
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 2
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 3
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 4
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 5
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 6
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 7
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])

        print "mapping took %1.4f seconds"%(time.time()-tic)   
示例#26
0
def resample_to_grid_only_valid_return(input_data,
                                       src_lon,
                                       src_lat,
                                       target_lon,
                                       target_lat,
                                       methods='nn',
                                       weight_funcs=None,
                                       min_neighbours=1,
                                       search_rad=18000,
                                       neighbours=8,
                                       fill_values=None):
    """
    resamples data from dictionary of numpy arrays using pyresample
    to given grid.
    Searches for the neighbours and then resamples the data
    to the grid given in togrid if at least
    min_neighbours neighbours are found

    Parameters
    ----------
    input_data : dict of numpy.arrays
    src_lon : numpy.array
        longitudes of the input data
    src_lat : numpy.array
        src_latitudes of the input data
    target_lon : numpy.array
        longitudes of the output data
    target_src_lat : numpy.array
        src_latitudes of the output data
    methods : string or dict, optional
        method of spatial averaging. this is given to pyresample
        and can be
        'nn' : nearest neighbour
        'custom' : custom weight function has to be supplied in weight_funcs
        see pyresample documentation for more details
        can also be a dictionary with a method for each array in input data dict
    weight_funcs : function or dict of functions, optional
        if method is 'custom' a function like func(distance) has to be given
        can also be a dictionary with a function for each array in input data dict
    min_neighbours: int, optional
        if given then only points with at least this number of neighbours will be
        resampled
        Default : 1
    search_rad : float, optional
        search radius in meters of neighbour search
        Default : 18000
    neighbours : int, optional
        maximum number of neighbours to look for for each input grid point
        Default : 8
    fill_values : number or dict, optional
        if given the output array will be filled with this value if no valid
        resampled value could be computed, if not a masked array will be returned
        can also be a dict with a fill value for each variable
    Returns
    -------
    data : dict of numpy.arrays
        resampled data on part of the target grid over which data was found
    mask: numpy.ndarray
        boolean mask into target grid that specifies where data was resampled

    Raises
    ------
    ValueError :
        if empty dataset is resampled
    """
    output_data = {}

    if target_lon.ndim == 2:
        target_lat = target_lat.ravel()
        target_lon = target_lon.ravel()

    input_swath = geometry.SwathDefinition(src_lon, src_lat)
    output_swath = geometry.SwathDefinition(target_lon, target_lat)

    (valid_input_index, valid_output_index, index_array,
     distance_array) = kd_tree.get_neighbour_info(input_swath,
                                                  output_swath,
                                                  search_rad,
                                                  neighbours=neighbours)

    # throw away points with less than min_neighbours neighbours
    # find points with valid neighbours
    # get number of found neighbours for each grid point/row
    if neighbours > 1:
        nr_neighbours = np.isfinite(distance_array).sum(1)
        neigh_condition = nr_neighbours >= min_neighbours
        mask = np.invert(neigh_condition)
        enough_neighbours = np.nonzero(neigh_condition)[0]
    if neighbours == 1:
        nr_neighbours = np.isfinite(distance_array)
        neigh_condition = nr_neighbours >= min_neighbours
        mask = np.invert(neigh_condition)
        enough_neighbours = np.nonzero(neigh_condition)[0]
        distance_array = np.reshape(distance_array,
                                    (distance_array.shape[0], 1))
        index_array = np.reshape(index_array, (index_array.shape[0], 1))

    if enough_neighbours.size == 0:
        raise ValueError("No points with at least %d neighbours found" %
                         min_neighbours)

    # remove neighbourhood info of input grid points that have no neighbours to not have to
    # resample to whole output grid for small input grid file
    distance_array = distance_array[enough_neighbours, :]
    index_array = index_array[enough_neighbours, :]
    valid_output_index = valid_output_index[enough_neighbours]

    for param in input_data:

        data = input_data[param]

        if type(methods) == dict:
            method = methods[param]
        else:
            method = methods

        if method is not 'nn':
            if type(weight_funcs) == dict:
                weight_func = weight_funcs[param]
            else:
                weight_func = weight_funcs
        else:
            weight_func = None

        neigh_slice = slice(None, None, None)
        # check if method is nn, if so only use first row of index_array and
        # distance_array
        if method == 'nn':
            neigh_slice = (slice(None, None, None), 0)

        if type(fill_values) == dict:
            fill_value = fill_values[param]
        else:
            fill_value = fill_values

        output_array = kd_tree.get_sample_from_neighbour_info(
            method,
            enough_neighbours.shape,
            data,
            valid_input_index,
            valid_output_index,
            index_array[neigh_slice],
            distance_array[neigh_slice],
            weight_funcs=weight_func,
            fill_value=fill_value)

        output_data[param] = output_array

    return output_data, mask
示例#27
0
                                   bandtype, (
                                       'lat',
                                       'lon',
                                   ),
                                   fill_value=fillvalue,
                                   zlib=True,
                                   complevel=6)

    #create a numpy *masked/unmasked* array to hold the values to be later saved as gridd_var in netcdf
    # netcdf variables cannot be addressed with e.g. gridd_var[land_mask]=fillvalue
    #   this would result in "Index cannot be multidimensional   !!!"
    # resample the swath_var from neighbour info ... was gridd_var[:]=
    gridd_array=kd_tree.get_sample_from_neighbour_info('nn', \
                                                         area.shape, \
                                                         swath_var, \
                                                         valid_input_index, \
                                                         valid_output_index, \
                                                         index_array, \
                                                         fill_value=fillvalue)

    #print bandname, type(gridd_array)
    # case1/case2 elaboration for chl in Black Sea bs:
    if bandname.lower() == 'chl' and area_id == 'bs':
        mask = misc.imread(bs_case)  #mask for case1/case2 !!
        mask = mask[:, :, 1]  #use only GREEN byte ...
        c1vals = (210, 136, 187, 217, 152, 255)
        c2vals = (70, 0, 175)
        #cXpxls are bool arrays, True if pixel is case X, X=1,2
        c1pxls = np.in1d(mask,
                         c1vals)  #numpy.isin available only in Numpy 1.13
        c1pxls = c1pxls.reshape(mask.shape)