コード例 #1
0
    def test_custom_multi_from_sample(self):
        def wf1(dist):
            return 1 - dist / 100000.0

        def wf2(dist):
            return 1

        def wf3(dist):
            return numpy.cos(dist) ** 2

        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100))
        lons = numpy.fromfunction(
            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
        lats = numpy.fromfunction(
            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
                                         data.ravel()))

        if (sys.version_info < (2, 6) or
                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
            valid_input_index, valid_output_index, index_array, distance_array = \
                kd_tree.get_neighbour_info(swath_def,
                                           self.area_def,
                                           50000, segments=1)
        else:
            with warnings.catch_warnings(record=True) as w:
                valid_input_index, valid_output_index, index_array, distance_array = \
                    kd_tree.get_neighbour_info(swath_def,
                                               self.area_def,
                                               50000, segments=1)
                self.assertFalse(
                    len(w) != 1, 'Failed to create neighbour radius warning')
                self.assertFalse(('Possible more' not in str(
                    w[0].message)), 'Failed to create correct neighbour radius warning')

        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
                                                     data_multi,
                                                     valid_input_index, valid_output_index,
                                                     index_array, distance_array,
                                                     weight_funcs=[wf1, wf2, wf3])

        cross_sum = res.sum()

        expected = 1461.842980746
        self.assertAlmostEqual(cross_sum, expected,
                               msg='Swath multi channel custom resampling from neighbour info failed 1')
        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
                                                     data_multi,
                                                     valid_input_index, valid_output_index,
                                                     index_array, distance_array,
                                                     weight_funcs=[wf1, wf2, wf3])

        # Look for error where input data has been manipulated
        cross_sum = res.sum()
        expected = 1461.842980746
        self.assertAlmostEqual(cross_sum, expected,
                               msg='Swath multi channel custom resampling from neighbour info failed 2')
コード例 #2
0
ファイル: test_kd_tree.py プロジェクト: cpaulik/pyresample
    def test_custom_multi_from_sample(self):
        def wf1(dist):
            return 1 - dist / 100000.0

        def wf2(dist):
            return 1

        def wf3(dist):
            return numpy.cos(dist) ** 2

        data = numpy.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100))
        lons = numpy.fromfunction(
            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
        lats = numpy.fromfunction(
            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
        data_multi = numpy.column_stack((data.ravel(), data.ravel(),
                                         data.ravel()))

        if (sys.version_info < (2, 6) or
                (sys.version_info >= (3, 0) and sys.version_info < (3, 4))):
            valid_input_index, valid_output_index, index_array, distance_array = \
                kd_tree.get_neighbour_info(swath_def,
                                           self.area_def,
                                           50000, segments=1)
        else:
            with warnings.catch_warnings(record=True) as w:
                valid_input_index, valid_output_index, index_array, distance_array = \
                    kd_tree.get_neighbour_info(swath_def,
                                               self.area_def,
                                               50000, segments=1)
                self.assertFalse(
                    len(w) != 1, 'Failed to create neighbour radius warning')
                self.assertFalse(('Possible more' not in str(
                    w[0].message)), 'Failed to create correct neighbour radius warning')

        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
                                                     data_multi,
                                                     valid_input_index, valid_output_index,
                                                     index_array, distance_array,
                                                     weight_funcs=[wf1, wf2, wf3])

        cross_sum = res.sum()

        expected = 1461.842980746
        self.assertAlmostEqual(cross_sum, expected,
                               msg='Swath multi channel custom resampling from neighbour info failed 1')
        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
                                                     data_multi,
                                                     valid_input_index, valid_output_index,
                                                     index_array, distance_array,
                                                     weight_funcs=[wf1, wf2, wf3])

        # Look for error where input data has been manipulated
        cross_sum = res.sum()
        expected = 1461.842980746
        self.assertAlmostEqual(cross_sum, expected,
                               msg='Swath multi channel custom resampling from neighbour info failed 2')
コード例 #3
0
 def test_match_integers(self):
     from pyresample.geometry import SwathDefinition
     from pyresample.kd_tree import get_neighbour_info
     lon = np.array([0, 10, 25])
     source_def  = SwathDefinition(*(lon, lon))
     target_def = SwathDefinition(*(lon, lon))
     valid_in, valid_out, indices_int, distances = get_neighbour_info(source_def, target_def, 1000, neighbours=1)
     lon = np.array([0, 10, 25]).astype(np.float64)
     source_def  = SwathDefinition(*(lon, lon))
     target_def = SwathDefinition(*(lon, lon))
     valid_in, valid_out, indices_float, distances = get_neighbour_info(source_def, target_def, 1000, neighbours=1)
コード例 #4
0
ファイル: scene.py プロジェクト: metno/satistjenesten
    def resample_to_area(self, target_area_def, resample_method=None):
        """
        Resample existing scene to the provided area definition

        """
        if resample_method not in ['nn', 'gaussian']:
            raise Exception('Resample method {} not known'.format(resample_method))

        attributes_list_to_pass = ['bands', 'timestamp']
        resampled_scene = GenericScene()
        resampled_scene.area_def = target_area_def
        copy_attributes(self, resampled_scene, attributes_list_to_pass)

        try:
            self.area_def = geometry.SwathDefinition(lons=self.longitudes, lats=self.latitudes)
        except:
            self.get_area_def()

        if resample_method is 'nn':
            neighbours = 1
        else:
            neighbours = 8

        valid_input_index, valid_output_index, index_array, distance_array = \
                kd_tree.get_neighbour_info(self.area_def, resampled_scene.area_def,
                                           resampled_scene.area_def.pixel_size_x*2.5, neighbours = neighbours, nprocs=1)

        bands_number = len(resampled_scene.bands)

        for i, band in enumerate(resampled_scene.bands.values()):

            print "Resampling band {0:d}/{1:d}".format(i+1, bands_number)
            swath_data = deepcopy(band.data)

            if resample_method == 'nn':
                band.data = kd_tree.get_sample_from_neighbour_info('nn', resampled_scene.area_def.shape,
                                                                   swath_data,
                                                                   valid_input_index,
                                                                   valid_output_index,
                                                                   index_array)

            elif resample_method == 'gaussian':

                radius_of_influence = resampled_scene.area_def.pixel_size_x * 2.5
                sigma = pr.utils.fwhm2sigma(radius_of_influence * 1.5)
                gauss = lambda r: numpy.exp(-r ** 2 / float(sigma) ** 2)

                band.data = kd_tree.get_sample_from_neighbour_info('custom', resampled_scene.area_def.shape,
                                                                    swath_data,
                                                                    valid_input_index,
                                                                    valid_output_index,
                                                                    index_array,
                                                                    distance_array=distance_array,
                                                                    weight_funcs=gauss,
                                                                    fill_value=0,
                                                                    with_uncert=False)

            else:
                raise Exception('Resampling method not known')
        return resampled_scene
コード例 #5
0
ファイル: test_kd_tree.py プロジェクト: cpaulik/pyresample
 def test_masked_multi_from_sample(self):
     data = numpy.ones((50, 10))
     data[:, 5:] = 2
     mask1 = numpy.ones((50, 10))
     mask1[:, :5] = 0
     mask2 = numpy.ones((50, 10))
     mask2[:, 5:] = 0
     mask3 = numpy.ones((50, 10))
     mask3[:25, :] = 0
     data_multi = numpy.column_stack(
         (data.ravel(), data.ravel(), data.ravel()))
     mask_multi = numpy.column_stack(
         (mask1.ravel(), mask2.ravel(), mask3.ravel()))
     masked_data = numpy.ma.array(data_multi, mask=mask_multi)
     lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
     lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
     swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
     valid_input_index, valid_output_index, index_array, distance_array = \
         kd_tree.get_neighbour_info(swath_def,
                                    self.area_def,
                                    50000, neighbours=1, segments=1)
     res = kd_tree.get_sample_from_neighbour_info('nn', (800, 800),
                                                  masked_data,
                                                  valid_input_index,
                                                  valid_output_index, index_array,
                                                  fill_value=None)
     expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__),
                                                      'test_files',
                                                      'mask_test_full_fill_multi.dat'),
                                         sep=' ').reshape((800, 800, 3))
     fill_mask = res.mask
     self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask),
                     msg='Failed to create fill mask on masked data')
コード例 #6
0
ファイル: data.py プロジェクト: metno/satistjenesten
    def resample_to_area(self):
        gridded_scene = GriddedSatScene()
        attributes_list_to_pass = ['bands', 'area_def', 'area_name']
        self.get_area_def()
        copy_attributes(self, gridded_scene, attributes_list_to_pass)

        try:
            self.swath_area_def = geometry.SwathDefinition(lons=self.longitudes, lats=self.latitudes)
        except:
            self.scene.get_area_def()
            self.swath_area_def = self.scene.area_def

        valid_input_index, valid_output_index, index_array, distance_array = \
                kd_tree.get_neighbour_info(self.swath_area_def, self.area_def,
                                            self.area_def.pixel_size_x*2.5, neighbours = 1)
        bands_number = len(self.bands)
        import ipdb; ipdb.set_trace() # BREAKPOINT

        for i, band in enumerate(self.bands.values()):
            print "Resampling band {0:d}/{1:d}".format(i+1, bands_number)
            swath_data = band.data.copy()
            band.data = kd_tree.get_sample_from_neighbour_info('nn', self.area_def.shape,
                                                                swath_data,
                                                                valid_input_index,
                                                                valid_output_index,
                                                                index_array)
        gridded_scene.gridded = True
        return gridded_scene
コード例 #7
0
 def test_masked_multi_from_sample(self):
     data = numpy.ones((50, 10))
     data[:, 5:] = 2
     mask1 = numpy.ones((50, 10))
     mask1[:, :5] = 0
     mask2 = numpy.ones((50, 10))
     mask2[:, 5:] = 0
     mask3 = numpy.ones((50, 10))
     mask3[:25, :] = 0
     data_multi = numpy.column_stack(
         (data.ravel(), data.ravel(), data.ravel()))
     mask_multi = numpy.column_stack(
         (mask1.ravel(), mask2.ravel(), mask3.ravel()))
     masked_data = numpy.ma.array(data_multi, mask=mask_multi)
     lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
     lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
     swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
     valid_input_index, valid_output_index, index_array, distance_array = \
         kd_tree.get_neighbour_info(swath_def,
                                    self.area_def,
                                    50000, neighbours=1, segments=1)
     res = kd_tree.get_sample_from_neighbour_info('nn', (800, 800),
                                                  masked_data,
                                                  valid_input_index,
                                                  valid_output_index, index_array,
                                                  fill_value=None)
     expected_fill_mask = numpy.fromfile(os.path.join(os.path.dirname(__file__),
                                                      'test_files',
                                                      'mask_test_full_fill_multi.dat'),
                                         sep=' ').reshape((800, 800, 3))
     fill_mask = res.mask
     self.assertTrue(numpy.array_equal(fill_mask, expected_fill_mask),
                     msg='Failed to create fill mask on masked data')
コード例 #8
0
ファイル: __init__.py プロジェクト: pytroll/pyresample
def generate_nearest_neighbour_linesample_arrays(source_area_def,
                                                 target_area_def,
                                                 radius_of_influence,
                                                 nprocs=1):
    """Generate linesample arrays for nearest neighbour grid resampling

    Parameters
    -----------
    source_area_def : object
        Source area definition as geometry definition object
    target_area_def : object
        Target area definition as geometry definition object
    radius_of_influence : float
        Cut off distance in meters
    nprocs : int, optional
        Number of processor cores to be used

    Returns
    -------
    (row_indices, col_indices) : tuple of numpy arrays
    """

    from pyresample.kd_tree import get_neighbour_info
    valid_input_index, valid_output_index, index_array, distance_array = \
        get_neighbour_info(source_area_def,
                           target_area_def,
                           radius_of_influence,
                           neighbours=1,
                           nprocs=nprocs)
    # Enumerate rows and cols
    rows = np.fromfunction(lambda i, j: i, source_area_def.shape,
                           dtype=np.int32).ravel()
    cols = np.fromfunction(lambda i, j: j, source_area_def.shape,
                           dtype=np.int32).ravel()

    # Reduce to match resampling data set
    rows_valid = rows[valid_input_index]
    cols_valid = cols[valid_input_index]

    # Get result using array indexing
    number_of_valid_points = valid_input_index.sum()
    index_mask = (index_array == number_of_valid_points)
    index_array[index_mask] = 0
    row_sample = rows_valid[index_array]
    col_sample = cols_valid[index_array]
    row_sample[index_mask] = -1
    col_sample[index_mask] = -1

    # Reshape to correct shape
    row_indices = row_sample.reshape(target_area_def.shape)
    col_indices = col_sample.reshape(target_area_def.shape)

    row_indices = _downcast_index_array(row_indices,
                                        source_area_def.shape[0])
    col_indices = _downcast_index_array(col_indices,
                                        source_area_def.shape[1])

    return row_indices, col_indices
コード例 #9
0
ファイル: __init__.py プロジェクト: mindyls/pyresample
def generate_nearest_neighbour_linesample_arrays(source_area_def,
                                                 target_area_def,
                                                 radius_of_influence,
                                                 nprocs=1):
    """Generate linesample arrays for nearest neighbour grid resampling

    Parameters
    -----------
    source_area_def : object
        Source area definition as geometry definition object
    target_area_def : object
        Target area definition as geometry definition object
    radius_of_influence : float
        Cut off distance in meters
    nprocs : int, optional
        Number of processor cores to be used

    Returns
    -------
    (row_indices, col_indices) : tuple of numpy arrays
    """

    from pyresample.kd_tree import get_neighbour_info
    valid_input_index, valid_output_index, index_array, distance_array = \
        get_neighbour_info(source_area_def,
                           target_area_def,
                           radius_of_influence,
                           neighbours=1,
                           nprocs=nprocs)
    # Enumerate rows and cols
    rows = np.fromfunction(lambda i, j: i,
                           source_area_def.shape,
                           dtype=np.int32).ravel()
    cols = np.fromfunction(lambda i, j: j,
                           source_area_def.shape,
                           dtype=np.int32).ravel()

    # Reduce to match resampling data set
    rows_valid = rows[valid_input_index]
    cols_valid = cols[valid_input_index]

    # Get result using array indexing
    number_of_valid_points = valid_input_index.sum()
    index_mask = (index_array == number_of_valid_points)
    index_array[index_mask] = 0
    row_sample = rows_valid[index_array]
    col_sample = cols_valid[index_array]
    row_sample[index_mask] = -1
    col_sample[index_mask] = -1

    # Reshape to correct shape
    row_indices = row_sample.reshape(target_area_def.shape)
    col_indices = col_sample.reshape(target_area_def.shape)

    row_indices = _downcast_index_array(row_indices, source_area_def.shape[0])
    col_indices = _downcast_index_array(col_indices, source_area_def.shape[1])

    return row_indices, col_indices
コード例 #10
0
 def test_dtype(self):
     lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
     lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
     grid_def = geometry.GridDefinition(lons, lats)
     lons = numpy.asarray(lons, dtype='f4')
     lats  = numpy.asarray(lats, dtype='f4')
     swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
     valid_input_index, valid_output_index, index_array, distance_array = \
         kd_tree.get_neighbour_info(swath_def,
                                    grid_def,
                                    50000, neighbours=1, segments=1)
コード例 #11
0
ファイル: test_kd_tree.py プロジェクト: cpaulik/pyresample
 def test_dtype(self):
     lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
     lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
     grid_def = geometry.GridDefinition(lons, lats)
     lons = numpy.asarray(lons, dtype='f4')
     lats  = numpy.asarray(lats, dtype='f4')
     swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
     valid_input_index, valid_output_index, index_array, distance_array = \
         kd_tree.get_neighbour_info(swath_def,
                                    grid_def,
                                    50000, neighbours=1, segments=1)
コード例 #12
0
    def setUpClass(cls):
        cls.pts_irregular = (np.array([[-1., 1.], ]),
                             np.array([[1., 2.], ]),
                             np.array([[-2., -1.], ]),
                             np.array([[2., -4.], ]))
        cls.pts_vert_parallel = (np.array([[-1., 1.], ]),
                                 np.array([[1., 2.], ]),
                                 np.array([[-1., -1.], ]),
                                 np.array([[1., -2.], ]))
        cls.pts_both_parallel = (np.array([[-1., 1.], ]),
                                 np.array([[1., 1.], ]),
                                 np.array([[-1., -1.], ]),
                                 np.array([[1., -1.], ]))

        # Area definition with four pixels
        target_def = geometry.AreaDefinition('areaD',
                                             'Europe (3km, HRV, VTC)',
                                             'areaD',
                                             {'a': '6378144.0',
                                              'b': '6356759.0',
                                              'lat_0': '50.00',
                                              'lat_ts': '50.00',
                                              'lon_0': '8.00',
                                              'proj': 'stere'},
                                             4, 4,
                                             [-1370912.72,
                                              -909968.64000000001,
                                              1029087.28,
                                              1490031.3600000001])

        # Input data around the target pixel at 0.63388324, 55.08234642,
        in_shape = (100, 100)
        cls.data1 = np.ones((in_shape[0], in_shape[1]))
        cls.data2 = 2. * cls.data1
        cls.data3 = cls.data1 + 9.5
        lons, lats = np.meshgrid(np.linspace(-25., 40., num=in_shape[0]),
                                 np.linspace(45., 75., num=in_shape[1]))
        cls.swath_def = geometry.SwathDefinition(lons=lons, lats=lats)

        radius = 50e3
        cls.neighbours = 32
        input_idxs, output_idxs, idx_ref, dists = \
            kd_tree.get_neighbour_info(cls.swath_def, target_def,
                                       radius, neighbours=cls.neighbours,
                                       nprocs=1)
        input_size = input_idxs.sum()
        index_mask = (idx_ref == input_size)
        idx_ref = np.where(index_mask, 0, idx_ref)

        cls.input_idxs = input_idxs
        cls.target_def = target_def
        cls.idx_ref = idx_ref
コード例 #13
0
ファイル: test_kd_tree.py プロジェクト: ifenty/pyresample
    def test_custom_multi_from_sample(self):
        def wf1(dist):
            return 1 - dist / 100000.0

        def wf2(dist):
            return 1

        def wf3(dist):
            return np.cos(dist)**2

        data = np.fromfunction(lambda y, x: (y + x) * 10**-6, (5000, 100))
        lons = np.fromfunction(lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
        lats = np.fromfunction(lambda y, x: 75 - (50.0 / 5000) * y,
                               (5000, 100))
        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
        data_multi = np.column_stack(
            (data.ravel(), data.ravel(), data.ravel()))

        with catch_warnings(UserWarning) as w:
            valid_input_index, valid_output_index, index_array, distance_array = \
                kd_tree.get_neighbour_info(swath_def,
                                           self.area_def,
                                           50000, segments=1)
            self.assertFalse(len(w) != 1)
            self.assertFalse(('Possible more' not in str(w[0].message)))

        res = kd_tree.get_sample_from_neighbour_info(
            'custom', (800, 800),
            data_multi,
            valid_input_index,
            valid_output_index,
            index_array,
            distance_array,
            weight_funcs=[wf1, wf2, wf3])

        cross_sum = res.sum()

        expected = 1461.8428378742638
        self.assertAlmostEqual(cross_sum, expected)
        res = kd_tree.get_sample_from_neighbour_info(
            'custom', (800, 800),
            data_multi,
            valid_input_index,
            valid_output_index,
            index_array,
            distance_array,
            weight_funcs=[wf1, wf2, wf3])

        # Look for error where input data has been manipulated
        cross_sum = res.sum()
        expected = 1461.8428378742638
        self.assertAlmostEqual(cross_sum, expected)
コード例 #14
0
 def test_nearest_from_sample(self):
     data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
     lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
     lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
     swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
     valid_input_index, valid_output_index, index_array, distance_array = kd_tree.get_neighbour_info(
         swath_def, self.area_def, 50000, neighbours=1, segments=1
     )
     res = kd_tree.get_sample_from_neighbour_info(
         "nn", (800, 800), data.ravel(), valid_input_index, valid_output_index, index_array
     )
     cross_sum = res.sum()
     expected = 15874591.0
     self.assertEqual(cross_sum, expected, msg="Swath resampling from neighbour info nearest failed")
コード例 #15
0
ファイル: test_kd_tree.py プロジェクト: sebastic/pyresample
 def test_nearest_from_sample(self):
     data = numpy.fromfunction(lambda y, x: y * x, (50, 10))
     lons = numpy.fromfunction(lambda y, x: 3 + x, (50, 10))
     lats = numpy.fromfunction(lambda y, x: 75 - y, (50, 10))
     swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
     valid_input_index, valid_output_index, index_array, distance_array = \
         kd_tree.get_neighbour_info(swath_def,
                                    self.area_def,
                                    50000, neighbours=1, segments=1)
     res = kd_tree.get_sample_from_neighbour_info('nn', (800, 800), data.ravel(),
                                                  valid_input_index, valid_output_index,
                                                  index_array)
     cross_sum = res.sum()
     expected = 15874591.0
     self.assertEqual(cross_sum, expected)
コード例 #16
0
ファイル: projector.py プロジェクト: junjie2008v/mpop
def calc_nearest_params(in_area, out_area, radius, nprocs=1):
    """Calculate projection parameters for nearest neighbour
    interpolation"""
    valid_index, valid_output_index, index_array, distance_array = \
        kd_tree.get_neighbour_info(in_area,
                                   out_area,
                                   radius,
                                   neighbours=1,
                                   nprocs=nprocs)
    del distance_array
    cache = {}
    cache['valid_index'] = valid_index
    cache['valid_output_index'] = valid_output_index
    cache['index_array'] = index_array

    return cache
コード例 #17
0
ファイル: projector.py プロジェクト: pytroll/mpop
def calc_nearest_params(in_area, out_area, radius, nprocs=1):
    """Calculate projection parameters for nearest neighbour
    interpolation"""
    valid_index, valid_output_index, index_array, distance_array = \
        kd_tree.get_neighbour_info(in_area,
                                   out_area,
                                   radius,
                                   neighbours=1,
                                   nprocs=nprocs)
    del distance_array
    cache = {}
    cache['valid_index'] = valid_index
    cache['valid_output_index'] = valid_output_index
    cache['index_array'] = index_array

    return cache
コード例 #18
0
ファイル: test_kd_tree.py プロジェクト: pytroll/pyresample
    def test_custom_multi_from_sample(self):
        def wf1(dist):
            return 1 - dist / 100000.0

        def wf2(dist):
            return 1

        def wf3(dist):
            return np.cos(dist) ** 2

        data = np.fromfunction(lambda y, x: (y + x) * 10 ** -6, (5000, 100))
        lons = np.fromfunction(
            lambda y, x: 3 + (10.0 / 100) * x, (5000, 100))
        lats = np.fromfunction(
            lambda y, x: 75 - (50.0 / 5000) * y, (5000, 100))
        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
        data_multi = np.column_stack((data.ravel(), data.ravel(),
                                      data.ravel()))

        with catch_warnings(UserWarning) as w:
            valid_input_index, valid_output_index, index_array, distance_array = \
                kd_tree.get_neighbour_info(swath_def,
                                           self.area_def,
                                           50000, segments=1)
            self.assertFalse(len(w) != 1)
            self.assertFalse(('Possible more' not in str(w[0].message)))

        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
                                                     data_multi,
                                                     valid_input_index, valid_output_index,
                                                     index_array, distance_array,
                                                     weight_funcs=[wf1, wf2, wf3])

        cross_sum = res.sum()

        expected = 1461.8428378742638
        self.assertAlmostEqual(cross_sum, expected)
        res = kd_tree.get_sample_from_neighbour_info('custom', (800, 800),
                                                     data_multi,
                                                     valid_input_index, valid_output_index,
                                                     index_array, distance_array,
                                                     weight_funcs=[wf1, wf2, wf3])

        # Look for error where input data has been manipulated
        cross_sum = res.sum()
        expected = 1461.8428378742638
        self.assertAlmostEqual(cross_sum, expected)
コード例 #19
0
ファイル: resample.py プロジェクト: weizushuai/satpy
    def precompute(
            self, mask=None, radius_of_influence=10000, epsilon=0, reduce_data=True, nprocs=1, segments=None,
            cache_dir=False, **kwargs):
        """Create a KDTree structure and store it for later use.

        Note: The `mask` keyword should be provided if geolocation may be valid where data points are invalid.
        This defaults to the `mask` attribute of the `data` numpy masked array passed to the `resample` method.
        """

        del kwargs

        source_geo_def = mask_source_lonlats(self.source_geo_def, mask)

        kd_hash = self.get_hash(source_geo_def=source_geo_def,
                                radius_of_influence=radius_of_influence,
                                epsilon=epsilon)

        filename = self._create_cache_filename(cache_dir, kd_hash)
        self._read_params_from_cache(cache_dir, kd_hash, filename)

        if self.cache is not None:
            LOG.debug("Loaded kd-tree parameters")
            return self.cache
        else:
            LOG.debug("Computing kd-tree parameters")

        valid_input_index, valid_output_index, index_array, distance_array = \
            get_neighbour_info(source_geo_def,
                               self.target_geo_def,
                               radius_of_influence,
                               neighbours=1,
                               epsilon=epsilon,
                               reduce_data=reduce_data,
                               nprocs=nprocs,
                               segments=segments)

        # it's important here not to modify the existing cache dictionary.
        self.cache = {"valid_input_index": valid_input_index,
                      "valid_output_index": valid_output_index,
                      "index_array": index_array,
                      "distance_array": distance_array,
                      "source_geo_def": source_geo_def,
                      }

        self._update_caches(kd_hash, cache_dir, filename)

        return self.cache
コード例 #20
0
 def setup(self):
     from pyresample import geometry, kd_tree, bilinear
     input_def = geometry.SwathDefinition(lons=self.input_grid.lon.values,
                                          lats=self.input_grid.lat.values)
     output_def = geometry.SwathDefinition(lons=self.output_grid.lon.values,
                                           lats=self.output_grid.lat.values)
     if not self.method or self.method == 'nearest':
         # Set default neighbours used in stencil to 1. Normal default is
         # 8, which won't work if the input and output grids are similar in
         # size and resolution.
         self._params.setdefault('neighbours', 1)
         self._args = kd_tree.get_neighbour_info(input_def, output_def,
                                                 50000, **self._params)
         self._regridder = kd_tree.get_sample_from_neighbour_info
     else:
         raise NotImplementedError(
             'Only nearest-neighbor regridding is '
             'currently supported for pyresample backend')
コード例 #21
0
ファイル: satnav.py プロジェクト: jflemer-ndp/GeoIPS
    def _calc_lines_samples(self, sector):
        # Allocate the full disk area definition
        fldk_ad = SwathDefinition(
            np.ma.masked_less_equal(self._lons_fd, -999.0),
            np.ma.masked_less_equal(self._lats_fd, -999.0))
        ad = sector.area_definition

        # Determine the nominal spatial resolution at nadir
        shape = self._lons_fd.shape
        # Resolution in meters
        latres = np.abs(self._lats_fd[shape[0] / 2, shape[1] / 2] -
                        self._lats_fd[shape[0] / 2 + 1,
                                      shape[1] / 2]) * 111.1 * 1000
        lonres = np.abs(self._lons_fd[shape[0] / 2, shape[1] / 2] -
                        self._lons_fd[shape[0] / 2,
                                      shape[1] / 2 + 1]) * 111.1 * 1000
        # Use larger of the two values times 10 as ROI for interpolation
        # Would be nice to use something more dynamic to save CPU time here
        # Kind of stuck as long as we use pyresample
        roi = 10 * max(latres, lonres)

        # Do the first step of the NN interpolation
        valid_input_index, valid_output_index, index_array, distance_array = \
            get_neighbour_info(fldk_ad, ad, radius_of_influence=roi, neighbours=1, nprocs=nproc)
        if not valid_input_index.any():
            raise SatNavError('{} sector does not intersect data.'.format(
                sector.name))

        # Determine which lines and samples intersect our domain.
        good_lines, good_samples = np.where(valid_input_index.reshape(shape))
        # When get_neighbour_info does not find a good value for a specific location it
        # fills index_array with the maximum index + 1.  So, just throw away all of the
        # out of range indexes.
        index_mask = (index_array == len(good_lines))
        lines = np.empty(ad.size, dtype=np.int64)
        lines[index_mask] = -999.1
        lines[~index_mask] = good_lines[index_array[~index_mask]]
        samples = np.empty(ad.size, dtype=np.int64)
        samples[index_mask] = -999.1
        samples[~index_mask] = good_samples[index_array[~index_mask]]

        return lines, samples
コード例 #22
0
    def test_nearest_from_sample_np_dtypes(self):
        lons = np.fromfunction(lambda y, x: 3 + x, (50, 10))
        lats = np.fromfunction(lambda y, x: 75 - y, (50, 10))
        swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
        valid_input_index, valid_output_index, index_array, distance_array = \
            kd_tree.get_neighbour_info(swath_def,
                                       self.area_def,
                                       50000, neighbours=1, segments=1)

        for dtype in [np.uint16, np.float32]:
            with self.subTest(dtype):
                data = np.fromfunction(lambda y, x: y * x, (50, 10)).astype(dtype)
                fill_value = dtype(0.0)
                res = \
                    kd_tree.get_sample_from_neighbour_info('nn', (800, 800),
                                                           data.ravel(),
                                                           valid_input_index,
                                                           valid_output_index,
                                                           index_array,
                                                           fill_value=fill_value)
                cross_sum = res.sum()
                expected = 15874591.0
                self.assertEqual(cross_sum, expected)
コード例 #23
0
ファイル: in_situ.py プロジェクト: simonpf/joint_flight
def load_drop_sonde_data(path, results=None):

    data = []
    files = Path(PATH).glob("faam-dropsonde*.nc")

    for f in files:
        ds_data = xr.load_dataset(f)

        valid = (-90 <= ds_data["lat"].data) * (90 >= ds_data["lat"].data)
        ds_data = ds_data.loc[{"time": valid}]

        if results:
            lons = results["longitude"].data
            lats = results["latitude"].data
            retrieval_swath = geometry.SwathDefinition(lons=lons, lats=lats)
            lons = ds_data["lon"].data
            lats = ds_data["lat"].data
            ds_swath = geometry.SwathDefinition(lons=lons, lats=lats)
            ni = kd_tree.get_neighbour_info(retrieval_swath,
                                            ds_swath,
                                            radius_of_influence=100e3,
                                            neighbours=1)
            (valid_input_index, valid_output_index, index_array,
             distance_array) = ni

            n = ds_data.time.size
            n_levels = results.z.size

            t_r = np.zeros(n)
            t_a = np.zeros(n)
            h2o_r = np.zeros(n)
            h2o_a = np.zeros(n)

            t_z = np.zeros((n, n_levels))
            t_a_z = np.zeros((n, n_levels))
            h2o_z = np.zeros((n, n_levels))
            h2o_a_z = np.zeros((n, n_levels))
            z = np.zeros((n, n_levels))
            d = np.zeros((n))

            lats_r = kd_tree.get_sample_from_neighbour_info(
                "nn",
                (n, ),
                results["latitude"].data,
                valid_input_index,
                valid_output_index,
                index_array,
                fill_value=np.nan,
            )
            lons_r = kd_tree.get_sample_from_neighbour_info(
                "nn",
                (n, ),
                results["longitude"].data,
                valid_input_index,
                valid_output_index,
                index_array,
                fill_value=np.nan,
            )

            d = kd_tree.get_sample_from_neighbour_info(
                "nn",
                (n, ),
                results["d"].data,
                valid_input_index,
                valid_output_index,
                index_array,
                fill_value=np.nan,
            )

            for i in range(n_levels):
                # t_z[:, i] = kd_tree.get_sample_from_neighbour_info(
                #    "nn",
                #    (n,),
                #    results["temperature"][:, i].data,
                #    valid_input_index,
                #    valid_output_index,
                #    index_array,
                #    fill_value=np.nan)
                # t_a_z[:, i] = kd_tree.get_sample_from_neighbour_info(
                #    "nn",
                #    (n,),
                #    results["temperature_a_priori"][:, i].data,
                #    valid_input_index,
                #    valid_output_index,
                #    index_array,
                #    fill_value=np.nan)
                h2o_z[:, i] = kd_tree.get_sample_from_neighbour_info(
                    "nn",
                    (n, ),
                    results["H2O"][:, i].data,
                    valid_input_index,
                    valid_output_index,
                    index_array,
                    fill_value=np.nan,
                )
                h2o_a_z[:, i] = kd_tree.get_sample_from_neighbour_info(
                    "nn",
                    (n, ),
                    results["H2O_a_priori"][:, i].data,
                    valid_input_index,
                    valid_output_index,
                    index_array,
                    fill_value=np.nan,
                )
                z[:, i] = kd_tree.get_sample_from_neighbour_info(
                    "nn",
                    (n, ),
                    results["altitude"][:, i].data,
                    valid_input_index,
                    valid_output_index,
                    index_array,
                    fill_value=np.nan,
                )

            for i in range(n):
                if np.isnan(ds_data["alt"][i]):
                    t_r[i] = np.nan
                    t_a[i] = np.nan
                    h2o_r[i] = np.nan
                    h2o_a[i] = np.nan
                    continue

                t_r[i] = np.interp(ds_data["alt"][i], z[i, :], t_z[i, :])
                t_a[i] = np.interp(ds_data["alt"][i], z[i, :], t_a_z[i, :])
                h2o_r[i] = np.interp(ds_data["alt"][i], z[i, :], h2o_z[i, :])
                h2o_a[i] = np.interp(ds_data["alt"][i], z[i, :], h2o_a_z[i, :])

            ds_data["t_retrieved"] = (("time", ), t_r)
            ds_data["t_a_priori"] = (("time", ), t_a)
            ds_data["h2o_retrieved"] = (("time", ), h2o_r)
            ds_data["h2o_a_priori"] = (("time", ), h2o_a)
            ds_data["lons_r"] = (("time"), lons_r)
            ds_data["lats_r"] = (("time"), lats_r)
            ds_data["d"] = (("time"), d)
        data.append(ds_data)
    return data
コード例 #24
0
ファイル: resample.py プロジェクト: wpreimes/repurpose
def resample_to_grid_only_valid_return(input_data,
                                       src_lon,
                                       src_lat,
                                       target_lon,
                                       target_lat,
                                       methods='nn',
                                       weight_funcs=None,
                                       min_neighbours=1,
                                       search_rad=18000,
                                       neighbours=8,
                                       fill_values=None):
    """
    resamples data from dictionary of numpy arrays using pyresample
    to given grid.
    Searches for the neighbours and then resamples the data
    to the grid given in togrid if at least
    min_neighbours neighbours are found

    Parameters
    ----------
    input_data : dict of numpy.arrays
    src_lon : numpy.array
        longitudes of the input data
    src_lat : numpy.array
        src_latitudes of the input data
    target_lon : numpy.array
        longitudes of the output data
    target_src_lat : numpy.array
        src_latitudes of the output data
    methods : string or dict, optional
        method of spatial averaging. this is given to pyresample
        and can be
        'nn' : nearest neighbour
        'custom' : custom weight function has to be supplied in weight_funcs
        see pyresample documentation for more details
        can also be a dictionary with a method for each array in input data dict
    weight_funcs : function or dict of functions, optional
        if method is 'custom' a function like func(distance) has to be given
        can also be a dictionary with a function for each array in input data dict
    min_neighbours: int, optional
        if given then only points with at least this number of neighbours will be
        resampled
        Default : 1
    search_rad : float, optional
        search radius in meters of neighbour search
        Default : 18000
    neighbours : int, optional
        maximum number of neighbours to look for for each input grid point
        Default : 8
    fill_values : number or dict, optional
        if given the output array will be filled with this value if no valid
        resampled value could be computed, if not a masked array will be returned
        can also be a dict with a fill value for each variable
    Returns
    -------
    data : dict of numpy.arrays
        resampled data on part of the target grid over which data was found
    mask: numpy.ndarray
        boolean mask into target grid that specifies where data was resampled

    Raises
    ------
    ValueError :
        if empty dataset is resampled
    """
    output_data = {}

    if target_lon.ndim == 2:
        target_lat = target_lat.ravel()
        target_lon = target_lon.ravel()

    input_swath = geometry.SwathDefinition(src_lon, src_lat)
    output_swath = geometry.SwathDefinition(target_lon, target_lat)

    (valid_input_index, valid_output_index, index_array,
     distance_array) = kd_tree.get_neighbour_info(input_swath,
                                                  output_swath,
                                                  search_rad,
                                                  neighbours=neighbours)

    # throw away points with less than min_neighbours neighbours
    # find points with valid neighbours
    # get number of found neighbours for each grid point/row
    if neighbours > 1:
        nr_neighbours = np.isfinite(distance_array).sum(1)
        neigh_condition = nr_neighbours >= min_neighbours
        mask = np.invert(neigh_condition)
        enough_neighbours = np.nonzero(neigh_condition)[0]
    if neighbours == 1:
        nr_neighbours = np.isfinite(distance_array)
        neigh_condition = nr_neighbours >= min_neighbours
        mask = np.invert(neigh_condition)
        enough_neighbours = np.nonzero(neigh_condition)[0]
        distance_array = np.reshape(distance_array,
                                    (distance_array.shape[0], 1))
        index_array = np.reshape(index_array, (index_array.shape[0], 1))

    if enough_neighbours.size == 0:
        raise ValueError("No points with at least %d neighbours found" %
                         min_neighbours)

    # remove neighbourhood info of input grid points that have no neighbours to not have to
    # resample to whole output grid for small input grid file
    distance_array = distance_array[enough_neighbours, :]
    index_array = index_array[enough_neighbours, :]
    valid_output_index = valid_output_index[enough_neighbours]

    for param in input_data:

        data = input_data[param]

        if type(methods) == dict:
            method = methods[param]
        else:
            method = methods

        if method is not 'nn':
            if type(weight_funcs) == dict:
                weight_func = weight_funcs[param]
            else:
                weight_func = weight_funcs
        else:
            weight_func = None

        neigh_slice = slice(None, None, None)
        # check if method is nn, if so only use first row of index_array and
        # distance_array
        if method == 'nn':
            neigh_slice = (slice(None, None, None), 0)

        if type(fill_values) == dict:
            fill_value = fill_values[param]
        else:
            fill_value = fill_values

        output_array = kd_tree.get_sample_from_neighbour_info(
            method,
            enough_neighbours.shape,
            data,
            valid_input_index,
            valid_output_index,
            index_array[neigh_slice],
            distance_array[neigh_slice],
            weight_funcs=weight_func,
            fill_value=fill_value)

        output_data[param] = output_array

    return output_data, mask
コード例 #25
0
class Test(unittest.TestCase):

    pts_irregular = (np.array([
        [-1., 1.],
    ]), np.array([
        [1., 2.],
    ]), np.array([
        [-2., -1.],
    ]), np.array([
        [2., -4.],
    ]))
    pts_vert_parallel = (np.array([
        [-1., 1.],
    ]), np.array([
        [1., 2.],
    ]), np.array([
        [-1., -1.],
    ]), np.array([
        [1., -2.],
    ]))
    pts_both_parallel = (np.array([
        [-1., 1.],
    ]), np.array([
        [1., 1.],
    ]), np.array([
        [-1., -1.],
    ]), np.array([
        [1., -1.],
    ]))

    # Area definition with four pixels
    target_def = geometry.AreaDefinition(
        'areaD', 'Europe (3km, HRV, VTC)', 'areaD', {
            'a': '6378144.0',
            'b': '6356759.0',
            'lat_0': '50.00',
            'lat_ts': '50.00',
            'lon_0': '8.00',
            'proj': 'stere'
        }, 4, 4,
        [-1370912.72, -909968.64000000001, 1029087.28, 1490031.3600000001])

    # Input data around the target pixel at 0.63388324, 55.08234642,
    in_shape = (100, 100)
    data1 = np.ones((in_shape[0], in_shape[1]))
    data2 = 2. * data1
    lons, lats = np.meshgrid(np.linspace(-5., 5., num=in_shape[0]),
                             np.linspace(50., 60., num=in_shape[1]))
    swath_def = geometry.SwathDefinition(lons=lons, lats=lats)

    radius = 50e3
    neighbours = 32
    input_idxs, output_idxs, idx_ref, dists = \
        kd_tree.get_neighbour_info(swath_def, target_def,
                                   radius, neighbours=neighbours,
                                   nprocs=1)
    input_size = input_idxs.sum()
    index_mask = (idx_ref == input_size)
    idx_ref = np.where(index_mask, 0, idx_ref)

    def test_calc_abc(self):
        # No np.nan inputs
        pt_1, pt_2, pt_3, pt_4 = self.pts_irregular
        res = bil._calc_abc(pt_1, pt_2, pt_3, pt_4, 0.0, 0.0)
        self.assertFalse(np.isnan(res[0]))
        self.assertFalse(np.isnan(res[1]))
        self.assertFalse(np.isnan(res[2]))
        # np.nan input -> np.nan output
        res = bil._calc_abc(np.array([[np.nan, np.nan]]), pt_2, pt_3, pt_4,
                            0.0, 0.0)
        self.assertTrue(np.isnan(res[0]))
        self.assertTrue(np.isnan(res[1]))
        self.assertTrue(np.isnan(res[2]))

    def test_get_ts_irregular(self):
        res = bil._get_ts_irregular(self.pts_irregular[0],
                                    self.pts_irregular[1],
                                    self.pts_irregular[2],
                                    self.pts_irregular[3], 0., 0.)
        self.assertEqual(res[0], 0.375)
        self.assertEqual(res[1], 0.5)
        res = bil._get_ts_irregular(self.pts_vert_parallel[0],
                                    self.pts_vert_parallel[1],
                                    self.pts_vert_parallel[2],
                                    self.pts_vert_parallel[3], 0., 0.)
        self.assertTrue(np.isnan(res[0]))
        self.assertTrue(np.isnan(res[1]))

    def test_get_ts_uprights_parallel(self):
        res = bil._get_ts_uprights_parallel(self.pts_vert_parallel[0],
                                            self.pts_vert_parallel[1],
                                            self.pts_vert_parallel[2],
                                            self.pts_vert_parallel[3], 0., 0.)
        self.assertEqual(res[0], 0.5)
        self.assertEqual(res[1], 0.5)

    def test_get_ts_parallellogram(self):
        res = bil._get_ts_parallellogram(self.pts_both_parallel[0],
                                         self.pts_both_parallel[1],
                                         self.pts_both_parallel[2], 0., 0.)
        self.assertEqual(res[0], 0.5)
        self.assertEqual(res[1], 0.5)

    def test_get_ts(self):
        out_x = np.array([[0.]])
        out_y = np.array([[0.]])
        res = bil._get_ts(self.pts_irregular[0], self.pts_irregular[1],
                          self.pts_irregular[2], self.pts_irregular[3], out_x,
                          out_y)
        self.assertEqual(res[0], 0.375)
        self.assertEqual(res[1], 0.5)
        res = bil._get_ts(self.pts_both_parallel[0], self.pts_both_parallel[1],
                          self.pts_both_parallel[2], self.pts_both_parallel[3],
                          out_x, out_y)
        self.assertEqual(res[0], 0.5)
        self.assertEqual(res[1], 0.5)
        res = bil._get_ts(self.pts_vert_parallel[0], self.pts_vert_parallel[1],
                          self.pts_vert_parallel[2], self.pts_vert_parallel[3],
                          out_x, out_y)
        self.assertEqual(res[0], 0.5)
        self.assertEqual(res[1], 0.5)

    def test_solve_quadratic(self):
        res = bil._solve_quadratic(1, 0, 0)
        self.assertEqual(res[0], 0.0)
        res = bil._solve_quadratic(1, 2, 1)
        self.assertTrue(np.isnan(res[0]))
        res = bil._solve_quadratic(1, 2, 1, min_val=-2.)
        self.assertEqual(res[0], -1.0)
        # Test that small adjustments work
        pt_1, pt_2, pt_3, pt_4 = self.pts_vert_parallel
        pt_1 = self.pts_vert_parallel[0].copy()
        pt_1[0][0] += 1e-7
        res = bil._calc_abc(pt_1, pt_2, pt_3, pt_4, 0.0, 0.0)
        res = bil._solve_quadratic(res[0], res[1], res[2])
        self.assertAlmostEqual(res[0], 0.5, 5)
        res = bil._calc_abc(pt_1, pt_3, pt_2, pt_4, 0.0, 0.0)
        res = bil._solve_quadratic(res[0], res[1], res[2])
        self.assertAlmostEqual(res[0], 0.5, 5)

    def test_get_output_xy(self):
        proj = Proj(self.target_def.proj4_string)
        out_x, out_y = bil._get_output_xy(self.target_def, proj)
        self.assertTrue(out_x.all())
        self.assertTrue(out_y.all())

    def test_get_input_xy(self):
        proj = Proj(self.target_def.proj4_string)
        in_x, in_y = bil._get_output_xy(self.swath_def, proj)
        self.assertTrue(in_x.all())
        self.assertTrue(in_y.all())

    def test_get_bounding_corners(self):
        proj = Proj(self.target_def.proj4_string)
        out_x, out_y = bil._get_output_xy(self.target_def, proj)
        in_x, in_y = bil._get_input_xy(self.swath_def, proj, self.input_idxs,
                                       self.idx_ref)
        res = bil._get_bounding_corners(in_x, in_y, out_x, out_y,
                                        self.neighbours, self.idx_ref)
        for i in range(len(res) - 1):
            pt_ = res[i]
            for j in range(2):
                # Only the sixth output location has four valid corners
                self.assertTrue(np.isfinite(pt_[5, j]))

    def test_get_bil_info(self):
        t__, s__, input_idxs, idx_arr = bil.get_bil_info(
            self.swath_def, self.target_def)
        # Only 6th index should have valid values
        for i in range(len(t__)):
            if i == 5:
                self.assertAlmostEqual(t__[i], 0.684850870155, 5)
                self.assertAlmostEqual(s__[i], 0.775433912393, 5)
            else:
                self.assertTrue(np.isnan(t__[i]))
                self.assertTrue(np.isnan(s__[i]))

    def test_get_sample_from_bil_info(self):
        t__, s__, input_idxs, idx_arr = bil.get_bil_info(
            self.swath_def, self.target_def)
        # Sample from data1
        res = bil.get_sample_from_bil_info(self.data1.ravel(), t__, s__,
                                           input_idxs, idx_arr)
        self.assertEqual(res[5], 1.)
        # Sample from data2
        res = bil.get_sample_from_bil_info(self.data2.ravel(), t__, s__,
                                           input_idxs, idx_arr)
        self.assertEqual(res[5], 2.)
        # Reshaping
        res = bil.get_sample_from_bil_info(self.data2.ravel(),
                                           t__,
                                           s__,
                                           input_idxs,
                                           idx_arr,
                                           output_shape=self.target_def.shape)
        res = res.shape
        self.assertEqual(res[0], self.target_def.shape[0])
        self.assertEqual(res[1], self.target_def.shape[1])

    def test_resample_bilinear(self):
        # Single array
        res = bil.resample_bilinear(self.data1, self.swath_def,
                                    self.target_def)
        self.assertEqual(res.shape, self.target_def.shape)
        # There should be only one pixel with value 1, all others are 0
        self.assertEqual(res.sum(), 1)

        # Single array with masked output
        res = bil.resample_bilinear(self.data1,
                                    self.swath_def,
                                    self.target_def,
                                    fill_value=None)
        self.assertTrue(hasattr(res, 'mask'))
        # There should be only one valid pixel
        self.assertEqual(self.target_def.size - res.mask.sum(), 1)

        # Two stacked arrays
        data = np.dstack((self.data1, self.data2))
        res = bil.resample_bilinear(data, self.swath_def, self.target_def)
        shp = res.shape
        self.assertEqual(shp[0:2], self.target_def.shape)
        self.assertEqual(shp[-1], 2)
コード例 #26
0
ファイル: projector.py プロジェクト: tparker-usgs/mpop
    def __init__(self, in_area, out_area,
                 in_latlons=None, mode=None,
                 radius=10000, nprocs=1):

        if (mode is not None and
                mode not in ["quick", "nearest", "ewa", "bilinear"]):
            raise ValueError("Projector mode must be one of 'nearest', "
                             "'quick', 'ewa', 'bilinear'")

        self.area_file = get_area_file()

        self.in_area = None
        self.out_area = None
        self._cache = None
        self._filename = None
        self.mode = "quick"
        self.radius = radius
        self.conf = ConfigParser.ConfigParser()
        self.conf.read(os.path.join(CONFIG_PATH, "mpop.cfg"))

        # TODO:
        # - Rework so that in_area and out_area can be lonlats.
        # - Add a recompute flag ?

        # Setting up the input area
        try:
            self.in_area = get_area_def(in_area)
            in_id = in_area
        except (utils.AreaNotFound, AttributeError):
            try:
                in_id = in_area.area_id
                self.in_area = in_area
            except AttributeError:
                try:
                    # TODO: Note that latlons are in order (lons, lats)
                    self.in_area = geometry.SwathDefinition(lons=in_latlons[0],
                                                            lats=in_latlons[1])
                    in_id = in_area
                except TypeError:
                    raise utils.AreaNotFound("Input area " +
                                             str(in_area) +
                                             " must be defined in " +
                                             self.area_file +
                                             ", be an area object"
                                             " or longitudes/latitudes must be "
                                             "provided.")

        # Setting up the output area
        try:
            self.out_area = get_area_def(out_area)
            out_id = out_area
        except (utils.AreaNotFound, AttributeError):
            try:
                out_id = out_area.area_id
                self.out_area = out_area
            except AttributeError:
                raise utils.AreaNotFound("Output area " +
                                         str(out_area) +
                                         " must be defined in " +
                                         self.area_file + " or "
                                         "be an area object.")

        # if self.in_area == self.out_area:
        #    return

        # choosing the right mode if necessary
        if mode is None:
            try:
                dicts = in_area.proj_dict, out_area.proj_dict
                del dicts
                self.mode = "quick"
            except AttributeError:
                self.mode = "nearest"
        else:
            self.mode = mode

        filename = (in_id + "2" + out_id + "_" +
                    str(_get_area_hash(self.in_area)) + "to" +
                    str(_get_area_hash(self.out_area)) + "_" +
                    self.mode + ".npz")

        projections_directory = "/var/tmp"
        try:
            projections_directory = self.conf.get("projector",
                                                  "projections_directory")
        except ConfigParser.NoSectionError:
            pass

        self._filename = os.path.join(projections_directory, filename)

        try:
            self._cache = {}
            self._file_cache = np.load(self._filename)
        except:
            logger.info("Computing projection from %s to %s...",
                        in_id, out_id)

            if self.mode == "nearest":
                valid_index, valid_output_index, index_array, distance_array = \
                    kd_tree.get_neighbour_info(self.in_area,
                                               self.out_area,
                                               self.radius,
                                               neighbours=1,
                                               nprocs=nprocs)
                del distance_array
                self._cache = {}
                self._cache['valid_index'] = valid_index
                self._cache['valid_output_index'] = valid_output_index
                self._cache['index_array'] = index_array

            elif self.mode == "quick":
                ridx, cidx = \
                    utils.generate_quick_linesample_arrays(self.in_area,
                                                           self.out_area)
                self._cache = {}
                self._cache['row_idx'] = ridx
                self._cache['col_idx'] = cidx

            elif self.mode == "ewa":
                from pyresample.ewa import ll2cr
                swath_points_in_grid, cols, rows = ll2cr(self.in_area,
                                                         self.out_area)
                self._cache = {}
                # self._cache['ewa_swath_points_in_grid'] = \
                #     swath_points_in_grid
                self._cache['ewa_cols'] = cols
                self._cache['ewa_rows'] = rows

            elif self.mode == "bilinear":

                bilinear_t, bilinear_s, input_idxs, idx_arr = \
                    get_bil_info(self.in_area, self.out_area,
                                 self.radius, neighbours=32,
                                 nprocs=nprocs, masked=False)

                self._cache = {}
                self._cache['bilinear_s'] = bilinear_s
                self._cache['bilinear_t'] = bilinear_t
                self._cache['input_idxs'] = input_idxs
                self._cache['idx_arr'] = idx_arr
コード例 #27
0
                llLon, llLat, urLon, urLat = np.min(lon), np.min(lat), np.max(
                    lon), np.max(lat)
                areaExtent = (llLon, llLat, urLon, urLat)
                projDict = {'proj': proj, 'datum': 'WGS84', 'units': 'degree'}
                areaDef = geom.AreaDefinition(epsg, pName, proj, projDict,
                                              cols, rows, areaExtent)
                ps = np.min([areaDef.pixel_size_x,
                             areaDef.pixel_size_y])  # Square pixels

            cols = int(round((areaExtent[2] - areaExtent[0]) /
                             ps))  # Calculate the output cols
            rows = int(round((areaExtent[3] - areaExtent[1]) /
                             ps))  # Calculate the output rows
            areaDef = geom.AreaDefinition(epsg, pName, proj, projDict, cols,
                                          rows, areaExtent)
            index, outdex, indexArr, distArr = kdt.get_neighbour_info(
                swathDef, areaDef, 210, neighbours=1)
        else:
            print('ECO1BGEO File not found for {}'.format(e))
            continue
# ------------------LOOP THROUGH SDS CONVERT SWATH2GRID AND APPLY GEOREFERENCING----------------- #
    for s in ecoSDS:
        ecoSD = f[s].value  # Create array and read dimensions

        # Read SDS Attributes if available
        try:
            fv = int(f[s].attrs['_FillValue'])
        except KeyError:
            fv = None
        except ValueError:
            if f[s].attrs['_FillValue'] == b'n/a':
                fv = None
コード例 #28
0
ファイル: resample.py プロジェクト: IsabellaP/pytesmo
def resample_to_grid(input_data, src_lon, src_lat, target_lon, target_lat,
                     methods='nn', weight_funcs=None,
                     min_neighbours=1, search_rad=18000, neighbours=8,
                     fill_values=None):
    """
    resamples data from dictionary of numpy arrays using pyresample
    to given grid.
    Searches for the neighbours and then resamples the data
    to the grid given in togrid if at least
    min_neighbours neighbours are found

    Parameters
    ----------
    input_data : dict of numpy.arrays
    src_lon : numpy.array
        longitudes of the input data
    src_lat : numpy.array
        src_latitudes of the input data
    target_lon : numpy.array
        longitudes of the output data
    target_src_lat : numpy.array
        src_latitudes of the output data
    methods : string or dict, optional
        method of spatial averaging. this is given to pyresample
        and can be
        'nn' : nearest neighbour
        'custom' : custom weight function has to be supplied in weight_funcs
        see pyresample documentation for more details
        can also be a dictionary with a method for each array in input data dict
    weight_funcs : function or dict of functions, optional
        if method is 'custom' a function like func(distance) has to be given
        can also be a dictionary with a function for each array in input data dict
    min_neighbours: int, optional
        if given then only points with at least this number of neighbours will be
        resampled
        Default : 1
    search_rad : float, optional
        search radius in meters of neighbour search
        Default : 18000
    neighbours : int, optional
        maximum number of neighbours to look for for each input grid point
        Default : 8
    fill_values : number or dict, optional
        if given the output array will be filled with this value if no valid
        resampled value could be computed, if not a masked array will be returned
        can also be a dict with a fill value for each variable
    Returns
    -------
    data : dict of numpy.arrays
        resampled data on given grid
    Raises
    ------
    ValueError :
        if empty dataset is resampled
    """
    output_data = {}

    output_shape = target_lat.shape
    if target_lon.ndim == 2:
        target_lat = target_lat.ravel()
        target_lon = target_lon.ravel()

    input_swath = geometry.SwathDefinition(src_lon, src_lat)
    output_swath = geometry.SwathDefinition(target_lon, target_lat)

    (valid_input_index,
     valid_output_index,
     index_array,
     distance_array) = kd_tree.get_neighbour_info(input_swath,
                                                  output_swath,
                                                  search_rad,
                                                  neighbours=neighbours)

    # throw away points with less than min_neighbours neighbours
    # find points with valid neighbours
    # get number of found neighbours for each grid point/row
    if neighbours > 1:
        nr_neighbours = np.isfinite(distance_array).sum(1)
        neigh_condition = nr_neighbours >= min_neighbours
        mask = np.invert(neigh_condition)
        enough_neighbours = np.nonzero(neigh_condition)[0]
    if neighbours == 1:
        nr_neighbours = np.isfinite(distance_array)
        neigh_condition = nr_neighbours >= min_neighbours
        mask = np.invert(neigh_condition)
        enough_neighbours = np.nonzero(neigh_condition)[0]
        distance_array = np.reshape(
            distance_array, (distance_array.shape[0], 1))
        index_array = np.reshape(index_array, (index_array.shape[0], 1))

    if enough_neighbours.size == 0:
        raise ValueError(
            "No points with at least %d neighbours found" % min_neighbours)

    # remove neighbourhood info of input grid points that have no neighbours to not have to
    # resample to whole output grid for small input grid file
    distance_array = distance_array[enough_neighbours, :]
    index_array = index_array[enough_neighbours, :]
    valid_output_index = valid_output_index[enough_neighbours]

    for param in input_data:

        data = input_data[param]

        if type(methods) == dict:
            method = methods[param]
        else:
            method = methods

        if method is not 'nn':
            if type(weight_funcs) == dict:
                weight_func = weight_funcs[param]
            else:
                weight_func = weight_funcs
        else:
            weight_func = None

        if type(fill_values) == dict:
            fill_value = fill_values[param]
        else:
            fill_value = fill_values

        # construct arrays in output grid form
        if fill_value is not None:
            output_array = np.zeros(
                output_swath.shape, dtype=np.float64) + fill_value
        else:
            output_array = np.zeros(output_swath.shape, dtype=np.float64)
            output_array = np.ma.array(output_array, mask=mask)

        neigh_slice = slice(None, None, None)
        # check if method is nn, if so only use first row of index_array and
        # distance_array
        if method == 'nn':
            neigh_slice = (slice(None, None, None), 0)

        output_array[enough_neighbours] = kd_tree.get_sample_from_neighbour_info(
            method,
            enough_neighbours.shape,
            data,
            valid_input_index,
            valid_output_index,
            index_array[neigh_slice],
            distance_array[neigh_slice],
            weight_funcs=weight_func,
            fill_value=fill_value)

        output_data[param] = output_array.reshape(output_shape)

    return output_data
コード例 #29
0
def get_bil_info(source_geo_def, target_area_def, radius=50e3, neighbours=32,
                 nprocs=1, masked=False, reduce_data=True, segments=None,
                 epsilon=0):
    """Calculate information needed for bilinear resampling.

    source_geo_def : object
        Geometry definition of source data
    target_area_def : object
        Geometry definition of target area
    radius : float, optional
        Cut-off distance in meters
    neighbours : int, optional
        Number of neighbours to consider for each grid point when
        searching the closest corner points
    nprocs : int, optional
        Number of processor cores to be used for getting neighbour info
    masked : bool, optional
        If true, return masked arrays, else return np.nan values for
        invalid points (default)
    reduce_data : bool, optional
        Perform initial coarse reduction of source dataset in order
        to reduce execution time
    segments : int or None
        Number of segments to use when resampling.
        If set to None an estimate will be calculated
    epsilon : float, optional
        Allowed uncertainty in meters. Increasing uncertainty
        reduces execution time

    Returns
    -------
    t__ : numpy array
        Vertical fractional distances from corner to the new points
    s__ : numpy array
        Horizontal fractional distances from corner to the new points
    input_idxs : numpy array
        Valid indices in the input data
    idx_arr : numpy array
        Mapping array from valid source points to target points
    """

    # Check source_geo_def
    # if isinstance(source_geo_def, tuple):
    #     from pyresample.geometry import SwathDefinition
    #     lons, lats = _mask_coordinates(source_geo_def[0], source_geo_def[1])
    #     source_geo_def = SwathDefinition(lons, lats)

    # Calculate neighbour information
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        (input_idxs, output_idxs, idx_ref, dists) = \
            kd_tree.get_neighbour_info(source_geo_def, target_area_def,
                                       radius, neighbours=neighbours,
                                       nprocs=nprocs, reduce_data=reduce_data,
                                       segments=segments, epsilon=epsilon)

    del output_idxs, dists

    # Reduce index reference
    input_size = input_idxs.sum()
    index_mask = (idx_ref == input_size)
    idx_ref = np.where(index_mask, 0, idx_ref)

    # Get output projection as pyproj object
    proj = Proj(target_area_def.proj4_string)

    # Get output x/y coordinates
    out_x, out_y = _get_output_xy(target_area_def, proj)

    # Get input x/y coordinates
    in_x, in_y = _get_input_xy(source_geo_def, proj, input_idxs, idx_ref)

    # Get the four closest corner points around each output location
    pt_1, pt_2, pt_3, pt_4, idx_ref = \
        _get_bounding_corners(in_x, in_y, out_x, out_y, neighbours, idx_ref)

    # Calculate vertical and horizontal fractional distances t and s
    t__, s__ = _get_ts(pt_1, pt_2, pt_3, pt_4, out_x, out_y)

    # Mask NaN values
    if masked:
        mask = np.isnan(t__) | np.isnan(s__)
        t__ = np.ma.masked_where(mask, t__)
        s__ = np.ma.masked_where(mask, s__)

    return t__, s__, input_idxs, idx_ref
コード例 #30
0
def regridAllImages(workDir, ops_dir, vars, proj_geom_def, grid_info, ul_x, ul_y, data_geom_def, layer_names, output_filenames, overwrite_psf='yes', scale_data_types=None, scale_data_ranges=None):
	"""
	Uses pyresample to regrid all of the images. Each image that is created is written to the PSF. If one or 
	more images cannot be created, they will be skipped, but the other images will be created if possible.

	  Parameters:
	    workDir: 
	    vars: 
	    proj_geom_def:
	    grid_info:
	    ul_x:
	    ul_y:
	    data_geom_def:
	    layer_names: The names of the layers (in the vars dictionary) to convert into geoTIFF format.
	    output_filenames: A dictionary, mapping (layer_name -> output_filename).
		overwrite_psf: A If not specified, defaults to yes.
		scale_data_types: A dictionary mapping (layer_name -> data_type).
		scale_data_ranges: A dictionary mapping (layer_name -> tuple(min_valid_value_in_layer, max_valid_value_in_layer))
	
	  Returns: 
	    True if any images were written to the PSF, False if not.
	"""

	if not (overwrite_psf == 'yes' or overwrite_psf == 'no'):
		log.exception("Invalid value specified for overwrite_psf: '" + str(overwrite_psf) + "'. Must be 'yes' or 'no'.")

	# compute the information needed to re-project the data based on the input and output geometry definitions.
	log.info("Calculating re-gridding based on lat/lon information.")
	resampleRadius = float(readPCF(workDir, "resampleRadius"))
	valid_input_index, valid_output_index, index_array, distance_array = kd_tree.get_neighbour_info(data_geom_def, proj_geom_def, resampleRadius, neighbours=1, reduce_data=False)

	# Actually reproject the images, using the information computed above.
	# If one image fails to be re-projected, this script will return failure, but will still attempt to convert the others if possible.
	gtCitationGeoKey = readPCF(workDir, "GTCitationGeoKey")
	geogCitationGeoKey = readPCF(workDir, "GeogCitationGeoKey")
	last_failure_status = os.EX_OK
	for layer in layer_names:
		if not layer in vars:
			log.warning("The layer '" + layer + "' was not found in the NC4 file. Skipping.")
			continue
		output_filename = output_filenames[layer]
		original_data = vars[layer]["data"]

		fill_value = processFillValues(vars, layer, original_data)
		if numpy.sum(original_data == fill_value) == (original_data.shape[0] * original_data.shape[1]):
			log.info("The input layer '" + layer + "' is all fill values. Skipping.")
			continue

		log.info("Regridding layer: '" + layer + "'")
		resampled_data = kd_tree.get_sample_from_neighbour_info('nn', proj_geom_def.shape, original_data, valid_input_index, valid_output_index, index_array, fill_value=fill_value)

		if numpy.sum(resampled_data == fill_value) == (resampled_data.shape[0] * resampled_data.shape[1]):
			log.warning("Output file: '" + output_filename + "' was not produced. The result of re-sampling was all fill values. The input data probably missed the grid.")
			continue
		
		# If requested, do rescaling of the data.
		if scale_data_types is not None:
			if scale_data_ranges is not None:
				resampled_data, fill_value = scaleData(resampled_data, fill_value, scale_data_types[layer], min_in=scale_data_ranges[layer][0], max_in=scale_data_ranges[layer][1])
			else:
				resampled_data, fill_value = scaleData(resampled_data, fill_value, scale_data_types[layer])

		log.info("Creating geoTIFF file: '" + output_filename + "'.")
		createGeoTiff(output_filename, resampled_data, grid_info['proj4_str'], [grid_info['pixel_size_x'], grid_info['pixel_size_y']], [ul_x, ul_y])

		# Edit the GeoTIFF keys.
		editStatus = editGeoTiffKeys(output_filename, workDir, ops_dir, gtCitationGeoKey=gtCitationGeoKey, geogCitationGeoKey=geogCitationGeoKey)
		if editStatus != os.EX_OK:
			last_failure_status = editStatus
		else:
			writePSF(workDir, output_filename, overwrite=overwrite_psf, close=False)
			overwrite_psf = 'no'

	if last_failure_status != os.EX_OK:
		log.exception("There was an error creating one or more of the geoTIFF output products. Exiting with failure.")
		sys.exit(last_failure_status)

	return (overwrite_psf == 'no')
コード例 #31
0
    def add_detection_stats_on_fib_lattice(self, my_obj):
        #Start with the area and get lat and lon to calculate the stats:
        if  len(my_obj.longitude) == 0:
            print "Skipping file, no matches !"
            return
        lats = self.flattice.lats[:]
        max_distance=self.flattice.radius_km*1000*2.5
        area_def = SwathDefinition(*(self.flattice.lons,
                                     self.flattice.lats))
        target_def = SwathDefinition(*(my_obj.longitude, 
                                       my_obj.latitude)) 
        valid_in, valid_out, indices, distances = get_neighbour_info(
            area_def, target_def, radius_of_influence=max_distance, 
            epsilon=100, neighbours=1)
        cols = get_sample_from_neighbour_info('nn', target_def.shape,
                                              np.array(xrange(0,len(lats))),
                                              valid_in, valid_out,
                                              indices)
        cols = cols[valid_out]
        detected_clouds = my_obj.detected_clouds[valid_out]
        detected_clear = my_obj.detected_clear[valid_out]
        detected_height_low = my_obj.detected_height_low[valid_out]
        detected_height_high = my_obj.detected_height_high[valid_out]
        detected_height = my_obj.detected_height[valid_out]
        detected_height_both = my_obj.detected_height_both[valid_out]
        false_clouds = my_obj.false_clouds[valid_out]
        undetected_clouds = my_obj.undetected_clouds[valid_out]
        new_detected_clouds = my_obj.new_detected_clouds[valid_out]
        new_false_clouds = my_obj.new_false_clouds[valid_out]
        lapse_rate = my_obj.lapse_rate[valid_out]  
        t11ts_offset = my_obj.t11ts_offset[valid_out] 
        t11t12_offset = my_obj.t11t12_offset[valid_out] 
        t37t12_offset = my_obj.t37t12_offset[valid_out] 
        height_bias_low = my_obj.height_bias_low[valid_out]
        height_bias = my_obj.height_bias[valid_out]
        height_mae_diff = my_obj.height_mae_diff[valid_out]
        temperature_bias_low = my_obj.temperature_bias_low[valid_out]
        temperature_bias_low_t11 = my_obj.temperature_bias_low_t11[valid_out]
        lapse_bias_low = my_obj.lapse_bias_low[valid_out]
        height_bias_high = my_obj.height_bias_high[valid_out]
        lapse_bias_high = my_obj.lapse_bias_high[valid_out]
        is_clear = np.logical_or(detected_clear,false_clouds)
        #lets make things faster, I'm tired of waiting!
        cols[distances>max_distance]=-9 #don't use pixles matched too far away!
        import time        
        tic = time.time()      
        arr, counts = np.unique(cols, return_index=False, return_counts=True)        
        for d in arr[arr>0]:
            use = cols==d
            ind = np.where(use)[0]
            #if ind.any():
            self.flattice.N_false_clouds[d] += np.sum(false_clouds[ind])
            self.flattice.N_detected_clouds[d] += np.sum(detected_clouds[ind])
            self.flattice.N_detected_clear[d] += np.sum(detected_clear[ind])
            self.flattice.N_undetected_clouds[d] += np.sum(undetected_clouds[ind])
            self.flattice.N_new_false_clouds[d] += np.sum(new_false_clouds[ind])
            self.flattice.N_new_detected_clouds[d] += np.sum(new_detected_clouds[ind])
            self.flattice.N_detected_height_low[d] += np.sum(detected_height_low[ind])
            self.flattice.N_detected_height_high[d] += np.sum(detected_height_high[ind])
            self.flattice.N_detected_height[d] += np.sum(detected_height[ind])
            self.flattice.N_detected_height_both[d] += np.sum(detected_height_both[ind])
            self.flattice.Sum_ctth_bias_low[d] += np.sum(height_bias_low[ind])
            self.flattice.Sum_ctth_mae_low[d] += np.sum(np.abs(height_bias_low[ind]))
            self.flattice.Sum_ctth_mae[d] += np.sum(np.abs(height_bias[ind]))
            self.flattice.Sum_ctth_mae_diff[d] += np.sum(height_mae_diff[ind])
            self.flattice.Sum_lapse_bias_low[d] += np.sum(lapse_bias_low[ind])
            self.flattice.Sum_ctth_bias_high[d] += np.sum(height_bias_high[ind])
            self.flattice.Sum_ctth_mae_high[d] += np.sum(np.abs(height_bias_high[ind]))
            self.flattice.Sum_lapse_bias_high[d] += np.sum(lapse_bias_high[ind])
            self.flattice.Sum_ctth_bias_temperature_low[d] += np.sum(temperature_bias_low[ind])
            self.flattice.Sum_ctth_bias_temperature_low_t11[d] += np.sum(temperature_bias_low_t11[ind])
            self.flattice.Min_lapse_rate[d] = np.min([self.flattice.Min_lapse_rate[d],
                                                      np.min(lapse_rate[ind])])  
            if np.sum(is_clear[ind])>0:
                self.flattice.Min_t11ts_offset[d] = np.min([self.flattice.Min_t11ts_offset[d],
                                                            np.percentile(t11ts_offset[ind][is_clear[ind]], 5)])
                self.flattice.Max_t11t12_offset[d] = np.max([self.flattice.Max_t11t12_offset[d],
                                                             np.percentile(t11t12_offset[ind][is_clear[ind]], 95)])
                self.flattice.Max_t37t12_offset[d] = np.max([self.flattice.Max_t37t12_offset[d],
                                                             np.percentile(t37t12_offset[ind][is_clear[ind]], 95)])
            cc_type = 0
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 1
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 2
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 3
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 4
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 5
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 6
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])
            cc_type = 7
            self.flattice.Sum_height_bias_type[cc_type][d] += np.sum(my_obj.height_bias_type[cc_type][ind])
            self.flattice.N_detected_height_type[cc_type][d] += np.sum(my_obj.detected_height_type[cc_type][ind])

        print "mapping took %1.4f seconds"%(time.time()-tic)   
コード例 #32
0
fmb.distribution_statement = "See CMEMS Data License"
fmb.naming_authority = "CMEMS"
fmb.cmems_production_unit = "OC-CNR-ROMA-IT"
fmb.institution = "CNR-GOS"
fmb.source = 'surface observation'
fmb.timeliness = timeliness
if timeliness == "NT":
    fmb.product_version = 'v02'
else:
    fmb.product_version = 'v02QL'

#extract the neighbour info once and reuse many, to optimize computation time
#http://pyresample.readthedocs.io/en/latest/swath.html#resampling-from-neighbour-info
# still in doubt why kd_tree.get_sample_from_neighbour_info does not accept nprocs=n_cores argument ...
valid_input_index, valid_output_index, index_array, distance_array =\
    kd_tree.get_neighbour_info(swath, area, radius, neighbours=1, nprocs=n_cores)

if args.verbose:
    print "... Computing BRDF coefficients"

# call the bdrf correction
# and create the new RRS bands

#wind speed components, ws0 and ws1 and
#viewing geometries:
#OAA,OZA,SAA,SZA
#O obs, S solar
#A Azimuth, Z Zenith
#A Angle
ws0 = np.zeros(h * w, np.float32)
ws1 = np.zeros(h * w, np.float32)
コード例 #33
0
def match_lonlat(source,
                 target,
                 radius_of_influence=0.7 * RESOLUTION * 1000.0,
                 n_neighbours=1):
    """
    Produce a masked array of the same shape as the arrays in *target*, with
    indices of nearest neighbours in *source*. *source* and *target* should be
    tuples (lon, lat) of the source and target swaths, respectively.

    Note::

        * Fastest matching is obtained when *target* has lower resolution than
        *source*.

        * *source* should have 2-dimensional lon and lat arrays.

    """
    from pyresample.geometry import SwathDefinition
    from pyresample.kd_tree import get_neighbour_info
    from pyresample.kd_tree import get_sample_from_neighbour_info

    lon, lat = source
    mask_out_lat = np.logical_or(lat < -90, lat > 90)
    mask_out_lon = np.logical_or(lon > 180, lon < -180)
    mask_out = np.logical_or(mask_out_lat, mask_out_lon)
    lat = np.ma.masked_array(lat, mask=mask_out)
    lon = np.ma.masked_array(lon, mask=mask_out)
    # lat = np.around(lat, decimals=4)
    # lon = np.around(lon, decimals=4)
    source_def = SwathDefinition(*(lon, lat))
    target_def = SwathDefinition(*target)
    logger.debug("Matching %d nearest neighbours", n_neighbours)
    valid_in, valid_out, indices, distances = get_neighbour_info(
        source_def, target_def, radius_of_influence, neighbours=n_neighbours)
    # import pdb; pdb.set_trace()
    # Use pyresampe code to find colmun and row numbers for each pixel
    # This is works also with no-data in imager lat/lon.
    cols_matrix, rows_matrix = np.meshgrid(np.array(range(0, lat.shape[1])),
                                           np.array(range(0, lat.shape[0])))
    if n_neighbours == 1:
        first_indices = indices
    else:
        first_indices = indices[:, 0]

    cols = get_sample_from_neighbour_info('nn', target_def.shape, cols_matrix,
                                          valid_in, valid_out, first_indices)
    rows = get_sample_from_neighbour_info('nn', target_def.shape, rows_matrix,
                                          valid_in, valid_out, first_indices)
    if n_neighbours > 1:
        rows_0 = rows.copy()
        cols_0 = cols.copy()
        rows = NODATA + np.zeros((len(rows_0), n_neighbours))
        cols = NODATA + np.zeros((len(cols_0), n_neighbours))
        rows[:, 0] = rows_0
        cols[:, 0] = cols_0
        for i in range(1, n_neighbours):
            cols[:,
                 i] = get_sample_from_neighbour_info('nn', target_def.shape,
                                                     cols_matrix, valid_in,
                                                     valid_out, indices[:, i])
            rows[:,
                 i] = get_sample_from_neighbour_info('nn', target_def.shape,
                                                     rows_matrix, valid_in,
                                                     valid_out, indices[:, i])
            test = (distances[:, 0] - distances[:, i])
            if sum(~np.isnan(test)) > 0 and np.max(test[~np.isnan(test)]) > 0:
                raise ValueError(
                    'We count on the first neighbour beeing the closest')

    rows = np.array(rows)
    cols = np.array(cols)
    # import pdb; pdb.set_trace()
    """ Code used during debugging, leaving it here for now
    if indices.dtype in ['uint32']:
        # With pykdtree installed get_neighbour_info returns indices
        # as type uint32
        # This does not combine well with a nodata value of -9.
        indices = np.array(indices, dtype=np.int64)
    """
    # Make sure all indices are valid
    # import pdb; pdb.set_trace()
    rows[rows >= source_def.shape[0]] = NODATA
    cols[cols >= source_def.shape[1]] = NODATA
    mask = np.logical_or(distances > radius_of_influence,
                         indices >= len(valid_in))
    distances[distances > radius_of_influence] = -9
    # import pdb; ipdb.set_trace()
    rows[mask] = NODATA
    cols[mask] = NODATA
    # import pdb; pdb.set_trace()
    return MatchMapper(rows, cols, mask), distances
コード例 #34
0
ファイル: match.py プロジェクト: rexjoe/atrain_match
def match_lonlat(source,
                 target,
                 radius_of_influence=0.7 * RESOLUTION * 1000.0,
                 n_neighbours=1):
    """
    Produce a masked array of the same shape as the arrays in *target*, with
    indices of nearest neighbours in *source*. *source* and *target* should be
    tuples (lon, lat) of the source and target swaths, respectively.
    
    Note::
    
        * Fastest matching is obtained when *target* has lower resolution than
        *source*.
        
        * *source* should have 2-dimensional lon and lat arrays.
    
    """
    from pyresample.geometry import SwathDefinition
    from pyresample.kd_tree import get_neighbour_info
    from pyresample.kd_tree import get_sample_from_neighbour_info

    lon, lat = source
    mask_out_lat = np.logical_or(lat < -90, lat > 90)
    mask_out_lon = np.logical_or(lon > 180, lat < -180)
    mask_out = np.logical_or(mask_out_lat, mask_out_lon)
    lat = np.ma.masked_array(lat, mask=mask_out)
    lon = np.ma.masked_array(lon, mask=mask_out)
    #lat = np.around(lat, decimals=4)
    #lon = np.around(lon, decimals=4)
    source_def = SwathDefinition(*(lon, lat))
    target_def = SwathDefinition(*target)
    logger.debug("Matching %d nearest neighbours" % n_neighbours)
    valid_in, valid_out, indices, distances = get_neighbour_info(
        source_def, target_def, radius_of_influence, neighbours=n_neighbours)
    #Use pyresampe code to find colmun and row numbers for each pixel
    #This is works also with no-data in imager lat/lon.
    cols_matrix, rows_matrix = np.meshgrid(np.array(xrange(0, lat.shape[1])),
                                           np.array(xrange(0, lat.shape[0])))
    cols = get_sample_from_neighbour_info('nn', target_def.shape, cols_matrix,
                                          valid_in, valid_out, indices)
    rows = get_sample_from_neighbour_info('nn', target_def.shape, rows_matrix,
                                          valid_in, valid_out, indices)
    rows = np.array(rows)
    cols = np.array(cols)
    """ Code used during debugging, leaving it here for now
    #Hopfully not needed anymore as indices is not used directly
    if indices.dtype in ['uint32']:
        #With pykdtree installed get_neighbour_info returns indices
        # as type uint32
        #This does not combine well with a nodata value of -9.
        indices = np.array(indices,dtype=np.int64)
    #get_expected_output even for nodata in lat/lon!
    #print "indices", indices
    if 1==1:
        print distances, indices
        print max(indices)
        print min(indices)
        print len(valid_in)
        print len(valid_in[valid_in])
        # But why is +1 item needed??
        from_one_to_many = np.array(xrange(0,len(valid_in)+1))
        print from_one_to_many
        valid_in_new = np.append(valid_in,np.array([True]), axis=0)
        print valid_in_new
        use_these = indices[valid_out]
        print use_these
        new_numbers = from_one_to_many[valid_in_new]
        print new_numbers
        indices[valid_out] = new_numbers[use_these]
    #print "indices", indices
    shape = list(target_def.shape)
    shape.append(n_neighbours)
    indices.shape = shape
    distances.shape = shape
    rows = indices // source_def.shape[1]
    cols = indices % source_def.shape[1]
    print "c", cols, "r", rows
    print rows.shape, cols.shape
    """

    # Make sure all indices are valid
    #import ipdb; ipdb.set_trace()
    rows[rows >= source_def.shape[0]] = NODATA
    cols[cols >= source_def.shape[1]] = NODATA
    mask = distances > radius_of_influence
    return MatchMapper(rows, cols, mask)
コード例 #35
0
    def setUp(self):
        """Do some setup for common things."""
        import dask.array as da
        from xarray import DataArray
        from pyresample import geometry, kd_tree

        self.pts_irregular = (np.array([
            [-1., 1.],
        ]), np.array([
            [1., 2.],
        ]), np.array([
            [-2., -1.],
        ]), np.array([
            [2., -4.],
        ]))
        self.pts_vert_parallel = (np.array([
            [-1., 1.],
        ]), np.array([
            [1., 2.],
        ]), np.array([
            [-1., -1.],
        ]), np.array([
            [1., -2.],
        ]))
        self.pts_both_parallel = (np.array([
            [-1., 1.],
        ]), np.array([
            [1., 1.],
        ]), np.array([
            [-1., -1.],
        ]), np.array([
            [1., -1.],
        ]))

        # Area definition with four pixels
        self.target_def = geometry.AreaDefinition(
            'areaD', 'Europe (3km, HRV, VTC)', 'areaD', {
                'a': '6378144.0',
                'b': '6356759.0',
                'lat_0': '50.00',
                'lat_ts': '50.00',
                'lon_0': '8.00',
                'proj': 'stere'
            }, 4, 4,
            [-1370912.72, -909968.64000000001, 1029087.28, 1490031.3600000001])

        # Input data around the target pixel at 0.63388324, 55.08234642,
        in_shape = (100, 100)
        self.data1 = DataArray(da.ones((in_shape[0], in_shape[1])),
                               dims=('y', 'x'))
        self.data2 = 2. * self.data1
        self.data3 = self.data1 + 9.5
        lons, lats = np.meshgrid(np.linspace(-25., 40., num=in_shape[0]),
                                 np.linspace(45., 75., num=in_shape[1]))
        self.source_def = geometry.SwathDefinition(lons=lons, lats=lats)

        self.radius = 50e3
        self.neighbours = 32
        valid_input_index, output_idxs, index_array, dists = \
            kd_tree.get_neighbour_info(self.source_def, self.target_def,
                                       self.radius, neighbours=self.neighbours,
                                       nprocs=1)
        input_size = valid_input_index.sum()
        index_mask = (index_array == input_size)
        index_array = np.where(index_mask, 0, index_array)

        self.valid_input_index = valid_input_index
        self.index_array = index_array

        shp = self.source_def.shape
        self.cols, self.lines = np.meshgrid(np.arange(shp[1]),
                                            np.arange(shp[0]))
コード例 #36
0
ファイル: resample.py プロジェクト: douyoujun/satpy
    def precompute(self, mask=None, radius_of_influence=10000, epsilon=0, reduce_data=True, nprocs=1, segments=None,
                   cache_dir=False, **kwargs):
        """Create a KDTree structure and store it for later use.

        Note: The `mask` keyword should be provided if geolocation may be valid where data points are invalid.
        This defaults to the `mask` attribute of the `data` numpy masked array passed to the `resample` method.
        """

        del kwargs

        source_geo_def = self.source_geo_def
        # the data may have additional masked pixels
        # let's compare them to see if we can use the same area
        # assume lons and lats mask are the same
        if np.any(mask):
            # copy the source area and use it for the rest of the calculations
            LOG.debug("Copying source area to mask invalid dataset points")
            source_geo_def = deepcopy(self.source_geo_def)
            lons, lats = source_geo_def.get_lonlats()
            if np.ndim(mask) == 3:
                # FIXME: we should treat 3d arrays (composites) layer by layer!
                mask = np.sum(mask, axis=2)
                # FIXME: pyresample doesn't seem to like this
                #lons = np.tile(lons, (1, 1, mask.shape[2]))
                #lats = np.tile(lats, (1, 1, mask.shape[2]))

            # use the same data, but make a new mask (i.e. don't affect the original masked array)
            # the ma.array function combines the undelying mask with the new one (OR)
            source_geo_def.lons = np.ma.array(lons, mask=mask)
            source_geo_def.lats = np.ma.array(lats, mask=mask)

        kd_hash = self.get_hash(source_geo_def=source_geo_def,
                                radius_of_influence=radius_of_influence,
                                epsilon=epsilon)
        if isinstance(cache_dir, (str, six.text_type)):
            filename = os.path.join(cache_dir, hashlib.sha1(kd_hash).hexdigest() + ".npz")
        else:
            filename = os.path.join('.', hashlib.sha1(kd_hash.encode("utf-8")).hexdigest() + ".npz")

        try:
            self.cache = self.caches[kd_hash]
            # trick to keep most used caches away from deletion
            del self.caches[kd_hash]
            self.caches[kd_hash] = self.cache

            if cache_dir:
                self.dump(filename)
            return self.cache
        except KeyError:
            if os.path.exists(filename):
                LOG.debug("Loading kd-tree parameters")
                self.cache = dict(np.load(filename))
                self.caches[kd_hash] = self.cache
                while len(self.caches) > CACHE_SIZE:
                    self.caches.popitem(False)
                if cache_dir:
                    self.dump(filename)
                return self.cache
            else:
                LOG.debug("Computing kd-tree parameters")

        valid_input_index, valid_output_index, index_array, distance_array = \
            get_neighbour_info(source_geo_def,
                               self.target_geo_def,
                               radius_of_influence,
                               neighbours=1,
                               epsilon=epsilon,
                               reduce_data=reduce_data,
                               nprocs=nprocs,
                               segments=segments)

        # it's important here not to modify the existing cache dictionary.
        self.cache = {"valid_input_index": valid_input_index,
                      "valid_output_index": valid_output_index,
                      "index_array": index_array,
                      "distance_array": distance_array,
                      "source_geo_def": source_geo_def,
                      }

        self.caches[kd_hash] = self.cache
        while len(self.caches) > CACHE_SIZE:
            self.caches.popitem(False)

        if cache_dir:
            self.dump(filename)
        return self.cache
コード例 #37
0
ファイル: projector.py プロジェクト: 3Geo/mpop
    def __init__(self, in_area, out_area,
                 in_latlons=None, mode=None,
                 radius=10000):

        # TODO:
        # - Rework so that in_area and out_area can be lonlats.
        # - Add a recompute flag ?

        # Setting up the input area
        try:
            self.in_area = get_area_def(in_area)
            in_id = in_area
        except (utils.AreaNotFound, AttributeError):
            if isinstance(in_area, geometry.AreaDefinition):
                self.in_area = in_area
                in_id = in_area.area_id
            elif isinstance(in_area, geometry.SwathDefinition):
                self.in_area = in_area
                in_id = in_area.area_id
            elif in_latlons is not None:
                self.in_area = geometry.SwathDefinition(lons=in_latlons[0],
                                                        lats=in_latlons[1])
                in_id = in_area
            else:
                raise utils.AreaNotFound("Input area " +
                                         str(in_area) +
                                         " must be defined in " +
                                         AREA_FILE + ", be an area object"
                                         " or longitudes/latitudes must be "
                                         "provided.")


        # Setting up the output area
        try:
            self.out_area = get_area_def(out_area)
            out_id = out_area
        except (utils.AreaNotFound, AttributeError):
            if isinstance(out_area, (geometry.AreaDefinition,
                                     geometry.SwathDefinition)):
                self.out_area = out_area
                out_id = out_area.area_id
            else:
                raise utils.AreaNotFound("Output area " +
                                         str(out_area) +
                                         " must be defined in " +
                                         AREA_FILE + " or "
                                         "be an area object.")

        if self.in_area == self.out_area:
            return

        # choosing the right mode if necessary
        if(mode is None):
            if (isinstance(in_area, geometry.AreaDefinition) and
                isinstance(out_area, geometry.AreaDefinition)):
                self.mode = "quick"
            else:
                self.mode = "nearest"
        else:
            self.mode = mode



        filename = (in_id + "2" + out_id + "_" + self.mode + ".npz")

        projections_directory = "/var/tmp"
        try:
            projections_directory = CONF.get("projector",
                                             "projections_directory")
        except ConfigParser.NoSectionError:
            pass
        
        self._filename = os.path.join(projections_directory, filename)

        if(not os.path.exists(self._filename)):
            LOG.info("Computing projection from %s to %s..."
                     %(in_id, out_id))


            if self.mode == "nearest":
                valid_index, valid_output_index, index_array, distance_array = \
                             kd_tree.get_neighbour_info(self.in_area,
                                                        self.out_area,
                                                        radius,
                                                        neighbours=1)
                del distance_array
                self._cache = {}
                self._cache['valid_index'] = valid_index
                self._cache['valid_output_index'] = valid_output_index
                self._cache['index_array'] = index_array

            elif self.mode == "quick":
                ridx, cidx = \
                      utils.generate_quick_linesample_arrays(self.in_area,
                                                             self.out_area)
                                                    
                self._cache = {}
                self._cache['row_idx'] = ridx
                self._cache['col_idx'] = cidx

            else:
                raise ValueError("Unrecognised mode " + str(self.mode) + ".") 
            
        else:
            self._cache = {}
            self._file_cache = np.load(self._filename)
コード例 #38
0
    def __init__(self,
                 in_area,
                 out_area,
                 in_latlons=None,
                 mode=None,
                 radius=10000,
                 nprocs=1):

        if (mode is not None and mode not in ["quick", "nearest"]):
            raise ValueError("Projector mode must be 'nearest' or 'quick'")

        self.area_file = get_area_file()

        self.in_area = None
        self.out_area = None
        self._cache = None
        self._filename = None
        self.mode = "quick"
        self.radius = radius
        self.conf = ConfigParser.ConfigParser()
        self.conf.read(os.path.join(CONFIG_PATH, "mpop.cfg"))

        # TODO:
        # - Rework so that in_area and out_area can be lonlats.
        # - Add a recompute flag ?

        # Setting up the input area
        try:
            self.in_area = get_area_def(in_area)
            in_id = in_area
        except (utils.AreaNotFound, AttributeError):
            try:
                in_id = in_area.area_id
                self.in_area = in_area
            except AttributeError:
                try:
                    self.in_area = geometry.SwathDefinition(lons=in_latlons[0],
                                                            lats=in_latlons[1])
                    in_id = in_area
                except TypeError:
                    raise utils.AreaNotFound(
                        "Input area " + str(in_area) + " must be defined in " +
                        self.area_file + ", be an area object"
                        " or longitudes/latitudes must be "
                        "provided.")

        # Setting up the output area
        try:
            self.out_area = get_area_def(out_area)
            out_id = out_area
        except (utils.AreaNotFound, AttributeError):
            try:
                out_id = out_area.area_id
                self.out_area = out_area
            except AttributeError:
                raise utils.AreaNotFound("Output area " + str(out_area) +
                                         " must be defined in " +
                                         self.area_file + " or "
                                         "be an area object.")

        #if self.in_area == self.out_area:
        #    return

        # choosing the right mode if necessary
        if mode is None:
            try:
                dicts = in_area.proj_dict, out_area.proj_dict
                del dicts
                self.mode = "quick"
            except AttributeError:
                self.mode = "nearest"
        else:
            self.mode = mode

        filename = (in_id + "2" + out_id + "_" +
                    str(_get_area_hash(self.in_area)) + "to" +
                    str(_get_area_hash(self.out_area)) + "_" + self.mode +
                    ".npz")

        projections_directory = "/var/tmp"
        try:
            projections_directory = self.conf.get("projector",
                                                  "projections_directory")
        except ConfigParser.NoSectionError:
            pass

        self._filename = os.path.join(projections_directory, filename)

        try:
            self._cache = {}
            self._file_cache = np.load(self._filename)
        except:
            logger.info("Computing projection from %s to %s...", in_id, out_id)

            if self.mode == "nearest":
                valid_index, valid_output_index, index_array, distance_array = \
                    kd_tree.get_neighbour_info(self.in_area,
                                               self.out_area,
                                               self.radius,
                                               neighbours=1,
                                               nprocs=nprocs)
                del distance_array
                self._cache = {}
                self._cache['valid_index'] = valid_index
                self._cache['valid_output_index'] = valid_output_index
                self._cache['index_array'] = index_array

            elif self.mode == "quick":
                ridx, cidx = \
                    utils.generate_quick_linesample_arrays(self.in_area,
                                                           self.out_area)
                self._cache = {}
                self._cache['row_idx'] = ridx
                self._cache['col_idx'] = cidx