Ejemplo n.º 1
0
    def test_min_time(self):
        input = Pyramid({0: self.tiled_raster_rdd})

        cube = GeopysparkDataCube(pyramid=input,
                                  metadata=self.collection_metadata)
        env = EvalEnv()
        min_time = cube.reduce_dimension(reducer=reducer('min'),
                                         dimension='t',
                                         env=env)
        max_time = cube.reduce_dimension(reducer=reducer('max'),
                                         dimension='t',
                                         env=env)

        stitched = min_time.pyramid.levels[0].stitch()
        print(stitched)

        self.assertEquals(2.0, stitched.cells[0][0][0])

        for p in self.points[1:3]:
            result = min_time.timeseries(p.x, p.y, srs="EPSG:3857")
            print(result)
            print(cube.timeseries(p.x, p.y, srs="EPSG:3857"))
            max_result = max_time.timeseries(p.x, p.y, srs="EPSG:3857")
            self.assertEqual(1.0, result['NoDate'])
            self.assertEqual(2.0, max_result['NoDate'])
Ejemplo n.º 2
0
    def test_apply_spatiotemporal(self):
        import openeo_udf.functions

        input = Pyramid({0: self.tiled_raster_rdd})

        imagecollection = GeotrellisTimeSeriesImageCollection(
            input, InMemoryServiceRegistry(), {
                "bands": [{
                    "band_id": "2",
                    "name": "blue",
                    "wavelength_nm": 496.6,
                    "res_m": 10,
                    "scale": 0.0001,
                    "offset": 0,
                    "type": "int16",
                    "unit": "1"
                }]
            })
        import os, openeo_udf
        dir = os.path.dirname(openeo_udf.functions.__file__)
        file_name = os.path.join(dir, "datacube_reduce_time_sum.py")
        with open(file_name, "r") as f:
            udf_code = f.read()

        result = imagecollection.apply_tiles_spatiotemporal(udf_code)
        stitched = result.pyramid.levels[0].to_spatial_layer().stitch()
        print(stitched)
        self.assertEqual(2, stitched.cells[0][0][0])
        self.assertEqual(6, stitched.cells[0][0][5])
        self.assertEqual(4, stitched.cells[0][5][6])
Ejemplo n.º 3
0
    def test_reproject_spatial(self):
        input = Pyramid({0: self.tiled_raster_rdd})

        imagecollection = GeopysparkDataCube(pyramid=input,
                                             metadata=self.collection_metadata)

        ref_path = str(self.temp_folder / "reproj_ref.tiff")
        imagecollection.reduce('max',
                               dimension="t").save_result(ref_path,
                                                          format="GTIFF")

        resampled = imagecollection.resample_spatial(resolution=0,
                                                     projection="EPSG:3395",
                                                     method="max")
        metadata = resampled.pyramid.levels[0].layer_metadata
        print(metadata)
        self.assertTrue("proj=merc" in metadata.crs)
        path = str(self.temp_folder / "reprojected.tiff")
        res = resampled.reduce('max', dimension="t")
        res.save_result(path, format="GTIFF")

        with rasterio.open(ref_path) as ref_ds:
            with rasterio.open(path) as ds:
                print(ds.profile)
                #this reprojection does not change the shape, so we can compare
                assert ds.read().shape == ref_ds.read().shape

                assert (ds.crs.to_epsg() == 3395)
Ejemplo n.º 4
0
    def test_rename_dimension(self):
        imagecollection = GeopysparkDataCube(pyramid=Pyramid(
            {0: self.tiled_raster_rdd}),
                                             metadata=self.collection_metadata)

        dim_renamed = imagecollection.rename_dimension('t', 'myNewTimeDim')

        dim_renamed.metadata.assert_valid_dimension('myNewTimeDim')
Ejemplo n.º 5
0
    def test_mask_raster_replacement_int(self):
        def createMask(tile):
            tile.cells[0][0][0] = 0.0
            return tile

        input = Pyramid({0: self.tiled_raster_rdd})
        mask_layer = self.tiled_raster_rdd.map_tiles(createMask)
        mask = Pyramid({0: mask_layer})

        cube = GeopysparkDataCube(pyramid=input,
                                  metadata=self.collection_metadata)
        mask_cube = GeopysparkDataCube(pyramid=mask)
        stitched = cube.mask(mask=mask_cube, replacement=10).reduce(
            'max', dimension="t").pyramid.levels[0].stitch()
        print(stitched)
        assert stitched.cells[0][0][0] == 2.0
        assert stitched.cells[0][0][1] == 10.0
Ejemplo n.º 6
0
    def test_aggregate_temporal(self):
        input = Pyramid({0: self.tiled_raster_rdd})

        imagecollection = GeotrellisTimeSeriesImageCollection(
            input, InMemoryServiceRegistry())
        stitched = imagecollection.aggregate_temporal(
            ["2017-01-01", "2018-01-01"], ["2017-01-03"],
            "max").pyramid.levels[0].to_spatial_layer().stitch()
        print(stitched)
Ejemplo n.º 7
0
    def test_reduce_nontemporal(self):
        input = Pyramid({0: self.tiled_raster_rdd})

        imagecollection = GeotrellisTimeSeriesImageCollection(
            input, InMemoryServiceRegistry())
        with self.assertRaises(AttributeError) as context:
            imagecollection.reduce("max",
                                   "spectral").pyramid.levels[0].stitch()
        print(context.exception)
Ejemplo n.º 8
0
    def test_reduce_nontemporal(self):
        input = Pyramid({0: self.tiled_raster_rdd})

        imagecollection = GeopysparkDataCube(pyramid=input,
                                             metadata=self.collection_metadata)
        with self.assertRaises(FeatureUnsupportedException) as context:
            imagecollection.reduce(
                "max", dimension="gender").pyramid.levels[0].stitch()
        print(context.exception)
Ejemplo n.º 9
0
    def test_aggregate_max_time(self):
        input = Pyramid({0: self.tiled_raster_rdd})
        imagecollection = GeopysparkDataCube(pyramid=input,
                                             metadata=self.collection_metadata)

        layer = imagecollection.reduce('max', dimension='t').pyramid.levels[0]
        stitched = layer.stitch()
        assert CellType.FLOAT32.value == layer.layer_metadata.cell_type
        print(stitched)
        self.assertEqual(2.0, stitched.cells[0][0][0])
Ejemplo n.º 10
0
 def _test_aggregate_temporal(self, interval_list):
     input = Pyramid({0: self.tiled_raster_rdd})
     imagecollection = GeopysparkDataCube(pyramid=input,
                                          metadata=self.collection_metadata)
     stitched = (imagecollection.aggregate_temporal(
         interval_list, ["2017-01-03"], "min",
         dimension="t").pyramid.levels[0].to_spatial_layer().stitch())
     print(stitched)
     expected_max = np.min([self.tile2.cells, self.tile.cells], axis=0)
     assert_array_almost_equal(stitched.cells[0, 0:5, 0:5], expected_max)
Ejemplo n.º 11
0
    def test_aggregate_max_time(self):

        input = Pyramid({0: self.tiled_raster_rdd})

        imagecollection = GeotrellisTimeSeriesImageCollection(
            input, InMemoryServiceRegistry())

        stitched = imagecollection.reduce(
            'max', 'temporal').pyramid.levels[0].stitch()
        print(stitched)
        self.assertEqual(2.0, stitched.cells[0][0][0])
Ejemplo n.º 12
0
    def test_mask_raster(self):
        input = Pyramid({0: self.tiled_raster_rdd})

        def createMask(tile):
            tile.cells[0][0][0] = 0.0
            return tile

        mask_layer = self.tiled_raster_rdd.map_tiles(createMask)
        mask = Pyramid({0: mask_layer})

        imagecollection = GeotrellisTimeSeriesImageCollection(
            input, InMemoryServiceRegistry())
        stitched = imagecollection.mask(
            rastermask=GeotrellisTimeSeriesImageCollection(
                mask, InMemoryServiceRegistry()),
            replacement=10.0).reduce('max',
                                     'temporal').pyramid.levels[0].stitch()
        print(stitched)
        self.assertEquals(2.0, stitched.cells[0][0][0])
        self.assertEquals(10.0, stitched.cells[0][0][1])
Ejemplo n.º 13
0
    def test_apply_kernel_int(self):
        kernel = np.array([[0, 1, 0], [1, 1, 1], [0, 1, 0]])

        input = Pyramid({0: self.tiled_raster_rdd})
        img = GeopysparkDataCube(pyramid=input,
                                 metadata=self.collection_metadata)
        stitched = img.apply_kernel(kernel).reduce(
            'max', dimension="t").pyramid.levels[0].stitch()

        assert stitched.cells[0][0][0] == 6.0
        assert stitched.cells[0][0][1] == 8.0
        assert stitched.cells[0][1][1] == 10.0
Ejemplo n.º 14
0
    def test_apply_kernel(self):
        kernel = np.array([[0.0, 1.0, 0.0], [1.0, 1.0, 1.0], [0.0, 1.0, 0.0]])

        input = Pyramid({0: self.tiled_raster_rdd})
        imagecollection = GeotrellisTimeSeriesImageCollection(
            input, InMemoryServiceRegistry())
        stitched = imagecollection.apply_kernel(kernel, 2.0).reduce(
            'max', 'temporal').pyramid.levels[0].stitch()

        self.assertEquals(12.0, stitched.cells[0][0][0])
        self.assertEquals(16.0, stitched.cells[0][0][1])
        self.assertEquals(20.0, stitched.cells[0][1][1])
Ejemplo n.º 15
0
    def test_apply_dimension_spatiotemporal(self):

        input = Pyramid({0: self.tiled_raster_rdd})

        imagecollection = GeopysparkDataCube(
            pyramid=input,
            metadata=GeopysparkCubeMetadata({
                "cube:dimensions": {
                    # TODO: also specify other dimensions?
                    "bands": {
                        "type": "bands",
                        "values": ["2"]
                    }
                },
                "summaries": {
                    "eo:bands": [{
                        "name": "2",
                        "common_name": "blue",
                        "wavelength_nm": 496.6,
                        "res_m": 10,
                        "scale": 0.0001,
                        "offset": 0,
                        "type": "int16",
                        "unit": "1"
                    }]
                }
            }))

        udf_code = """
def rct_savitzky_golay(udf_data:UdfData):
    from scipy.signal import savgol_filter

    print(udf_data.get_datacube_list())
    return udf_data
        
        """

        result = imagecollection.apply_tiles_spatiotemporal(udf_code)
        local_tiles = result.pyramid.levels[0].to_numpy_rdd().collect()
        print(local_tiles)
        self.assertEquals(len(TestMultipleDates.layer), len(local_tiles))
        ref_dict = {
            e[0]: e[1]
            for e in imagecollection.pyramid.levels[0].convert_data_type(
                CellType.FLOAT64).to_numpy_rdd().collect()
        }
        result_dict = {e[0]: e[1] for e in local_tiles}
        for k, v in ref_dict.items():
            tile = result_dict[k]
            assert_array_almost_equal(np.squeeze(v.cells),
                                      np.squeeze(tile.cells),
                                      decimal=2)
Ejemplo n.º 16
0
 def test_aggregate_temporal_median(self):
     input = Pyramid({0: self.tiled_raster_rdd})
     imagecollection = GeopysparkDataCube(pyramid=input,
                                          metadata=self.collection_metadata)
     stitched = (imagecollection.aggregate_temporal(
         ["2015-01-01", "2018-01-01"], ["2017-01-03"],
         self._median_reducer(),
         dimension="t").pyramid.levels[0].to_spatial_layer().stitch())
     print(stitched)
     expected_median = np.median(
         [self.tile.cells, self.tile2.cells, self.tile.cells], axis=0)
     #TODO nodata handling??
     assert_array_almost_equal(stitched.cells[0, 1:2, 1:2],
                               expected_median[1:2, 1:2])
Ejemplo n.º 17
0
    def test_reduce(self):
        input = Pyramid({0: self.tiled_raster_rdd})

        cube = GeopysparkDataCube(pyramid=input,
                                  metadata=self.collection_metadata)
        env = EvalEnv()

        stitched = cube.reduce_dimension(dimension="t",
                                         reducer=reducer("max"),
                                         env=env).pyramid.levels[0].stitch()
        print(stitched)
        self.assertEqual(2.0, stitched.cells[0][0][0])
        self.assertEqual(2.0, stitched.cells[0][0][1])

        stitched = cube.reduce_dimension(dimension="t",
                                         reducer=reducer("min"),
                                         env=env).pyramid.levels[0].stitch()
        print(stitched)
        self.assertEqual(2.0, stitched.cells[0][0][0])
        self.assertEqual(1.0, stitched.cells[0][0][1])

        stitched = cube.reduce_dimension(dimension="t",
                                         reducer=reducer("sum"),
                                         env=env).pyramid.levels[0].stitch()
        print(stitched)
        self.assertEqual(2.0, stitched.cells[0][0][0])
        self.assertEqual(4.0, stitched.cells[0][0][1])

        stitched = cube.reduce_dimension(dimension="t",
                                         reducer=reducer("mean"),
                                         env=env).pyramid.levels[0].stitch()
        print(stitched)
        self.assertEqual(2.0, stitched.cells[0][0][0])
        self.assertAlmostEqual(1.3333333, stitched.cells[0][0][1])

        stitched = cube.reduce_dimension(reducer=reducer("variance"),
                                         dimension="t",
                                         env=env).pyramid.levels[0].stitch()
        print(stitched)
        self.assertEqual(0.0, stitched.cells[0][0][0])
        self.assertAlmostEqual(0.2222222, stitched.cells[0][0][1])

        stitched = cube.reduce_dimension(reducer=reducer("sd"),
                                         dimension="t",
                                         env=env).pyramid.levels[0].stitch()
        print(stitched)
        self.assertEqual(0.0, stitched.cells[0][0][0])
        self.assertAlmostEqual(0.4714045, stitched.cells[0][0][1])
Ejemplo n.º 18
0
    def test_resample_spatial(self):
        input = Pyramid({0: self.tiled_raster_rdd})

        imagecollection = GeopysparkDataCube(pyramid=input,
                                             metadata=self.collection_metadata)

        resampled = imagecollection.resample_spatial(resolution=0.05)

        path = str(self.temp_folder / "resampled.tiff")
        res = resampled.reduce('max', dimension="t")
        res.save_result(path, format="GTIFF")

        import rasterio
        with rasterio.open(path) as ds:
            print(ds.profile)
            self.assertAlmostEqual(0.05, ds.res[0], 3)
Ejemplo n.º 19
0
    def test_resample_spatial(self):
        input = Pyramid({0: self.tiled_raster_rdd})

        imagecollection = GeotrellisTimeSeriesImageCollection(
            input, InMemoryServiceRegistry())

        resampled = imagecollection.resample_spatial(resolution=0.05)

        path = str(self.temp_folder / "resampled.tiff")
        resampled.reduce('max',
                         'temporal').download(path,
                                              format="GTIFF",
                                              parameters={'tiled': True})

        import rasterio
        with rasterio.open(path) as ds:
            print(ds.profile)
            self.assertAlmostEqual(0.05, ds.res[0], 3)
Ejemplo n.º 20
0
    def test_reduce_all_data(self):
        input = Pyramid({
            0:
            self._single_pixel_layer({
                datetime.datetime.strptime("2016-04-24T04:00:00Z", '%Y-%m-%dT%H:%M:%SZ'):
                1.0,
                datetime.datetime.strptime("2017-04-24T04:00:00Z", '%Y-%m-%dT%H:%M:%SZ'):
                5.0
            })
        })

        cube = GeopysparkDataCube(pyramid=input,
                                  metadata=self.collection_metadata)
        env = EvalEnv()
        stitched = cube.reduce_dimension(reducer=reducer("min"),
                                         dimension="t",
                                         env=env).pyramid.levels[0].stitch()
        self.assertEqual(1.0, stitched.cells[0][0][0])

        stitched = cube.reduce_dimension(reducer=reducer("max"),
                                         dimension="t",
                                         env=env).pyramid.levels[0].stitch()
        self.assertEqual(5.0, stitched.cells[0][0][0])

        stitched = cube.reduce_dimension(reducer=reducer("sum"),
                                         dimension="t",
                                         env=env).pyramid.levels[0].stitch()
        self.assertEqual(6.0, stitched.cells[0][0][0])

        stitched = cube.reduce_dimension(reducer=reducer("mean"),
                                         dimension="t",
                                         env=env).pyramid.levels[0].stitch()
        self.assertAlmostEqual(3.0, stitched.cells[0][0][0], delta=0.001)

        stitched = cube.reduce_dimension(reducer=reducer("variance"),
                                         dimension="t",
                                         env=env).pyramid.levels[0].stitch()
        self.assertAlmostEqual(4.0, stitched.cells[0][0][0], delta=0.001)

        stitched = cube.reduce_dimension(reducer=reducer("sd"),
                                         dimension="t",
                                         env=env).pyramid.levels[0].stitch()
        self.assertAlmostEqual(2.0, stitched.cells[0][0][0], delta=0.001)
Ejemplo n.º 21
0
    def test_apply_dimension_spatiotemporal(self):

        input = Pyramid({0: self.tiled_raster_rdd})

        imagecollection = GeotrellisTimeSeriesImageCollection(
            input, InMemoryServiceRegistry(), {
                "bands": [{
                    "band_id": "2",
                    "name": "blue",
                    "wavelength_nm": 496.6,
                    "res_m": 10,
                    "scale": 0.0001,
                    "offset": 0,
                    "type": "int16",
                    "unit": "1"
                }]
            })

        udf_code = """
def rct_savitzky_golay(udf_data:UdfData):
    from scipy.signal import savgol_filter

    print(udf_data.get_datacube_list())
    return udf_data
        
        """

        result = imagecollection.apply_tiles_spatiotemporal(udf_code)
        local_tiles = result.pyramid.levels[0].to_numpy_rdd().collect()
        print(local_tiles)
        self.assertEquals(len(TestMultipleDates.layer), len(local_tiles))
        ref_dict = {
            e[0]: e[1]
            for e in imagecollection.pyramid.levels[0].convert_data_type(
                CellType.FLOAT64).to_numpy_rdd().collect()
        }
        result_dict = {e[0]: e[1] for e in local_tiles}
        for k, v in ref_dict.items():
            tile = result_dict[k]
            assert_array_almost_equal(np.squeeze(v.cells),
                                      np.squeeze(tile.cells),
                                      decimal=2)
Ejemplo n.º 22
0
    def test_min_time(self):
        input = Pyramid({0: self.tiled_raster_rdd})

        imagecollection = GeotrellisTimeSeriesImageCollection(
            input, InMemoryServiceRegistry())
        min_time = imagecollection.reduce('min', 'temporal')
        max_time = imagecollection.reduce('max', 'temporal')

        stitched = min_time.pyramid.levels[0].stitch()
        print(stitched)

        self.assertEquals(2.0, stitched.cells[0][0][0])

        for p in self.points[1:3]:
            result = min_time.timeseries(p.x, p.y, srs="EPSG:3857")
            print(result)
            print(imagecollection.timeseries(p.x, p.y, srs="EPSG:3857"))
            max_result = max_time.timeseries(p.x, p.y, srs="EPSG:3857")
            self.assertEqual(1.0, result['NoDate'])
            self.assertEqual(2.0, max_result['NoDate'])
Ejemplo n.º 23
0
    def test_reduce_some_nodata(self):
        no_data = -1.0

        input = Pyramid({
            0:
            self._single_pixel_layer(
                {
                    datetime.datetime.strptime("2016-04-24T04:00:00Z", '%Y-%m-%dT%H:%M:%SZ'):
                    no_data,
                    datetime.datetime.strptime("2017-04-24T04:00:00Z", '%Y-%m-%dT%H:%M:%SZ'):
                    5.0
                }, no_data)
        })

        imagecollection = GeopysparkDataCube(pyramid=input,
                                             metadata=self.collection_metadata)

        stitched = imagecollection.reduce(
            "min", dimension="t").pyramid.levels[0].stitch()
        #print(stitched)
        self.assertEqual(5.0, stitched.cells[0][0][0])

        stitched = imagecollection.reduce(
            "max", dimension="t").pyramid.levels[0].stitch()
        self.assertEqual(5.0, stitched.cells[0][0][0])

        stitched = imagecollection.reduce(
            "sum", dimension="t").pyramid.levels[0].stitch()
        self.assertEqual(5.0, stitched.cells[0][0][0])

        stitched = imagecollection.reduce(
            "mean", dimension="t").pyramid.levels[0].stitch()
        self.assertAlmostEqual(5.0, stitched.cells[0][0][0], delta=0.001)

        stitched = imagecollection.reduce(
            "variance", dimension="t").pyramid.levels[0].stitch()
        self.assertAlmostEqual(0.0, stitched.cells[0][0][0], delta=0.001)

        stitched = imagecollection.reduce(
            "sd", dimension="t").pyramid.levels[0].stitch()
        self.assertAlmostEqual(0.0, stitched.cells[0][0][0], delta=0.001)
Ejemplo n.º 24
0
    def test_reduce(self):
        input = Pyramid({0: self.tiled_raster_rdd})

        imagecollection = GeotrellisTimeSeriesImageCollection(
            input, InMemoryServiceRegistry())

        stitched = imagecollection.reduce(
            "max", "temporal").pyramid.levels[0].stitch()
        print(stitched)
        self.assertEqual(2.0, stitched.cells[0][0][0])
        self.assertEqual(2.0, stitched.cells[0][0][1])

        stitched = imagecollection.reduce(
            "min", "temporal").pyramid.levels[0].stitch()
        print(stitched)
        self.assertEqual(2.0, stitched.cells[0][0][0])
        self.assertEqual(1.0, stitched.cells[0][0][1])

        stitched = imagecollection.reduce(
            "sum", "temporal").pyramid.levels[0].stitch()
        print(stitched)
        self.assertEqual(2.0, stitched.cells[0][0][0])
        self.assertEqual(4.0, stitched.cells[0][0][1])

        stitched = imagecollection.reduce(
            "mean", "temporal").pyramid.levels[0].stitch()
        print(stitched)
        self.assertEqual(2.0, stitched.cells[0][0][0])
        self.assertAlmostEqual(1.3333333, stitched.cells[0][0][1])

        stitched = imagecollection.reduce(
            "variance", "temporal").pyramid.levels[0].stitch()
        print(stitched)
        self.assertEqual(0.0, stitched.cells[0][0][0])
        self.assertAlmostEqual(0.2222222, stitched.cells[0][0][1])

        stitched = imagecollection.reduce(
            "sd", "temporal").pyramid.levels[0].stitch()
        print(stitched)
        self.assertEqual(0.0, stitched.cells[0][0][0])
        self.assertAlmostEqual(0.4714045, stitched.cells[0][0][1])
Ejemplo n.º 25
0
    def test_reduce_some_nodata(self):
        no_data = -1.0

        input = Pyramid({
            0:
            self._single_pixel_layer(
                {
                    datetime.datetime.strptime("2016-04-24T04:00:00Z", '%Y-%m-%dT%H:%M:%SZ'):
                    no_data,
                    datetime.datetime.strptime("2017-04-24T04:00:00Z", '%Y-%m-%dT%H:%M:%SZ'):
                    5.0
                }, no_data)
        })

        imagecollection = GeotrellisTimeSeriesImageCollection(
            input, InMemoryServiceRegistry())

        stitched = imagecollection.reduce(
            "min", "temporal").pyramid.levels[0].stitch()
        #print(stitched)
        self.assertEqual(5.0, stitched.cells[0][0][0])

        stitched = imagecollection.reduce(
            "max", "temporal").pyramid.levels[0].stitch()
        self.assertEqual(5.0, stitched.cells[0][0][0])

        stitched = imagecollection.reduce(
            "sum", "temporal").pyramid.levels[0].stitch()
        self.assertEqual(5.0, stitched.cells[0][0][0])

        stitched = imagecollection.reduce(
            "mean", "temporal").pyramid.levels[0].stitch()
        self.assertAlmostEqual(5.0, stitched.cells[0][0][0], delta=0.001)

        stitched = imagecollection.reduce(
            "variance", "temporal").pyramid.levels[0].stitch()
        self.assertAlmostEqual(0.0, stitched.cells[0][0][0], delta=0.001)

        stitched = imagecollection.reduce(
            "sd", "temporal").pyramid.levels[0].stitch()
        self.assertAlmostEqual(0.0, stitched.cells[0][0][0], delta=0.001)
Ejemplo n.º 26
0
    def test_reproject_spatial(self):
        input = Pyramid({0: self.tiled_raster_rdd})

        imagecollection = GeotrellisTimeSeriesImageCollection(
            input, InMemoryServiceRegistry())

        resampled = imagecollection.resample_spatial(resolution=0,
                                                     projection="EPSG:3857",
                                                     method="max")
        metadata = resampled.pyramid.levels[0].layer_metadata
        print(metadata)
        self.assertTrue("proj=merc" in metadata.crs)
        path = str(self.temp_folder / "reprojected.tiff")
        resampled.reduce('max',
                         'temporal').download(path,
                                              format="GTIFF",
                                              parameters={'tiled': True})

        import rasterio
        with rasterio.open(path) as ds:
            print(ds.profile)
Ejemplo n.º 27
0
def test_apply_spatiotemporal(udf_code):
    udf_code = textwrap.dedent(udf_code)

    input = Pyramid({0: TestMultipleDates.tiled_raster_rdd})

    imagecollection = GeopysparkDataCube(
        pyramid=input,
        metadata=GeopysparkCubeMetadata({
            "cube:dimensions": {
                # TODO: also specify other dimensions?
                "bands": {
                    "type": "bands",
                    "values": ["2"]
                }
            },
            "summaries": {
                "eo:bands": [{
                    "name": "2",
                    "common_name": "blue",
                    "wavelength_nm": 496.6,
                    "res_m": 10,
                    "scale": 0.0001,
                    "offset": 0,
                    "type": "int16",
                    "unit": "1"
                }]
            }
        }))

    result = imagecollection.apply_tiles_spatiotemporal(udf_code)
    stitched = result.pyramid.levels[0].to_spatial_layer().stitch()
    print(stitched)

    assert stitched.cells[0][0][0] == 2
    assert stitched.cells[0][0][5] == 6
    assert stitched.cells[0][5][6] == 4