コード例 #1
0
    def test_eval_multiple_outputs(self):

        lat = clinspace(45, 66, 30, name="lat")
        lon = clinspace(-80, 70, 40, name="lon")
        kernel = [[1, 2, 1]]
        coords = Coordinates([lat, lon])
        multi = Array(source=np.random.random(coords.shape + (2, )),
                      coordinates=coords,
                      outputs=["a", "b"])
        node = Convolution(source=multi,
                           kernel=kernel,
                           kernel_dims=["lat", "lon"])
        o1 = node.eval(Coordinates([lat, lon]))

        kernel = [[[1, 2]]]
        coords = Coordinates([lat, lon])
        multi = Array(source=np.random.random(coords.shape + (2, )),
                      coordinates=coords,
                      outputs=["a", "b"])
        node1 = Convolution(source=multi,
                            kernel=kernel,
                            kernel_dims=["lat", "lon", "output"],
                            force_eval=True)
        node2 = Convolution(source=multi,
                            kernel=kernel[0],
                            kernel_dims=["lat", "lon"],
                            force_eval=True)
        o1 = node1.eval(Coordinates([lat, lon]))
        o2 = node2.eval(Coordinates([lat, lon]))

        assert np.any(o2.data != o1.data)
コード例 #2
0
ファイル: egi.py プロジェクト: RabindraNP/podpac
    def coordinates(self):
        if self.data is None:
            _log.warning("No coordinates found in EGI source")
            return Coordinates([], dims=[])

        return Coordinates.from_xarray(self.data.coords,
                                       crs=self.data.attrs["crs"])
コード例 #3
0
    def test_eval(self):
        lat = clinspace(45, 66, 30, name="lat")
        lon = clinspace(-80, 70, 40, name="lon")
        time = crange("2017-09-01", "2017-10-31", "1,D", name="time")

        kernel1d = [1, 2, 1]
        kernel2d = [[1, 2, 1]]
        kernel3d = [[[1, 2, 1]]]

        node1d = Convolution(source=Arange(), kernel=kernel1d, kernel_dims=["time"])
        node2d = Convolution(source=Arange(), kernel=kernel2d, kernel_dims=["lat", "lon"])
        node3d = Convolution(source=Arange(), kernel=kernel3d, kernel_dims=["lon", "lat", "time"])

        o = node1d.eval(Coordinates([time]))
        o = node2d.eval(Coordinates([lat, lon]))
        o = node3d.eval(Coordinates([lat, lon, time]))

        with pytest.raises(
            ValueError, match="Kernel dims must contain all of the dimensions in source but not all of "
        ):
            node2d.eval(Coordinates([lat, lon, time]))

        with pytest.raises(
            ValueError, match="Kernel dims must contain all of the dimensions in source but not all of "
        ):
            node2d.eval(Coordinates([lat, time]))
コード例 #4
0
    def test_extra_coord_dims(self):
        lat = clinspace(-0.25, 1.25, 7, name="lat")
        lon = clinspace(-0.125, 1.125, 11, name="lon")
        time = ["2012-05-19", "2016-01-31", "2018-06-20"]
        coords = Coordinates([lat, lon, time], dims=["lat", "lon", "time"])

        source = Array(source=np.random.random(coords.drop("time").shape), coordinates=coords.drop("time"))
        node = Convolution(source=source, kernel=[[-1, 2, -1]], kernel_dims=["lat", "lon"], force_eval=True)
        o = node.eval(coords)
        assert np.all([d in ["lat", "lon"] for d in o.dims])
コード例 #5
0
    def test_coords_order(self):
        lat = clinspace(-0.25, 1.25, 7, name="lat")
        lon = clinspace(-0.125, 1.125, 11, name="lon")
        coords = Coordinates([lat, lon])

        lat = clinspace(0, 1, 5, name="lat")
        lon = clinspace(0, 1, 9, name="lon")
        coords1 = Coordinates([lat, lon])
        coords2 = Coordinates([lon, lat])

        source = Array(source=np.random.random(coords.shape), coordinates=coords)
        node = Convolution(source=source, kernel=[[-1, 2, -1]], kernel_dims=["lat", "lon"], force_eval=True)
        o1 = node.eval(coords1)
        o2 = node.eval(coords2)
        assert np.all(o2.data == o1.data.T)
コード例 #6
0
def get_terrain_tiles(which):
    # create coordinates to get tiles
    node = TerrainTiles(tile_format='geotiff', zoom=5)
    coords = 0
    if (which == 'both'):
        coords = Coordinates([clinspace(75, -60, 1000), clinspace(-155, -35, 1000)], dims=['lat', 'lon'])
    elif(which == 'north'):
        coords = Coordinates([clinspace(75, 10, 1000), clinspace(-155, -50, 1000)], dims=['lat', 'lon'])
    elif(which == 'south'):
        coords = Coordinates([clinspace(15, -60, 1000), clinspace(-85, -45, 1000)], dims=['lat', 'lon'])
    # evaluate node
    ev = node.eval(coords)
    data = np.asarray(ev.data)

    return data
コード例 #7
0
    def test_debuggable_source(self):
        with podpac.settings:
            podpac.settings["DEBUG"] = False
            lat = clinspace(45, 66, 30, name="lat")
            lon = clinspace(-80, 70, 40, name="lon")
            coords = Coordinates([lat, lon])

            # normal version
            a = Arange()
            node = Convolution(source=a, kernel=[[1, 2, 1]], kernel_dims=["lat", "lon"])
            node.eval(coords)

            assert node.source is a

            # debuggable
            podpac.settings["DEBUG"] = True

            a = Arange()
            node = Convolution(source=a, kernel=[[1, 2, 1]], kernel_dims=["lat", "lon"])
            node.eval(coords)

            assert node.source is not a
            assert node._requested_coordinates == coords
            assert node.source._requested_coordinates is not None
            assert node.source._requested_coordinates != coords
            assert a._requested_coordinates is None
コード例 #8
0
ファイル: generic.py プロジェクト: creare-com/podpac
    def algorithm(self, inputs, coordinates):
        cs = [Coordinates.from_xarray(x) for x in inputs.values()]
        if any(c != cs[0] for c in cs):
            raise NodeException("Cannot combine inputs with different coordinates")

        data = np.stack([inputs[key] for key in self.inputs], axis=-1)
        return self.create_output_array(cs[0], data=data)
コード例 #9
0
    def test_extra_kernel_dims(self):
        lat = clinspace(45, 66, 8, name="lat")
        lon = clinspace(-80, 70, 16, name="lon")
        coords = Coordinates([lat, lon])

        node = Convolution(source=Arange(), kernel=[[[1, 2, 1]]], kernel_dims=["time", "lat", "lon"])
        o = node.eval(coords)
コード例 #10
0
    def test_eval_with_output_argument(self):
        lat = clinspace(45, 66, 30, name="lat")
        lon = clinspace(-80, 70, 40, name="lon")
        coords = Coordinates([lat, lon])

        node = Convolution(source=Arange(), kernel=[[1, 2, 1]], kernel_dims=["lat", "lon"])

        a = node.create_output_array(coords)
        o = node.eval(coords, output=a)
        assert_array_equal(a, o)
コード例 #11
0
    def test_eval_nan(self):
        lat = clinspace(45, 66, 30, name="lat")
        lon = clinspace(-80, 70, 40, name="lon")
        coords = Coordinates([lat, lon])

        data = np.ones(coords.shape)
        data[10, 10] = np.nan
        source = Array(source=data, coordinates=coords)
        node = Convolution(source=source, kernel=[[1, 2, 1]], kernel_dims=["lat", "lon"])

        o = node.eval(coords[8:12, 7:13])
コード例 #12
0
    def test_terrain_tiles(self):
        c = Coordinates([clinspace(40, 43, 1000), clinspace(-76, -72, 1000)], dims=["lat", "lon"])
        c2 = Coordinates(
            [clinspace(40, 43, 1000), clinspace(-76, -72, 1000), ["2018-01-01", "2018-01-02"]],
            dims=["lat", "lon", "time"],
        )

        node = TerrainTiles(tile_format="geotiff", zoom=8)
        output = node.eval(c)
        assert np.any(np.isfinite(output))

        output = node.eval(c2)
        assert np.any(np.isfinite(output))

        node = TerrainTiles(tile_format="geotiff", zoom=8, cache_ctrl=["ram", "disk"])
        output = node.eval(c)
        assert np.any(np.isfinite(output))

        # tile urls
        print(np.array(get_tile_urls("geotiff", 1)))
        print(np.array(get_tile_urls("geotiff", 9, coordinates=c)))
コード例 #13
0
    def test_eval(self):
        lat = clinspace(45, 66, 30, name="lat")
        lon = clinspace(-80, 70, 40, name="lon")
        time = crange("2017-09-01", "2017-10-31", "1,D", name="time")

        kernel1d = [1, 2, 1]
        kernel2d = [[1, 2, 1]]
        kernel3d = [[[1, 2, 1]]]

        node1d = Convolution(source=Arange(),
                             kernel=kernel1d,
                             kernel_dims=["time"])
        node2d = Convolution(source=Arange(),
                             kernel=kernel2d,
                             kernel_dims=["lat", "lon"])
        node3d = Convolution(source=Arange(),
                             kernel=kernel3d,
                             kernel_dims=["lon", "lat", "time"])

        o = node1d.eval(Coordinates([time]))
        o = node2d.eval(Coordinates([lat, lon]))
        o = node3d.eval(Coordinates([lat, lon, time]))
コード例 #14
0
    def test_missing_source_dims(self):
        """ When the kernel has more dimensions than the source, sum out the kernel for the missing dim"""
        lat = clinspace(-0.25, 1.25, 7, name="lat")
        lon = clinspace(-0.125, 1.125, 11, name="lon")
        time = ["2012-05-19", "2016-01-31", "2018-06-20"]
        coords = Coordinates([lat, lon, time], dims=["lat", "lon", "time"])
        coords2 = Coordinates([lat[[1, 2, 4]], lon, time],
                              dims=["lat", "lon", "time"])

        source = Array(source=np.random.random(coords.drop("time").shape),
                       coordinates=coords.drop("time"))
        node = Convolution(source=source,
                           kernel=[[[-1], [2], [-1]]],
                           kernel_dims=["lat", "lon", "time"],
                           force_eval=True)
        o = node.eval(coords[:, 1:-1, :])
        expected = source.source[:, 1:
                                 -1] * 2 - source.source[:,
                                                         2:] - source.source[:, :
                                                                             -2]
        assert np.abs(o.data - expected).max() < 1e-14

        # Check when request has an ArrayCoordinates1d
        node = Convolution(source=source,
                           kernel_type="mean,3",
                           kernel_dims=["lat", "lon", "time"],
                           force_eval=True)
        o = node.eval(coords2[:, 1:-1])
        expected = (
            source.source[[1, 2, 4], 1:-1] + source.source[[0, 1, 2], 1:-1] +
            source.source[[2, 4, 6], 1:-1] + source.source[[1, 2, 4], :-2] +
            source.source[[0, 1, 2], :-2] + source.source[[2, 4, 6], :-2] +
            source.source[[1, 2, 4], 2:] + source.source[[0, 1, 2], 2:] +
            source.source[[2, 4, 6], 2:]) / 9
        assert np.abs(o.data - expected).max() < 1e-14

        # Check to make sure array coordinates for a single coordinate is ok...
        o = node.eval(coords2[0, 1:-1])
コード例 #15
0
ファイル: intake_catalog.py プロジェクト: creare-com/podpac
    def get_coordinates(self):
        """Get coordinates from catalog definition or input dims"""

        # look for dims in catalog
        if self.dims is None:
            if "dims" in self.dataset.metadata:
                self.dims = self.dataset.metadata["dims"]
            else:
                raise ValueError(
                    "No coordinates dims defined in catalog or input")

        # look for crs in catalog
        if self.crs is None:
            if "crs" in self.dataset.metadata:
                self.crs = self.dataset.metadata["crs"]

        source_data = self.source_data
        c_data = []

        # indentifiers are columns when container is a dataframe
        if self.dataset.container == "dataframe":
            for dim in self.dims:
                c_data.append(source_data[self.dims[dim]].values)

            return Coordinates(c_data, dims=list(self.dims.keys()))

        ## TODO: this needs to be tested
        elif self.dataset.container == "ndarray":
            for dim in self.dims:
                c_data.append(source_data[self.dims[dim]])

            return Coordinates(c_data, dims=list(self.dims.keys()))

        else:
            raise ValueError(
                "podpac does not currently support dataset container {}".
                format(self.dataset.container))
コード例 #16
0
    def test_partial_source_convolution(self):
        lat = clinspace(-0.25, 1.25, 7, name="lat")
        lon = clinspace(-0.125, 1.125, 11, name="lon")
        time = ["2012-05-19", "2016-01-31", "2018-06-20"]
        coords = Coordinates([lat, lon, time], dims=["lat", "lon", "time"])

        source = Array(source=np.random.random(coords.shape),
                       coordinates=coords)
        node = Convolution(source=source,
                           kernel=[[-1, 2, -1]],
                           kernel_dims=["lat", "lon"],
                           force_eval=True)
        o = node.eval(coords[:, 1:-1, :])
        expected = source.source[:, 1:
                                 -1] * 2 - source.source[:,
                                                         2:] - source.source[:, :
                                                                             -2]

        assert np.abs(o.data - expected).max() < 1e-14
コード例 #17
0
    def open(cls, *args, **kwargs):
        """
        Open an :class:`podpac.UnitsDataArray` from a file or file-like object containing a single data variable.

        This is a wrapper around :func:`xarray.open_datarray`.
        The inputs to this function are passed directly to :func:`xarray.open_datarray`.
        See http://xarray.pydata.org/en/stable/generated/xarray.open_dataarray.html#xarray.open_dataarray.

        The DataArray passed back from :func:`xarray.open_datarray` is used to create a units data array using :func:`creare_dataarray`.

        Returns
        -------
        :class:`podpac.UnitsDataArray`
        """
        da = xr.open_dataarray(*args, **kwargs)
        coords = Coordinates.from_xarray(da)

        # pass in kwargs to constructor
        uda_kwargs = {"attrs": da.attrs}
        if "output" in da.dims:
            uda_kwargs.update({"outputs": da.coords["output"]})
        return cls.create(coords, data=da.data, **uda_kwargs)
コード例 #18
0
ファイル: smap_egi.py プロジェクト: RabindraNP/podpac
    def read_file(self, filelike):
        """Interpret individual SMAP file from  EGI zip archive.

        Parameters
        ----------
        filelike : filelike
            Reference to file inside EGI zip archive

        Returns
        -------
        podpac.UnitsDataArray

        Raises
        ------
        ValueError
        """
        ds = h5py.File(filelike, "r")

        # handle data
        data = ds[self._data_key][()]

        if self.check_quality_flags and self.quality_flag_key:
            flag = ds[self.quality_flag_key][()]
            flag = flag > 0
            [flag] == np.nan

        data = np.array([data])  # add extra dimension for time slice

        # handle time
        if "SPL3" in self.product:
            # TODO: make this py2.7 compatible
            # take the midpoint between the range identified in the file
            t_start = np.datetime64(
                ds["Metadata/Extent"].attrs["rangeBeginningDateTime"].replace(
                    b"Z", b""))
            t_end = np.datetime64(
                ds["Metadata/Extent"].attrs["rangeEndingDateTime"].replace(
                    b"Z", b""))
            time = np.array([t_start + (t_end - t_start) / 2])
            time = time.astype("datetime64[D]")

        elif "SPL4" in self.product:
            time_unit = ds["time"].attrs["units"].decode()
            time = xr.coding.times.decode_cf_datetime(ds["time"][()][0],
                                                      units=time_unit)
            time = time.astype("datetime64[h]")

        # handle spatial coordinates
        if "SPL3" in self.product:

            # take nan mean along each axis
            lons = ds[self.lon_key][()]
            lats = ds[self.lat_key][()]
            lons[lons == self.nan_vals[0]] = np.nan
            lats[lats == self.nan_vals[0]] = np.nan

            # short-circuit if all lat/lon are non
            if np.all(np.isnan(lats)) and np.all(np.isnan(lons)):
                return None

            # make podpac coordinates
            lon = np.nanmean(lons, axis=0)
            lat = np.nanmean(lats, axis=1)
            c = Coordinates([time, lat, lon], dims=["time", "lat", "lon"])

        elif "SPL4" in self.product:
            # lat/lon coordinates in EPSG:6933 (https://epsg.io/6933)
            lon = ds["x"][()]
            lat = ds["y"][()]

            # short-circuit if all lat/lon are nan
            if np.all(np.isnan(lat)) and np.all(np.isnan(lon)):
                return None

            c = Coordinates([time, lat, lon],
                            dims=["time", "lat", "lon"],
                            crs="epsg:6933")

        # make units data array with coordinates and data
        return UnitsDataArray.create(c, data=data)
コード例 #19
0
ファイル: smap_egi.py プロジェクト: RabindraNP/podpac
            data.lon.data[:] = lon.data
            data.lat.data[:] = lat.data

        return all_data.combine_first(data)


if __name__ == "__main__":
    import logging
    import getpass
    from podpac import Coordinates, clinspace

    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)

    username = input("Username:"******"Password:"******"2015-07-06", "2015-07-08", 10)
        ],
        dims=["lon", "lat", "time"],
    )

    node = SMAP(product="SPL3SMP_AM", username=username, password=password)
    output = node.eval(c)
    print(output)
コード例 #20
0
ファイル: terraintiles.py プロジェクト: RabindraNP/podpac
        (x, y) int tile coordinates
    """

    tiles = 2**zoom
    diameter = 2 * np.pi
    x = int(tiles * (xm + np.pi) / diameter)
    y = int(tiles * (np.pi - ym) / diameter)

    return x, y


if __name__ == "__main__":
    from podpac import Coordinates, clinspace

    c = Coordinates([clinspace(40, 43, 1000),
                     clinspace(-76, -72, 1000)],
                    dims=["lat", "lon"])
    c2 = Coordinates([
        clinspace(40, 43, 1000),
        clinspace(-76, -72, 1000), ["2018-01-01", "2018-01-02"]
    ],
                     dims=["lat", "lon", "time"])

    print("TerrainTiles")
    node = TerrainTiles(tile_format="geotiff", zoom=8)
    output = node.eval(c)
    print(output)

    output = node.eval(c2)
    print(output)
コード例 #21
0
def to_geotiff(fp, data, geotransform=None, crs=None, **kwargs):
    """Export a UnitsDataArray to a Geotiff

    Params
    -------
    fp:  str, file object or pathlib.Path object
        A filename or URL, a file object opened in binary ('rb') mode, or a Path object. If not supplied, the results will
        be written to a memfile object
    data: UnitsDataArray, xr.DataArray, np.ndarray
        The data to be saved. If there is more than 1 band, this should be the last dimension of the array.
        If given a np.ndarray, ensure that the 'lat' dimension is aligned with the rows of the data, with an appropriate
        geotransform.
    geotransform: tuple, optional
        The geotransform that describes the input data. If not given, will look for data.attrs['geotransform']
    crs: str, optional
        The coordinate reference system for the data
    kwargs: **dict
        Additional key-word arguments that overwrite defaults used in the `rasterio.open` function. This function
        populates the following defaults:
                drive="GTiff"
                height=data.shape[0]
                width=data.shape[1]
                count=data.shape[2]
                dtype=data.dtype
                mode="w"

    Returns
    --------
    MemoryFile, list
        If fp is given, results a list of the results for writing to each band r.append(dst.write(data[..., i], i + 1))
        If fp is None, returns the MemoryFile object
    """

    # This only works for data that essentially has lat/lon only
    dims = list(data.coords.keys())
    if "lat" not in dims or "lon" not in dims:
        raise NotImplementedError("Cannot export GeoTIFF for dataset with lat/lon coordinates.")
    if "time" in dims and len(data.coords["time"]) > 1:
        raise NotImplemented("Cannot export GeoTIFF for dataset with multiple times,")
    if "alt" in dims and len(data.coords["alt"]) > 1:
        raise NotImplemented("Cannot export GeoTIFF for dataset with multiple altitudes.")

    # TODO: add proper checks, etc. to make sure we handle edge cases and throw errors when we cannot support
    #       i.e. do work to remove this warning.
    _logger.warning("GeoTIFF export assumes data is in a uniform, non-rotated coordinate system.")

    # Get the crs and geotransform that describes the coordinates
    if crs is None:
        crs = data.attrs.get("crs")
    if crs is None:
        raise ValueError(
            "The `crs` of the data needs to be provided to save as GeoTIFF. If supplying a UnitsDataArray, created "
            " through a PODPAC Node, the crs should be automatically populated. If not, please file an issue."
        )
    if geotransform is None:
        geotransform = data.attrs.get("geotransform")
        # Geotransform should ALWAYS be defined as (lon_origin, lon_dj, lon_di, lat_origin, lat_dj, lat_di)
        # if isinstance(data, xr.DataArray) and data.dims.index('lat') > data.dims.index('lon'):
        # geotransform = geotransform[3:] + geotransform[:3]

    if geotransform is None:
        try:
            geotransform = Coordinates.from_xarray(data).geotransform
        except (TypeError, AttributeError):
            raise ValueError(
                "The `geotransform` of the data needs to be provided to save as GeoTIFF. If the geotransform attribute "
                "wasn't automatically populated as part of the dataset, it means that the data is in a non-uniform "
                "coordinate system. This can sometimes happen when the data is transformed to a different CRS than the "
                "native CRS, which can cause the coordinates to seems non-uniform due to floating point precision. "
            )

    # Make all types into a numpy array
    if isinstance(data, xr.DataArray):
        data = data.data

    # Get the data
    dtype = kwargs.get("dtype", np.float32)
    data = data.astype(dtype).squeeze()

    if len(data.shape) == 2:
        data = data[:, :, None]

    geotransform = affine.Affine.from_gdal(*geotransform)

    # Update the kwargs that rasterio will use. Anything added by the user will take priority.
    kwargs2 = dict(
        driver="GTiff",
        height=data.shape[0],
        width=data.shape[1],
        count=data.shape[2],
        dtype=data.dtype,
        crs=crs,
        transform=geotransform,
    )
    kwargs2.update(kwargs)

    # Write the file
    if fp is None:
        # Write to memory file
        r = rasterio.io.MemoryFile()
        with r.open(**kwargs2) as dst:
            for i in range(data.shape[2]):
                dst.write(data[..., i], i + 1)
    else:
        r = []
        kwargs2["mode"] = "w"
        with rasterio.open(fp, **kwargs2) as dst:
            for i in range(data.shape[2]):
                r.append(dst.write(data[..., i], i + 1))

    return r
コード例 #22
0
class TestReprojection(object):
    source_coords = Coordinates(
        [clinspace(0, 8, 9, "lat"),
         clinspace(0, 8, 9, "lon")])
    coarse_coords = Coordinates(
        [clinspace(0, 8, 3, "lat"),
         clinspace(0, 8, 3, "lon")])
    source = Array(source=np.arange(81).reshape(9, 9),
                   coordinates=source_coords,
                   interpolation="nearest")
    source_coarse = Array(source=[[0, 4, 8], [36, 40, 44], [72, 76, 80]],
                          coordinates=coarse_coords,
                          interpolation="bilinear")
    source_coarse2 = Array(
        source=[[0, 4, 8], [36, 40, 44], [72, 76, 80]],
        coordinates=coarse_coords.transform("EPSG:3857"),
        interpolation="bilinear",
    )

    def test_reprojection_Coordinates(self):
        reproject = Reproject(source=self.source,
                              interpolation="bilinear",
                              coordinates=self.coarse_coords)
        o1 = reproject.eval(self.source_coords)
        o2 = self.source_coarse.eval(self.source_coords)

        assert_array_equal(o1.data, o2.data)

        node = podpac.Node.from_json(reproject.json)
        o3 = node.eval(self.source_coords)
        assert_array_equal(o1.data, o3.data)

    def test_reprojection_source_coords(self):
        reproject = Reproject(source=self.source,
                              interpolation="bilinear",
                              coordinates=self.source_coarse)
        o1 = reproject.eval(self.coarse_coords)
        o2 = self.source_coarse.eval(self.coarse_coords)

        assert_array_equal(o1.data, o2.data)

        node = podpac.Node.from_json(reproject.json)
        o3 = node.eval(self.coarse_coords)
        assert_array_equal(o1.data, o3.data)

    def test_reprojection_source_dict(self):
        reproject = Reproject(source=self.source,
                              interpolation="bilinear",
                              coordinates=self.coarse_coords.definition)
        o1 = reproject.eval(self.coarse_coords)
        o2 = self.source_coarse.eval(self.coarse_coords)

        assert_array_equal(o1.data, o2.data)

        node = podpac.Node.from_json(reproject.json)
        o3 = node.eval(self.coarse_coords)
        assert_array_equal(o1.data, o3.data)

    def test_reprojection_source_str(self):
        reproject = Reproject(source=self.source,
                              interpolation="bilinear",
                              coordinates=self.coarse_coords.json)
        o1 = reproject.eval(self.coarse_coords)
        o2 = self.source_coarse.eval(self.coarse_coords)

        assert_array_equal(o1.data, o2.data)

        node = podpac.Node.from_json(reproject.json)
        o3 = node.eval(self.coarse_coords)
        assert_array_equal(o1.data, o3.data)

    def test_reprojection_Coordinates_crs(self):
        # same eval and source but different reproject
        reproject = Reproject(
            source=self.source,
            interpolation={
                "method": "bilinear",
                "params": {
                    "fill_value": "extrapolate"
                }
            },
            coordinates=self.coarse_coords.transform("EPSG:3857"),
        )
        o1 = reproject.eval(self.source_coords)
        # We have to use a second source here because the reprojected source
        # gets interpreted as having it's source coordinates in EPSG:3857
        # and when being subsampled, there's a warping effect...
        o2 = self.source_coarse2.eval(self.source_coords)
        assert_almost_equal(o1.data, o2.data, decimal=13)

        node = podpac.Node.from_json(reproject.json)
        o3 = node.eval(self.source_coords)
        assert_array_equal(o1.data, o3.data)

        # same eval and reproject but different source
        o1 = reproject.eval(self.source_coords.transform("EPSG:3857"))
        o2 = self.source_coarse2.eval(
            self.source_coords.transform("EPSG:3857"))
        assert_almost_equal(o1.data, o2.data, decimal=13)

        # same source and reproject but different eval
        reproject = Reproject(source=self.source,
                              interpolation="bilinear",
                              coordinates=self.coarse_coords)
        o1 = reproject.eval(self.source_coords.transform("EPSG:3857"))
        o2 = self.source_coarse.eval(self.source_coords.transform("EPSG:3857"))
        assert_almost_equal(o1.data, o2.data, decimal=13)
コード例 #23
0
ファイル: ex1.py プロジェクト: mikelytaev/wave-propagation
settings['DEFAULT_CACHE'] = ['disk']

# create terrain tiles node
node = TerrainTiles(tile_format='geotiff', zoom=11)

#lat = 60.5
#lon = 30
lat, lon = 53.548254, 157.328588
dir = 135
x_grid = np.linspace(0, 200000, 10000)
#coords = geodesic_problem(lat, lon, dir, x_grid)
coords, x_grid = inv_geodesic_problem(60.112502, 29.636637, 60.699130, 31.288706, 1000)
lats = [c[0] for c in coords]
lons = [c[1] for c in coords]

# create coordinates to get tiles
#c = Coordinates([clinspace(10, 11, 2), clinspace(10, 11, 2)], dims=['lat', 'lon'])
c = Coordinates([lats, lons], dims=['lat', 'lon'])

# evaluate node
o = node.eval(c)

eval = np.array([o.data[i, i] for i in range(0, len(x_grid))])

plt.plot(x_grid, eval)
plt.show()

angles = np.arctan((eval[1::] - eval[:-1:]) / (x_grid[1] - x_grid[0])) * 180 / cm.pi
plt.plot(x_grid[:-1:], angles)
plt.show()
コード例 #24
0
import matplotlib.pyplot as plt
import numpy as np
import json
import csv
import pandas as pd
import time
from copy import deepcopy
from podpac.managers import aws

N = 2000
POINTS_NUM = 10
GRID = int(N / POINTS_NUM)
# getting data from terrain tiles dataset with podapac
terrain_node = TerrainTiles(tile_format='geotiff', zoom=7)
# Europe's coordinates
coords = Coordinates(
    [clinspace(71, 35, N), clinspace(-9, 30, N)], dims=['lat', 'lon'])

# node = podpac.managers.aws.Lambda(source=terrain_node)

o = terrain_node.eval(coords)
data_from_tt = np.asarray(o.data)
# original data -> plotting
initial_data = deepcopy(data_from_tt)
# copied data -> calculations
_, h = initial_data.shape
# initial data -> (2000, 2000)
# grid_tiles -> (40000, 10, 10)
grid_tiles = initial_data.reshape(h // POINTS_NUM, POINTS_NUM, -1,
                                  POINTS_NUM).swapaxes(1, 2).reshape(
                                      -1, POINTS_NUM, POINTS_NUM)
# calculating mean height value in every region
コード例 #25
0
import podpac
from podpac.core.coordinates.stacked_coordinates import StackedCoordinates
from podpac.core.coordinates.array_coordinates1d import ArrayCoordinates1d
from podpac.core.coordinates.affine_coordinates import AffineCoordinates
from podpac.core.coordinates.uniform_coordinates1d import UniformCoordinates1d

# origin [10, 20], pixel size [3, 2], north up
GEOTRANSFORM_NORTHUP = (10.0, 2.0, 0.0, 20.0, 0.0, -3.0)

# origin [10, 20], step [2, 3], rotated 20 degrees
GEOTRANSFORM_ROTATED = (10.0, 1.879, -1.026, 20.0, 0.684, 2.819)

from podpac import Coordinates

UNIFORM = Coordinates.from_geotransform(geotransform=GEOTRANSFORM_NORTHUP,
                                        shape=(3, 4))


class TestAffineCoordinatesCreation(object):
    def test_init(self):
        c = AffineCoordinates(geotransform=GEOTRANSFORM_NORTHUP, shape=(3, 4))

        assert c.geotransform == GEOTRANSFORM_NORTHUP
        assert c.shape == (3, 4)
        assert c.is_affine
        assert c.dims == ("lat", "lon")
        assert c.udims == ("lat", "lon")
        assert len(set(c.xdims)) == 2
        assert c.name == "lat_lon"
        repr(c)
コード例 #26
0
class TestReprojection(object):
    source_coords = Coordinates(
        [clinspace(0, 8, 9, "lat"),
         clinspace(0, 8, 9, "lon")])
    coarse_coords = Coordinates(
        [clinspace(0, 8, 3, "lat"),
         clinspace(0, 8, 3, "lon")])
    source = Array(source=np.arange(81).reshape(9, 9),
                   coordinates=source_coords,
                   interpolation="nearest")
    source_coarse = Array(source=[[0, 4, 8], [36, 40, 44], [72, 76, 80]],
                          coordinates=coarse_coords,
                          interpolation="bilinear")

    def test_reprojection_Coordinates(self):
        reproject = Reproject(source=self.source,
                              interpolation="bilinear",
                              coordinates=self.coarse_coords)
        o1 = reproject.eval(self.source_coords)
        o2 = self.source_coarse.eval(self.source_coords)

        assert_array_equal(o1.data, o2.data)

        node = podpac.Node.from_json(reproject.json)
        o3 = node.eval(self.source_coords)
        assert_array_equal(o1.data, o3.data)

    def test_reprojection_source_coords(self):
        reproject = Reproject(source=self.source,
                              interpolation="bilinear",
                              coordinates=self.source_coarse)
        o1 = reproject.eval(self.coarse_coords)
        o2 = self.source_coarse.eval(self.coarse_coords)

        assert_array_equal(o1.data, o2.data)

        node = podpac.Node.from_json(reproject.json)
        o3 = node.eval(self.coarse_coords)
        assert_array_equal(o1.data, o3.data)

    def test_reprojection_source_dict(self):
        reproject = Reproject(source=self.source,
                              interpolation="bilinear",
                              coordinates=self.coarse_coords.definition)
        o1 = reproject.eval(self.coarse_coords)
        o2 = self.source_coarse.eval(self.coarse_coords)

        assert_array_equal(o1.data, o2.data)

        node = podpac.Node.from_json(reproject.json)
        o3 = node.eval(self.coarse_coords)
        assert_array_equal(o1.data, o3.data)

    def test_reprojection_source_str(self):
        reproject = Reproject(source=self.source,
                              interpolation="bilinear",
                              coordinates=self.coarse_coords.json)
        o1 = reproject.eval(self.coarse_coords)
        o2 = self.source_coarse.eval(self.coarse_coords)

        assert_array_equal(o1.data, o2.data)

        node = podpac.Node.from_json(reproject.json)
        o3 = node.eval(self.coarse_coords)
        assert_array_equal(o1.data, o3.data)