Esempio n. 1
0
def GLDAS025Grids(only_land=False):
    """
    Create global 0.25 DEG gldas grids (origin in bottom left)

    Parameters
    ---------
    only_land : bool, optional (default: False)
        Uses the land mask to reduce the GLDAS 0.25DEG land grid to land points
        only.

    Returns
    --------
    grid : pygeogrids.CellGrid
        Either a land grid or a global grid
    """

    resolution = 0.25
    glob_lons = np.arange(-180 + resolution / 2, 180 + resolution / 2,
                          resolution)
    glob_lats = np.arange(-90 + resolution / 2, 90 + resolution / 2,
                          resolution)
    lon, lat = np.meshgrid(glob_lons, glob_lats)
    glob_grid = BasicGrid(lon.flatten(),
                          lat.flatten()).to_cell_grid(cellsize=5.0)

    if only_land:
        ds = Dataset(
            os.path.join(
                os.path.abspath(os.path.dirname(__file__)),
                "GLDASp4_landmask_025d.nc4",
            ))
        land_lats = ds.variables["lat"][:]
        land_mask = ds.variables["GLDAS_mask"][:].flatten().filled() == 0.0
        dlat = glob_lats.size - land_lats.size

        land_mask = np.concatenate((np.ones(dlat * glob_lons.size), land_mask))
        land_points = np.ma.masked_array(glob_grid.get_grid_points()[0],
                                         land_mask)

        land_grid = glob_grid.subgrid_from_gpis(
            land_points[~land_points.mask].filled())
        return land_grid
    else:
        return glob_grid
Esempio n. 2
0
    def __init__(
        self,
        filename_pattern,
        parameter="mrsos",
        cellsize=5.0,
        only_land=False,
        bbox=None,
    ):
        self.parameter = parameter
        self.cellsize = cellsize
        self.only_land = only_land

        # open dataset with xarray, using dask backend for parallel access
        ds = xr.open_mfdataset(
            str(filename_pattern), parallel=True, concat_dim="time"
        )
        self.dataset = ds.stack(dimensions={"latlon": ("lat", "lon")})
        # lons are between 0 and 360, they have to be remapped to (-180, 180)
        self._lons = np.array(self.dataset.lon.values)
        self._lons[self._lons > 180] -= 360

        # setup grid
        global_grid = BasicGrid(self.lon, self.lat)

        # land mask
        self.landmask = ~np.isnan(self.dataset[parameter].isel(time=0).values)
        self.land_gpis = global_grid.get_grid_points()[0][self.landmask]
        if self.only_land:
            grid = global_grid.subgrid_from_gpis(self.land_gpis)
        else:
            grid = global_grid

        # bounding box
        if bbox is not None:
            # given is: bbox = [lonmin, latmin, lonmax, latmax]
            self.lonmin, self.latmin, self.lonmax, self.latmax = (*bbox,)
            self.bbox_gpis = grid.get_bbox_grid_points(
                lonmin=self.lonmin,
                latmin=self.latmin,
                lonmax=self.lonmax,
                latmax=self.latmax,
            )
            grid = grid.subgrid_from_gpis(self.bbox_gpis)

        self.grid = grid.to_cell_grid(cellsize=self.cellsize)

        print(f"Number of active gpis: {len(self.grid.activegpis)}")
        print(f"Number of grid cells: {len(self.grid.get_cells())}")

        # create metadata dictionary from dataset attributes
        self.metadata = copy(self.dataset.attrs)
        array_metadata = copy(self.dataset[self.parameter].attrs)
        # merging history metadata (the only common keyword)
        self.metadata["history"] = (
            "Dataset: "
            + self.metadata["history"]
            + "; DataArray: "
            + array_metadata["history"]
        )
        del array_metadata["history"]
        self.metadata.update(array_metadata)
Esempio n. 3
0
    def __init__(
        self,
        fname,
        parameters,
        loading_func,
        cellsize=5.0,
        only_land=False,
        bbox=None,
    ):
        # input validation
        if isinstance(parameters, str):
            parameters = [parameters]
        self.parameters = parameters
        if isinstance(loading_func, str):
            if loading_func not in loading_func_dict:  # pragma: no cover
                raise ValueError(
                    f"No loading function with the name '{loading_func}'"
                    " exists.")
            loading_func = loading_func_dict[loading_func]
        self.loading_func = loading_func
        self.cellsize = cellsize
        self.only_land = only_land

        # load dataset
        self.dataset = loading_func(str(fname), self.parameters)

        # Img2Ts prefers flattened data
        self.dataset = self.dataset.stack(
            dimensions={"latlon": ("lat", "lon")})
        # lons are between 0 and 360, they have to be remapped to (-180, 180)
        self._lons = np.array(self.dataset.lon.values)
        self._lons[self._lons > 180] -= 360

        # setup grid
        global_grid = BasicGrid(self.lon, self.lat)

        # land mask
        if self.only_land:
            if "landmask" in self.dataset:
                self.landmask = self.dataset.landmask.values
            else:  # pragma: no cover
                raise ValueError("No landmask available!")
            self.land_gpis = global_grid.get_grid_points()[0][self.landmask]
            grid = global_grid.subgrid_from_gpis(self.land_gpis)
        else:
            grid = global_grid

        # bounding box
        if bbox is not None:
            # given is: bbox = [lonmin, latmin, lonmax, latmax]
            self.lonmin, self.latmin, self.lonmax, self.latmax = (*bbox, )
            self.bbox_gpis = grid.get_bbox_grid_points(
                lonmin=self.lonmin,
                latmin=self.latmin,
                lonmax=self.lonmax,
                latmax=self.latmax,
            )
            grid = grid.subgrid_from_gpis(self.bbox_gpis)

        self.grid = grid.to_cell_grid(cellsize=self.cellsize)

        print(f"Number of active gpis: {len(self.grid.activegpis)}")
        print(f"Number of grid cells: {len(self.grid.get_cells())}")

        # create metadata dictionary from dataset attributes
        # this copies the dataset metadata directly and appends metadata of the
        # single variables with <param_name>_ as prefix to their metadata keys.
        self.metadata = copy(self.dataset.attrs)
        array_metadata = {}
        for p in self.parameters:
            md = copy(self.dataset[p].attrs)
            for key in md:
                array_metadata["_".join([p, key])] = md[key]
        self.metadata.update(array_metadata)