def _read_dfs2_header(self): if not os.path.isfile(self._filename): raise Exception(f"file {self._filename} does not exist!") self._dfs = DfsFileFactory.Dfs2FileOpen(self._filename) self._dx = self._dfs.SpatialAxis.Dx self._dy = self._dfs.SpatialAxis.Dy self._nx = self._dfs.SpatialAxis.XCount self._ny = self._dfs.SpatialAxis.YCount self._read_header()
def _read_dfs2_header(self): if not os.path.isfile(self._filename): raise Exception(f"file {self._filename} does not exist!") self._dfs = DfsFileFactory.Dfs2FileOpen(self._filename) self._dx = self._dfs.SpatialAxis.Dx self._dy = self._dfs.SpatialAxis.Dy self._nx = self._dfs.SpatialAxis.XCount self._ny = self._dfs.SpatialAxis.YCount if self._dfs.FileInfo.TimeAxis.TimeAxisType == 4: self._is_equidistant = False self._read_header()
def find_nearest_element( self, lon, lat, ): """Find index of closest element Parameters ---------- lon: float longitude lat: float latitude Returns ------- (int,int): indexes in y, x """ dfs = DfsFileFactory.Dfs2FileOpen(self._filename) self._dfs = dfs projection = self._dfs.FileInfo.Projection axis = self._dfs.SpatialAxis cart = Cartography( projection.WKTString, projection.Longitude, projection.Latitude, projection.Orientation, ) # C# out parameters must be handled in special way (_, xx, yy) = cart.Geo2Xy(lon, lat, 0.0, 0.0) j = int(xx / axis.Dx + 0.5) k = axis.YCount - int(yy / axis.Dy + 0.5) - 1 j = min(max(0, j), axis.XCount - 1) k = min(max(0, k), axis.YCount - 1) return k, j
def reproject( self, filename, projectionstring, dx, dy, longitude_origin=None, latitude_origin=None, nx=None, ny=None, orientation=0.0, interpolate=True, ): """ Reproject and write results to a new dfs2 file Parameters ---------- filename: str location to write the reprojected dfs2 file projectionstring: str WKT string of new projection dx: float length of each grid in the x direction (projection units) dy: float length of each grid in the y direction (projection units) latitude_origin: float, optional latitude at origin of new grid, default same as original longitude_origin: float, optional longitude at origin of new grid, default same as original nx: int, optional n grid points in x direction, default same as original ny: int, optional n grid points in y direction, default same as original orientation: float, optional rotated grid, default 0.0 interpolate: bool, optional interpolate to new grid, default true Examples -------- >>> dfs = Dfs2("input.dfs") >>> dfs.reproject("out.dfs2", projectionstring="UTM-33", ... dx=200.0, dy=200.0, ... longitude_origin=12.0, latitude_origin=55.0, ... nx=285, ny=612) """ if nx is None: nx = self.shape[2] if ny is None: ny = self.shape[1] if latitude_origin is None: latitude_origin = self.latitude if longitude_origin is None: longitude_origin = self.longitude dfs2File = DfsFileFactory.Dfs2FileOpen(self._filename) tool = Dfs2Reprojector(dfs2File, filename) tool.Interpolate = interpolate tool.SetTarget( projectionstring, longitude_origin, latitude_origin, orientation, nx, 0.0, dx, ny, 0.0, dy, ) tool.Process()
def _open(self): self._dfs = DfsFileFactory.Dfs2FileOpen(self._filename) self._source = self._dfs
def read(self, items=None, time_steps=None): """ Read data from a dfs2 file Parameters --------- items: list[int] or list[str], optional Read only selected items, by number (0-based), or by name time_steps: int or list[int], optional Read only selected time_steps Returns ------- Dataset A dataset with data dimensions [t,y,x] """ dfs = DfsFileFactory.Dfs2FileOpen(self._filename) self._dfs = dfs self._source = dfs nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps items, item_numbers, time_steps = get_valid_items_and_timesteps( self, items, time_steps ) # Determine the size of the grid axis = dfs.SpatialAxis yNum = axis.YCount xNum = axis.XCount if nt == 0: raise ValueError("Static files (with no dynamic items) are not supported.") for t in time_steps: if t > (nt - 1): raise ValueError(f"Trying to read timestep {t}: max timestep is {nt-1}") deleteValue = dfs.FileInfo.DeleteValueFloat self._n_items = len(item_numbers) data_list = [] for item in range(self._n_items): data = np.ndarray(shape=(len(time_steps), yNum, xNum), dtype=float) data_list.append(data) t_seconds = np.zeros(len(time_steps), dtype=float) # startTime = dfs.FileInfo.TimeAxis.StartDateTime for i in range(len(time_steps)): it = time_steps[i] for item in range(self._n_items): itemdata = dfs.ReadItemTimeStep(item_numbers[item] + 1, it) src = itemdata.Data d = to_numpy(src) d = d.reshape(yNum, xNum) d = np.flipud(d) d[d == deleteValue] = np.nan data_list[item][i, :, :] = d t_seconds[i] = itemdata.Time start_time = from_dotnet_datetime(dfs.FileInfo.TimeAxis.StartDateTime) time = [start_time + timedelta(seconds=tsec) for tsec in t_seconds] items = get_item_info(dfs, item_numbers) dfs.Close() return Dataset(data_list, time, items)
def _read_dfs2_header(self): dfs = DfsFileFactory.Dfs2FileOpen(self._filename) self._dx = dfs.SpatialAxis.Dx self._dy = dfs.SpatialAxis.Dy self._read_header(dfs)
def read(self, filename, item_numbers=None, item_names=None, time_steps=None): """ Parameters --------- filename: str dfs2 filename item_numbers: list[int], optional Read only selected items, by number (0-based) item_names: list[str], optional Read only selected items, by name, takes precedence over item_numbers time_steps: list[int], optional Read only selected time_steps Returns ------- Dataset A dataset with data dimensions [t,y,x] """ # NOTE. Item numbers are base 0 (everything else in the dfs is base 0) # Open the dfs file for reading dfs = DfsFileFactory.Dfs2FileOpen(filename) self._dfs = dfs if item_names is not None: item_numbers = find_item(dfs, item_names) if item_numbers is None: n_items = safe_length(dfs.ItemInfo) item_numbers = list(range(n_items)) nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps if time_steps is None: time_steps = list(range(nt)) # Determine the size of the grid # axis = dfs.ItemInfo[0].SpatialAxis axis = dfs.SpatialAxis yNum = axis.YCount xNum = axis.XCount if nt == 0: raise Warning( "Static files (with no dynamic items) are not supported.") nt = 1 for t in time_steps: if t > (nt - 1): raise ValueError( f"Trying to read timestep {t}: max timestep is {nt-1}") deleteValue = dfs.FileInfo.DeleteValueFloat n_items = len(item_numbers) data_list = [] for item in range(n_items): # Initialize an empty data block data = np.ndarray(shape=(len(time_steps), yNum, xNum), dtype=float) data_list.append(data) t_seconds = np.zeros(len(time_steps), dtype=float) startTime = dfs.FileInfo.TimeAxis.StartDateTime for i in range(len(time_steps)): it = time_steps[i] for item in range(n_items): itemdata = dfs.ReadItemTimeStep(item_numbers[item] + 1, it) src = itemdata.Data d = to_numpy(src) d = d.reshape(yNum, xNum) d = np.flipud(d) d[d == deleteValue] = np.nan data_list[item][i, :, :] = d t_seconds[i] = itemdata.Time start_time = from_dotnet_datetime(dfs.FileInfo.TimeAxis.StartDateTime) time = [start_time + timedelta(seconds=tsec) for tsec in t_seconds] items = get_item_info(dfs, item_numbers) dfs.Close() return Dataset(data_list, time, items)