Пример #1
0
    def read(self, items=None, time_steps=None):
        """
        Read data from a dfs0 file.

        Parameters
        ----------
        items: list[int] or list[str], optional
            Read only selected items, by number (0-based), or by name
        time_steps: int or list[int], optional
            Read only selected time_steps

        Returns
        -------
        Dataset
            A dataset with data dimensions [t]
        """

        if not os.path.exists(self._filename):
            raise FileNotFoundError(f"File {self._filename} not found.")

        dfs = DfsFileFactory.DfsGenericOpen(self._filename)
        self._source = dfs
        self._n_items = safe_length(dfs.ItemInfo)
        self._n_timesteps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        items, item_numbers, _ = get_valid_items_and_timesteps(self, items, time_steps)

        dfs.Close()

        ds = self.__read(self._filename)
        ds = ds[item_numbers]
        if time_steps:
            ds = ds.isel(time_steps, axis=0)

        return ds
Пример #2
0
    def grid_coordinates(self, dfs3file):
        """Function: Returns the Grid information
        Usage:
            [X0, Y0, dx, dy, nx, ny, nz, nt] = grid_coordinates( filename )
        dfs3file
            a full path and filename to the dfs3 file

        Returns:

            X0, Y0:
                bottom left coordinates
            dx, dy:
                grid size in x and y directions
            nx, ny, nz:
                number of grid elements in the x, y and z direction
            nt:
                number of time steps
        """

        dfs = DfsFileFactory.DfsGenericOpen(dfs3file)

        # Determine the size of the grid
        axis = dfs.ItemInfo[0].SpatialAxis
        dx = axis.Dx
        dy = axis.Dy
        x0 = axis.X0
        y0 = axis.Y0
        yNum = axis.YCount
        xNum = axis.XCount
        zNum = axis.ZCount
        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        dfs.Close()

        return x0, y0, dx, dy, xNum, yNum, zNum, nt
Пример #3
0
def diff(infilename_a: str, infilename_b: str, outfilename: str) -> None:
    """Calculate difference between two dfs files (a-b)

    Parameters
    ----------
    infilename_a: str
        full path to the first input file
    infilename_b: str 
        full path to the second input file
    outfilename: str
        full path to the output file
    """

    copyfile(infilename_a, outfilename)

    dfs_i_a = DfsFileFactory.DfsGenericOpen(infilename_a)
    dfs_i_b = DfsFileFactory.DfsGenericOpen(infilename_b)
    dfs_o = DfsFileFactory.DfsGenericOpenEdit(outfilename)

    deletevalue = dfs_i_a.FileInfo.DeleteValueFloat

    n_time_steps = dfs_i_a.FileInfo.TimeAxis.NumberOfTimeSteps
    n_items = safe_length(dfs_i_a.ItemInfo)
    # TODO Add checks to verify identical structure of file a and b

    for timestep in trange(n_time_steps):
        for item in range(n_items):

            itemdata_a = dfs_i_a.ReadItemTimeStep(item + 1, timestep)
            d_a = to_numpy(itemdata_a.Data)
            d_a[d_a == deletevalue] = np.nan

            itemdata_b = dfs_i_b.ReadItemTimeStep(item + 1, timestep)
            d_b = to_numpy(itemdata_b.Data)
            d_b[d_b == deletevalue] = np.nan
            time = itemdata_a.Time

            outdata = d_a - d_b

            darray = to_dotnet_float_array(outdata)

            dfs_o.WriteItemTimeStep(item + 1, timestep, time, darray)

    dfs_i_a.Close()
    dfs_i_b.Close()
    dfs_o.Close()
Пример #4
0
    def read(self, items=None, time_steps=None):
        """
        Read data from a dfs0 file.

        Parameters
        ----------
        items: list[int] or list[str], optional
            Read only selected items, by number (0-based), or by name
        time_steps: str, int or list[int], optional
            Read only selected time_steps

        Returns
        -------
        Dataset
            A dataset with data dimensions [t]
        """

        if not os.path.exists(self._filename):
            raise FileNotFoundError(f"File {self._filename} not found.")

        dfs = DfsFileFactory.DfsGenericOpen(self._filename)
        self._source = dfs

        self._n_items = safe_length(dfs.ItemInfo)
        item_numbers = _valid_item_numbers(dfs.ItemInfo, items)

        self._n_timesteps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        if self._timeaxistype == TimeAxisType.NonEquidistantCalendar and isinstance(
            time_steps, str
        ):
            sel_time_step_str = time_steps
            time_steps = range(self._n_timesteps)
        else:
            sel_time_step_str = None

        dfs.Close()

        ds = self.__read(self._filename)
        ds = ds[item_numbers]
        if time_steps:
            ds = ds.isel(time_steps, axis=0)

        if sel_time_step_str:
            parts = sel_time_step_str.split(",")
            if len(parts) == 1:
                parts.append(parts[0])  # end=start

            if parts[0] == "":
                sel = slice(parts[1])  # stop only
            elif parts[1] == "":
                sel = slice(parts[0], None)  # start only
            else:
                sel = slice(parts[0], parts[1])
            ds = ds[sel]

        return ds
Пример #5
0
def _clone(infilename, outfilename):
    """Clone a dfs file

    Parameters
    ----------
    infilename : str
        input filename
    outfilename : str
        output filename

    Returns
    -------
    DfsFile
        MIKE generic dfs file object
    """
    source = DfsFileFactory.DfsGenericOpen(infilename)
    fi = source.FileInfo

    builder = DfsBuilder.Create(fi.FileTitle, fi.ApplicationTitle,
                                fi.ApplicationVersion)

    # Set up the header
    builder.SetDataType(fi.DataType)
    builder.SetGeographicalProjection(fi.Projection)
    builder.SetTemporalAxis(fi.TimeAxis)
    builder.SetItemStatisticsType(fi.StatsType)
    builder.DeleteValueByte = fi.DeleteValueByte
    builder.DeleteValueDouble = fi.DeleteValueDouble
    builder.DeleteValueFloat = fi.DeleteValueFloat
    builder.DeleteValueInt = fi.DeleteValueInt
    builder.DeleteValueUnsignedInt = fi.DeleteValueUnsignedInt

    # Copy custom blocks - if any
    for customBlock in fi.CustomBlocks:
        builder.AddCustomBlock(customBlock)

    # Copy dynamic items
    for itemInfo in source.ItemInfo:
        builder.AddDynamicItem(itemInfo)

    # Create file
    builder.CreateFile(outfilename)

    # Copy static items
    while True:
        static_item = source.ReadStaticItemNext()
        if static_item is None:
            break
        builder.AddStaticItem(static_item)

    # Get the file
    file = builder.GetFile()

    source.Close()

    return file
Пример #6
0
def sum(infilename_a, infilename_b, outfilename):
    """Sum two dfs files (a+b)

    Parameters
    ----------
    infilename_a: str
        full path to the first input file
    infilename_b: str
        full path to the second input file
    outfilename: str
        full path to the output file
    """
    copyfile(infilename_a, outfilename)

    dfs_i_a = DfsFileFactory.DfsGenericOpen(infilename_a)
    dfs_i_b = DfsFileFactory.DfsGenericOpen(infilename_b)
    dfs_o = DfsFileFactory.DfsGenericOpenEdit(outfilename)

    n_time_steps = dfs_i_a.FileInfo.TimeAxis.NumberOfTimeSteps
    n_items = safe_length(dfs_i_a.ItemInfo)
    # TODO Add checks to verify identical structure of file a and b

    for timestep in range(n_time_steps):
        for item in range(n_items):

            itemdata_a = dfs_i_a.ReadItemTimeStep(item + 1, timestep)
            d_a = to_numpy(itemdata_a.Data)

            itemdata_b = dfs_i_b.ReadItemTimeStep(item + 1, timestep)
            d_b = to_numpy(itemdata_b.Data)
            time = itemdata_a.Time

            outdata = d_a + d_b

            darray = to_dotnet_float_array(outdata)

            dfs_o.WriteItemTimeStep(item + 1, timestep, time, darray)

    dfs_i_a.Close()
    dfs_i_b.Close()
    dfs_o.Close()
Пример #7
0
    def __read(self, filename):
        """Read data from the dfs0 file
        """
        if not os.path.exists(filename):
            raise Warning("filename - File does not Exist %s", filename)

        dfs = DfsFileFactory.DfsGenericOpen(filename)
        self._dfs = dfs

        n_items = safe_length(dfs.ItemInfo)
        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        items = []
        for i in range(n_items):
            eumItem = dfs.ItemInfo[i].Quantity.Item
            eumUnit = dfs.ItemInfo[i].Quantity.Unit
            name = dfs.ItemInfo[i].Name
            itemtype = EUMType(eumItem)
            unit = EUMUnit(eumUnit)
            item = ItemInfo(name, itemtype, unit)
            items.append(item)

        # BULK READ THE DFS0
        dfsdata = Dfs0Util.ReadDfs0DataDouble(dfs)

        t = []
        starttime = dfs.FileInfo.TimeAxis.StartDateTime

        # EMPTY Data Block for copying the Results
        for it in range(dfs.FileInfo.TimeAxis.NumberOfTimeSteps):
            t.append(
                starttime.AddSeconds(
                    dfsdata[it, 0]).ToString("yyyy-MM-dd HH:mm:ss"))

        # Copies the System Array to a numpy matrix
        # First column in the time (the rest is the data)
        data = np.fromiter(dfsdata, np.float64).reshape(nt, n_items + 1)[:,
                                                                         1::]

        mask = np.isclose(data, dfs.FileInfo.DeleteValueFloat, atol=1e-36)
        data[mask] = np.nan
        # mask = np.isclose(
        #    data, dfs.FileInfo.DeleteValueDouble, atol=1e-34
        # )  # TODO needs to be verified
        # data[mask] = np.nan

        dfs.Close()

        return data, t, items
Пример #8
0
    def __read(self, filename):
        """Read data from the dfs0 file
        """
        if not os.path.exists(filename):
            raise Warning("filename - File does not Exist %s", filename)

        dfs = DfsFileFactory.DfsGenericOpen(filename)
        self._dfs = dfs

        n_items = safe_length(dfs.ItemInfo)
        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        items = []
        for i in range(n_items):
            eumItem = dfs.ItemInfo[i].Quantity.Item
            eumUnit = dfs.ItemInfo[i].Quantity.Unit
            name = dfs.ItemInfo[i].Name
            itemtype = EUMType(eumItem)
            unit = EUMUnit(eumUnit)
            item = ItemInfo(name, itemtype, unit)
            items.append(item)

        # BULK READ THE DFS0
        dfsdata = Dfs0Util.ReadDfs0DataDouble(dfs)
        # First column is the time (the rest is the data)
        t_seconds = [dfsdata[i, 0] for i in range(nt)]

        start_time = from_dotnet_datetime(dfs.FileInfo.TimeAxis.StartDateTime)
        time = [start_time + timedelta(seconds=tsec) for tsec in t_seconds]

        # TODO use to_numpy ?
        data = np.fromiter(dfsdata, np.float64).reshape(nt, n_items + 1)[:,
                                                                         1::]

        mask = np.isclose(data, dfs.FileInfo.DeleteValueFloat, atol=1e-36)
        data[mask] = np.nan
        # mask = np.isclose(
        #    data, dfs.FileInfo.DeleteValueDouble, atol=1e-34
        # )  # TODO needs to be verified
        # data[mask] = np.nan

        dfs.Close()

        return data, time, items
Пример #9
0
    def __read(self, filename):
        """
        Read data from a dfs0 file.
        """
        if not os.path.exists(filename):
            raise FileNotFoundError(f"File {filename} not found.")

        self._dfs = DfsFileFactory.DfsGenericOpen(filename)
        self._n_items = safe_length(self._dfs.ItemInfo)
        self._n_timesteps = self._dfs.FileInfo.TimeAxis.NumberOfTimeSteps
        self._time_column_index = 0  # First column is time (the rest is data)

        raw_data = Dfs0Util.ReadDfs0DataDouble(self._dfs)  # Bulk read the data

        data = self.__to_numpy_with_nans(raw_data)
        time = list(self.__get_time(raw_data))
        items = list(self.__get_items())

        self._dfs.Close()

        return data, time, items
Пример #10
0
    def __read(self, filename):
        """
        Read all data from a dfs0 file.
        """
        self._time_column_index = 0  # First column is time (the rest is data)

        self._dfs = DfsFileFactory.DfsGenericOpen(filename)
        raw_data = Dfs0Util.ReadDfs0DataDouble(self._dfs)  # Bulk read the data

        matrix = self.__to_numpy_with_nans(raw_data)

        data = []
        for i in range(matrix.shape[1]):
            data.append(matrix[:, i])

        time = list(self.__get_time(raw_data))
        items = list(self.__get_items())

        self._dfs.Close()

        return Dataset(data, time, items)
Пример #11
0
    def __read(self, filename):
        """Read data from the dfs0 file
        """
        if not os.path.exists(filename):
            raise Warning("filename - File does not Exist %s", filename)

        dfs = DfsFileFactory.DfsGenericOpen(filename)

        n_items = safe_length(dfs.ItemInfo)
        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        names = []
        for i in range(n_items):
            names.append(dfs.ItemInfo[i].Name)

        # BULK READ THE DFS0
        dfsdata = Dfs0Util.ReadDfs0DataDouble(dfs)

        t = []
        starttime = dfs.FileInfo.TimeAxis.StartDateTime

        # EMPTY Data Block for copying the Results
        for it in range(dfs.FileInfo.TimeAxis.NumberOfTimeSteps):
            t.append(
                starttime.AddSeconds(
                    dfsdata[it, 0]).ToString("yyyy-MM-dd HH:mm:ss"))

        # Copies the System Array to a numpy matrix
        # First column in the time (the rest is the data)
        data = np.fromiter(dfsdata, np.float64).reshape(nt, n_items + 1)[:,
                                                                         1::]

        data[data == -1.0000000180025095e-35] = np.nan
        data[data == -1.0000000031710769e-30] = np.nan
        data[data == dfs.FileInfo.DeleteValueFloat] = np.nan
        data[data == dfs.FileInfo.DeleteValueDouble] = np.nan

        dfs.Close()

        return data, t, names
Пример #12
0
    def _read_header(self):
        if not os.path.exists(self._filename):
            raise FileNotFoundError(self._filename)

        dfs = DfsFileFactory.DfsGenericOpen(self._filename)
        self._deletevalue = dfs.FileInfo.DeleteValueFloat

        # Read items
        self._n_items = safe_length(dfs.ItemInfo)
        self._items = _get_item_info(dfs.ItemInfo, list(range(self._n_items)))

        self._timeaxistype = TimeAxisType(dfs.FileInfo.TimeAxis.TimeAxisType)

        if self._timeaxistype in [
            TimeAxisType.EquidistantCalendar,
            TimeAxisType.NonEquidistantCalendar,
        ]:
            self._start_time = from_dotnet_datetime(dfs.FileInfo.TimeAxis.StartDateTime)
        else:  # relative time axis
            self._start_time = datetime(1970, 1, 1)

        dfs.Close()
Пример #13
0
def _clone(infilename: str,
           outfilename: str,
           start_time=None,
           items=None) -> DfsFile:
    """Clone a dfs file

    Parameters
    ----------
    infilename : str
        input filename
    outfilename : str
        output filename
    start_time : datetime, optional
        new start time for the new file, default
    items : list(int), optional
        clone only these items, default: all items

    Returns
    -------
    DfsFile
        MIKE generic dfs file object
    """
    source = DfsFileFactory.DfsGenericOpen(infilename)
    fi = source.FileInfo

    builder = DfsBuilder.Create(fi.FileTitle, fi.ApplicationTitle,
                                fi.ApplicationVersion)

    # Set up the header
    builder.SetDataType(fi.DataType)
    builder.SetGeographicalProjection(fi.Projection)

    # Copy time axis
    time_axis = fi.TimeAxis
    if start_time is not None:
        dt = to_dotnet_datetime(start_time)
        time_axis.set_StartDateTime(dt)
    builder.SetTemporalAxis(time_axis)

    builder.SetItemStatisticsType(fi.StatsType)
    builder.DeleteValueByte = fi.DeleteValueByte
    builder.DeleteValueDouble = fi.DeleteValueDouble
    builder.DeleteValueFloat = fi.DeleteValueFloat
    builder.DeleteValueInt = fi.DeleteValueInt
    builder.DeleteValueUnsignedInt = fi.DeleteValueUnsignedInt

    # Copy custom blocks - if any
    for customBlock in fi.CustomBlocks:
        builder.AddCustomBlock(customBlock)

    # Copy dynamic items
    item_numbers = _valid_item_numbers(source.ItemInfo, items)
    for item in item_numbers:
        builder.AddDynamicItem(source.ItemInfo[item])

    # Create file
    builder.CreateFile(outfilename)

    # Copy static items
    while True:
        static_item = source.ReadStaticItemNext()
        if static_item is None:
            break
        builder.AddStaticItem(static_item)

    # Get the file
    file = builder.GetFile()

    source.Close()

    return file
Пример #14
0
    def read(self, filename, item_numbers=None, item_names=None, time_steps=None):
        """
        Parameters
        ---------
        filename: str
            dfs2 filename
        item_numbers: list[int], optional
            Read only selected items, by number (0-based)
        item_names: list[str], optional
            Read only selected items, by name, takes precedence over item_numbers
        time_steps: list[int], optional
            Read only selected time_steps

        Returns
        -------
        Dataset
            A dataset with data dimensions [t,y,x]
        """

        # NOTE. Item numbers are base 0 (everything else in the dfs is base 0)

        # Open the dfs file for reading
        dfs = DfsFileFactory.DfsGenericOpen(filename)

        if item_names is not None:
            item_numbers = find_item(dfs, item_names)

        if item_numbers is None:
            n_items = safe_length(dfs.ItemInfo)
            item_numbers = list(range(n_items))

        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        if time_steps is None:
            time_steps = list(range(nt))

        # Determine the size of the grid
        axis = dfs.ItemInfo[0].SpatialAxis
        yNum = axis.YCount
        xNum = axis.XCount

        if nt == 0:
            raise Warning("Static files (with no dynamic items) are not supported.")
            nt = 1

        for t in time_steps:
            if t > (nt - 1):
                raise ValueError(f"Trying to read timestep {t}: max timestep is {nt-1}")

        deleteValue = dfs.FileInfo.DeleteValueFloat

        n_items = len(item_numbers)
        data_list = []

        for item in range(n_items):
            # Initialize an empty data block
            data = np.ndarray(shape=(len(time_steps), yNum, xNum), dtype=float)
            data_list.append(data)

        t_seconds = np.zeros(len(time_steps), dtype=float)

        startTime = dfs.FileInfo.TimeAxis.StartDateTime
        for i in range(len(time_steps)):
            it = time_steps[i]
            for item in range(n_items):

                itemdata = dfs.ReadItemTimeStep(item_numbers[item] + 1, it)

                src = itemdata.Data
                d = to_numpy(src)

                d = d.reshape(yNum, xNum)
                d = np.flipud(d)
                d[d == deleteValue] = np.nan
                data_list[item][i, :, :] = d

            t_seconds[i] = itemdata.Time

        start_time = from_dotnet_datetime(dfs.FileInfo.TimeAxis.StartDateTime)
        time = [start_time + timedelta(seconds=tsec) for tsec in t_seconds]

        items = get_item_info(dfs, item_numbers)

        dfs.Close()
        return Dataset(data_list, time, items)
Пример #15
0
    def read_slice(
        self,
        dfs3file,
        lower_left_xy,
        upper_right_xy,
        items=None,
        layers=None,
        conservative=True,
    ):
        """Function: Read data from a dfs3 file within the locations chosen


        Usage:
            [data,time,name] = read( filename, lower_left_xy, upper_right_xy, items, conservative)
        dfs3file
            a full path and filename to the dfs3 file        
        lower_left_xy
            list or array of size two with the X and the Y coordinate (same projection as the dfs3)
        upper_right_xy
            list or array of size two with the X and the Y coordinate (same projection as the dfs3)
        items
            list of indices (base 0) to read from            
        layers
            list of layers to read
        conservative
            Default is true. Only include the grids within the given bounds (don't return those grids on the boarder)

        Returns
            1) the data contained in a dfs3 file in a list of numpy matrices
            2) time index
            3) name of the items

        NOTE
            Returns data ( y, x, z, nt)

            1) If coordinates is selected, then only return data at those coordinates
            2) coordinates specified overules layers.
            3) layer counts from the bottom
        """

        data = self.read(dfs3file, items=items, layers=layers)

        dfs = DfsFileFactory.DfsGenericOpen(dfs3file)

        # Determine the size of the grid
        axis = dfs.ItemInfo[0].SpatialAxis
        dx = axis.Dx
        dy = axis.Dy
        x0 = axis.X0
        y0 = axis.Y0
        yNum = axis.YCount
        xNum = axis.XCount

        top_left_y = y0 + (yNum + 1) * dy

        dfs.Close()

        # SLICE all the Data

        lower_left_x_index = (lower_left_xy[0] - x0) / dx
        lower_left_y_index = (top_left_y - lower_left_xy[1]) / dy

        upper_right_x_index = (upper_right_xy[0] - x0) / dx
        upper_right_y_index = (top_left_y - upper_right_xy[1]) / dy

        if conservative:
            lower_left_x_index = int(np.ceil(lower_left_x_index))
            upper_right_x_index = int(np.floor(upper_right_x_index))
            lower_left_y_index = int(np.floor(lower_left_y_index))
            upper_right_y_index = int(np.ceil(upper_right_y_index))

        else:
            lower_left_x_index = int(np.floor(lower_left_x_index))
            upper_right_x_index = int(np.ceil(upper_right_x_index))
            lower_left_y_index = int(np.ceil(lower_left_y_index))
            upper_right_y_index = int(np.floor(upper_right_y_index))

        if lower_left_x_index < 0:
            raise IndexError("lower_left_x_index < 0.")
            lower_left_x_index = 0

        if upper_right_y_index < 0:
            raise IndexError("upper_right_y_index < 0.")
            upper_right_y_index = 0

        if lower_left_y_index > yNum - 1:
            raise IndexError("lower_left_y_index > yNum - 1")
            lower_left_y_index = yNum - 1

        if upper_right_x_index > xNum - 1:
            raise IndexError("upper_right_x_index > xNum - 1")
            upper_right_x_index = xNum - 1

        for i in range(len(data[0])):
            data[0][i] = data[0][i][
                upper_right_y_index:lower_left_y_index,
                lower_left_x_index:upper_right_x_index, :, :, ]

        return data
Пример #16
0
    def read(self, items=None, layers=None, coordinates=None, time_steps=None):
        """Function: Read data from a dfs3 file

        Usage:
            [data,time,name] = read( filename, items, layers=None, coordinates=None)

        items
            list of indices (base 0) to read from. If None then all the items.
        layers
            list of layer indices (base 0) to read
        coordinates
            list of list (x,y,layer) integers ( 0,0 at Bottom Left of Grid !! )
            example coordinates = [[2,5,1], [11,41,2]]

        Returns
            1) the data contained in a dfs3 file in a list of numpy matrices
            2) time index
            3) name of the items

        NOTE
            Returns Dataset with data (t, z, y, x)

            1) If coordinates is selected, then only return data at those coordinates
            2) coordinates specified overules layers.
            3) layer counts from the bottom
        """

        # Open the dfs file for reading
        dfs = DfsFileFactory.DfsGenericOpen(self._filename)

        item_numbers = _valid_item_numbers(dfs.ItemInfo, items)
        n_items = len(item_numbers)

        time_steps = _valid_timesteps(dfs.FileInfo, time_steps)
        nt = len(time_steps)

        # Determine the size of the grid
        axis = dfs.ItemInfo[0].SpatialAxis
        zNum = axis.ZCount
        yNum = axis.YCount
        xNum = axis.XCount
        deleteValue = dfs.FileInfo.DeleteValueFloat

        data_list = []

        if coordinates is None:
            # if nt is 0, then the dfs is 'static' and must be handled differently
            if nt != 0:
                for item in range(n_items):
                    if layers is None:
                        # Initialize an empty data block
                        data = np.ndarray(shape=(nt, zNum, yNum, xNum),
                                          dtype=float)
                        data_list.append(data)
                    else:
                        data = np.ndarray(shape=(nt, len(layers), yNum, xNum),
                                          dtype=float)
                        data_list.append(data)

            else:
                raise ValueError(
                    "Static dfs3 files (with no time steps) are not supported."
                )

        else:
            ncoordinates = len(coordinates)
            for item in range(n_items):
                # Initialize an empty data block
                data = np.ndarray(shape=(nt, ncoordinates), dtype=float)
                data_list.append(data)

        t_seconds = np.zeros(nt, dtype=float)
        startTime = dfs.FileInfo.TimeAxis.StartDateTime

        if coordinates is None:
            for it_number, it in enumerate(time_steps):
                for item in range(n_items):
                    itemdata = dfs.ReadItemTimeStep(item_numbers[item] + 1, it)

                    src = itemdata.Data
                    d = to_numpy(src)

                    # DO a direct copy instead of eleement by elment
                    d = d.reshape(zNum, yNum,
                                  xNum)  # .swapaxes(0, 2).swapaxes(0, 1)
                    d = np.flipud(d)
                    d[d == deleteValue] = np.nan
                    if layers is None:
                        data_list[item][it_number, :, :, :] = d
                    else:
                        for l in range(len(layers)):
                            data_list[item][it_number,
                                            l, :, :] = d[layers[l], :, :]

                t_seconds[it_number] = itemdata.Time
        else:
            indices = [
                self.__calculate_index(xNum, yNum, zNum, x, y, z)
                for x, y, z in coordinates
            ]
            for it in range(nt):
                for item in range(n_items):
                    itemdata = dfs.ReadItemTimeStep(item_numbers[item] + 1, it)
                    d = np.array([itemdata.Data[i] for i in indices])
                    d[d == deleteValue] = np.nan
                    data_list[item][it, :] = d

                t_seconds[it] = itemdata.Time

        start_time = from_dotnet_datetime(dfs.FileInfo.TimeAxis.StartDateTime)
        time = [start_time + timedelta(seconds=tsec) for tsec in t_seconds]

        items = _get_item_info(dfs.ItemInfo, item_numbers)

        dfs.Close()

        return Dataset(data_list, time, items)
Пример #17
0
def dfstodfs0(dfsfile, dfs0file, func=np.nanmean):
    """ Function: take average (or other statistics) over dfs and output dfs0

    Usage:
        dfstodfs0(dfsfile, dfs0file)
        dfstodfs0(dfsfile, dfs0file, func=np.nanmean)
    """

    # Read dfs
    dfs_in = DfsFileFactory.DfsGenericOpen(dfsfile)
    fileInfo = dfs_in.FileInfo

    # Basic info from input file
    n_time_steps = fileInfo.TimeAxis.NumberOfTimeSteps
    if n_time_steps == 0:
        raise Warning(
            "Static dfs files (with no time steps) are not supported.")

    # Create an empty dfs1 file object
    factory = DfsFactory()
    builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle,
                                fileInfo.ApplicationVersion)

    # Set up the header
    builder.SetDataType(fileInfo.DataType)
    builder.SetGeographicalProjection(factory.CreateProjectionUndefined())
    builder.SetTemporalAxis(fileInfo.TimeAxis)
    builder.DeleteValueByte = fileInfo.DeleteValueByte
    builder.DeleteValueDouble = fileInfo.DeleteValueDouble
    builder.DeleteValueFloat = fileInfo.DeleteValueFloat
    builder.DeleteValueInt = fileInfo.DeleteValueInt
    builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt

    # dynamic items
    n_items = safe_length(dfs_in.ItemInfo)
    for item in range(n_items):
        ii = dfs_in.ItemInfo[item]
        itemj = builder.CreateDynamicItemBuilder()
        itemj.Set(ii.Name, ii.Quantity, DfsSimpleType.Float)
        itemj.SetValueType(DataValueType.Instantaneous)
        itemj.SetAxis(factory.CreateAxisEqD0())
        # itemj.SetReferenceCoordinates(0, 0, 0)
        builder.AddDynamicItem(itemj.GetDynamicItemInfo())

    try:
        builder.CreateFile(dfs0file)
    except IOError:
        print("cannot create dfs0 file: ", dfs0file)

    dfs_out = builder.GetFile()

    # read-write data
    deleteValue = fileInfo.DeleteValueFloat
    for it in range(n_time_steps):
        for item in range(n_items):
            itemdata = dfs_in.ReadItemTimeStep(item + 1, it)

            d = to_numpy(itemdata.Data)
            d[d == deleteValue] = np.nan

            d0 = func(d)
            d = np.zeros(1)
            d[0] = d0
            d[np.isnan(d)] = deleteValue

            darray = to_dotnet_float_array(d)
            dfs_out.WriteItemTimeStepNext(itemdata.Time, darray)

    dfs_in.Close()
    dfs_out.Close()
Пример #18
0
def dfs2todfs1(dfs2file, dfs1file, axis=1, func=np.nanmean):
    """Aggregate file over an axis
    
    Parameters
    ----------
    dfs2file : str
        input file
    dfs1file : str
        output file
    axis : int, optional
        spatial axis to aggregate over, 1=y, 2=x default 1
    func : function, optional
        aggregation function, by default np.nanmean
    """

    if axis not in [1, 2]:
        raise ValueError("Axis must be 1=y or 2=x")

    # Read dfs2
    dfs_in = DfsFileFactory.DfsGenericOpen(dfs2file)
    fileInfo = dfs_in.FileInfo

    # Basic info from input file
    ax = dfs_in.ItemInfo[0].SpatialAxis
    n_time_steps = fileInfo.TimeAxis.NumberOfTimeSteps
    if n_time_steps == 0:
        raise Warning(
            "Static dfs2 files (with no time steps) are not supported.")

    # Create an empty dfs1 file object
    factory = DfsFactory()
    builder = Dfs1Builder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle,
                                 fileInfo.ApplicationVersion)

    # Set up the header
    builder.SetDataType(fileInfo.DataType)
    builder.SetGeographicalProjection(fileInfo.Projection)
    builder.SetTemporalAxis(fileInfo.TimeAxis)
    builder.DeleteValueByte = fileInfo.DeleteValueByte
    builder.DeleteValueDouble = fileInfo.DeleteValueDouble
    builder.DeleteValueFloat = fileInfo.DeleteValueFloat
    builder.DeleteValueInt = fileInfo.DeleteValueInt
    builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt

    if axis == 1:
        builder.SetSpatialAxis(
            factory.CreateAxisEqD1(ax.AxisUnit, ax.XCount, ax.X0, ax.Dx))
    else:
        builder.SetSpatialAxis(
            factory.CreateAxisEqD1(ax.AxisUnit, ax.YCount, ax.Y0, ax.Dy))

    # assume no compression keys
    if fileInfo.IsFileCompressed:
        raise Warning("Compressed files not supported")

    # custom blocks
    # cb = fileInfo.CustomBlocks #[0]
    # for j in range(safe_length(cb)):
    #    builder.AddCustomBlocks(cb[j])

    # static items
    while True:
        static_item = dfs_in.ReadStaticItemNext()
        if static_item == None:
            break
        builder.AddStaticItem(static_item)

    # dynamic items
    n_items = safe_length(dfs_in.ItemInfo)
    for item in range(n_items):
        ii = dfs_in.ItemInfo[item]
        builder.AddDynamicItem(ii.Name, ii.Quantity, DfsSimpleType.Float,
                               DataValueType.Instantaneous)

    try:
        builder.CreateFile(dfs1file)
    except IOError:
        print("cannot create dfs1 file: ", dfs1file)

    dfs_out = builder.GetFile()

    # read-write data
    deleteValue = fileInfo.DeleteValueFloat
    for it in range(n_time_steps):
        for item in range(n_items):
            itemdata = dfs_in.ReadItemTimeStep(item + 1, it)

            d = to_numpy(itemdata.Data)
            d[d == deleteValue] = np.nan
            d2 = d.reshape(ax.YCount, ax.XCount)
            d2 = np.flipud(d2)

            d1 = func(d2, axis=axis - 1)
            d1[np.isnan(d1)] = deleteValue

            darray = to_dotnet_float_array(d1)
            dfs_out.WriteItemTimeStepNext(itemdata.Time, darray)

    dfs_in.Close()
    dfs_out.Close()
Пример #19
0
    def read(self, filename, item_numbers=None):
        """ Function: Read a dfs1 file

        usage:
            [data, time, name] = read(filename, item_numbers)
            item_numbers is a list of indices (base 0) to read from

        Returns
            1) the data contained in a dfs1 file in a list of numpy matrices
            2) time index
            3) name of the items

        NOTE
            Returns data (nt, x)
        """

        # NOTE. Item numbers are base 0 (everything else in the dfs is base 0)

        # Open the dfs file for reading
        dfs = DfsFileFactory.DfsGenericOpen(filename)

        if item_numbers is None:
            n_items = safe_length(dfs.ItemInfo)
            item_numbers = list(range(n_items))

        # Determine the size of the grid
        axis = dfs.ItemInfo[0].SpatialAxis

        xNum = axis.XCount
        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps
        if nt == 0:
            raise Warning(
                "Static dfs1 files (with no time steps) are not supported.")
            nt = 1
        deleteValue = dfs.FileInfo.DeleteValueFloat

        n_items = len(item_numbers)
        data_list = []

        for item in range(n_items):
            # Initialize an empty data block
            data = np.ndarray(shape=(nt, xNum), dtype=float)
            data_list.append(data)

        t = []
        startTime = dfs.FileInfo.TimeAxis.StartDateTime
        for it in range(dfs.FileInfo.TimeAxis.NumberOfTimeSteps):
            for item in range(n_items):

                itemdata = dfs.ReadItemTimeStep(item_numbers[item] + 1, it)

                src = itemdata.Data
                d = to_numpy(src)

                d[d == deleteValue] = np.nan
                data_list[item][it, :] = d

            t.append(
                startTime.AddSeconds(
                    itemdata.Time).ToString("yyyy-MM-dd HH:mm:ss"))

        time = [datetime.strptime(x, "%Y-%m-%d %H:%M:%S") for x in t]
        names = []
        for item in range(n_items):
            name = dfs.ItemInfo[item].Name
            names.append(name)

        dfs.Close()
        return Dataset(data_list, time, names)
Пример #20
0
    def read(self, filename, item_numbers=None, item_names=None):
        """Read data from the dfs1 file

        Usage:
            read(filename, item_numbers=None, item_names=None)
        filename
            full path to the dfs1 file.
        item_numbers
            read only the item_numbers in the array specified (0 base)
        item_names
            read only the items in the array specified, (takes precedence over item_numbers)

        Return:
            Dataset(data, time, items)
            where data[nt,x]
        """

        # NOTE. Item numbers are base 0 (everything else in the dfs is base 0)

        # Open the dfs file for reading
        dfs = DfsFileFactory.DfsGenericOpen(filename)

        if item_names is not None:
            item_numbers = find_item(dfs, item_names)

        if item_numbers is None:
            n_items = safe_length(dfs.ItemInfo)
            item_numbers = list(range(n_items))

        # Determine the size of the grid
        axis = dfs.ItemInfo[0].SpatialAxis

        xNum = axis.XCount
        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps
        if nt == 0:
            raise Warning("Static dfs1 files (with no time steps) are not supported.")
            nt = 1
        deleteValue = dfs.FileInfo.DeleteValueFloat

        n_items = len(item_numbers)
        data_list = []

        for item in range(n_items):
            # Initialize an empty data block
            data = np.ndarray(shape=(nt, xNum), dtype=float)
            data_list.append(data)

        t_seconds = np.zeros(nt, dtype=float)

        for it in range(dfs.FileInfo.TimeAxis.NumberOfTimeSteps):
            for item in range(n_items):

                itemdata = dfs.ReadItemTimeStep(item_numbers[item] + 1, it)

                src = itemdata.Data
                d = to_numpy(src)

                d[d == deleteValue] = np.nan
                data_list[item][it, :] = d

            t_seconds[it] = itemdata.Time

        start_time = from_dotnet_datetime(dfs.FileInfo.TimeAxis.StartDateTime)
        time = [start_time + timedelta(seconds=tsec) for tsec in t_seconds]

        items = get_item_info(dfs, item_numbers)

        dfs.Close()
        return Dataset(data_list, time, items)
Пример #21
0
def concat(infilenames, outfilename):
    """Concatenates files along the time axis

    If files are overlapping, the last one will be used.

    Parameters
    ----------
    infilenames: list of str
        filenames to concatenate

    outfilename: str
        filename

    Notes
    ------

    The list of input files have to be sorted, i.e. in chronological order
    """

    dfs_i_a = DfsFileFactory.DfsGenericOpen(infilenames[0])

    dfs_o = _clone(infilenames[0], outfilename)

    n_items = safe_length(dfs_i_a.ItemInfo)

    for i, infilename in enumerate(infilenames):

        dfs_i = DfsFileFactory.DfsGenericOpen(infilename)
        n_time_steps = dfs_i.FileInfo.TimeAxis.NumberOfTimeSteps
        dt = dfs_i.FileInfo.TimeAxis.TimeStep
        start_time = from_dotnet_datetime(dfs_i.FileInfo.TimeAxis.StartDateTime)

        if i > 0 and start_time > current_time + timedelta(seconds=dt):
            dfs_o.Close()
            os.remove(outfilename)
            raise Exception("Gap in time axis detected - not supported")

        current_time = start_time

        if i < (len(infilenames) - 1):
            dfs_n = DfsFileFactory.DfsGenericOpen(infilenames[i + 1])
            nf = dfs_n.FileInfo.TimeAxis.StartDateTime
            next_start_time = datetime(
                nf.Year, nf.Month, nf.Day, nf.Hour, nf.Minute, nf.Second
            )

        for timestep in range(n_time_steps):

            current_time = start_time + timedelta(seconds=timestep * dt)
            if i < (len(infilenames) - 1):
                if current_time >= next_start_time:
                    break

            for item in range(n_items):

                itemdata = dfs_i.ReadItemTimeStep(item + 1, timestep)
                d = to_numpy(itemdata.Data)

                darray = to_dotnet_float_array(d)

                dfs_o.WriteItemTimeStepNext(0, darray)

    dfs_o.Close()
Пример #22
0
    def read(self, dfs3file, item_numbers=None, layers=None, coordinates=None):
        """ Function: Read data from a dfs3 file

        Usage:
            [data,time,name] = read( filename, item_numbers, layers=None, coordinates=None)

        item_numbers
            list of indices (base 0) to read from. If None then all the items.
        layers
            list of layer indices (base 0) to read
        coordinates
            list of list (x,y,layer) integers ( 0,0 at Bottom Left of Grid !! )
            example coordinates = [[2,5,1], [11,41,2]]

        Returns
            1) the data contained in a dfs3 file in a list of numpy matrices
            2) time index
            3) name of the items

        NOTE
            Returns data ( y, x, z, nt)

            1) If coordinates is selected, then only return data at those coordinates
            2) coordinates specified overules layers.
            3) layer counts from the bottom
        """

        # Open the dfs file for reading
        dfs = DfsFileFactory.DfsGenericOpen(dfs3file)

        # Determine the size of the grid
        axis = dfs.ItemInfo[0].SpatialAxis
        zNum = axis.ZCount
        yNum = axis.YCount
        xNum = axis.XCount
        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        deleteValue = dfs.FileInfo.DeleteValueFloat

        if item_numbers is None:
            n_items = safe_length(dfs.ItemInfo)
            item_numbers = list(range(n_items))

        n_items = len(item_numbers)
        data_list = []

        if coordinates is None:
            # if nt is 0, then the dfs is 'static' and must be handled differently
            if nt != 0:
                for item in range(n_items):
                    if layers is None:
                        # Initialize an empty data block
                        data = np.ndarray(shape=(yNum, xNum, zNum, nt),
                                          dtype=float)  # .fill(deleteValue)
                        data_list.append(data)
                    else:
                        data = np.ndarray(shape=(yNum, xNum, len(layers), nt),
                                          dtype=float)  # .fill(deleteValue)
                        data_list.append(data)

            else:
                raise Warning(
                    "Static dfs3 files (with no time steps) are not supported."
                )
                quit()
        else:
            ncoordinates = len(coordinates)
            for item in range(n_items):
                # Initialize an empty data block
                data = np.ndarray(shape=(ncoordinates, nt), dtype=float)
                data_list.append(data)

        t = []
        startTime = dfs.FileInfo.TimeAxis.StartDateTime

        if coordinates is None:
            for it in range(nt):
                for item in range(n_items):
                    itemdata = dfs.ReadItemTimeStep(item_numbers[item] + 1, it)

                    src = itemdata.Data
                    d = to_numpy(src)

                    # DO a direct copy instead of eleement by elment
                    d = d.reshape(zNum, yNum, xNum).swapaxes(0,
                                                             2).swapaxes(0, 1)
                    d = np.flipud(d)
                    d[d == deleteValue] = np.nan
                    if layers is None:
                        data_list[item][:, :, :, it] = d
                    else:
                        for l in range(len(layers)):
                            data_list[item][:, :, l, it] = d[:, :, layers[l]]

                t.append(
                    startTime.AddSeconds(
                        itemdata.Time).ToString("yyyy-MM-dd HH:mm:ss"))
        else:
            indices = [
                self.__calculate_index(xNum, yNum, zNum, x, y, z)
                for x, y, z in coordinates
            ]
            for it in range(nt):
                for item in range(n_items):
                    itemdata = dfs.ReadItemTimeStep(item_numbers[item] + 1, it)
                    d = np.array([itemdata.Data[i] for i in indices])
                    d[d == deleteValue] = np.nan
                    data_list[item][:, it] = d

                t.append(
                    startTime.AddSeconds(
                        itemdata.Time).ToString("yyyy-MM-dd HH:mm:ss"))

        # time = pd.DatetimeIndex(t)
        time = [datetime.strptime(x, "%Y-%m-%d %H:%M:%S") for x in t]
        names = []
        for item in range(n_items):
            name = dfs.ItemInfo[item_numbers[item]].Name
            names.append(name)

        dfs.Close()

        return Dataset(data_list, time, names)
Пример #23
0
def dfs2todfs1(dfs2file, dfs1file, ax=0, func=np.nanmean):
    """ Function: take average (or other statistics) over axis in dfs2 and output to dfs1

    Usage:
        dfs2todfs1(dfs2file, dfs1file)
        dfs2todfs1(dfs2file, dfs1file, axis)
        dfs2todfs1(dfs2file, dfs1file, axis, func=np.nanmean)
    """

    # Read dfs2
    dfs_in = DfsFileFactory.DfsGenericOpen(dfs2file)
    fileInfo = dfs_in.FileInfo

    # Basic info from input file
    axis = dfs_in.ItemInfo[0].SpatialAxis
    n_time_steps = fileInfo.TimeAxis.NumberOfTimeSteps
    if n_time_steps == 0:
        raise Warning(
            "Static dfs2 files (with no time steps) are not supported.")

    # Create an empty dfs1 file object
    factory = DfsFactory()
    builder = Dfs1Builder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle,
                                 fileInfo.ApplicationVersion)

    # Set up the header
    builder.SetDataType(fileInfo.DataType)
    builder.SetGeographicalProjection(fileInfo.Projection)
    builder.SetTemporalAxis(fileInfo.TimeAxis)
    builder.DeleteValueByte = fileInfo.DeleteValueByte
    builder.DeleteValueDouble = fileInfo.DeleteValueDouble
    builder.DeleteValueFloat = fileInfo.DeleteValueFloat
    builder.DeleteValueInt = fileInfo.DeleteValueInt
    builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt

    # use x-axis (default) else y-axis
    if ax == 0:
        builder.SetSpatialAxis(
            factory.CreateAxisEqD1(axis.AxisUnit, axis.XCount, axis.X0,
                                   axis.Dx))
    else:
        builder.SetSpatialAxis(
            factory.CreateAxisEqD1(axis.AxisUnit, axis.YCount, axis.Y0,
                                   axis.Dy))

    # assume no compression keys
    if fileInfo.IsFileCompressed:
        raise Warning("Compressed files not supported")

    # custom blocks
    # cb = fileInfo.CustomBlocks #[0]
    # for j in range(safe_length(cb)):
    #    builder.AddCustomBlocks(cb[j])

    # static items
    while True:
        static_item = dfs_in.ReadStaticItemNext()
        if static_item == None:
            break
        builder.AddStaticItem(static_item)

    # dynamic items
    n_items = safe_length(dfs_in.ItemInfo)
    for item in range(n_items):
        ii = dfs_in.ItemInfo[item]
        builder.AddDynamicItem(ii.Name, ii.Quantity, DfsSimpleType.Float,
                               DataValueType.Instantaneous)

    try:
        builder.CreateFile(dfs1file)
    except IOError:
        print("cannot create dfs1 file: ", dfs1file)

    dfs_out = builder.GetFile()

    # read-write data
    deleteValue = fileInfo.DeleteValueFloat
    for it in range(n_time_steps):
        for item in range(n_items):
            itemdata = dfs_in.ReadItemTimeStep(item + 1, it)

            d = to_numpy(itemdata.Data)
            d[d == deleteValue] = np.nan
            d2 = d.reshape(axis.YCount, axis.XCount)
            d2 = np.flipud(d2)

            d1 = func(d2, axis=ax)
            d1[np.isnan(d1)] = deleteValue

            darray = Array[System.Single](d1)
            dfs_out.WriteItemTimeStepNext(itemdata.Time, darray)

    dfs_in.Close()
    dfs_out.Close()
Пример #24
0
    def read(self, filename, item_numbers=None, item_names=None):
        """Read data from the dfs1 file

        Usage:
            read(filename, item_numbers=None, item_names=None)
        filename
            full path to the dfs1 file.
        item_numbers
            read only the item_numbers in the array specified (0 base)
        item_names
            read only the items in the array specified, (takes precedence over item_numbers)

        Return:
            Dataset(data, time, items)
            where data[nt,y,x]
        """

        # NOTE. Item numbers are base 0 (everything else in the dfs is base 0)

        # Open the dfs file for reading
        dfs = DfsFileFactory.DfsGenericOpen(filename)

        if item_names is not None:
            item_numbers = find_item(dfs, item_names)

        if item_numbers is None:
            n_items = safe_length(dfs.ItemInfo)
            item_numbers = list(range(n_items))

        # Determine the size of the grid
        axis = dfs.ItemInfo[0].SpatialAxis
        yNum = axis.YCount
        xNum = axis.XCount
        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps
        if nt == 0:
            raise Warning(
                "Static files (with no dynamic items) are not supported.")
            nt = 1
        deleteValue = dfs.FileInfo.DeleteValueFloat

        n_items = len(item_numbers)
        data_list = []

        for item in range(n_items):
            # Initialize an empty data block
            data = np.ndarray(shape=(nt, yNum, xNum), dtype=float)
            data_list.append(data)

        t = []
        startTime = dfs.FileInfo.TimeAxis.StartDateTime
        for it in range(dfs.FileInfo.TimeAxis.NumberOfTimeSteps):
            for item in range(n_items):

                itemdata = dfs.ReadItemTimeStep(item_numbers[item] + 1, it)

                src = itemdata.Data
                d = to_numpy(src)

                d = d.reshape(yNum, xNum)
                d = np.flipud(d)
                d[d == deleteValue] = np.nan
                data_list[item][it, :, :] = d

            t.append(
                startTime.AddSeconds(
                    itemdata.Time).ToString("yyyy-MM-dd HH:mm:ss"))

        time = [datetime.strptime(x, "%Y-%m-%d %H:%M:%S") for x in t]

        items = get_item_info(dfs, item_numbers)

        dfs.Close()
        return Dataset(data_list, time, items)
Пример #25
0
    def read(self, items=None, time_steps=None):
        """
        Read data from a dfs1 file
        
        Parameters
        ---------
        items: list[int] or list[str], optional
            Read only selected items, by number (0-based), or by name
        time_steps: int or list[int], optional
            Read only selected time_steps

        Returns
        -------
        Dataset
            A dataset with data dimensions [t,x]
        """

        # NOTE. Item numbers are base 0 (everything else in the dfs is base 0)

        # Open the dfs file for reading
        dfs = DfsFileFactory.DfsGenericOpen(self._filename)
        self._dfs = dfs
        self._source = dfs

        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        items, item_numbers, time_steps = get_valid_items_and_timesteps(
            self, items, time_steps)

        # Determine the size of the grid
        axis = dfs.ItemInfo[0].SpatialAxis

        xNum = axis.XCount
        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps
        if nt == 0:
            raise ValueError(
                "Static dfs1 files (with no time steps) are not supported.")

        deleteValue = dfs.FileInfo.DeleteValueFloat

        n_items = len(item_numbers)
        data_list = []

        for item in range(n_items):
            # Initialize an empty data block
            data = np.ndarray(shape=(len(time_steps), xNum), dtype=float)
            data_list.append(data)

        t_seconds = np.zeros(len(time_steps), dtype=float)

        for i in range(len(time_steps)):
            it = time_steps[i]
            for item in range(n_items):

                itemdata = dfs.ReadItemTimeStep(item_numbers[item] + 1, it)

                src = itemdata.Data
                d = to_numpy(src)

                d[d == deleteValue] = np.nan
                data_list[item][it, :] = d

            t_seconds[it] = itemdata.Time

        start_time = from_dotnet_datetime(dfs.FileInfo.TimeAxis.StartDateTime)
        time = [start_time + timedelta(seconds=tsec) for tsec in t_seconds]

        items = get_item_info(dfs, item_numbers)

        dfs.Close()
        return Dataset(data_list, time, items)