示例#1
0
    def write(self, filename, data):

        """write overwrites an existing dfs0 file.

        Parameters
        ----------
        filename: str
            Full path and filename to dfs0 to be modified.
        data: list[np.array]
            data to overwrite
        """

        if not os.path.exists(filename):
            raise Warning("filename - File does not Exist %s", filename)

        try:
            dfs = DfsFileFactory.DfsGenericOpenEdit(filename)
        except IOError:
            print("cannot open", filename)

        delete_value = dfs.FileInfo.DeleteValueFloat

        n_items = len(dfs.ItemInfo)
        n_time_steps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        # Makes sure the data to write to the file matches the dfs0 file
        if n_time_steps != data[0].shape[0]:
            raise Exception(
                f"Inconsistent data size. nt (row count) must be size {n_time_steps}"
            )

        if n_items != len(data):
            raise Exception(f"Number of items must be size {n_items}")

        for i in range(n_items):
            d = data[i]

            d[np.isnan(d)] = delete_value

        # Get the date times in seconds (from start)
        dfsdata = Dfs0Util.ReadDfs0DataDouble(dfs)
        t_seconds = [dfsdata[i,0] for i in range(n_time_steps)]

        dfs.Reset()

        data1 = np.stack(data, axis=1)
        Dfs0Util.WriteDfs0DataDouble(dfs, t_seconds, to_dotnet_array(data1))

        dfs.Close()
示例#2
0
    def __read(self, filename):
        """Read data from the dfs0 file
        """
        if not os.path.exists(filename):
            raise Warning("filename - File does not Exist %s", filename)

        dfs = DfsFileFactory.DfsGenericOpen(filename)
        self._dfs = dfs

        n_items = safe_length(dfs.ItemInfo)
        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        items = []
        for i in range(n_items):
            eumItem = dfs.ItemInfo[i].Quantity.Item
            eumUnit = dfs.ItemInfo[i].Quantity.Unit
            name = dfs.ItemInfo[i].Name
            itemtype = EUMType(eumItem)
            unit = EUMUnit(eumUnit)
            item = ItemInfo(name, itemtype, unit)
            items.append(item)

        # BULK READ THE DFS0
        dfsdata = Dfs0Util.ReadDfs0DataDouble(dfs)

        t = []
        starttime = dfs.FileInfo.TimeAxis.StartDateTime

        # EMPTY Data Block for copying the Results
        for it in range(dfs.FileInfo.TimeAxis.NumberOfTimeSteps):
            t.append(
                starttime.AddSeconds(
                    dfsdata[it, 0]).ToString("yyyy-MM-dd HH:mm:ss"))

        # Copies the System Array to a numpy matrix
        # First column in the time (the rest is the data)
        data = np.fromiter(dfsdata, np.float64).reshape(nt, n_items + 1)[:,
                                                                         1::]

        mask = np.isclose(data, dfs.FileInfo.DeleteValueFloat, atol=1e-36)
        data[mask] = np.nan
        # mask = np.isclose(
        #    data, dfs.FileInfo.DeleteValueDouble, atol=1e-34
        # )  # TODO needs to be verified
        # data[mask] = np.nan

        dfs.Close()

        return data, t, items
示例#3
0
文件: dfs0.py 项目: famiuer/mikeio
    def write(self, filename, data):
        """
        Overwrite data in an existing dfs0 file.

        Parameters
        ----------
        filename: str
            Full path and filename to dfs0 to be modified.
        data: list[np.array]
            New data to write.
        """
        dfs, n_items, n_time_steps = self._validate_and_open_dfs(
            filename, data)

        # Get time in seconds from start
        existing_data = Dfs0Util.ReadDfs0DataDouble(dfs)
        time = [existing_data[i, 0] for i in range(n_time_steps)]

        # Overwrite with new data
        dfs.Reset()
        new_data = np.nan_to_num(data, nan=dfs.FileInfo.DeleteValueFloat)
        new_data_dotnet = to_dotnet_array(np.stack(new_data, axis=1))
        Dfs0Util.WriteDfs0DataDouble(dfs, time, new_data_dotnet)
        dfs.Close()
示例#4
0
    def __read(self, filename):
        """Read data from the dfs0 file
        """
        if not os.path.exists(filename):
            raise Warning("filename - File does not Exist %s", filename)

        dfs = DfsFileFactory.DfsGenericOpen(filename)
        self._dfs = dfs

        n_items = safe_length(dfs.ItemInfo)
        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        items = []
        for i in range(n_items):
            eumItem = dfs.ItemInfo[i].Quantity.Item
            eumUnit = dfs.ItemInfo[i].Quantity.Unit
            name = dfs.ItemInfo[i].Name
            itemtype = EUMType(eumItem)
            unit = EUMUnit(eumUnit)
            item = ItemInfo(name, itemtype, unit)
            items.append(item)

        # BULK READ THE DFS0
        dfsdata = Dfs0Util.ReadDfs0DataDouble(dfs)
        # First column is the time (the rest is the data)
        t_seconds = [dfsdata[i, 0] for i in range(nt)]

        start_time = from_dotnet_datetime(dfs.FileInfo.TimeAxis.StartDateTime)
        time = [start_time + timedelta(seconds=tsec) for tsec in t_seconds]

        # TODO use to_numpy ?
        data = np.fromiter(dfsdata, np.float64).reshape(nt, n_items + 1)[:,
                                                                         1::]

        mask = np.isclose(data, dfs.FileInfo.DeleteValueFloat, atol=1e-36)
        data[mask] = np.nan
        # mask = np.isclose(
        #    data, dfs.FileInfo.DeleteValueDouble, atol=1e-34
        # )  # TODO needs to be verified
        # data[mask] = np.nan

        dfs.Close()

        return data, time, items
示例#5
0
    def __read(self, filename):
        """
        Read all data from a dfs0 file.
        """
        self._time_column_index = 0  # First column is time (the rest is data)

        self._dfs = DfsFileFactory.DfsGenericOpen(filename)
        raw_data = Dfs0Util.ReadDfs0DataDouble(self._dfs)  # Bulk read the data

        matrix = self.__to_numpy_with_nans(raw_data)

        data = []
        for i in range(matrix.shape[1]):
            data.append(matrix[:, i])

        time = list(self.__get_time(raw_data))
        items = list(self.__get_items())

        self._dfs.Close()

        return Dataset(data, time, items)
示例#6
0
文件: dfs0.py 项目: lifuyun023/mikeio
    def __read(self, filename):
        """
        Read data from a dfs0 file.
        """
        if not os.path.exists(filename):
            raise FileNotFoundError(f"File {filename} not found.")

        self._dfs = DfsFileFactory.DfsGenericOpen(filename)
        self._n_items = safe_length(self._dfs.ItemInfo)
        self._n_timesteps = self._dfs.FileInfo.TimeAxis.NumberOfTimeSteps
        self._time_column_index = 0  # First column is time (the rest is data)

        raw_data = Dfs0Util.ReadDfs0DataDouble(self._dfs)  # Bulk read the data

        data = self.__to_numpy_with_nans(raw_data)
        time = list(self.__get_time(raw_data))
        items = list(self.__get_items())

        self._dfs.Close()

        return data, time, items
示例#7
0
    def __read(self, filename):
        """Read data from the dfs0 file
        """
        if not os.path.exists(filename):
            raise Warning("filename - File does not Exist %s", filename)

        dfs = DfsFileFactory.DfsGenericOpen(filename)

        n_items = safe_length(dfs.ItemInfo)
        nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        names = []
        for i in range(n_items):
            names.append(dfs.ItemInfo[i].Name)

        # BULK READ THE DFS0
        dfsdata = Dfs0Util.ReadDfs0DataDouble(dfs)

        t = []
        starttime = dfs.FileInfo.TimeAxis.StartDateTime

        # EMPTY Data Block for copying the Results
        for it in range(dfs.FileInfo.TimeAxis.NumberOfTimeSteps):
            t.append(
                starttime.AddSeconds(
                    dfsdata[it, 0]).ToString("yyyy-MM-dd HH:mm:ss"))

        # Copies the System Array to a numpy matrix
        # First column in the time (the rest is the data)
        data = np.fromiter(dfsdata, np.float64).reshape(nt, n_items + 1)[:,
                                                                         1::]

        data[data == -1.0000000180025095e-35] = np.nan
        data[data == -1.0000000031710769e-30] = np.nan
        data[data == dfs.FileInfo.DeleteValueFloat] = np.nan
        data[data == dfs.FileInfo.DeleteValueDouble] = np.nan

        dfs.Close()

        return data, t, names
示例#8
0
    def create(
        self,
        filename,
        data,
        start_time=None,
        timeseries_unit=TimeStep.SECOND,
        dt=1.0,
        datetimes=None,
        items=None,
        title=None,
        data_value_type=None,
    ):
        """Create a dfs0 file.

        Parameters
        ----------
        filename: str
            Full path and filename to dfs0 to be created.
        data: list[np.array]
            values
        start_time: datetime.datetime, , optional
            start date of type datetime.
        timeseries_unit: Timestep, optional
            Timestep  unitdefault Timestep.SECOND
        dt: float, optional
            the time step. Therefore dt of 5.5 with timeseries_unit of minutes
            means 5 mins and 30 seconds. default to 1.0
        items: list[ItemInfo], optional
            List of ItemInfo corresponding to a variable types (ie. Water Level).
        title: str, optional
            title
        data_value_type: list[DataValueType], optional
            DataValueType default DataValueType.INSTANTANEOUS

        """
        if title is None:
            title = "dfs0 file"

        n_items = len(data)
        n_time_steps = np.shape(data[0])[0]

        if start_time is None:
            start_time = datetime.now()

        if items is None:
            items = [ItemInfo(f"temItem {i+1}") for i in range(n_items)]

        if len(items) != n_items:
            raise Warning(
                "names must be an array of strings with the same number of elements as data columns"
            )

        if datetimes is None:
            equidistant = True

            if not type(start_time) is datetime:
                raise Warning("start_time must be of type datetime ")

            dt = np.float(dt)
            datetimes = np.array(
                [
                    start_time + timedelta(seconds=(step * dt))
                    for step in np.arange(n_time_steps)
                ]
            )

        else:
            start_time = datetimes[0]
            equidistant = False

        # if not isinstance(timeseries_unit, int):
        #    raise Warning("timeseries_unit must be an integer. See dfsutil options for help ")

        system_start_time = to_dotnet_datetime(start_time)

        factory = DfsFactory()
        builder = DfsBuilder.Create(title, "DFS", 0)
        builder.SetDataType(1)
        builder.SetGeographicalProjection(factory.CreateProjectionUndefined())

        if equidistant:
            builder.SetTemporalAxis(
                factory.CreateTemporalEqCalendarAxis(
                    timeseries_unit, system_start_time, 0, dt
                )
            )
        else:
            builder.SetTemporalAxis(
                factory.CreateTemporalNonEqCalendarAxis(
                    timeseries_unit, system_start_time
                )
            )

        builder.SetItemStatisticsType(StatType.RegularStat)

        for i in range(n_items):

            item = builder.CreateDynamicItemBuilder()

            item.Set(
                items[i].name,
                eumQuantity.Create(items[i].type, items[i].unit),
                DfsSimpleType.Float,
            )

            if data_value_type is not None:
                item.SetValueType(data_value_type[i])
            else:
                item.SetValueType(DataValueType.Instantaneous)

            item.SetAxis(factory.CreateAxisEqD0())
            builder.AddDynamicItem(item.GetDynamicItemInfo())

        try:
            builder.CreateFile(filename)

        except IOError:
            print("cannot create dfso file: ", filename)

        dfs = builder.GetFile()
        delete_value = dfs.FileInfo.DeleteValueFloat

        for i in range(n_items):
            d = data[i]
            d[np.isnan(d)] = delete_value

        data1 = np.stack(data, axis=1)
        t_seconds = [(t - datetimes[0]).total_seconds() for t in datetimes]
        Dfs0Util.WriteDfs0DataDouble(dfs, t_seconds, to_dotnet_array(data1))

        dfs.Close()
示例#9
0
    def write(
        self,
        filename,
        data,
        start_time=None,
        timeseries_unit=TimeStepUnit.SECOND,
        dt=None,
        datetimes=None,
        items=None,
        title="",
        data_value_type=None,
        dtype=None,
    ):
        """
        Create a dfs0 file.

        Parameters
        ----------
        filename: str
            Full path and filename to dfs0 to be created.
        data: list[np.array]
            values
        start_time: datetime.datetime, , optional
            start date of type datetime.
        timeseries_unit: TimestepUnit, optional
            Timestep  unit, default TimeStepUnit.SECOND
        dt: float, optional
            the time step. Therefore dt of 5.5 with timeseries_unit of minutes
            means 5 mins and 30 seconds. default to 1.0
        datetimes: list[datetime]
        items: list[ItemInfo], optional
            List of ItemInfo corresponding to a variable types (ie. Water Level).
        title: str, optional
            title, default blank
        data_value_type: list[DataValueType], optional
            DataValueType default DataValueType.INSTANTANEOUS
        dtype : np.dtype, optional
            default np.float32

        """
        self._filename = filename
        self._title = title
        self._timeseries_unit = timeseries_unit
        self._dtype = dtype
        self._data_value_type = data_value_type

        if isinstance(data, Dataset):
            self._items = data.items

            if data.is_equidistant:
                self._start_time = data.time[0]
                self._dt = (data.time[1] - data.time[0]).total_seconds()
            else:
                datetimes = data.time
            data = data.data

        if dt:
            self._dt = dt

        if self._dt is None:
            self._dt = 1.0

        if start_time:
            self._start_time = start_time

        self._n_items = len(data)
        self._n_timesteps = np.shape(data[0])[0]

        if items:
            self._items = items

        if self._items is None:
            warnings.warn("No items info supplied. Using Item 1, 2, 3,...")
            self._items = [ItemInfo(f"Item {i + 1}") for i in range(self._n_items)]

        if len(self._items) != self._n_items:
            raise ValueError("Number of items must match the number of data columns.")

        if datetimes is not None:
            self._start_time = datetimes[0]
            self._is_equidistant = False
        else:
            self._is_equidistant = True
            if self._start_time is None:
                self._start_time = datetime.now()
                warnings.warn(
                    f"No start time supplied. Using current time: {self._start_time} as start time."
                )

            self._dt = np.float(self._dt)
            datetimes = np.array(
                [
                    self._start_time + timedelta(seconds=(step * self._dt))
                    for step in np.arange(self._n_timesteps)
                ]
            )

        dfs = self._setup_header()

        delete_value = dfs.FileInfo.DeleteValueFloat

        data = np.array(data).astype(np.float64)
        data[np.isnan(data)] = delete_value
        data_to_write = to_dotnet_array(data.T)
        t_seconds = [(t - datetimes[0]).total_seconds() for t in datetimes]
        Dfs0Util.WriteDfs0DataDouble(dfs, t_seconds, data_to_write)

        dfs.Close()
示例#10
0
    def write(self, filename, data):
        """write overwrites an existing dfs0 file.

        Parameters
        ----------
        filename: str
            Full path and filename to dfs0 to be modified.
        data: list[np.array]
            data to overwrite
        """

        if not os.path.exists(filename):
            raise Warning("filename - File does not Exist %s", filename)

        try:
            dfs = DfsFileFactory.DfsGenericOpenEdit(filename)
        except IOError:
            print("cannot open", filename)

        delete_value = dfs.FileInfo.DeleteValueFloat

        n_items = len(dfs.ItemInfo)
        n_time_steps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps

        # Makes sure the data to write to the file matches the dfs0 file
        if n_time_steps != data[0].shape[0]:
            raise Exception(
                f"Inconsistent data size. nt (row count) must be size {n_time_steps}"
            )

        if n_items != len(data):
            raise Exception(f"Number of items must be size {n_items}")

        for i in range(n_items):
            d = data[i]

            d[np.isnan(d)] = delete_value

        d = Array.CreateInstance(System.Single, 1)

        # Get the date times in seconds (from start)
        dfsdata = Dfs0Util.ReadDfs0DataDouble(dfs)

        t = []
        # starttime = dfs.FileInfo.TimeAxis.StartDateTime

        for it in range(dfs.FileInfo.TimeAxis.NumberOfTimeSteps):
            t.append(dfsdata[it, 0])

        dfs.Reset()

        # COPY OVER THE DATA
        for it in range(n_time_steps):

            for ii in range(n_items):

                d = Array[System.Single](np.array(data[ii][it:it + 1]))

                # dt = (t[it] - t[0]).total_seconds()
                dt = t[it]
                dfs.WriteItemTimeStepNext(dt, d)

        dfs.Close()