Exemple #1
0
    def _write(self, filename, data, start_time, dt, datetimes, items,
               coordinate, title):

        if isinstance(data, Dataset) and not data.is_equidistant:
            datetimes = data.time

        self._write_handle_common_arguments(title, data, items, coordinate,
                                            start_time, dt)

        shape = np.shape(data[0])
        if self._ndim == 1:
            self._nx = shape[1]
        elif self._ndim == 2:
            self._ny = shape[1]
            self._nx = shape[2]

        self._factory = DfsFactory()
        self._set_spatial_axis()

        if self._ndim == 1:
            if not all(np.shape(d)[1] == self._nx for d in data):
                raise DataDimensionMismatch()

        if self._ndim == 2:
            if not all(np.shape(d)[1] == self._ny for d in data):
                raise DataDimensionMismatch()

            if not all(np.shape(d)[2] == self._nx for d in data):
                raise DataDimensionMismatch()
        if datetimes is not None:
            self._is_equidistant = False
            start_time = datetimes[0]
            self._start_time = start_time

        dfs = self._setup_header(filename)

        deletevalue = dfs.FileInfo.DeleteValueFloat  # -1.0000000031710769e-30

        for i in trange(self._n_timesteps, disable=not self.show_progress):
            for item in range(self._n_items):

                d = self._data[item][i]
                d = d.copy()  # to avoid modifying the input
                d[np.isnan(d)] = deletevalue

                if self._ndim == 1:
                    darray = to_dotnet_float_array(d)

                if self._ndim == 2:
                    d = d.reshape(self.shape[1:])
                    d = np.flipud(d)
                    darray = to_dotnet_float_array(d.reshape(d.size, 1)[:, 0])
                if self._is_equidistant:
                    dfs.WriteItemTimeStepNext(0, darray)
                else:
                    t = datetimes[i]
                    relt = (t - start_time).total_seconds()
                    dfs.WriteItemTimeStepNext(relt, darray)

        dfs.Close()
def setupDfs0():
  global shePath
  global dfs
  global dfsDataX
  global dfsDataY
  global nX
  global nY
  global nZ
  import clr
  global simStart
  global simStart
  now = MShePy.wm.currentTime()
  clr.AddReference("DHI.Mike.Install, Version=1.0.0.0, Culture=neutral, PublicKeyToken=c513450b5d0bf0bf") # "fully qualified" name required!
  from DHI.Mike.Install import MikeImport, MikeProducts
  MikeImport.SetupLatest()
  clr.AddReference("DHI.Generic.MikeZero.DFS")
  clr.AddReference("DHI.Generic.MikeZero.EUM")
  clr.AddReference("System")
  import System
  from System import Array
  from DHI.Generic.MikeZero import eumUnit, eumItem, eumQuantity
  from DHI.Generic.MikeZero.DFS import DfsFactory, DfsBuilder, DfsSimpleType, DataValueType
  shePath = MShePy.wm.getSheFilePath()
  sheDir = os.path.dirname(shePath)
  filename = os.path.join(sheDir, 'BndFluxes.dfs2')
  builder = DfsBuilder.Create(filename, "MSHE SZ boundary fluxes output per layer", 0)
  builder.SetDataType(1)
  factory = DfsFactory()
  builder.SetGeographicalProjection(factory.CreateProjectionGeoOrigin("NON-UTM", 0, 0, 0))
  simStart = now
  nowSys = System.DateTime(now.year, now.month, now.day, now.hour, now.minute, now.second)
  # note: time unit given here has to be used in WriteItemTimeStepNext
  axis = factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, nowSys)
  builder.SetTemporalAxis(axis)
  builder.DeleteValueFloat = -1e-30
  (startTime, endTime, values) = MShePy.wm.getValues(MShePy.paramTypes.SZ_X_FLO) # just for the geometry
  (nX, nY, nZ) = values.shape()
  (x0, y0) = MShePy.wm.gridCellToCoord(0, 0)
  (x1, y1) = MShePy.wm.gridCellToCoord(1, 1)
  dfsDataX = Array.CreateInstance(System.Single, nX * nY)
  dfsDataY = Array.CreateInstance(System.Single, nX * nY)
  for x in range(nX):
    for y in range(nY):
      if(not MShePy.wm.gridIsInModel(x, y)):
        dfsDataX[x + y * nX] = builder.DeleteValueFloat
        dfsDataY[x + y * nX] = builder.DeleteValueFloat
  dx = x1 - x0  # cell size, dx == dy
  axis = factory.CreateAxisEqD2(eumUnit.eumUmeter, nX, x0 - dx / 2, dx, nY, y0 - dx / 2, dx)
  itemBuilder = builder.CreateDynamicItemBuilder()
  itemBuilder.SetValueType(DataValueType.MeanStepBackward)
  itemBuilder.SetAxis(axis)
  for iz in range(nZ):
    for xy in ['x', 'y']:
      itemBuilder.Set('Boundary inflow layer {0}, {1}-direction'.format(iz + 1, xy), eumQuantity.Create(eumItem.eumIDischarge, eumUnit.eumUm3PerSec), DfsSimpleType.Float)
      builder.AddDynamicItem(itemBuilder.GetDynamicItemInfo()) 
  builder.CreateFile(filename)
  dfs = builder.GetFile()
Exemple #3
0
    def _write(
        self,
        filename,
        data,
        start_time,
        dt,
        items,
        coordinate,
        title,
    ):
        self._write_handle_common_arguments(title, data, items, coordinate,
                                            start_time, dt)

        shape = np.shape(data[0])
        if self._ndim == 1:
            self._nx = shape[1]
        elif self._ndim == 2:
            self._ny = shape[1]
            self._nx = shape[2]

        self._factory = DfsFactory()
        self._set_spatial_axis()

        if self._ndim == 1:
            if not all(np.shape(d)[1] == self._nx for d in data):
                raise DataDimensionMismatch()

        if self._ndim == 2:
            if not all(np.shape(d)[1] == self._ny for d in data):
                raise DataDimensionMismatch()

            if not all(np.shape(d)[2] == self._nx for d in data):
                raise DataDimensionMismatch()

        dfs = self._setup_header(filename)

        deletevalue = dfs.FileInfo.DeleteValueFloat  # -1.0000000031710769e-30

        for i in range(self._n_timesteps):
            for item in range(self._n_items):

                d = self._data[item][i]
                d[np.isnan(d)] = deletevalue

                if self._ndim == 1:
                    darray = to_dotnet_float_array(d)

                if self._ndim == 2:
                    d = d.reshape(self.shape[1:])
                    d = np.flipud(d)
                    darray = to_dotnet_float_array(d.reshape(d.size, 1)[:, 0])

                dfs.WriteItemTimeStepNext(0, darray)

        dfs.Close()
Exemple #4
0
    def _setup_header(self):
        factory = DfsFactory()
        builder = DfsBuilder.Create(self._title, "DFS", 0)
        builder.SetDataType(1)
        builder.SetGeographicalProjection(factory.CreateProjectionUndefined())

        system_start_time = to_dotnet_datetime(self._start_time)

        if self._is_equidistant:
            temporal_axis = factory.CreateTemporalEqCalendarAxis(
                self._timeseries_unit, system_start_time, 0, self._dt
            )
        else:
            temporal_axis = factory.CreateTemporalNonEqCalendarAxis(
                self._timeseries_unit, system_start_time
            )

        builder.SetTemporalAxis(temporal_axis)
        builder.SetItemStatisticsType(StatType.RegularStat)

        dtype_dfs = self._to_dfs_datatype(self._dtype)

        for i in range(self._n_items):
            item = self._items[i]
            newitem = builder.CreateDynamicItemBuilder()
            quantity = eumQuantity.Create(item.type, item.unit)
            newitem.Set(
                item.name, quantity, dtype_dfs,
            )

            if self._data_value_type is not None:
                newitem.SetValueType(self._data_value_type[i])
            else:
                newitem.SetValueType(DataValueType.Instantaneous)

            newitem.SetAxis(factory.CreateAxisEqD0())
            builder.AddDynamicItem(newitem.GetDynamicItemInfo())

        try:
            builder.CreateFile(self._filename)
        except IOError:
            raise IOError(f"Cannot create dfs0 file: {self._filename}")

        return builder.GetFile()
Exemple #5
0
    def create(
        self,
        meshfilename,
        filename,
        data,
        start_time=None,
        dt=1,
        timeseries_unit=TimeStep.SECOND,
        items=None,
        title=None,
    ):
        """Create a dfsu file

        Parameters
        -----------
        meshfilename: str,
            full path to a mesh or dfsu file
        filename: str
            full path to the new dfsu file
        data: list[np.array]
            list of matrices, one for each item. Matrix dimension: time, y, x
        start_time: datetime, optional
            start datetime, default is datetime.now()
        dt: float
            The time step. Therefore dt of 5.5 with timeseries_unit of TimeStep.MINUTE
            means 5 mins and 30 seconds. Default 1
        timeseries_unit: TimeStep, optional
             default TimeStep.SECOND
        unit: list[ItemInfo], optional
            Name, item, unit, default is undefined
        title: str
            title of the dfsu file. Default is blank.
        """

        n_items = len(data)
        n_time_steps = np.shape(data[0])[0]

        if start_time is None:
            start_time = datetime.now()

        if items is None:
            items = [ItemInfo(f"temItem {i+1}") for i in range(n_items)]

        if title is None:
            title = ""

        system_start_time = to_dotnet_datetime(start_time)

        # Default filetype;
        filetype = DfsuFileType.Dfsu2D

        _, ext = os.path.splitext(meshfilename)

        if ext == ".mesh":

            source = MeshFile.ReadMesh(meshfilename)
            projstr = source.ProjectionString

        elif ext == ".dfsu":

            source = DfsuFile.Open(meshfilename)
            projstr = source.Projection.WKTString
            filetype = source.DfsuFileType

        xn = source.X
        yn = source.Y

        # zn have to be Single precision??
        zn = to_dotnet_float_array(np.array(list(source.Z)))

        nodecodes = source.Code
        elementtable = source.ElementTable

        builder = DfsuBuilder.Create(filetype)

        builder.SetNodes(xn, yn, zn, nodecodes)
        builder.SetElements(elementtable)
        builder.SetNodeIds(source.NodeIds)
        builder.SetElementIds(source.ElementIds)

        factory = DfsFactory()
        proj = factory.CreateProjection(projstr)
        builder.SetProjection(proj)
        builder.SetTimeInfo(system_start_time, dt)
        builder.SetZUnit(eumUnit.eumUmeter)

        if filetype != DfsuFileType.Dfsu2D:
            builder.SetNumberOfSigmaLayers(source.NumberOfSigmaLayers)

        for item in items:
            if item.name != "Z coordinate":
                builder.AddDynamicItem(
                    item.name, eumQuantity.Create(item.type, item.unit))

        try:
            dfs = builder.CreateFile(filename)
        except IOError:
            print("cannot create dfsu file: ", filename)

        deletevalue = dfs.DeleteValueFloat

        try:
            # Add data for all item-timesteps, copying from source
            for i in range(n_time_steps):
                for item in range(n_items):
                    d = data[item][i, :]
                    d[np.isnan(d)] = deletevalue
                    darray = to_dotnet_float_array(d)
                    dfs.WriteItemTimeStepNext(0, darray)
            dfs.Close()

        except Exception as e:
            print(e)
            dfs.Close()
            os.remove(filename)
Exemple #6
0
class _Dfs123:

    _filename = None
    _projstr = None
    _start_time = None
    _end_time = None
    _is_equidistant = True
    _items = None
    _builder = None
    _factory = None
    _deletevalue = None
    _override_coordinates = False
    _timeseries_unit = TimeStepUnit.SECOND
    _dt = None

    show_progress = False

    def __init__(self, filename=None):
        self._filename = filename

    def read(self, items=None, time_steps=None):
        """
        Read data from a dfs file

        Parameters
        ---------
        items: list[int] or list[str], optional
            Read only selected items, by number (0-based), or by name
        time_steps: str, int or list[int], optional
            Read only selected time_steps

        Returns
        -------
        Dataset
        """
        self._open()

        item_numbers = _valid_item_numbers(self._dfs.ItemInfo, items)
        n_items = len(item_numbers)

        time_steps = _valid_timesteps(self._dfs.FileInfo, time_steps)
        nt = len(time_steps)

        if self._ndim == 1:
            shape = (nt, self._nx)
        elif self._ndim == 2:
            shape = (nt, self._ny, self._nx)
        else:
            shape = (nt, self._nz, self._ny, self._nx)

        data_list = [np.ndarray(shape=shape) for item in range(n_items)]

        t_seconds = np.zeros(len(time_steps))

        for i, it in enumerate(tqdm(time_steps,
                                    disable=not self.show_progress)):
            for item in range(n_items):

                itemdata = self._dfs.ReadItemTimeStep(item_numbers[item] + 1,
                                                      it)

                src = itemdata.Data
                d = to_numpy(src)

                d[d == self.deletevalue] = np.nan

                if self._ndim == 2:
                    d = d.reshape(self._ny, self._nx)
                    d = np.flipud(d)

                data_list[item][i] = d

            t_seconds[i] = itemdata.Time

        time = [self.start_time + timedelta(seconds=t) for t in t_seconds]

        items = _get_item_info(self._dfs.ItemInfo, item_numbers)

        self._dfs.Close()
        return Dataset(data_list, time, items)

    def _read_header(self):
        dfs = self._dfs
        self._n_items = len(dfs.ItemInfo)
        self._items = _get_item_info(dfs.ItemInfo, list(range(self._n_items)))
        self._start_time = from_dotnet_datetime(
            dfs.FileInfo.TimeAxis.StartDateTime)
        if hasattr(dfs.FileInfo.TimeAxis, "TimeStep"):
            self._timestep_in_seconds = (dfs.FileInfo.TimeAxis.TimeStep
                                         )  # TODO handle other timeunits
            # TODO to get the EndTime
        self._n_timesteps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps
        self._projstr = dfs.FileInfo.Projection.WKTString
        self._longitude = dfs.FileInfo.Projection.Longitude
        self._latitude = dfs.FileInfo.Projection.Latitude
        self._orientation = dfs.FileInfo.Projection.Orientation
        self._deletevalue = dfs.FileInfo.DeleteValueFloat

        dfs.Close()

    def _write(self, filename, data, start_time, dt, datetimes, items,
               coordinate, title):

        if isinstance(data, Dataset) and not data.is_equidistant:
            datetimes = data.time

        self._write_handle_common_arguments(title, data, items, coordinate,
                                            start_time, dt)

        shape = np.shape(data[0])
        if self._ndim == 1:
            self._nx = shape[1]
        elif self._ndim == 2:
            self._ny = shape[1]
            self._nx = shape[2]

        self._factory = DfsFactory()
        self._set_spatial_axis()

        if self._ndim == 1:
            if not all(np.shape(d)[1] == self._nx for d in data):
                raise DataDimensionMismatch()

        if self._ndim == 2:
            if not all(np.shape(d)[1] == self._ny for d in data):
                raise DataDimensionMismatch()

            if not all(np.shape(d)[2] == self._nx for d in data):
                raise DataDimensionMismatch()
        if datetimes is not None:
            self._is_equidistant = False
            start_time = datetimes[0]
            self._start_time = start_time

        dfs = self._setup_header(filename)

        deletevalue = dfs.FileInfo.DeleteValueFloat  # -1.0000000031710769e-30

        for i in trange(self._n_timesteps, disable=not self.show_progress):
            for item in range(self._n_items):

                d = self._data[item][i]
                d = d.copy()  # to avoid modifying the input
                d[np.isnan(d)] = deletevalue

                if self._ndim == 1:
                    darray = to_dotnet_float_array(d)

                if self._ndim == 2:
                    d = d.reshape(self.shape[1:])
                    d = np.flipud(d)
                    darray = to_dotnet_float_array(d.reshape(d.size, 1)[:, 0])
                if self._is_equidistant:
                    dfs.WriteItemTimeStepNext(0, darray)
                else:
                    t = datetimes[i]
                    relt = (t - start_time).total_seconds()
                    dfs.WriteItemTimeStepNext(relt, darray)

        dfs.Close()

    def _write_handle_common_arguments(self, title, data, items, coordinate,
                                       start_time, dt):

        if title is None:
            self._title = ""

        self._n_timesteps = np.shape(data[0])[0]
        self._n_items = len(data)

        if coordinate is None:
            if self._projstr is not None:
                self._coordinate = [
                    self._projstr,
                    self._longitude,
                    self._latitude,
                    self._orientation,
                ]
            else:
                warnings.warn("No coordinate system provided")
                self._coordinate = ["LONG/LAT", 0, 0, 0]
        else:
            self._override_coordinates = True
            self._coordinate = coordinate

        if isinstance(data, Dataset):
            self._items = data.items
            self._start_time = data.time[0]
            if dt is None and len(data.time) > 1:
                self._dt = (data.time[1] - data.time[0]).total_seconds()
            self._data = data.data
        else:
            self._data = data

        if start_time is None:
            if self._start_time is None:
                self._start_time = datetime.now()
                warnings.warn(
                    f"No start time supplied. Using current time: {self._start_time} as start time."
                )
            else:
                self._start_time = self._start_time
        else:
            self._start_time = start_time

        if dt:
            self._dt = dt

        if self._dt is None:
            self._dt = 1
            warnings.warn("No timestep supplied. Using 1s.")

        if items:
            self._items = items

        if self._items is None:
            self._items = [
                ItemInfo(f"Item {i+1}") for i in range(self._n_items)
            ]

        self._timeseries_unit = TimeStepUnit.SECOND

    def _setup_header(self, filename):

        system_start_time = to_dotnet_datetime(self._start_time)

        self._builder.SetDataType(0)

        if self._coordinate[0] == "LONG/LAT":
            proj = self._factory.CreateProjectionGeoOrigin(*self._coordinate)
        else:
            if self._override_coordinates:
                proj = self._factory.CreateProjectionProjOrigin(
                    *self._coordinate)
            else:
                proj = self._factory.CreateProjectionGeoOrigin(
                    *self._coordinate)

        self._builder.SetGeographicalProjection(proj)

        if self._is_equidistant:
            self._builder.SetTemporalAxis(
                self._factory.CreateTemporalEqCalendarAxis(
                    self._timeseries_unit, system_start_time, 0, self._dt))
        else:
            self._builder.SetTemporalAxis(
                self._factory.CreateTemporalNonEqCalendarAxis(
                    self._timeseries_unit, system_start_time))

        for item in self._items:
            self._builder.AddDynamicItem(
                item.name,
                eumQuantity.Create(item.type, item.unit),
                DfsSimpleType.Float,
                item.data_value_type,
            )

        try:
            self._builder.CreateFile(filename)
        except IOError:
            # TODO does this make sense?
            print("cannot create dfs file: ", filename)

        return self._builder.GetFile()

    def _open(self):
        raise NotImplementedError("Should be implemented by subclass")

    def _set_spatial_axis(self):
        raise NotImplementedError("Should be implemented by subclass")

    @property
    def deletevalue(self):
        "File delete value"
        return self._deletevalue

    @property
    def n_items(self):
        "Number of items"
        return self._n_items

    @property
    def items(self):
        "List of items"
        return self._items

    @property
    def start_time(self):
        """File start time"""
        return self._start_time

    @property
    def end_time(self):
        """File end time
        """
        if self._end_time is None:
            self._end_time = self.read([0]).time[-1].to_pydatetime()

        return self._end_time

    @property
    def n_timesteps(self):
        """Number of time steps"""
        return self._n_timesteps

    @property
    def timestep(self):
        """Time step size in seconds"""
        return self._timestep_in_seconds

    @property
    def projection_string(self):
        return self._projstr

    @property
    def longitude(self):
        """Origin longitude"""
        return self._longitude

    @property
    def latitude(self):
        """Origin latitude"""
        return self._latitude

    @property
    def orientation(self):
        """North to Y orientation"""
        return self._orientation
Exemple #7
0
def dfs2todfs1(dfs2file, dfs1file, axis=1, func=np.nanmean):
    """Aggregate file over an axis
    
    Parameters
    ----------
    dfs2file : str
        input file
    dfs1file : str
        output file
    axis : int, optional
        spatial axis to aggregate over, 1=y, 2=x default 1
    func : function, optional
        aggregation function, by default np.nanmean
    """

    if axis not in [1, 2]:
        raise ValueError("Axis must be 1=y or 2=x")

    # Read dfs2
    dfs_in = DfsFileFactory.DfsGenericOpen(dfs2file)
    fileInfo = dfs_in.FileInfo

    # Basic info from input file
    ax = dfs_in.ItemInfo[0].SpatialAxis
    n_time_steps = fileInfo.TimeAxis.NumberOfTimeSteps
    if n_time_steps == 0:
        raise Warning(
            "Static dfs2 files (with no time steps) are not supported.")

    # Create an empty dfs1 file object
    factory = DfsFactory()
    builder = Dfs1Builder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle,
                                 fileInfo.ApplicationVersion)

    # Set up the header
    builder.SetDataType(fileInfo.DataType)
    builder.SetGeographicalProjection(fileInfo.Projection)
    builder.SetTemporalAxis(fileInfo.TimeAxis)
    builder.DeleteValueByte = fileInfo.DeleteValueByte
    builder.DeleteValueDouble = fileInfo.DeleteValueDouble
    builder.DeleteValueFloat = fileInfo.DeleteValueFloat
    builder.DeleteValueInt = fileInfo.DeleteValueInt
    builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt

    if axis == 1:
        builder.SetSpatialAxis(
            factory.CreateAxisEqD1(ax.AxisUnit, ax.XCount, ax.X0, ax.Dx))
    else:
        builder.SetSpatialAxis(
            factory.CreateAxisEqD1(ax.AxisUnit, ax.YCount, ax.Y0, ax.Dy))

    # assume no compression keys
    if fileInfo.IsFileCompressed:
        raise Warning("Compressed files not supported")

    # custom blocks
    # cb = fileInfo.CustomBlocks #[0]
    # for j in range(safe_length(cb)):
    #    builder.AddCustomBlocks(cb[j])

    # static items
    while True:
        static_item = dfs_in.ReadStaticItemNext()
        if static_item == None:
            break
        builder.AddStaticItem(static_item)

    # dynamic items
    n_items = safe_length(dfs_in.ItemInfo)
    for item in range(n_items):
        ii = dfs_in.ItemInfo[item]
        builder.AddDynamicItem(ii.Name, ii.Quantity, DfsSimpleType.Float,
                               DataValueType.Instantaneous)

    try:
        builder.CreateFile(dfs1file)
    except IOError:
        print("cannot create dfs1 file: ", dfs1file)

    dfs_out = builder.GetFile()

    # read-write data
    deleteValue = fileInfo.DeleteValueFloat
    for it in range(n_time_steps):
        for item in range(n_items):
            itemdata = dfs_in.ReadItemTimeStep(item + 1, it)

            d = to_numpy(itemdata.Data)
            d[d == deleteValue] = np.nan
            d2 = d.reshape(ax.YCount, ax.XCount)
            d2 = np.flipud(d2)

            d1 = func(d2, axis=axis - 1)
            d1[np.isnan(d1)] = deleteValue

            darray = to_dotnet_float_array(d1)
            dfs_out.WriteItemTimeStepNext(itemdata.Time, darray)

    dfs_in.Close()
    dfs_out.Close()
Exemple #8
0
    def write(
        self,
        filename,
        data,
        start_time=None,
        dt=1,
        items=None,
        dx=1.0,
        dy=1.0,
        dz=1.0,
        x0=0,
        y0=0,
        coordinate=None,
        timeseries_unit=TimeStepUnit.SECOND,
        title=None,
    ):
        """
        Write a dfs3 file

        Parameters
        ----------

        filename: str
            Location to write the dfs3 file
        data: list[np.array]
            list of matrices, one for each item. Matrix dimension: time, z, y, x
        start_time: datetime, optional
            start date of type datetime.
        timeseries_unit: Timestep, optional
            TimeStep default TimeStep.SECOND
        dt: float, optional
            The time step. Therefore dt of 5.5 with timeseries_unit of TimeStep.MINUTE
            means 5 mins and 30 seconds. Default 1
        items: list[ItemInfo], optional
            List of ItemInfo corresponding to a variable types (ie. Water Level).
        coordinate:
            ['UTM-33', 12.4387, 55.2257, 327]  for UTM, Long, Lat, North to Y orientation. Note: long, lat in decimal degrees
        x0: float, optional
            Lower right position
        y0: float, optional
            Lower right position
        dx: float, optional
            length of each grid in the x direction (projection units)
        dy: float, optional
            length of each grid in the y direction (projection units)
        dz: float, optional
            length of each grid in the z direction (projection units)

        title: str, optional
            title of the dfs2 file. Default is blank.
        """

        if title is None:
            title = "dfs3 file"

        n_time_steps = np.shape(data[0])[0]
        number_z = np.shape(data[0])[1]
        number_y = np.shape(data[0])[2]
        number_x = np.shape(data[0])[3]

        n_items = len(data)

        system_start_time = to_dotnet_datetime(start_time)

        # Create an empty dfs3 file object
        factory = DfsFactory()
        builder = Dfs3Builder.Create(title, "mikeio", 0)

        # Set up the header
        builder.SetDataType(1)
        builder.SetGeographicalProjection(
            factory.CreateProjectionGeoOrigin(*coordinate))
        builder.SetTemporalAxis(
            factory.CreateTemporalEqCalendarAxis(timeseries_unit,
                                                 system_start_time, 0, dt))
        builder.SetSpatialAxis(
            factory.CreateAxisEqD3(
                eumUnit.eumUmeter,
                number_x,
                x0,
                dx,
                number_y,
                y0,
                dy,
                number_z,
                0,
                dz,
            ))

        for i in range(n_items):
            builder.AddDynamicItem(
                items[i].name,
                eumQuantity.Create(items[i].type, items[i].unit),
                DfsSimpleType.Float,
                DataValueType.Instantaneous,
            )

        try:
            builder.CreateFile(filename)
        except IOError:
            print("cannot create dfs3 file: ", filename)

        dfs = builder.GetFile()
        deletevalue = dfs.FileInfo.DeleteValueFloat  # -1.0000000031710769e-30

        for i in range(n_time_steps):
            for item in range(n_items):
                d = data[item][i]
                d[np.isnan(d)] = deletevalue
                d = np.flipud(d)
                darray = to_dotnet_float_array(d.reshape(d.size, 1)[:, 0])

                dfs.WriteItemTimeStepNext(0, darray)

        dfs.Close()
Exemple #9
0
    def create(self,
               filename,
               data,
               start_time=None,
               dt=1,
               length_x=1,
               x0=0,
               coordinate=None,
               timeseries_unit=TimeStep.SECOND,
               variable_type=None,
               unit=None,
               names=None,
               title=None):
        """
        Creates a dfs1 file

        filename:
            Location to write the dfs1 file
        data:
            list of matrices, one for each item. Matrix dimension: x, time
        start_time:
            start date of type datetime.
        timeseries_unit:
            TimeStep default TimeStep.SECOND
        dt:
            The time step (double based on the timeseries_unit). Therefore dt of 5.5 with timeseries_unit of minutes
            means 5 mins and 30 seconds.
        variable_type:
            Array integers corresponding to a variable types (ie. Water Level). Use dfsutil type_list
            to figure out the integer corresponding to the variable.
        unit:
            Array integers corresponding to the unit corresponding to the variable types The unit (meters, seconds),
            use dfsutil unit_list to figure out the corresponding unit for the variable.
        coordinate:
            ['UTM-33', 12.4387, 55.2257, 327]  for UTM, Long, Lat, North to Y orientation. Note: long, lat in decimal degrees
            OR
            [TODO: Support not Local Coordinates ...]
        x0:
            Lower right position
        length_x:
            length of each grid in the x direction (meters)
        names:
            array of names (ie. array of strings). (can be blank)
        title:
            title of the dfs2 file (can be blank)

        """

        if title is None:
            title = ""

        n_time_steps = np.shape(data[0])[0]
        number_x = np.shape(data[0])[1]
        n_items = len(data)

        if start_time is None:
            start_time = datetime.now()

        if coordinate is None:
            coordinate = ['LONG/LAT', 0, 0, 0]

        if names is None:
            names = [f"Item {i+1}" for i in range(n_items)]

        if variable_type is None:
            variable_type = [999] * n_items

        if unit is None:
            unit = [0] * n_items

        if not all(np.shape(d)[0] == n_time_steps for d in data):
            raise Warning(
                "ERROR data matrices in the time dimension do not all match in the data list. "
                "Data is list of matices [t, x]")
        if not all(np.shape(d)[1] == number_x for d in data):
            raise Warning(
                "ERROR data matrices in the X dimension do not all match in the data list. "
                "Data is list of matices [t, x]")

        if len(names) != n_items:
            raise Warning(
                "names must be an array of strings with the same number as matrices in data list"
            )

        if len(variable_type) != n_items or not all(
                isinstance(item, int) and 0 <= item < 1e15
                for item in variable_type):
            raise Warning(
                "type if specified must be an array of integers (enuType) with the same number of "
                "elements as data columns")

        if len(unit) != n_items or not all(
                isinstance(item, int) and 0 <= item < 1e15 for item in unit):
            raise Warning(
                "unit if specified must be an array of integers (enuType) with the same number of "
                "elements as data columns")

        if not type(start_time) is datetime:
            raise Warning("start_time must be of type datetime ")

        #if not isinstance(timeseries_unit, int):
        #    raise Warning("timeseries_unit must be an integer. timeseries_unit: second=1400, minute=1401, hour=1402, "
        #                  "day=1403, month=1405, year= 1404See dfsutil options for help ")

        system_start_time = System.DateTime(start_time.year, start_time.month,
                                            start_time.day, start_time.hour,
                                            start_time.minute,
                                            start_time.second)

        # Create an empty dfs1 file object
        factory = DfsFactory()
        builder = Dfs1Builder.Create(title, 'mikeio', 0)

        # Set up the header
        builder.SetDataType(0)
        builder.SetGeographicalProjection(
            factory.CreateProjectionGeoOrigin(coordinate[0], coordinate[1],
                                              coordinate[2], coordinate[3]))
        builder.SetTemporalAxis(
            factory.CreateTemporalEqCalendarAxis(timeseries_unit,
                                                 system_start_time, 0, dt))
        builder.SetSpatialAxis(
            factory.CreateAxisEqD1(eumUnit.eumUmeter, number_x, x0, length_x))

        for i in range(n_items):
            builder.AddDynamicItem(
                names[i], eumQuantity.Create(variable_type[i], unit[i]),
                DfsSimpleType.Float, DataValueType.Instantaneous)

        try:
            builder.CreateFile(filename)
        except IOError:
            print('cannot create dfs2 file: ', filename)

        dfs = builder.GetFile()
        deletevalue = dfs.FileInfo.DeleteValueFloat  #-1.0000000031710769e-30

        for i in range(n_time_steps):
            for item in range(n_items):
                d = data[item][i, :]
                d[np.isnan(d)] = deletevalue
                darray = Array[System.Single](np.array(
                    d.reshape(d.size, 1)[:, 0]))
                dfs.WriteItemTimeStepNext(0, darray)

        dfs.Close()
Exemple #10
0
def dfstodfs0(dfsfile, dfs0file, func=np.nanmean):
    """ Function: take average (or other statistics) over dfs and output dfs0

    Usage:
        dfstodfs0(dfsfile, dfs0file)
        dfstodfs0(dfsfile, dfs0file, func=np.nanmean)
    """

    # Read dfs
    dfs_in = DfsFileFactory.DfsGenericOpen(dfsfile)
    fileInfo = dfs_in.FileInfo

    # Basic info from input file
    n_time_steps = fileInfo.TimeAxis.NumberOfTimeSteps
    if n_time_steps == 0:
        raise Warning(
            "Static dfs files (with no time steps) are not supported.")

    # Create an empty dfs1 file object
    factory = DfsFactory()
    builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle,
                                fileInfo.ApplicationVersion)

    # Set up the header
    builder.SetDataType(fileInfo.DataType)
    builder.SetGeographicalProjection(factory.CreateProjectionUndefined())
    builder.SetTemporalAxis(fileInfo.TimeAxis)
    builder.DeleteValueByte = fileInfo.DeleteValueByte
    builder.DeleteValueDouble = fileInfo.DeleteValueDouble
    builder.DeleteValueFloat = fileInfo.DeleteValueFloat
    builder.DeleteValueInt = fileInfo.DeleteValueInt
    builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt

    # dynamic items
    n_items = safe_length(dfs_in.ItemInfo)
    for item in range(n_items):
        ii = dfs_in.ItemInfo[item]
        itemj = builder.CreateDynamicItemBuilder()
        itemj.Set(ii.Name, ii.Quantity, DfsSimpleType.Float)
        itemj.SetValueType(DataValueType.Instantaneous)
        itemj.SetAxis(factory.CreateAxisEqD0())
        # itemj.SetReferenceCoordinates(0, 0, 0)
        builder.AddDynamicItem(itemj.GetDynamicItemInfo())

    try:
        builder.CreateFile(dfs0file)
    except IOError:
        print("cannot create dfs0 file: ", dfs0file)

    dfs_out = builder.GetFile()

    # read-write data
    deleteValue = fileInfo.DeleteValueFloat
    for it in range(n_time_steps):
        for item in range(n_items):
            itemdata = dfs_in.ReadItemTimeStep(item + 1, it)

            d = to_numpy(itemdata.Data)
            d[d == deleteValue] = np.nan

            d0 = func(d)
            d = np.zeros(1)
            d[0] = d0
            d[np.isnan(d)] = deleteValue

            darray = to_dotnet_float_array(d)
            dfs_out.WriteItemTimeStepNext(itemdata.Time, darray)

    dfs_in.Close()
    dfs_out.Close()
Exemple #11
0
    def create(self,
               filename,
               data,
               start_time=None,
               timeseries_unit=TimeStep.SECOND,
               dt=1,
               datetimes=None,
               variable_type=None,
               unit=None,
               names=None,
               title=None,
               data_value_type=None):
        """create creates a dfs0 file.

        filename:
            Full path and filename to dfs0 to be created.
        data:
            a numpy matrix
        start_time:
            start date of type datetime.
        timeseries_unit:
            Timestep default Timestep.SECOND
        dt:
            the time step. Therefore dt of 5.5 with timeseries_unit of minutes
            means 5 mins and 30 seconds. default to 1
        variable_type:
            Array integers corresponding to a variable types (ie. Water Level). Use dfsutil type_list
            to figure out the integer corresponding to the variable.
        unit:
            Array integers corresponding to the unit corresponding to the variable types The unit (meters, seconds),
            use dfsutil unit_list to figure out the corresponding unit for the variable.
        names:
            array of names (ie. array of strings)
        title:
            title (string)
        data_value_type:
            DataValueType default DataValueType.INSTANTANEOUS

        """
        if title is None:
            title = "dfs0 file"

        n_items = len(data)
        n_time_steps = np.shape(data[0])[0]

        if start_time is None:
            start_time = datetime.now()

        if names is None:
            names = [f"Item {i+1}" for i in range(n_items)]

        if variable_type is None:
            variable_type = [999] * n_items

        if unit is None:
            unit = [0] * n_items

        if names is not None and len(names) != n_items:
            raise Warning(
                "names must be an array of strings with the same number of elements as data columns"
            )

        if len(variable_type) != n_items:
            raise Warning(
                "type if specified must be an array of integers (eumType) with the same number of "
                "elements as data columns")

        if len(unit) != n_items:
            raise Warning(
                "unit if specified must be an array of integers (eumType) with the same number of "
                "elements as data columns")

        if datetimes is None:
            equidistant = True

            if not type(start_time) is datetime:
                raise Warning("start_time must be of type datetime ")
        else:
            start_time = datetimes[0]
            equidistant = False

        #if not isinstance(timeseries_unit, int):
        #    raise Warning("timeseries_unit must be an integer. See dfsutil options for help ")

        system_start_time = System.DateTime(start_time.year, start_time.month,
                                            start_time.day, start_time.hour,
                                            start_time.minute,
                                            start_time.second)

        factory = DfsFactory()
        builder = DfsBuilder.Create(title, 'DFS', 0)
        builder.SetDataType(1)
        builder.SetGeographicalProjection(factory.CreateProjectionUndefined())

        if equidistant:
            builder.SetTemporalAxis(
                factory.CreateTemporalEqCalendarAxis(timeseries_unit,
                                                     system_start_time, 0, dt))
        else:
            builder.SetTemporalAxis(
                factory.CreateTemporalNonEqCalendarAxis(
                    timeseries_unit, system_start_time))

        builder.SetItemStatisticsType(StatType.RegularStat)

        for i in range(n_items):

            item = builder.CreateDynamicItemBuilder()
            if type is not None:
                item.Set(names[i], eumQuantity.Create(variable_type[i],
                                                      unit[i]),
                         DfsSimpleType.Float)
            else:
                item.Set(str(i),
                         eumQuantity.Create(eumItem.eumIItemUndefined, 0),
                         DfsSimpleType.Float)

            if data_value_type is not None:
                item.SetValueType(data_value_type[i])
            else:
                item.SetValueType(DataValueType.Instantaneous)

            item.SetAxis(factory.CreateAxisEqD0())
            builder.AddDynamicItem(item.GetDynamicItemInfo())

        try:
            builder.CreateFile(filename)

        except IOError:
            print('cannot create dfso file: ', filename)

        dfs = builder.GetFile()
        delete_value = dfs.FileInfo.DeleteValueFloat

        for i in range(n_items):
            d = data[i]

            d[np.isnan(d)] = delete_value

        # COPY OVER THE DATA
        for it in range(n_time_steps):
            for ii in range(n_items):

                d = Array[System.Single](np.array(data[ii][it:it + 1]))
                if equidistant:
                    dfs.WriteItemTimeStepNext(it, d)
                else:
                    dt = (datetimes[it] - datetimes[0]).total_seconds()
                    dfs.WriteItemTimeStepNext(dt, d)

        dfs.Close()
Exemple #12
0
    def create(
        self,
        filename,
        data,
        start_time=None,
        dt=1,
        items=None,
        length_x=1,
        x0=0,
        coordinate=None,
        timeseries_unit=TimeStep.SECOND,
        title=None,
    ):
        """
        Create a dfs1 file

        Parameters
        ----------
        filename: str
            Location to write the dfs1 file
        data: list[np.array]
            list of matrices, one for each item. Matrix dimension: x, time
        start_time: datetime, optional
            start datetime
        timeseries_unit: Timestep, optional
            TimeStep unit default TimeStep.SECOND
        dt: float
            The time step (double based on the timeseries_unit). Therefore dt of 5.5 with timeseries_unit of minutes
            means 5 mins and 30 seconds.
        items: list[ItemInfo], optional
            List of ItemInfo corresponding to a variable types (ie. Water Level).
        coordinate:
            ['UTM-33', 12.4387, 55.2257, 327]  for UTM, Long, Lat, North to Y orientation. Note: long, lat in decimal degrees
            OR
            [TODO: Support not Local Coordinates ...]
        x0:
            Lower right position
        length_x:
            length of each grid in the x direction (meters)
        title:
            title of the dfs2 file (can be blank)

        """

        if title is None:
            title = ""

        n_time_steps = np.shape(data[0])[0]
        number_x = np.shape(data[0])[1]
        n_items = len(data)

        if start_time is None:
            start_time = datetime.now()

        if coordinate is None:
            coordinate = ["LONG/LAT", 0, 0, 0]

        if items is None:
            items = [ItemInfo(f"temItem {i+1}") for i in range(n_items)]

        if not all(np.shape(d)[0] == n_time_steps for d in data):
            raise Warning(
                "ERROR data matrices in the time dimension do not all match in the data list. "
                "Data is list of matices [t, x]")
        if not all(np.shape(d)[1] == number_x for d in data):
            raise Warning(
                "ERROR data matrices in the X dimension do not all match in the data list. "
                "Data is list of matices [t, x]")

        if len(items) != n_items:
            raise Warning(
                "names must be an array of strings with the same number as matrices in data list"
            )

        if not type(start_time) is datetime:
            raise Warning("start_time must be of type datetime ")

        system_start_time = System.DateTime(
            start_time.year,
            start_time.month,
            start_time.day,
            start_time.hour,
            start_time.minute,
            start_time.second,
        )

        # Create an empty dfs1 file object
        factory = DfsFactory()
        builder = Dfs1Builder.Create(title, "mikeio", 0)

        # Set up the header
        builder.SetDataType(0)
        builder.SetGeographicalProjection(
            factory.CreateProjectionGeoOrigin(coordinate[0], coordinate[1],
                                              coordinate[2], coordinate[3]))
        builder.SetTemporalAxis(
            factory.CreateTemporalEqCalendarAxis(timeseries_unit,
                                                 system_start_time, 0, dt))
        builder.SetSpatialAxis(
            factory.CreateAxisEqD1(eumUnit.eumUmeter, number_x, x0, length_x))

        for i in range(n_items):
            builder.AddDynamicItem(
                items[i].name,
                eumQuantity.Create(items[i].type, items[i].unit),
                DfsSimpleType.Float,
                DataValueType.Instantaneous,
            )

        try:
            builder.CreateFile(filename)
        except IOError:
            print("cannot create dfs2 file: ", filename)

        dfs = builder.GetFile()
        deletevalue = dfs.FileInfo.DeleteValueFloat  # -1.0000000031710769e-30

        for i in range(n_time_steps):
            for item in range(n_items):
                d = data[item][i, :]
                d[np.isnan(d)] = deletevalue
                darray = Array[System.Single](np.array(
                    d.reshape(d.size, 1)[:, 0]))
                dfs.WriteItemTimeStepNext(0, darray)

        dfs.Close()
Exemple #13
0
    def create(
        self,
        filename,
        data,
        start_time=None,
        timeseries_unit=TimeStep.SECOND,
        dt=1.0,
        datetimes=None,
        items=None,
        title=None,
        data_value_type=None,
    ):
        """Create a dfs0 file.

        Parameters
        ----------
        filename: str
            Full path and filename to dfs0 to be created.
        data: list[np.array]
            values
        start_time: datetime.datetime, , optional
            start date of type datetime.
        timeseries_unit: Timestep, optional
            Timestep  unitdefault Timestep.SECOND
        dt: float, optional
            the time step. Therefore dt of 5.5 with timeseries_unit of minutes
            means 5 mins and 30 seconds. default to 1.0
        items: list[ItemInfo], optional
            List of ItemInfo corresponding to a variable types (ie. Water Level).
        title: str, optional
            title
        data_value_type: list[DataValueType], optional
            DataValueType default DataValueType.INSTANTANEOUS

        """
        if title is None:
            title = "dfs0 file"

        n_items = len(data)
        n_time_steps = np.shape(data[0])[0]

        if start_time is None:
            start_time = datetime.now()

        if items is None:
            items = [ItemInfo(f"temItem {i+1}") for i in range(n_items)]

        if len(items) != n_items:
            raise Warning(
                "names must be an array of strings with the same number of elements as data columns"
            )

        if datetimes is None:
            equidistant = True

            if not type(start_time) is datetime:
                raise Warning("start_time must be of type datetime ")

            dt = np.float(dt)
            datetimes = np.array(
                [
                    start_time + timedelta(seconds=(step * dt))
                    for step in np.arange(n_time_steps)
                ]
            )

        else:
            start_time = datetimes[0]
            equidistant = False

        # if not isinstance(timeseries_unit, int):
        #    raise Warning("timeseries_unit must be an integer. See dfsutil options for help ")

        system_start_time = to_dotnet_datetime(start_time)

        factory = DfsFactory()
        builder = DfsBuilder.Create(title, "DFS", 0)
        builder.SetDataType(1)
        builder.SetGeographicalProjection(factory.CreateProjectionUndefined())

        if equidistant:
            builder.SetTemporalAxis(
                factory.CreateTemporalEqCalendarAxis(
                    timeseries_unit, system_start_time, 0, dt
                )
            )
        else:
            builder.SetTemporalAxis(
                factory.CreateTemporalNonEqCalendarAxis(
                    timeseries_unit, system_start_time
                )
            )

        builder.SetItemStatisticsType(StatType.RegularStat)

        for i in range(n_items):

            item = builder.CreateDynamicItemBuilder()

            item.Set(
                items[i].name,
                eumQuantity.Create(items[i].type, items[i].unit),
                DfsSimpleType.Float,
            )

            if data_value_type is not None:
                item.SetValueType(data_value_type[i])
            else:
                item.SetValueType(DataValueType.Instantaneous)

            item.SetAxis(factory.CreateAxisEqD0())
            builder.AddDynamicItem(item.GetDynamicItemInfo())

        try:
            builder.CreateFile(filename)

        except IOError:
            print("cannot create dfso file: ", filename)

        dfs = builder.GetFile()
        delete_value = dfs.FileInfo.DeleteValueFloat

        for i in range(n_items):
            d = data[i]
            d[np.isnan(d)] = delete_value

        data1 = np.stack(data, axis=1)
        t_seconds = [(t - datetimes[0]).total_seconds() for t in datetimes]
        Dfs0Util.WriteDfs0DataDouble(dfs, t_seconds, to_dotnet_array(data1))

        dfs.Close()
Exemple #14
0
    def create_equidistant_calendar(self,
                                    dfs3file,
                                    data,
                                    start_time,
                                    timeseries_unit,
                                    dt,
                                    variable_type,
                                    unit,
                                    coordinate,
                                    x0,
                                    y0,
                                    length_x,
                                    length_y,
                                    names,
                                    title=None):
        """
        Creates a dfs3 file

        dfs3file:
            Location to write the dfs3 file
        data:
            list of matrices, one for each item. Matrix dimension: y, x, z, time
        start_time:
            start date of type datetime.
        timeseries_unit:
            second=1400, minute=1401, hour=1402, day=1403, month=1405, year= 1404
        dt:
            The time step (double based on the timeseries_unit). Therefore dt of 5.5 with timeseries_unit of minutes
            means 5 mins and 30 seconds.
        variable_type:
            Array integers corresponding to a variable types (ie. Water Level). Use dfsutil type_list
            to figure out the integer corresponding to the variable.
        unit:
            Array integers corresponding to the unit corresponding to the variable types The unit (meters, seconds),
            use dfsutil unit_list to figure out the corresponding unit for the variable.
        coordinate:
            ['UTM-33', 12.4387, 55.2257, 327]  for UTM, Long, Lat, North to Y orientation. Note: long, lat in decimal degrees
            OR
            [TODO: Support not Local Coordinates ...]
        x0:
            Lower right position
        y0:
            Lower right position
        length_x:
            length of each grid in the x direction (meters)
        length_y:
            length of each grid in the y direction (meters)
        names:
            array of names (ie. array of strings).
        title:
            title of the dfs3 file (can be blank)

        """

        if title is None:
            title = "dfs0 file"

        number_y = np.shape(data[0])[0]
        number_x = np.shape(data[0])[1]
        number_z = np.shape(data[0])[2]
        n_time_steps = np.shape(data[0])[3]
        n_items = len(data)

        if not all(np.shape(d)[0] == number_y for d in data):
            raise Warning(
                "ERROR data matrices in the Y dimension do not all match in the data list. "
                "Data is list of matices [y,x,time]")
        if not all(np.shape(d)[1] == number_x for d in data):
            raise Warning(
                "ERROR data matrices in the X dimension do not all match in the data list. "
                "Data is list of matices [y,x,time]")
        if not all(np.shape(d)[2] == number_z for d in data):
            raise Warning(
                "ERROR data matrices in the X dimension do not all match in the data list. "
                "Data is list of matices [y,x,time]")
        if not all(np.shape(d)[3] == n_time_steps for d in data):
            raise Warning(
                "ERROR data matrices in the time dimension do not all match in the data list. "
                "Data is list of matices [y,x,time]")

        if len(names) != n_items:
            raise Warning(
                "names must be an array of strings with the same number as matrices in data list"
            )

        if len(variable_type) != n_items or not all(
                isinstance(item, int) and 0 <= item < 1e15
                for item in variable_type):
            raise Warning(
                "type if specified must be an array of integers (enuType) with the same number of "
                "elements as data columns")

        if len(unit) != n_items or not all(
                isinstance(item, int) and 0 <= item < 1e15 for item in unit):
            raise Warning(
                "unit if specified must be an array of integers (enuType) with the same number of "
                "elements as data columns")

        if not type(start_time) is datetime.datetime:
            raise Warning("start_time must be of type datetime ")

        if not isinstance(timeseries_unit, int):
            raise Warning(
                "timeseries_unit must be an integer. timeseries_unit: second=1400, minute=1401, hour=1402, "
                "day=1403, month=1405, year= 1404See dfsutil options for help "
            )

        system_start_time = System.DateTime(start_time.year, start_time.month,
                                            start_time.day, start_time.hour,
                                            start_time.minute,
                                            start_time.second)

        # Create an empty dfs3 file object
        factory = DfsFactory()
        builder = Dfs3Builder.Create(title, 'pydhi', 0)

        # Set up the header
        builder.SetDataType(1)
        builder.SetGeographicalProjection(
            factory.CreateProjectionGeoOrigin(coordinate[0], coordinate[1],
                                              coordinate[2], coordinate[3]))
        builder.SetTemporalAxis(
            factory.CreateTemporalEqCalendarAxis(timeseries_unit,
                                                 system_start_time, 0, dt))
        builder.SetSpatialAxis(
            factory.CreateAxisEqD3(eumUnit.eumUmeter, number_x, x0, length_x,
                                   number_y, y0, length_y, number_z, 0, 1))

        deletevalue = builder.DeleteValueFloat

        for i in range(n_items):
            builder.AddDynamicItem(
                names[i], eumQuantity.Create(variable_type[i], unit[i]),
                DfsSimpleType.Float, DataValueType.Instantaneous)

        try:
            builder.CreateFile(dfs3file)
        except IOError:
            print('cannot create dfs3 file: ', dfs3file)

        dfs = builder.GetFile()

        for i in range(n_time_steps):
            for item in range(n_items):
                #d = data[item][:, :, :, i]
                #d.reshape(number_z, number_y, number_x).swapaxes(0, 2).swapaxes(0, 1)
                #d = np.flipud(d)
                #d[np.isnan(d)] = deletevalue
                #darray = Array[System.Single](np.array(d.reshape(d.size, 1)[:, 0]))
                #dfs.WriteItemTimeStepNext(0, darray)

                # TESTED AND WORKDS if data already in the y,x,z,t format
                d = data[item][:, :, :, i]
                d = d.swapaxes(0, 1)
                d = d.swapaxes(0, 2)
                d = np.fliplr(d)
                d[np.isnan(d)] = deletevalue
                darray = Array[System.Single](np.array(
                    d.reshape(d.size, 1)[:, 0]))
                dfs.WriteItemTimeStepNext(0, darray)

        dfs.Close()
Exemple #15
0
 def Export(self):
     self.factory = DfsFactory()
     self.builder = self.CreateDfsBuilder()
     self.DefineDynamicDataItems()
     self.WriteDataItems()
Exemple #16
0
class ExtractorDfs0(Extractor):
    """Class which extracts data to dfs0 file format"""
    def Export(self):
        self.factory = DfsFactory()
        self.builder = self.CreateDfsBuilder()
        self.DefineDynamicDataItems()
        self.WriteDataItems()

    def CreateDfsBuilder(self):
        resultData = self.resultData
        factory = self.factory

        builder = DfsBuilder.Create("ResultDataExtractor-script", "MIKE SDK",
                                    100)

        # Set up file header
        builder.SetDataType(1)
        builder.SetGeographicalProjection(factory.CreateProjectionUndefined())
        builder.SetTemporalAxis(
            factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec,
                                                    resultData.StartTime))
        builder.SetItemStatisticsType(StatType.NoStat)

        return builder

    def DefineDynamicDataItems(self):
        outputData = self.outputData
        resultData = self.resultData
        builder = self.builder

        for dataEntry in outputData:
            dataItem = dataEntry.dataItem
            elementIndex = dataEntry.elementIndex

            quantity = dataItem.Quantity
            itemTypeGroup = dataItem.ItemTypeGroup
            numberWithinGroup = dataItem.NumberWithinGroup

            reaches = list(resultData.Reaches)
            nodes = list(resultData.Nodes)
            catchments = list(resultData.Catchments)

            if itemTypeGroup == ItemTypeGroup.ReachItem:
                reach = reaches[numberWithinGroup]
                gridPointIndex = dataItem.IndexList[elementIndex]
                gridPoints = list(reach.GridPoints)
                chainage = gridPoints[gridPointIndex].Chainage
                itemName = "reach:%s:%s:%.3f" % (quantity.Id, reach.Name,
                                                 chainage)

            elif itemTypeGroup == ItemTypeGroup.NodeItem:
                node = nodes[numberWithinGroup]
                itemName = "node:%s:%s" % (quantity.Id, node.Id)

            elif itemTypeGroup == ItemTypeGroup.CatchmentItem:
                catchment = catchments[numberWithinGroup]
                itemName = "catchment:%s:%s" % (quantity.Id, catchment.Id)

            else:
                itemName = "%s:%s:%s" % (itemTypeGroup, quantityId,
                                         dataItem.Id)

            item = builder.CreateDynamicItemBuilder()
            item.Set(itemName, dataItem.Quantity.EumQuantity,
                     DfsSimpleType.Float)
            item.SetValueType(DataValueType.Instantaneous)
            item.SetAxis(self.factory.CreateAxisEqD0())
            builder.AddDynamicItem(item.GetDynamicItemInfo())

    def WriteDataItems(self):
        outputData = self.outputData
        resultData = self.resultData
        builder = self.builder

        # Create file
        builder.CreateFile(self.outFileName)
        dfsfile = builder.GetFile()
        times = list(resultData.TimesList)

        # Write data to file
        val = Array.CreateInstance(System.Single, 1)
        for timeStepIndex in range(resultData.NumberOfTimeSteps):
            if (timeStepIndex % self.timeStepSkippingNumber != 0):
                continue

            time = times[timeStepIndex].Subtract(
                resultData.StartTime).TotalSeconds
            for dataEntry in outputData:
                dataItem = dataEntry.dataItem
                elementIndex = dataEntry.elementIndex

                val[0] = dataItem.GetValue(timeStepIndex, elementIndex)
                dfsfile.WriteItemTimeStepNext(time, val)

        dfsfile.Close()
Exemple #17
0
    def create(
        self,
        meshfilename,
        filename,
        data,
        start_time=None,
        dt=1,
        timeseries_unit=TimeStep.SECOND,
        variable_type=None,
        unit=None,
        names=None,
        title=None,
    ):

        n_items = len(data)
        n_time_steps = np.shape(data[0])[0]

        if start_time is None:
            start_time = datetime.now()

        if names is None:
            names = [f"Item {i+1}" for i in range(n_items)]

        if variable_type is None:
            variable_type = [999] * n_items

        if unit is None:
            unit = [0] * n_items

        if title is None:
            title = ""

        system_start_time = System.DateTime(
            start_time.year,
            start_time.month,
            start_time.day,
            start_time.hour,
            start_time.minute,
            start_time.second,
        )

        mesh = MeshFile.ReadMesh(meshfilename)

        # TODO support all types of Dfsu
        builder = DfsuBuilder.Create(DfsuFileType.Dfsu2D)

        # Setup header and geometry, copy from source file

        # zn have to be Single precision??
        zn = Array[System.Single](list(mesh.Z))
        builder.SetNodes(mesh.X, mesh.Y, zn, mesh.Code)

        builder.SetElements(mesh.ElementTable)
        factory = DfsFactory()
        proj = factory.CreateProjection(mesh.ProjectionString)
        builder.SetProjection(proj)
        builder.SetTimeInfo(system_start_time, dt)
        builder.SetZUnit(eumUnit.eumUmeter)

        for i in range(n_items):
            builder.AddDynamicItem(
                names[i], eumQuantity.Create(variable_type[i], unit[i])
            )

        try:
            dfs = builder.CreateFile(filename)
        except IOError:
            print("cannot create dfsu file: ", filename)

        deletevalue = dfs.DeleteValueFloat

        # Add data for all item-timesteps, copying from source
        for i in range(n_time_steps):
            for item in range(n_items):
                d = data[item][i, :]
                d[np.isnan(d)] = deletevalue
                darray = Array[System.Single](np.array(d.reshape(d.size, 1)[:, 0]))
                dfs.WriteItemTimeStepNext(0, darray)

        dfs.Close()
Exemple #18
0
    def write(
        self,
        filename,
        data,
        start_time=None,
        dt=1,
        datetimes=None,
        items=None,
        dx=None,
        dy=None,
        x0=0,
        y0=0,
        coordinate=None,
        title=None,
    ):
        """
        Create a dfs2 file

        Parameters
        ----------

        filename: str
            Location to write the dfs2 file
        data: list[np.array] or Dataset
            list of matrices, one for each item. Matrix dimension: time, y, x
        start_time: datetime, optional
            start date of type datetime.
        dt: float, optional
            The time step in seconds.
        datetimes: list[datetime], optional
            datetimes, creates a non-equidistant calendar axis
        items: list[ItemInfo], optional
            List of ItemInfo corresponding to a variable types (ie. Water Level).
        x0: float, optional
            Lower right position
        x0: float, optional
            Lower right position
        dx: float, optional
            length of each grid in the x direction (projection units)
        dy: float, optional
            length of each grid in the y direction (projection units)
        coordinate:
            ['UTM-33', 12.4387, 55.2257, 327]  for UTM, Long, Lat, North to Y orientation. Note: long, lat in decimal degrees
        title: str, optional
            title of the dfs2 file. Default is blank.
        """

        self._write_handle_common_arguments(
            title, data, items, coordinate, start_time, dt
        )

        number_y = np.shape(data[0])[1]
        number_x = np.shape(data[0])[2]

        if dx is None:
            if self._dx is not None:
                dx = self._dx
            else:
                dx = 1

        if dy is None:
            if self._dy is not None:
                dy = self._dy
            else:
                dy = 1

        if not all(np.shape(d)[0] == self._n_time_steps for d in data):
            raise ValueError(
                "ERROR data matrices in the time dimension do not all match in the data list. "
                "Data is list of matrices [t,y,x]"
            )
        if not all(np.shape(d)[1] == number_y for d in data):
            raise ValueError(
                "ERROR data matrices in the Y dimension do not all match in the data list. "
                "Data is list of matrices [t,y,x]"
            )
        if not all(np.shape(d)[2] == number_x for d in data):
            raise ValueError(
                "ERROR data matrices in the X dimension do not all match in the data list. "
                "Data is list of matrices [t,y,x]"
            )

        if datetimes is None:
            self._is_equidistant = True
        else:
            self._is_equidistant = False
            start_time = datetimes[0]
            self._start_time = start_time

        factory = DfsFactory()
        builder = Dfs2Builder.Create(title, "mikeio", 0)

        self._builder = builder
        self._factory = factory

        builder.SetSpatialAxis(
            factory.CreateAxisEqD2(
                eumUnit.eumUmeter, number_x, x0, dx, number_y, y0, dy
            )
        )

        dfs = self._setup_header(filename)
        # coordinate, start_time, dt, timeseries_unit, items, filename
        # )

        deletevalue = dfs.FileInfo.DeleteValueFloat  # -1.0000000031710769e-30

        for i in range(self._n_time_steps):
            for item in range(self._n_items):
                d = self._data[item][i, :, :]
                d[np.isnan(d)] = deletevalue
                d = d.reshape(number_y, number_x)
                d = np.flipud(d)
                darray = to_dotnet_float_array(d.reshape(d.size, 1)[:, 0])

                if self._is_equidistant:
                    dfs.WriteItemTimeStepNext(0, darray)
                else:
                    t = datetimes[i]
                    relt = (t - self._start_time).total_seconds()
                    dfs.WriteItemTimeStepNext(relt, darray)

        dfs.Close()
Exemple #19
0
    def create(
        self,
        meshfilename,
        filename,
        data,
        start_time=None,
        dt=1,
        timeseries_unit=TimeStep.SECOND,
        items=None,
        title=None,
    ):
        """Create a dfsu file

        Parameters
        -----------
        meshfilename: str,
            full path to a valid mesh file
        filename: str
            full path to the new dfsu file
        data: list[np.array]
            list of matrices, one for each item. Matrix dimension: time, y, x
        start_time: datetime, optional
            start datetime, default is datetime.now()
        dt: float
            The time step. Therefore dt of 5.5 with timeseries_unit of TimeStep.MINUTE
            means 5 mins and 30 seconds. Default 1
        timeseries_unit: TimeStep, optional
             default TimeStep.SECOND
        unit: list[ItemInfo], optional
            Name, item, unit, default is undefined
        title: str
            title of the dfsu file. Default is blank.
        """

        n_items = len(data)
        n_time_steps = np.shape(data[0])[0]

        if start_time is None:
            start_time = datetime.now()

        if items is None:
            items = [ItemInfo(f"temItem {i+1}") for i in range(n_items)]

        if title is None:
            title = ""

        system_start_time = System.DateTime(
            start_time.year,
            start_time.month,
            start_time.day,
            start_time.hour,
            start_time.minute,
            start_time.second,
        )

        mesh = MeshFile.ReadMesh(meshfilename)

        # TODO support all types of Dfsu
        builder = DfsuBuilder.Create(DfsuFileType.Dfsu2D)

        # Setup header and geometry, copy from source file

        # zn have to be Single precision??
        zn = Array[System.Single](list(mesh.Z))
        builder.SetNodes(mesh.X, mesh.Y, zn, mesh.Code)

        builder.SetElements(mesh.ElementTable)
        factory = DfsFactory()
        proj = factory.CreateProjection(mesh.ProjectionString)
        builder.SetProjection(proj)
        builder.SetTimeInfo(system_start_time, dt)
        builder.SetZUnit(eumUnit.eumUmeter)

        for item in items:
            builder.AddDynamicItem(item.name,
                                   eumQuantity.Create(item.type, item.unit))

        try:
            dfs = builder.CreateFile(filename)
        except IOError:
            print("cannot create dfsu file: ", filename)

        deletevalue = dfs.DeleteValueFloat

        # Add data for all item-timesteps, copying from source
        for i in range(n_time_steps):
            for item in range(n_items):
                d = data[item][i, :]
                d[np.isnan(d)] = deletevalue
                darray = Array[System.Single](np.array(
                    d.reshape(d.size, 1)[:, 0]))
                dfs.WriteItemTimeStepNext(0, darray)

        dfs.Close()
Exemple #20
0
    def create(
        self,
        filename,
        data,
        start_time=None,
        dt=1,
        datetimes=None,
        items=None,
        length_x=1,
        length_y=1,
        x0=0,
        y0=0,
        coordinate=None,
        timeseries_unit=TimeStep.SECOND,
        title=None,
    ):
        """
        Create a dfs2 file

        Parameters
        ----------

        filename: str
            Location to write the dfs2 file
        data: list[np.array]
            list of matrices, one for each item. Matrix dimension: time, y, x
        start_time: datetime, optional
            start date of type datetime.
        timeseries_unit: Timestep, optional
            TimeStep default TimeStep.SECOND
        dt: float, optional
            The time step. Therefore dt of 5.5 with timeseries_unit of TimeStep.MINUTE
            means 5 mins and 30 seconds. Default 1
        datetimes: list[datetime], optional
            datetimes, creates a non-equidistant calendar axis
        items: list[ItemInfo], optional
            List of ItemInfo corresponding to a variable types (ie. Water Level).
        coordinate:
            ['UTM-33', 12.4387, 55.2257, 327]  for UTM, Long, Lat, North to Y orientation. Note: long, lat in decimal degrees
        x0: float, optional
            Lower right position
        x0: float, optional
            Lower right position
        length_x: float, optional
            length of each grid in the x direction (projection units)
        length_y: float, optional
            length of each grid in the y direction (projection units)
        
        title: str, optional
            title of the dfs2 file. Default is blank.
        """

        if title is None:
            title = ""

        n_time_steps = np.shape(data[0])[0]
        number_y = np.shape(data[0])[1]
        number_x = np.shape(data[0])[2]

        n_items = len(data)

        if start_time is None:
            start_time = datetime.now()

        if coordinate is None:
            coordinate = ["LONG/LAT", 0, 0, 0]

        if items is None:
            items = [ItemInfo(f"temItem {i+1}") for i in range(n_items)]

        if not all(np.shape(d)[0] == n_time_steps for d in data):
            raise Warning(
                "ERROR data matrices in the time dimension do not all match in the data list. "
                "Data is list of matices [t,y,x]")
        if not all(np.shape(d)[1] == number_y for d in data):
            raise Warning(
                "ERROR data matrices in the Y dimension do not all match in the data list. "
                "Data is list of matices [t,y,x]")
        if not all(np.shape(d)[2] == number_x for d in data):
            raise Warning(
                "ERROR data matrices in the X dimension do not all match in the data list. "
                "Data is list of matices [t,y,x,]")

        if len(items) != n_items:
            raise Warning(
                "number of items must correspond to the number of arrays in data list"
            )

        if datetimes is None:
            equidistant = True

            if not type(start_time) is datetime:
                raise Warning("start_time must be of type datetime ")
        else:
            equidistant = False
            start_time = datetimes[0]

        # if not isinstance(timeseries_unit, int):
        #    raise Warning("timeseries_unit must be an integer. timeseries_unit: second=1400, minute=1401, hour=1402, "
        #                  "day=1403, month=1405, year= 1404See dfsutil options for help ")

        system_start_time = System.DateTime(
            start_time.year,
            start_time.month,
            start_time.day,
            start_time.hour,
            start_time.minute,
            start_time.second,
        )

        # Create an empty dfs2 file object
        factory = DfsFactory()
        builder = Dfs2Builder.Create(title, "mikeio", 0)

        # Set up the header
        builder.SetDataType(0)

        if coordinate[0] == "LONG/LAT":
            builder.SetGeographicalProjection(
                factory.CreateProjectionGeoOrigin(coordinate[0], coordinate[1],
                                                  coordinate[2],
                                                  coordinate[3]))
        else:
            builder.SetGeographicalProjection(
                factory.CreateProjectionProjOrigin(coordinate[0],
                                                   coordinate[1],
                                                   coordinate[2],
                                                   coordinate[3]))

        if equidistant:
            builder.SetTemporalAxis(
                factory.CreateTemporalEqCalendarAxis(timeseries_unit,
                                                     system_start_time, 0, dt))
        else:
            builder.SetTemporalAxis(
                factory.CreateTemporalNonEqCalendarAxis(
                    eumUnit.eumUsec, system_start_time))

        builder.SetSpatialAxis(
            factory.CreateAxisEqD2(eumUnit.eumUmeter, number_x, x0, length_x,
                                   number_y, y0, length_y))

        for i in range(n_items):
            builder.AddDynamicItem(
                items[i].name,
                eumQuantity.Create(items[i].type, items[i].unit),
                DfsSimpleType.Float,
                DataValueType.Instantaneous,
            )

        try:
            builder.CreateFile(filename)
        except IOError:
            print("cannot create dfs2 file: ", filename)

        dfs = builder.GetFile()
        deletevalue = dfs.FileInfo.DeleteValueFloat  # -1.0000000031710769e-30

        for i in range(n_time_steps):
            for item in range(n_items):
                d = data[item][i, :, :]
                d[np.isnan(d)] = deletevalue
                d = d.reshape(number_y, number_x)
                d = np.flipud(d)
                darray = Array[System.Single](np.array(
                    d.reshape(d.size, 1)[:, 0]))

                if equidistant:
                    dfs.WriteItemTimeStepNext(0, darray)
                else:
                    t = datetimes[i]
                    relt = (t - start_time).seconds
                    dfs.WriteItemTimeStepNext(relt, darray)

        dfs.Close()
Exemple #21
0
def dfs2todfs1(dfs2file, dfs1file, ax=0, func=np.nanmean):
    """ Function: take average (or other statistics) over axis in dfs2 and output to dfs1

    Usage:
        dfs2todfs1(dfs2file, dfs1file)
        dfs2todfs1(dfs2file, dfs1file, axis)
        dfs2todfs1(dfs2file, dfs1file, axis, func=np.nanmean)
    """

    # Read dfs2
    dfs_in = DfsFileFactory.DfsGenericOpen(dfs2file)
    fileInfo = dfs_in.FileInfo

    # Basic info from input file
    axis = dfs_in.ItemInfo[0].SpatialAxis
    n_time_steps = fileInfo.TimeAxis.NumberOfTimeSteps
    if n_time_steps == 0:
        raise Warning(
            "Static dfs2 files (with no time steps) are not supported.")

    # Create an empty dfs1 file object
    factory = DfsFactory()
    builder = Dfs1Builder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle,
                                 fileInfo.ApplicationVersion)

    # Set up the header
    builder.SetDataType(fileInfo.DataType)
    builder.SetGeographicalProjection(fileInfo.Projection)
    builder.SetTemporalAxis(fileInfo.TimeAxis)
    builder.DeleteValueByte = fileInfo.DeleteValueByte
    builder.DeleteValueDouble = fileInfo.DeleteValueDouble
    builder.DeleteValueFloat = fileInfo.DeleteValueFloat
    builder.DeleteValueInt = fileInfo.DeleteValueInt
    builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt

    # use x-axis (default) else y-axis
    if ax == 0:
        builder.SetSpatialAxis(
            factory.CreateAxisEqD1(axis.AxisUnit, axis.XCount, axis.X0,
                                   axis.Dx))
    else:
        builder.SetSpatialAxis(
            factory.CreateAxisEqD1(axis.AxisUnit, axis.YCount, axis.Y0,
                                   axis.Dy))

    # assume no compression keys
    if fileInfo.IsFileCompressed:
        raise Warning("Compressed files not supported")

    # custom blocks
    # cb = fileInfo.CustomBlocks #[0]
    # for j in range(safe_length(cb)):
    #    builder.AddCustomBlocks(cb[j])

    # static items
    while True:
        static_item = dfs_in.ReadStaticItemNext()
        if static_item == None:
            break
        builder.AddStaticItem(static_item)

    # dynamic items
    n_items = safe_length(dfs_in.ItemInfo)
    for item in range(n_items):
        ii = dfs_in.ItemInfo[item]
        builder.AddDynamicItem(ii.Name, ii.Quantity, DfsSimpleType.Float,
                               DataValueType.Instantaneous)

    try:
        builder.CreateFile(dfs1file)
    except IOError:
        print("cannot create dfs1 file: ", dfs1file)

    dfs_out = builder.GetFile()

    # read-write data
    deleteValue = fileInfo.DeleteValueFloat
    for it in range(n_time_steps):
        for item in range(n_items):
            itemdata = dfs_in.ReadItemTimeStep(item + 1, it)

            d = to_numpy(itemdata.Data)
            d[d == deleteValue] = np.nan
            d2 = d.reshape(axis.YCount, axis.XCount)
            d2 = np.flipud(d2)

            d1 = func(d2, axis=ax)
            d1[np.isnan(d1)] = deleteValue

            darray = Array[System.Single](d1)
            dfs_out.WriteItemTimeStepNext(itemdata.Time, darray)

    dfs_in.Close()
    dfs_out.Close()
Exemple #22
0
class _Dfs123:

    _filename = None
    _projstr = None
    _start_time = None
    _is_equidistant = True
    _items = None
    _builder = None
    _factory = None
    _deletevalue = None
    _override_coordinates = False
    _timeseries_unit = TimeStepUnit.SECOND
    _dt = None

    def __init__(self, filename=None):
        self._filename = filename

    def read(self, items=None, time_steps=None):
        """
        Read data from a dfs file
        
        Parameters
        ---------
        items: list[int] or list[str], optional
            Read only selected items, by number (0-based), or by name
        time_steps: int or list[int], optional
            Read only selected time_steps

        Returns
        -------
        Dataset
        """
        self._open()

        items, item_numbers, time_steps = self._get_valid_items_and_timesteps(
            items, time_steps)

        for t in time_steps:
            if t > (self.n_timesteps - 1):
                raise IndexError(f"Timestep {t} is > {self.n_timesteps-1}")

        n_items = len(item_numbers)
        nt = len(time_steps)

        if self._ndim == 1:
            shape = (nt, self._nx)
        elif self._ndim == 2:
            shape = (nt, self._ny, self._nx)
        else:
            shape = (nt, self._nz, self._ny, self._nx)

        data_list = [np.ndarray(shape=shape) for item in range(n_items)]

        t_seconds = np.zeros(len(time_steps))

        for i, it in enumerate(time_steps):
            for item in range(n_items):

                itemdata = self._dfs.ReadItemTimeStep(item_numbers[item] + 1,
                                                      it)

                src = itemdata.Data
                d = to_numpy(src)

                d[d == self.deletevalue] = np.nan

                if self._ndim == 2:
                    d = d.reshape(self._ny, self._nx)
                    d = np.flipud(d)

                data_list[item][i] = d

            t_seconds[i] = itemdata.Time

        time = [self.start_time + timedelta(seconds=t) for t in t_seconds]

        items = self._get_item_info(item_numbers)

        self._dfs.Close()
        return Dataset(data_list, time, items)

    def _read_header(self):
        dfs = self._dfs
        self._n_items = len(dfs.ItemInfo)
        self._items = self._get_item_info(list(range(self._n_items)))
        self._start_time = from_dotnet_datetime(
            dfs.FileInfo.TimeAxis.StartDateTime)
        if hasattr(dfs.FileInfo.TimeAxis, "TimeStep"):
            self._timestep_in_seconds = (dfs.FileInfo.TimeAxis.TimeStep
                                         )  # TODO handle other timeunits
        self._n_timesteps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps
        self._projstr = dfs.FileInfo.Projection.WKTString
        self._longitude = dfs.FileInfo.Projection.Longitude
        self._latitude = dfs.FileInfo.Projection.Latitude
        self._orientation = dfs.FileInfo.Projection.Orientation
        self._deletevalue = dfs.FileInfo.DeleteValueFloat

        dfs.Close()

    def _write(
        self,
        filename,
        data,
        start_time,
        dt,
        items,
        coordinate,
        title,
    ):
        self._write_handle_common_arguments(title, data, items, coordinate,
                                            start_time, dt)

        shape = np.shape(data[0])
        if self._ndim == 1:
            self._nx = shape[1]
        elif self._ndim == 2:
            self._ny = shape[1]
            self._nx = shape[2]

        self._factory = DfsFactory()
        self._set_spatial_axis()

        if self._ndim == 1:
            if not all(np.shape(d)[1] == self._nx for d in data):
                raise DataDimensionMismatch()

        if self._ndim == 2:
            if not all(np.shape(d)[1] == self._ny for d in data):
                raise DataDimensionMismatch()

            if not all(np.shape(d)[2] == self._nx for d in data):
                raise DataDimensionMismatch()

        dfs = self._setup_header(filename)

        deletevalue = dfs.FileInfo.DeleteValueFloat  # -1.0000000031710769e-30

        for i in range(self._n_timesteps):
            for item in range(self._n_items):

                d = self._data[item][i]
                d[np.isnan(d)] = deletevalue

                if self._ndim == 1:
                    darray = to_dotnet_float_array(d)

                if self._ndim == 2:
                    d = d.reshape(self.shape[1:])
                    d = np.flipud(d)
                    darray = to_dotnet_float_array(d.reshape(d.size, 1)[:, 0])

                dfs.WriteItemTimeStepNext(0, darray)

        dfs.Close()

    def _write_handle_common_arguments(self, title, data, items, coordinate,
                                       start_time, dt):

        if title is None:
            self._title = ""

        self._n_timesteps = np.shape(data[0])[0]
        self._n_items = len(data)

        if coordinate is None:
            if self._projstr is not None:
                self._coordinate = [
                    self._projstr,
                    self._longitude,
                    self._latitude,
                    self._orientation,
                ]
            else:
                warnings.warn("No coordinate system provided")
                self._coordinate = ["LONG/LAT", 0, 0, 0]
        else:
            self._override_coordinates = True
            self._coordinate = coordinate

        if isinstance(data, Dataset):
            self._items = data.items
            self._start_time = data.time[0]
            if dt is None and len(data.time) > 1:
                self._dt = (data.time[1] - data.time[0]).total_seconds()
            self._data = data.data
        else:
            self._data = data

        if start_time is None:
            if self._start_time is None:
                self._start_time = datetime.now()
                warnings.warn(
                    f"No start time supplied. Using current time: {self._start_time} as start time."
                )
            else:
                self._start_time = self._start_time
        else:
            self._start_time = start_time

        if dt:
            self._dt = dt

        if self._dt is None:
            self._dt = 1
            warnings.warn("No timestep supplied. Using 1s.")

        if items:
            self._items = items

        if self._items is None:
            self._items = [
                ItemInfo(f"Item {i+1}") for i in range(self._n_items)
            ]

        self._timeseries_unit = TimeStepUnit.SECOND

    def _setup_header(self, filename):

        system_start_time = to_dotnet_datetime(self._start_time)

        self._builder.SetDataType(0)

        if self._coordinate[0] == "LONG/LAT":
            proj = self._factory.CreateProjectionGeoOrigin(*self._coordinate)
        else:
            if self._override_coordinates:
                proj = self._factory.CreateProjectionProjOrigin(
                    *self._coordinate)
            else:
                proj = self._factory.CreateProjectionGeoOrigin(
                    *self._coordinate)

        self._builder.SetGeographicalProjection(proj)

        if self._is_equidistant:
            self._builder.SetTemporalAxis(
                self._factory.CreateTemporalEqCalendarAxis(
                    self._timeseries_unit, system_start_time, 0, self._dt))
        else:
            self._builder.SetTemporalAxis(
                self._factory.CreateTemporalNonEqCalendarAxis(
                    self._timeseries_unit, system_start_time))

        for item in self._items:
            self._builder.AddDynamicItem(
                item.name,
                eumQuantity.Create(item.type, item.unit),
                DfsSimpleType.Float,
                DataValueType.Instantaneous,
            )

        try:
            self._builder.CreateFile(filename)
        except IOError:
            # TODO does this make sense?
            print("cannot create dfs file: ", filename)

        return self._builder.GetFile()

    def _get_valid_items_and_timesteps(self, items, time_steps):

        if isinstance(items, int) or isinstance(items, str):
            items = [items]

        if items is not None and isinstance(items[0], str):
            items = self._find_item(items)

        if items is None:
            item_numbers = list(range(self.n_items))
        else:
            item_numbers = items

        if time_steps is None:
            time_steps = list(range(self.n_timesteps))

        if isinstance(time_steps, int):
            time_steps = [time_steps]

        if isinstance(time_steps, str):
            parts = time_steps.split(",")
            if parts[0] == "":
                time_steps = slice(parts[1])  # stop only
            elif parts[1] == "":
                time_steps = slice(parts[0], None)  # start only
            else:
                time_steps = slice(parts[0], parts[1])

        if isinstance(time_steps, slice):
            freq = pd.tseries.offsets.DateOffset(seconds=self.timestep)
            time = pd.date_range(self.start_time,
                                 periods=self.n_timesteps,
                                 freq=freq)
            s = time.slice_indexer(time_steps.start, time_steps.stop)
            time_steps = list(range(s.start, s.stop))

        items = self._get_item_info(item_numbers)

        return items, item_numbers, time_steps

    def _open(self):
        raise NotImplementedError("Should be implemented by subclass")

    def _set_spatial_axis(self):
        raise NotImplementedError("Should be implemented by subclass")

    def _find_item(self, item_names):
        """Utility function to find item numbers

        Parameters
        ----------
        dfs : DfsFile

        item_names : list[str]
            Names of items to be found

        Returns
        -------
        list[int]
            item numbers (0-based)

        Raises
        ------
        KeyError
            In case item is not found in the dfs file
        """
        names = [x.Name for x in self._dfs.ItemInfo]
        item_lookup = {name: i for i, name in enumerate(names)}
        try:
            item_numbers = [item_lookup[x] for x in item_names]
        except KeyError:
            raise KeyError(
                f"Selected item name not found. Valid names are {names}")

        return item_numbers

    def _get_item_info(self, item_numbers):
        """Read DFS ItemInfo

        Parameters
        ----------
        dfs : MIKE dfs object
        item_numbers : list[int]
            
        Returns
        -------
        list[Iteminfo]
        """
        items = []
        for item in item_numbers:
            name = self._dfs.ItemInfo[item].Name
            eumItem = self._dfs.ItemInfo[item].Quantity.Item
            eumUnit = self._dfs.ItemInfo[item].Quantity.Unit
            itemtype = EUMType(eumItem)
            unit = EUMUnit(eumUnit)
            item = ItemInfo(name, itemtype, unit)
            items.append(item)
        return items

    @property
    def deletevalue(self):
        "File delete value"
        return self._deletevalue

    @property
    def n_items(self):
        "Number of items"
        return self._n_items

    @property
    def items(self):
        "List of items"
        return self._items

    @property
    def start_time(self):
        """File start time
        """
        return self._start_time

    @property
    def n_timesteps(self):
        """Number of time steps
        """
        return self._n_timesteps

    @property
    def timestep(self):
        """Time step size in seconds
        """
        return self._timestep_in_seconds

    @property
    def projection_string(self):
        return self._projstr

    @property
    def longitude(self):
        """Origin longitude
        """
        return self._longitude

    @property
    def latitude(self):
        """Origin latitude
        """
        return self._latitude

    @property
    def orientation(self):
        """North to Y orientation
        """
        return self._orientation
Exemple #23
0
    def write(
        self,
        filename,
        data,
        start_time=None,
        dt=1,
        items=None,
        dx=1,
        x0=0,
        coordinate=None,
        title=None,
    ):
        """
        Write a dfs1 file

        Parameters
        ----------
        filename: str
            Location to write the dfs1 file
        data: list[np.array]
            list of matrices, one for each item. Matrix dimension: x, time
        start_time: datetime, optional
            start datetime
        dt: float
            The time step in seconds. 
        items: list[ItemInfo], optional
            List of ItemInfo corresponding to a variable types (ie. Water Level).
        coordinate:
            ['UTM-33', 12.4387, 55.2257, 327]  for UTM, Long, Lat, North to Y orientation. Note: long, lat in decimal degrees
        x0:
            Lower right position
        dx:
            length of each grid in the x direction (meters)
        title: str, optional
            title of the dfs file (can be blank)

        """

        self._write_handle_common_arguments(title, data, items, coordinate,
                                            start_time, dt)

        number_x = np.shape(data[0])[1]

        if dx is None:
            if self._dx is not None:
                dx = self._dx
            else:
                dx = 1

        if not all(np.shape(d)[1] == number_x for d in data):
            raise ValueError(
                "ERROR data matrices in the X dimension do not all match in the data list. "
                "Data is list of matices [t, x]")

        factory = DfsFactory()
        builder = Dfs1Builder.Create(title, "mikeio", 0)

        self._builder = builder
        self._factory = factory

        builder.SetSpatialAxis(
            factory.CreateAxisEqD1(eumUnit.eumUmeter, number_x, x0, dx))

        dfs = self._setup_header(filename)

        deletevalue = dfs.FileInfo.DeleteValueFloat  # -1.0000000031710769e-30

        for i in range(self._n_time_steps):
            for item in range(self._n_items):
                d = data[item][i, :]
                d[np.isnan(d)] = deletevalue

                darray = to_dotnet_float_array(d)
                dfs.WriteItemTimeStepNext(0, darray)

        dfs.Close()