def _setup_header(self): factory = DfsFactory() builder = DfsBuilder.Create(self._title, "DFS", 0) builder.SetDataType(1) builder.SetGeographicalProjection(factory.CreateProjectionUndefined()) system_start_time = to_dotnet_datetime(self._start_time) if self._is_equidistant: temporal_axis = factory.CreateTemporalEqCalendarAxis( self._timeseries_unit, system_start_time, 0, self._dt ) else: temporal_axis = factory.CreateTemporalNonEqCalendarAxis( self._timeseries_unit, system_start_time ) builder.SetTemporalAxis(temporal_axis) builder.SetItemStatisticsType(StatType.RegularStat) dtype_dfs = self._to_dfs_datatype(self._dtype) for i in range(self._n_items): item = self._items[i] newitem = builder.CreateDynamicItemBuilder() quantity = eumQuantity.Create(item.type, item.unit) newitem.Set( item.name, quantity, dtype_dfs, ) if self._data_value_type is not None: newitem.SetValueType(self._data_value_type[i]) else: newitem.SetValueType(DataValueType.Instantaneous) newitem.SetAxis(factory.CreateAxisEqD0()) builder.AddDynamicItem(newitem.GetDynamicItemInfo()) try: builder.CreateFile(self._filename) except IOError: raise IOError(f"Cannot create dfs0 file: {self._filename}") return builder.GetFile()
def create( self, filename, data, start_time=None, timeseries_unit=TimeStep.SECOND, dt=1.0, datetimes=None, items=None, title=None, data_value_type=None, ): """Create a dfs0 file. Parameters ---------- filename: str Full path and filename to dfs0 to be created. data: list[np.array] values start_time: datetime.datetime, , optional start date of type datetime. timeseries_unit: Timestep, optional Timestep unitdefault Timestep.SECOND dt: float, optional the time step. Therefore dt of 5.5 with timeseries_unit of minutes means 5 mins and 30 seconds. default to 1.0 items: list[ItemInfo], optional List of ItemInfo corresponding to a variable types (ie. Water Level). title: str, optional title data_value_type: list[DataValueType], optional DataValueType default DataValueType.INSTANTANEOUS """ if title is None: title = "dfs0 file" n_items = len(data) n_time_steps = np.shape(data[0])[0] if start_time is None: start_time = datetime.now() if items is None: items = [ItemInfo(f"temItem {i+1}") for i in range(n_items)] if len(items) != n_items: raise Warning( "names must be an array of strings with the same number of elements as data columns" ) if datetimes is None: equidistant = True if not type(start_time) is datetime: raise Warning("start_time must be of type datetime ") dt = np.float(dt) datetimes = np.array( [ start_time + timedelta(seconds=(step * dt)) for step in np.arange(n_time_steps) ] ) else: start_time = datetimes[0] equidistant = False # if not isinstance(timeseries_unit, int): # raise Warning("timeseries_unit must be an integer. See dfsutil options for help ") system_start_time = to_dotnet_datetime(start_time) factory = DfsFactory() builder = DfsBuilder.Create(title, "DFS", 0) builder.SetDataType(1) builder.SetGeographicalProjection(factory.CreateProjectionUndefined()) if equidistant: builder.SetTemporalAxis( factory.CreateTemporalEqCalendarAxis( timeseries_unit, system_start_time, 0, dt ) ) else: builder.SetTemporalAxis( factory.CreateTemporalNonEqCalendarAxis( timeseries_unit, system_start_time ) ) builder.SetItemStatisticsType(StatType.RegularStat) for i in range(n_items): item = builder.CreateDynamicItemBuilder() item.Set( items[i].name, eumQuantity.Create(items[i].type, items[i].unit), DfsSimpleType.Float, ) if data_value_type is not None: item.SetValueType(data_value_type[i]) else: item.SetValueType(DataValueType.Instantaneous) item.SetAxis(factory.CreateAxisEqD0()) builder.AddDynamicItem(item.GetDynamicItemInfo()) try: builder.CreateFile(filename) except IOError: print("cannot create dfso file: ", filename) dfs = builder.GetFile() delete_value = dfs.FileInfo.DeleteValueFloat for i in range(n_items): d = data[i] d[np.isnan(d)] = delete_value data1 = np.stack(data, axis=1) t_seconds = [(t - datetimes[0]).total_seconds() for t in datetimes] Dfs0Util.WriteDfs0DataDouble(dfs, t_seconds, to_dotnet_array(data1)) dfs.Close()
class ExtractorDfs0(Extractor): """Class which extracts data to dfs0 file format""" def Export(self): self.factory = DfsFactory() self.builder = self.CreateDfsBuilder() self.DefineDynamicDataItems() self.WriteDataItems() def CreateDfsBuilder(self): resultData = self.resultData factory = self.factory builder = DfsBuilder.Create("ResultDataExtractor-script", "MIKE SDK", 100) # Set up file header builder.SetDataType(1) builder.SetGeographicalProjection(factory.CreateProjectionUndefined()) builder.SetTemporalAxis( factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, resultData.StartTime)) builder.SetItemStatisticsType(StatType.NoStat) return builder def DefineDynamicDataItems(self): outputData = self.outputData resultData = self.resultData builder = self.builder for dataEntry in outputData: dataItem = dataEntry.dataItem elementIndex = dataEntry.elementIndex quantity = dataItem.Quantity itemTypeGroup = dataItem.ItemTypeGroup numberWithinGroup = dataItem.NumberWithinGroup reaches = list(resultData.Reaches) nodes = list(resultData.Nodes) catchments = list(resultData.Catchments) if itemTypeGroup == ItemTypeGroup.ReachItem: reach = reaches[numberWithinGroup] gridPointIndex = dataItem.IndexList[elementIndex] gridPoints = list(reach.GridPoints) chainage = gridPoints[gridPointIndex].Chainage itemName = "reach:%s:%s:%.3f" % (quantity.Id, reach.Name, chainage) elif itemTypeGroup == ItemTypeGroup.NodeItem: node = nodes[numberWithinGroup] itemName = "node:%s:%s" % (quantity.Id, node.Id) elif itemTypeGroup == ItemTypeGroup.CatchmentItem: catchment = catchments[numberWithinGroup] itemName = "catchment:%s:%s" % (quantity.Id, catchment.Id) else: itemName = "%s:%s:%s" % (itemTypeGroup, quantityId, dataItem.Id) item = builder.CreateDynamicItemBuilder() item.Set(itemName, dataItem.Quantity.EumQuantity, DfsSimpleType.Float) item.SetValueType(DataValueType.Instantaneous) item.SetAxis(self.factory.CreateAxisEqD0()) builder.AddDynamicItem(item.GetDynamicItemInfo()) def WriteDataItems(self): outputData = self.outputData resultData = self.resultData builder = self.builder # Create file builder.CreateFile(self.outFileName) dfsfile = builder.GetFile() times = list(resultData.TimesList) # Write data to file val = Array.CreateInstance(System.Single, 1) for timeStepIndex in range(resultData.NumberOfTimeSteps): if (timeStepIndex % self.timeStepSkippingNumber != 0): continue time = times[timeStepIndex].Subtract( resultData.StartTime).TotalSeconds for dataEntry in outputData: dataItem = dataEntry.dataItem elementIndex = dataEntry.elementIndex val[0] = dataItem.GetValue(timeStepIndex, elementIndex) dfsfile.WriteItemTimeStepNext(time, val) dfsfile.Close()
def dfstodfs0(dfsfile, dfs0file, func=np.nanmean): """ Function: take average (or other statistics) over dfs and output dfs0 Usage: dfstodfs0(dfsfile, dfs0file) dfstodfs0(dfsfile, dfs0file, func=np.nanmean) """ # Read dfs dfs_in = DfsFileFactory.DfsGenericOpen(dfsfile) fileInfo = dfs_in.FileInfo # Basic info from input file n_time_steps = fileInfo.TimeAxis.NumberOfTimeSteps if n_time_steps == 0: raise Warning( "Static dfs files (with no time steps) are not supported.") # Create an empty dfs1 file object factory = DfsFactory() builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion) # Set up the header builder.SetDataType(fileInfo.DataType) builder.SetGeographicalProjection(factory.CreateProjectionUndefined()) builder.SetTemporalAxis(fileInfo.TimeAxis) builder.DeleteValueByte = fileInfo.DeleteValueByte builder.DeleteValueDouble = fileInfo.DeleteValueDouble builder.DeleteValueFloat = fileInfo.DeleteValueFloat builder.DeleteValueInt = fileInfo.DeleteValueInt builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt # dynamic items n_items = safe_length(dfs_in.ItemInfo) for item in range(n_items): ii = dfs_in.ItemInfo[item] itemj = builder.CreateDynamicItemBuilder() itemj.Set(ii.Name, ii.Quantity, DfsSimpleType.Float) itemj.SetValueType(DataValueType.Instantaneous) itemj.SetAxis(factory.CreateAxisEqD0()) # itemj.SetReferenceCoordinates(0, 0, 0) builder.AddDynamicItem(itemj.GetDynamicItemInfo()) try: builder.CreateFile(dfs0file) except IOError: print("cannot create dfs0 file: ", dfs0file) dfs_out = builder.GetFile() # read-write data deleteValue = fileInfo.DeleteValueFloat for it in range(n_time_steps): for item in range(n_items): itemdata = dfs_in.ReadItemTimeStep(item + 1, it) d = to_numpy(itemdata.Data) d[d == deleteValue] = np.nan d0 = func(d) d = np.zeros(1) d[0] = d0 d[np.isnan(d)] = deleteValue darray = to_dotnet_float_array(d) dfs_out.WriteItemTimeStepNext(itemdata.Time, darray) dfs_in.Close() dfs_out.Close()
def create(self, filename, data, start_time=None, timeseries_unit=TimeStep.SECOND, dt=1, datetimes=None, variable_type=None, unit=None, names=None, title=None, data_value_type=None): """create creates a dfs0 file. filename: Full path and filename to dfs0 to be created. data: a numpy matrix start_time: start date of type datetime. timeseries_unit: Timestep default Timestep.SECOND dt: the time step. Therefore dt of 5.5 with timeseries_unit of minutes means 5 mins and 30 seconds. default to 1 variable_type: Array integers corresponding to a variable types (ie. Water Level). Use dfsutil type_list to figure out the integer corresponding to the variable. unit: Array integers corresponding to the unit corresponding to the variable types The unit (meters, seconds), use dfsutil unit_list to figure out the corresponding unit for the variable. names: array of names (ie. array of strings) title: title (string) data_value_type: DataValueType default DataValueType.INSTANTANEOUS """ if title is None: title = "dfs0 file" n_items = len(data) n_time_steps = np.shape(data[0])[0] if start_time is None: start_time = datetime.now() if names is None: names = [f"Item {i+1}" for i in range(n_items)] if variable_type is None: variable_type = [999] * n_items if unit is None: unit = [0] * n_items if names is not None and len(names) != n_items: raise Warning( "names must be an array of strings with the same number of elements as data columns" ) if len(variable_type) != n_items: raise Warning( "type if specified must be an array of integers (eumType) with the same number of " "elements as data columns") if len(unit) != n_items: raise Warning( "unit if specified must be an array of integers (eumType) with the same number of " "elements as data columns") if datetimes is None: equidistant = True if not type(start_time) is datetime: raise Warning("start_time must be of type datetime ") else: start_time = datetimes[0] equidistant = False #if not isinstance(timeseries_unit, int): # raise Warning("timeseries_unit must be an integer. See dfsutil options for help ") system_start_time = System.DateTime(start_time.year, start_time.month, start_time.day, start_time.hour, start_time.minute, start_time.second) factory = DfsFactory() builder = DfsBuilder.Create(title, 'DFS', 0) builder.SetDataType(1) builder.SetGeographicalProjection(factory.CreateProjectionUndefined()) if equidistant: builder.SetTemporalAxis( factory.CreateTemporalEqCalendarAxis(timeseries_unit, system_start_time, 0, dt)) else: builder.SetTemporalAxis( factory.CreateTemporalNonEqCalendarAxis( timeseries_unit, system_start_time)) builder.SetItemStatisticsType(StatType.RegularStat) for i in range(n_items): item = builder.CreateDynamicItemBuilder() if type is not None: item.Set(names[i], eumQuantity.Create(variable_type[i], unit[i]), DfsSimpleType.Float) else: item.Set(str(i), eumQuantity.Create(eumItem.eumIItemUndefined, 0), DfsSimpleType.Float) if data_value_type is not None: item.SetValueType(data_value_type[i]) else: item.SetValueType(DataValueType.Instantaneous) item.SetAxis(factory.CreateAxisEqD0()) builder.AddDynamicItem(item.GetDynamicItemInfo()) try: builder.CreateFile(filename) except IOError: print('cannot create dfso file: ', filename) dfs = builder.GetFile() delete_value = dfs.FileInfo.DeleteValueFloat for i in range(n_items): d = data[i] d[np.isnan(d)] = delete_value # COPY OVER THE DATA for it in range(n_time_steps): for ii in range(n_items): d = Array[System.Single](np.array(data[ii][it:it + 1])) if equidistant: dfs.WriteItemTimeStepNext(it, d) else: dt = (datetimes[it] - datetimes[0]).total_seconds() dfs.WriteItemTimeStepNext(dt, d) dfs.Close()