def dfs2todfs1(dfs2file, dfs1file, axis=1, func=np.nanmean): """Aggregate file over an axis Parameters ---------- dfs2file : str input file dfs1file : str output file axis : int, optional spatial axis to aggregate over, 1=y, 2=x default 1 func : function, optional aggregation function, by default np.nanmean """ if axis not in [1, 2]: raise ValueError("Axis must be 1=y or 2=x") # Read dfs2 dfs_in = DfsFileFactory.DfsGenericOpen(dfs2file) fileInfo = dfs_in.FileInfo # Basic info from input file ax = dfs_in.ItemInfo[0].SpatialAxis n_time_steps = fileInfo.TimeAxis.NumberOfTimeSteps if n_time_steps == 0: raise Warning( "Static dfs2 files (with no time steps) are not supported.") # Create an empty dfs1 file object factory = DfsFactory() builder = Dfs1Builder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion) # Set up the header builder.SetDataType(fileInfo.DataType) builder.SetGeographicalProjection(fileInfo.Projection) builder.SetTemporalAxis(fileInfo.TimeAxis) builder.DeleteValueByte = fileInfo.DeleteValueByte builder.DeleteValueDouble = fileInfo.DeleteValueDouble builder.DeleteValueFloat = fileInfo.DeleteValueFloat builder.DeleteValueInt = fileInfo.DeleteValueInt builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt if axis == 1: builder.SetSpatialAxis( factory.CreateAxisEqD1(ax.AxisUnit, ax.XCount, ax.X0, ax.Dx)) else: builder.SetSpatialAxis( factory.CreateAxisEqD1(ax.AxisUnit, ax.YCount, ax.Y0, ax.Dy)) # assume no compression keys if fileInfo.IsFileCompressed: raise Warning("Compressed files not supported") # custom blocks # cb = fileInfo.CustomBlocks #[0] # for j in range(safe_length(cb)): # builder.AddCustomBlocks(cb[j]) # static items while True: static_item = dfs_in.ReadStaticItemNext() if static_item == None: break builder.AddStaticItem(static_item) # dynamic items n_items = safe_length(dfs_in.ItemInfo) for item in range(n_items): ii = dfs_in.ItemInfo[item] builder.AddDynamicItem(ii.Name, ii.Quantity, DfsSimpleType.Float, DataValueType.Instantaneous) try: builder.CreateFile(dfs1file) except IOError: print("cannot create dfs1 file: ", dfs1file) dfs_out = builder.GetFile() # read-write data deleteValue = fileInfo.DeleteValueFloat for it in range(n_time_steps): for item in range(n_items): itemdata = dfs_in.ReadItemTimeStep(item + 1, it) d = to_numpy(itemdata.Data) d[d == deleteValue] = np.nan d2 = d.reshape(ax.YCount, ax.XCount) d2 = np.flipud(d2) d1 = func(d2, axis=axis - 1) d1[np.isnan(d1)] = deleteValue darray = to_dotnet_float_array(d1) dfs_out.WriteItemTimeStepNext(itemdata.Time, darray) dfs_in.Close() dfs_out.Close()
def create(self, filename, data, start_time=None, dt=1, length_x=1, x0=0, coordinate=None, timeseries_unit=TimeStep.SECOND, variable_type=None, unit=None, names=None, title=None): """ Creates a dfs1 file filename: Location to write the dfs1 file data: list of matrices, one for each item. Matrix dimension: x, time start_time: start date of type datetime. timeseries_unit: TimeStep default TimeStep.SECOND dt: The time step (double based on the timeseries_unit). Therefore dt of 5.5 with timeseries_unit of minutes means 5 mins and 30 seconds. variable_type: Array integers corresponding to a variable types (ie. Water Level). Use dfsutil type_list to figure out the integer corresponding to the variable. unit: Array integers corresponding to the unit corresponding to the variable types The unit (meters, seconds), use dfsutil unit_list to figure out the corresponding unit for the variable. coordinate: ['UTM-33', 12.4387, 55.2257, 327] for UTM, Long, Lat, North to Y orientation. Note: long, lat in decimal degrees OR [TODO: Support not Local Coordinates ...] x0: Lower right position length_x: length of each grid in the x direction (meters) names: array of names (ie. array of strings). (can be blank) title: title of the dfs2 file (can be blank) """ if title is None: title = "" n_time_steps = np.shape(data[0])[0] number_x = np.shape(data[0])[1] n_items = len(data) if start_time is None: start_time = datetime.now() if coordinate is None: coordinate = ['LONG/LAT', 0, 0, 0] if names is None: names = [f"Item {i+1}" for i in range(n_items)] if variable_type is None: variable_type = [999] * n_items if unit is None: unit = [0] * n_items if not all(np.shape(d)[0] == n_time_steps for d in data): raise Warning( "ERROR data matrices in the time dimension do not all match in the data list. " "Data is list of matices [t, x]") if not all(np.shape(d)[1] == number_x for d in data): raise Warning( "ERROR data matrices in the X dimension do not all match in the data list. " "Data is list of matices [t, x]") if len(names) != n_items: raise Warning( "names must be an array of strings with the same number as matrices in data list" ) if len(variable_type) != n_items or not all( isinstance(item, int) and 0 <= item < 1e15 for item in variable_type): raise Warning( "type if specified must be an array of integers (enuType) with the same number of " "elements as data columns") if len(unit) != n_items or not all( isinstance(item, int) and 0 <= item < 1e15 for item in unit): raise Warning( "unit if specified must be an array of integers (enuType) with the same number of " "elements as data columns") if not type(start_time) is datetime: raise Warning("start_time must be of type datetime ") #if not isinstance(timeseries_unit, int): # raise Warning("timeseries_unit must be an integer. timeseries_unit: second=1400, minute=1401, hour=1402, " # "day=1403, month=1405, year= 1404See dfsutil options for help ") system_start_time = System.DateTime(start_time.year, start_time.month, start_time.day, start_time.hour, start_time.minute, start_time.second) # Create an empty dfs1 file object factory = DfsFactory() builder = Dfs1Builder.Create(title, 'mikeio', 0) # Set up the header builder.SetDataType(0) builder.SetGeographicalProjection( factory.CreateProjectionGeoOrigin(coordinate[0], coordinate[1], coordinate[2], coordinate[3])) builder.SetTemporalAxis( factory.CreateTemporalEqCalendarAxis(timeseries_unit, system_start_time, 0, dt)) builder.SetSpatialAxis( factory.CreateAxisEqD1(eumUnit.eumUmeter, number_x, x0, length_x)) for i in range(n_items): builder.AddDynamicItem( names[i], eumQuantity.Create(variable_type[i], unit[i]), DfsSimpleType.Float, DataValueType.Instantaneous) try: builder.CreateFile(filename) except IOError: print('cannot create dfs2 file: ', filename) dfs = builder.GetFile() deletevalue = dfs.FileInfo.DeleteValueFloat #-1.0000000031710769e-30 for i in range(n_time_steps): for item in range(n_items): d = data[item][i, :] d[np.isnan(d)] = deletevalue darray = Array[System.Single](np.array( d.reshape(d.size, 1)[:, 0])) dfs.WriteItemTimeStepNext(0, darray) dfs.Close()
def dfs2todfs1(dfs2file, dfs1file, ax=0, func=np.nanmean): """ Function: take average (or other statistics) over axis in dfs2 and output to dfs1 Usage: dfs2todfs1(dfs2file, dfs1file) dfs2todfs1(dfs2file, dfs1file, axis) dfs2todfs1(dfs2file, dfs1file, axis, func=np.nanmean) """ # Read dfs2 dfs_in = DfsFileFactory.DfsGenericOpen(dfs2file) fileInfo = dfs_in.FileInfo # Basic info from input file axis = dfs_in.ItemInfo[0].SpatialAxis n_time_steps = fileInfo.TimeAxis.NumberOfTimeSteps if n_time_steps == 0: raise Warning( "Static dfs2 files (with no time steps) are not supported.") # Create an empty dfs1 file object factory = DfsFactory() builder = Dfs1Builder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion) # Set up the header builder.SetDataType(fileInfo.DataType) builder.SetGeographicalProjection(fileInfo.Projection) builder.SetTemporalAxis(fileInfo.TimeAxis) builder.DeleteValueByte = fileInfo.DeleteValueByte builder.DeleteValueDouble = fileInfo.DeleteValueDouble builder.DeleteValueFloat = fileInfo.DeleteValueFloat builder.DeleteValueInt = fileInfo.DeleteValueInt builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt # use x-axis (default) else y-axis if ax == 0: builder.SetSpatialAxis( factory.CreateAxisEqD1(axis.AxisUnit, axis.XCount, axis.X0, axis.Dx)) else: builder.SetSpatialAxis( factory.CreateAxisEqD1(axis.AxisUnit, axis.YCount, axis.Y0, axis.Dy)) # assume no compression keys if fileInfo.IsFileCompressed: raise Warning("Compressed files not supported") # custom blocks # cb = fileInfo.CustomBlocks #[0] # for j in range(safe_length(cb)): # builder.AddCustomBlocks(cb[j]) # static items while True: static_item = dfs_in.ReadStaticItemNext() if static_item == None: break builder.AddStaticItem(static_item) # dynamic items n_items = safe_length(dfs_in.ItemInfo) for item in range(n_items): ii = dfs_in.ItemInfo[item] builder.AddDynamicItem(ii.Name, ii.Quantity, DfsSimpleType.Float, DataValueType.Instantaneous) try: builder.CreateFile(dfs1file) except IOError: print("cannot create dfs1 file: ", dfs1file) dfs_out = builder.GetFile() # read-write data deleteValue = fileInfo.DeleteValueFloat for it in range(n_time_steps): for item in range(n_items): itemdata = dfs_in.ReadItemTimeStep(item + 1, it) d = to_numpy(itemdata.Data) d[d == deleteValue] = np.nan d2 = d.reshape(axis.YCount, axis.XCount) d2 = np.flipud(d2) d1 = func(d2, axis=ax) d1[np.isnan(d1)] = deleteValue darray = Array[System.Single](d1) dfs_out.WriteItemTimeStepNext(itemdata.Time, darray) dfs_in.Close() dfs_out.Close()
def create( self, filename, data, start_time=None, dt=1, items=None, length_x=1, x0=0, coordinate=None, timeseries_unit=TimeStep.SECOND, title=None, ): """ Create a dfs1 file Parameters ---------- filename: str Location to write the dfs1 file data: list[np.array] list of matrices, one for each item. Matrix dimension: x, time start_time: datetime, optional start datetime timeseries_unit: Timestep, optional TimeStep unit default TimeStep.SECOND dt: float The time step (double based on the timeseries_unit). Therefore dt of 5.5 with timeseries_unit of minutes means 5 mins and 30 seconds. items: list[ItemInfo], optional List of ItemInfo corresponding to a variable types (ie. Water Level). coordinate: ['UTM-33', 12.4387, 55.2257, 327] for UTM, Long, Lat, North to Y orientation. Note: long, lat in decimal degrees OR [TODO: Support not Local Coordinates ...] x0: Lower right position length_x: length of each grid in the x direction (meters) title: title of the dfs2 file (can be blank) """ if title is None: title = "" n_time_steps = np.shape(data[0])[0] number_x = np.shape(data[0])[1] n_items = len(data) if start_time is None: start_time = datetime.now() if coordinate is None: coordinate = ["LONG/LAT", 0, 0, 0] if items is None: items = [ItemInfo(f"temItem {i+1}") for i in range(n_items)] if not all(np.shape(d)[0] == n_time_steps for d in data): raise Warning( "ERROR data matrices in the time dimension do not all match in the data list. " "Data is list of matices [t, x]") if not all(np.shape(d)[1] == number_x for d in data): raise Warning( "ERROR data matrices in the X dimension do not all match in the data list. " "Data is list of matices [t, x]") if len(items) != n_items: raise Warning( "names must be an array of strings with the same number as matrices in data list" ) if not type(start_time) is datetime: raise Warning("start_time must be of type datetime ") system_start_time = System.DateTime( start_time.year, start_time.month, start_time.day, start_time.hour, start_time.minute, start_time.second, ) # Create an empty dfs1 file object factory = DfsFactory() builder = Dfs1Builder.Create(title, "mikeio", 0) # Set up the header builder.SetDataType(0) builder.SetGeographicalProjection( factory.CreateProjectionGeoOrigin(coordinate[0], coordinate[1], coordinate[2], coordinate[3])) builder.SetTemporalAxis( factory.CreateTemporalEqCalendarAxis(timeseries_unit, system_start_time, 0, dt)) builder.SetSpatialAxis( factory.CreateAxisEqD1(eumUnit.eumUmeter, number_x, x0, length_x)) for i in range(n_items): builder.AddDynamicItem( items[i].name, eumQuantity.Create(items[i].type, items[i].unit), DfsSimpleType.Float, DataValueType.Instantaneous, ) try: builder.CreateFile(filename) except IOError: print("cannot create dfs2 file: ", filename) dfs = builder.GetFile() deletevalue = dfs.FileInfo.DeleteValueFloat # -1.0000000031710769e-30 for i in range(n_time_steps): for item in range(n_items): d = data[item][i, :] d[np.isnan(d)] = deletevalue darray = Array[System.Single](np.array( d.reshape(d.size, 1)[:, 0])) dfs.WriteItemTimeStepNext(0, darray) dfs.Close()
def write( self, filename, data, start_time=None, dt=1, items=None, dx=1, x0=0, coordinate=None, title=None, ): """ Write a dfs1 file Parameters ---------- filename: str Location to write the dfs1 file data: list[np.array] list of matrices, one for each item. Matrix dimension: x, time start_time: datetime, optional start datetime dt: float The time step in seconds. items: list[ItemInfo], optional List of ItemInfo corresponding to a variable types (ie. Water Level). coordinate: ['UTM-33', 12.4387, 55.2257, 327] for UTM, Long, Lat, North to Y orientation. Note: long, lat in decimal degrees x0: Lower right position dx: length of each grid in the x direction (meters) title: str, optional title of the dfs file (can be blank) """ self._write_handle_common_arguments(title, data, items, coordinate, start_time, dt) number_x = np.shape(data[0])[1] if dx is None: if self._dx is not None: dx = self._dx else: dx = 1 if not all(np.shape(d)[1] == number_x for d in data): raise ValueError( "ERROR data matrices in the X dimension do not all match in the data list. " "Data is list of matices [t, x]") factory = DfsFactory() builder = Dfs1Builder.Create(title, "mikeio", 0) self._builder = builder self._factory = factory builder.SetSpatialAxis( factory.CreateAxisEqD1(eumUnit.eumUmeter, number_x, x0, dx)) dfs = self._setup_header(filename) deletevalue = dfs.FileInfo.DeleteValueFloat # -1.0000000031710769e-30 for i in range(self._n_time_steps): for item in range(self._n_items): d = data[item][i, :] d[np.isnan(d)] = deletevalue darray = to_dotnet_float_array(d) dfs.WriteItemTimeStepNext(0, darray) dfs.Close()