def write(self, filename, data): """ Function: write to a pre-created dfsu file. filename: full path and filename to existing dfsu file data: list of matrices. len(data) must equal the number of items in the dfsu. Each matrix must be of dimension time,elements usage: write(filename, data) where data(nt,elements) Returns: Nothing """ # Open the dfs file for writing dfs = DfsFileFactory.DfsGenericOpenEdit(filename) n_time_steps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps n_items = safe_length(dfs.ItemInfo) deletevalue = dfs.FileInfo.DeleteValueFloat for i in range(n_time_steps): for item in range(n_items): d = data[item][i, :] d[np.isnan(d)] = deletevalue darray = Array[System.Single](np.array(d.reshape(d.size, 1)[:, 0])) dfs.WriteItemTimeStepNext(0, darray) dfs.Close()
def scale(infilename, outfilename, offset=0.0, factor=1.0): """Apply scaling to any dfs file Parameters ---------- infilename: str full path to the input file outfilename: str full path to the output file offset: float, optional value to add to all items, default 0.0 factor: float, optional value to multiply to all items, default 1.0 """ copyfile(infilename, outfilename) dfs = DfsFileFactory.DfsGenericOpenEdit(outfilename) n_time_steps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps n_items = safe_length(dfs.ItemInfo) for timestep in range(n_time_steps): for item in range(n_items): itemdata = dfs.ReadItemTimeStep(item + 1, timestep) d = to_numpy(itemdata.Data) time = itemdata.Time outdata = d * factor + offset darray = to_dotnet_float_array(outdata) dfs.WriteItemTimeStep(item + 1, timestep, time, darray)
def scale(infilename, outfilename, offset=0.0, factor=1.0, item_numbers=None, item_names=None): """Apply scaling to any dfs file Parameters ---------- infilename: str full path to the input file outfilename: str full path to the output file offset: float, optional value to add to all items, default 0.0 factor: float, optional value to multiply to all items, default 1.0 item_numbers: list[int], optional Process only selected items, by number (0-based) item_names: list[str], optional Process only selected items, by name, takes precedence over item_numbers """ copyfile(infilename, outfilename) dfs = DfsFileFactory.DfsGenericOpenEdit(outfilename) if item_names is not None: item_numbers = find_item(dfs, item_names) if item_numbers is None: n_items = safe_length(dfs.ItemInfo) item_numbers = list(range(n_items)) n_time_steps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps n_items = len(item_numbers) deletevalue = dfs.FileInfo.DeleteValueFloat for timestep in range(n_time_steps): for item in range(n_items): itemdata = dfs.ReadItemTimeStep(item_numbers[item] + 1, timestep) time = itemdata.Time d = to_numpy(itemdata.Data) d[d == deletevalue] = np.nan outdata = d * factor + offset outdata[np.isnan(outdata)] = deletevalue darray = to_dotnet_float_array(outdata) dfs.WriteItemTimeStep(item_numbers[item] + 1, timestep, time, darray) dfs.Close()
def write(self, filename, data): """Writes data to the pre-created dfs0 file. filename --> file path to existing dfs0 file. data --> numpy matrix with data. """ if not path.exists(filename): raise Warning("filename - File does not Exist %s", filename) try: dfs = DfsFileFactory.DfsGenericOpenEdit(filename) except IOError: print('cannot open', filename) delete_value = dfs.FileInfo.DeleteValueFloat n_items = len(dfs.ItemInfo) nt = dfs.FileInfo.TimeAxis.NumberOfTimeSteps if len(np.shape(data)) == 1: data = data.reshape(len(data), 1) # Makes sure the data to write to the file matches the dfs0 file if nt != data.shape[0]: print("Inconsistent data size. nt (row count) must be size" + str(nt)) # quit() if n_items != data.shape[1]: print( "Inconsistent data size. number of items (column count) must be size" + str(n_items)) data[np.isnan(data)] = delete_value d = Array.CreateInstance(System.Single, 1) # Get the date times in seconds (from start) t = [] for i in range(nt): itemData = dfs.ReadItemTimeStep(1, i) newTime = DfsExtensions.TimeInSeconds(itemData, dfs.FileInfo.TimeAxis) t.append(newTime) dfs.Reset() # COPY OVER THE DATA for it in range(dfs.FileInfo.TimeAxis.NumberOfTimeSteps): tit = System.Double(t[it]) for ii in range(len(dfs.ItemInfo)): d = Array[System.Single](np.array([[data[it, ii]]])) dfs.WriteItemTimeStepNext(tit, d) dfs.Close()
def scale( infilename: str, outfilename: str, offset: float = 0.0, factor: float = 1.0, items: Union[List[str], List[int]] = None, ) -> None: """Apply scaling to any dfs file Parameters ---------- infilename: str full path to the input file outfilename: str full path to the output file offset: float, optional value to add to all items, default 0.0 factor: float, optional value to multiply to all items, default 1.0 items: List[str] or List[int], optional Process only selected items, by number (0-based) """ copyfile(infilename, outfilename) dfs = DfsFileFactory.DfsGenericOpenEdit(outfilename) item_numbers = _valid_item_numbers(dfs.ItemInfo, items) n_items = len(item_numbers) n_time_steps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps deletevalue = dfs.FileInfo.DeleteValueFloat for timestep in trange(n_time_steps, disable=not show_progress): for item in range(n_items): itemdata = dfs.ReadItemTimeStep(item_numbers[item] + 1, timestep) time = itemdata.Time d = to_numpy(itemdata.Data) d[d == deletevalue] = np.nan outdata = d * factor + offset outdata[np.isnan(outdata)] = deletevalue darray = to_dotnet_float_array(outdata) dfs.WriteItemTimeStep(item_numbers[item] + 1, timestep, time, darray) dfs.Close()
def write(self, filename, data): """write overwrites an existing dfs0 file. Parameters ---------- filename: str Full path and filename to dfs0 to be modified. data: list[np.array] data to overwrite """ if not os.path.exists(filename): raise Warning("filename - File does not Exist %s", filename) try: dfs = DfsFileFactory.DfsGenericOpenEdit(filename) except IOError: print("cannot open", filename) delete_value = dfs.FileInfo.DeleteValueFloat n_items = len(dfs.ItemInfo) n_time_steps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps # Makes sure the data to write to the file matches the dfs0 file if n_time_steps != data[0].shape[0]: raise Exception( f"Inconsistent data size. nt (row count) must be size {n_time_steps}" ) if n_items != len(data): raise Exception(f"Number of items must be size {n_items}") for i in range(n_items): d = data[i] d[np.isnan(d)] = delete_value # Get the date times in seconds (from start) dfsdata = Dfs0Util.ReadDfs0DataDouble(dfs) t_seconds = [dfsdata[i,0] for i in range(n_time_steps)] dfs.Reset() data1 = np.stack(data, axis=1) Dfs0Util.WriteDfs0DataDouble(dfs, t_seconds, to_dotnet_array(data1)) dfs.Close()
def diff(infilename_a: str, infilename_b: str, outfilename: str) -> None: """Calculate difference between two dfs files (a-b) Parameters ---------- infilename_a: str full path to the first input file infilename_b: str full path to the second input file outfilename: str full path to the output file """ copyfile(infilename_a, outfilename) dfs_i_a = DfsFileFactory.DfsGenericOpen(infilename_a) dfs_i_b = DfsFileFactory.DfsGenericOpen(infilename_b) dfs_o = DfsFileFactory.DfsGenericOpenEdit(outfilename) deletevalue = dfs_i_a.FileInfo.DeleteValueFloat n_time_steps = dfs_i_a.FileInfo.TimeAxis.NumberOfTimeSteps n_items = safe_length(dfs_i_a.ItemInfo) # TODO Add checks to verify identical structure of file a and b for timestep in trange(n_time_steps): for item in range(n_items): itemdata_a = dfs_i_a.ReadItemTimeStep(item + 1, timestep) d_a = to_numpy(itemdata_a.Data) d_a[d_a == deletevalue] = np.nan itemdata_b = dfs_i_b.ReadItemTimeStep(item + 1, timestep) d_b = to_numpy(itemdata_b.Data) d_b[d_b == deletevalue] = np.nan time = itemdata_a.Time outdata = d_a - d_b darray = to_dotnet_float_array(outdata) dfs_o.WriteItemTimeStep(item + 1, timestep, time, darray) dfs_i_a.Close() dfs_i_b.Close() dfs_o.Close()
def sum(infilename_a, infilename_b, outfilename): """Sum two dfs files (a+b) Parameters ---------- infilename_a: str full path to the first input file infilename_b: str full path to the second input file outfilename: str full path to the output file """ copyfile(infilename_a, outfilename) dfs_i_a = DfsFileFactory.DfsGenericOpen(infilename_a) dfs_i_b = DfsFileFactory.DfsGenericOpen(infilename_b) dfs_o = DfsFileFactory.DfsGenericOpenEdit(outfilename) n_time_steps = dfs_i_a.FileInfo.TimeAxis.NumberOfTimeSteps n_items = safe_length(dfs_i_a.ItemInfo) # TODO Add checks to verify identical structure of file a and b for timestep in range(n_time_steps): for item in range(n_items): itemdata_a = dfs_i_a.ReadItemTimeStep(item + 1, timestep) d_a = to_numpy(itemdata_a.Data) itemdata_b = dfs_i_b.ReadItemTimeStep(item + 1, timestep) d_b = to_numpy(itemdata_b.Data) time = itemdata_a.Time outdata = d_a + d_b darray = to_dotnet_float_array(outdata) dfs_o.WriteItemTimeStep(item + 1, timestep, time, darray) dfs_i_a.Close() dfs_i_b.Close() dfs_o.Close()
def write(self, filename, data): """Overwrite a pre-created dfsu file. Parameters ---------- filename: str full path and filename to existing dfsu file data: list[np.array] list of matrices. len(data) must equal the number of items in the dfsu. Each matrix must be of dimension time,elements """ # Open the dfs file for writing dfs = DfsFileFactory.DfsGenericOpenEdit(filename) n_time_steps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps n_items = safe_length(dfs.ItemInfo) if len(data) != n_items: dfs.Close() raise ValueError( "Number of items in data must equal number of items in the file" ) if data[0].shape[0] != n_time_steps: dfs.Close() raise ValueError( "Number of timesteps in data must equal number of timesteps in the file" ) deletevalue = dfs.FileInfo.DeleteValueFloat for i in range(n_time_steps): for item in range(n_items): d = data[item][i, :] d[np.isnan(d)] = deletevalue darray = to_dotnet_float_array(d) dfs.WriteItemTimeStepNext(0, darray) dfs.Close()
def _validate_and_open_dfs(filename, data): if not os.path.exists(filename): raise FileNotFoundError(filename) try: dfs = DfsFileFactory.DfsGenericOpenEdit(filename) except IOError: raise IOError(f"Cannot open {filename}.") n_items = len(dfs.ItemInfo) n_time_steps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps # Match the data to write to the existing dfs0 file if n_time_steps != data[0].shape[0]: raise ValueError( f"Inconsistent data size. Number of time steps (row count) is {data[0].shape[0]}. Expected {n_time_steps}." ) if n_items != len(data): raise ValueError(f"The number of items is {len(data)}. Expected {n_items}.") return dfs, n_items, n_time_steps
def extract(infilename: str, outfilename: str, start=0, end=-1, items=None) -> None: """Extract timesteps and/or items to a new dfs file Parameters ---------- infilename : str path to input dfs file outfilename : str path to output dfs file start : int, float, str or datetime, optional start of extraction as either step, relative seconds or datetime/str, by default 0 (start of file) end : int, float, str or datetime, optional end of extraction as either step, relative seconds or datetime/str, by default -1 (end of file) items : int, list(int), str, list(str), optional items to be extracted to new file Examples -------- >>> extract('f_in.dfs0', 'f_out.dfs0', start='2018-1-1') >>> extract('f_in.dfs2', 'f_out.dfs2', end=-3) >>> extract('f_in.dfsu', 'f_out.dfsu', start=1800.0, end=3600.0) >>> extract('f_in.dfsu', 'f_out.dfsu', items=[2, 0]) >>> extract('f_in.dfsu', 'f_out.dfsu', items="Salinity") >>> extract('f_in.dfsu', 'f_out.dfsu', end='2018-2-1 00:00', items="Salinity") """ dfs_i = DfsFileFactory.DfsGenericOpenEdit(infilename) file_start_new, start_step, start_sec, end_step, end_sec = _parse_start_end( dfs_i, start, end) items = _valid_item_numbers(dfs_i.ItemInfo, items) dfs_o = _clone(infilename, outfilename, start_time=file_start_new, items=items) file_start_shift = 0 if file_start_new is not None: file_start_orig = from_dotnet_datetime( dfs_i.FileInfo.TimeAxis.StartDateTime) file_start_shift = (file_start_new - file_start_orig).total_seconds() timestep_out = -1 for timestep in range(start_step, end_step): for item_out, item in enumerate(items): itemdata = dfs_i.ReadItemTimeStep((item + 1), timestep) time_sec = itemdata.Time if time_sec > end_sec: dfs_i.Close() dfs_o.Close() return if time_sec >= start_sec: if item == items[0]: timestep_out = timestep_out + 1 time_sec_out = time_sec - file_start_shift outdata = itemdata.Data dfs_o.WriteItemTimeStep((item_out + 1), timestep_out, time_sec_out, outdata) dfs_i.Close() dfs_o.Close()
def write(self, filename, data): """write overwrites an existing dfs0 file. Parameters ---------- filename: str Full path and filename to dfs0 to be modified. data: list[np.array] data to overwrite """ if not os.path.exists(filename): raise Warning("filename - File does not Exist %s", filename) try: dfs = DfsFileFactory.DfsGenericOpenEdit(filename) except IOError: print("cannot open", filename) delete_value = dfs.FileInfo.DeleteValueFloat n_items = len(dfs.ItemInfo) n_time_steps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps # Makes sure the data to write to the file matches the dfs0 file if n_time_steps != data[0].shape[0]: raise Exception( f"Inconsistent data size. nt (row count) must be size {n_time_steps}" ) if n_items != len(data): raise Exception(f"Number of items must be size {n_items}") for i in range(n_items): d = data[i] d[np.isnan(d)] = delete_value d = Array.CreateInstance(System.Single, 1) # Get the date times in seconds (from start) dfsdata = Dfs0Util.ReadDfs0DataDouble(dfs) t = [] # starttime = dfs.FileInfo.TimeAxis.StartDateTime for it in range(dfs.FileInfo.TimeAxis.NumberOfTimeSteps): t.append(dfsdata[it, 0]) dfs.Reset() # COPY OVER THE DATA for it in range(n_time_steps): for ii in range(n_items): d = Array[System.Single](np.array(data[ii][it:it + 1])) # dt = (t[it] - t[0]).total_seconds() dt = t[it] dfs.WriteItemTimeStepNext(dt, d) dfs.Close()