Exemple #1
0
 def _load_file(self):
     """Load the file."""
     if not os.path.exists(self.file_path):
         raise FileExistsError(f"File {self.file_path} does not exist.")
     self.file = ResultData()
     self.file.Connection = Connection.Create(self.file_path)
     self.file.Load()
     self._closed = False
Exemple #2
0
    def _load_file(self):
        if not os.path.exists(self.file_path):
            raise FileExistsError(f"File {self.file_path} does not exist.")

        self._data = ResultData()
        self._data.Connection = Connection.Create(self.file_path)
        self._data.Load(Diagnostics())
        self._query = ResultDataQuery(self._data)
Exemple #3
0
    def __read(file_path):
        """
        Read the res1d file
        """
        if not os.path.exists(file_path):
            raise FileExistsError(f"File does not exist {file_path}")

        file = ResultData()
        file.Connection = Connection.Create(file_path)
        file.Load()
        return file
Exemple #4
0
    def __init__(self, filename, useFilter=None, outputDataItem=True):
        # Load result file
        self.diagnostics = Diagnostics("Loading file")
        self.resultData = ResultData()
        self.resultData.Connection = Connection.Create(filename)
        self.useFilter = useFilter
        self.outputDataItem = outputDataItem

        if useFilter:
            self.SetupFilter()
        else:
            self.Load()

        # Searcher is helping to find reaches, nodes and catchments
        self.searcher = ResultDataSearch(self.resultData)
Exemple #5
0
    def _load_header(self):
        if not os.path.exists(self.file_path):
            raise FileExistsError(f"File {self.file_path} does not exist.")

        self._data = ResultData()
        self._data.Connection = Connection.Create(self.file_path)
        self._diagnostics = Diagnostics("Loading header")

        if self._lazy_load:
            self._data.Connection.BridgeName = "res1dlazy"

        if self._use_filter:
            self._data.LoadHeader(True, self._diagnostics)
        else:
            self._data.LoadHeader(self._diagnostics)
# by MIKE 1D, and convert to res1d file. This example is converting
# a MOUSE RR file (.crf) to res1d.

import sys
import clr

# The SetupLatest method will make your script find the latest MIKE assemblies at runtime.
clr.AddReference(
    "DHI.Mike.Install, Version=1.0.0.0, Culture=neutral, PublicKeyToken=c513450b5d0bf0bf"
)
from DHI.Mike.Install import MikeImport, MikeProducts
MikeImport.SetupLatest()
print('Found MIKE in: ' + MikeImport.ActiveProduct().InstallRoot)

clr.AddReference("DHI.Mike1D.ResultDataAccess")
clr.AddReference("DHI.Mike1D.Generic")
from DHI.Mike1D.ResultDataAccess import ResultData, ResultDataSearch
from DHI.Mike1D.Generic import Connection

resultData = ResultData()
resultData.Connection = Connection.Create("DemoBase.crf")
resultData.Load()

# For crf files, set Type to "" if None (null) - work-around for a bug
for c in resultData.Catchments:
    if (c.Type is None):
        c.Type = ""

resultData.Connection = Connection.Create("DemoBase-crf.res1d")
resultData.Save()
Exemple #7
0
class ResultFinder(object):
    """Class storing a Mike1D data item and a corresponding element index"""
    def __init__(self, filename, useFilter=None, outputDataItem=True):
        # Load result file
        self.diagnostics = Diagnostics("Loading file")
        self.resultData = ResultData()
        self.resultData.Connection = Connection.Create(filename)
        self.useFilter = useFilter
        self.outputDataItem = outputDataItem

        if useFilter:
            self.SetupFilter()
        else:
            self.Load()

        # Searcher is helping to find reaches, nodes and catchments
        self.searcher = ResultDataSearch(self.resultData)

    def SetupFilter(self):
        """
        Setup the filter for result data object.
        """
        if not self.useFilter:
            return

        self.resultData.LoadHeader(True, self.diagnostics)

        self.dataFilter = Filter()
        self.dataSubFilter = DataItemFilterName(self.resultData)
        self.dataFilter.AddDataItemFilter(self.dataSubFilter)

        self.resultData.Parameters.Filter = self.dataFilter

    def Load(self):
        """
        Load the data from the result file into memory
        """
        if self.useFilter:
            self.resultData.LoadData(self.diagnostics)
        else:
            self.resultData.Load(self.diagnostics)

    def AddLocation(self, locationType, locationId):
        if locationType == LocationType.REACH:
            self.AddReach(locationId)

        if locationType == LocationType.NODE:
            self.AddNode(locationId)

        if locationType == LocationType.CATCHMENT:
            self.AddCatchment(locationId)

    def AddReach(self, reachId):
        self.dataSubFilter.Reaches.Add(reachId)

    def AddNode(self, nodeId):
        self.dataSubFilter.Nodes.Add(nodeId)

    def AddCatchment(self, catchmentId):
        self.dataSubFilter.Catchments.Add(catchmentId)

    def FindQuantity(self, dataSet, quantityId):
        """
        Find a given quantity from an IRes1DDataSet
        """
        numItems = dataSet.DataItems.Count
        dataItems = list(dataSet.DataItems)
        for j in range(numItems):
            dataItem = dataItems[j]
            if StringComparer.OrdinalIgnoreCase.Equals(dataItem.Quantity.Id,
                                                       quantityId):
                return dataItem
        return None

    def FindQuantityInLocation(self,
                               locationType,
                               quantityId,
                               locationId,
                               chainage=Constants.ALL_CHAINAGES):
        data = None

        if locationType == LocationType.REACH:
            if chainage == Constants.ALL_CHAINAGES:
                data = self.FindReachQuantityAllChainages(
                    quantityId, locationId)
            else:
                data = self.FindReachQuantity(quantityId, locationId, chainage)

        if locationType == LocationType.NODE:
            data = self.FindNodeQuantity(quantityId, locationId)

        if locationType == LocationType.CATCHMENT:
            data = self.FindCatchmentQuantity(quantityId, locationId)

        return data

    def FindReachQuantityAllChainages(self, quantityId, reachId):
        # There can be more than one reach with this reachId, check all
        reaches = self.searcher.FindReaches(reachId)
        if reaches.Count == 0:
            print("Could not find reach '%s'" % (reachId))
            return None

        dataEntries = []
        # All elements of all reaches having that quantity
        for reach in reaches:
            dataItem = self.FindQuantity(reach, quantityId)
            if dataItem == None:
                continue

            for j in range(dataItem.NumberOfElements):
                dataEntry = self.ConvertDataItemElementToList(dataItem, j)
                dataEntries.append(dataEntry)

        if len(dataEntries) == 0:
            print("Could not find quantity '%s' on reach '%s'." %
                  (quantityId, reachId))

        return dataEntries

    def FindReachQuantity(self, quantityId, reachId, chainage):
        """
        Find a given quantity on the reach with the given reachId.
        The grid point closest to the given chainage is used.
        """
        # There can be more than one reach with this reachId, check all
        reaches = self.searcher.FindReaches(reachId)
        if reaches.Count == 0:
            print("Could not find reach '%s'" % (reachId))
            return None

        # Find grid point closest to given chainage
        minDist = 999999
        minDataItem = None
        minElmtIndex = -1
        for reach in reaches:
            dataItem = self.FindQuantity(reach, quantityId)
            if dataItem != None:
                # Loop over all grid points in reach dataItem
                for j in range(dataItem.NumberOfElements):
                    indexList = list(dataItem.IndexList)
                    gridPoints = list(reach.GridPoints)
                    dist = abs(gridPoints[indexList[j]].Chainage - chainage)
                    if dist < minDist:
                        minDist = dist
                        minDataItem = dataItem
                        minElmtIndex = j

        if minDataItem == None:
            print("Could not find quantity '%s' on reach '%s'." %
                  (quantityId, reachId))

        return [self.ConvertDataItemElementToList(dataItem, minElmtIndex)]

    def FindNodeQuantity(self, quantityId, nodeId):
        """
        Find a given quantity on the node with the given nodeId
        """
        node = self.searcher.FindNode(nodeId)

        if node == None:
            print("Could not find node '%s'" % (nodeId))
            return None

        dataItem = self.FindQuantity(node, quantityId)
        if dataItem == None:
            print("Could not find quantity '%s' in node '%s'." %
                  (quantityId, nodeId))

        return [self.ConvertDataItemElementToList(dataItem, 0)]

    def FindCatchmentQuantity(self, quantityId, catchId):
        """
        Find a given quantity on the catchment with the given catchId
        """
        catchment = self.searcher.FindCatchment(catchId)
        if catchment == None:
            print("Could not find catchment '%s'" % (catchId))
            return None

        dataItem = self.FindQuantity(catchment, quantityId)
        if dataItem == None:
            print("Could not find quantity '%s' in catchment '%s'." %
                  (quantityId, catchId))

        return [self.ConvertDataItemElementToList(dataItem, 0)]

    def PrintAllQuantities(self):
        """
        Print out quantities on IRes1DDataSet
        """
        print("Available quantity IDs:")
        resultData = self.resultData
        quantities = list(resultData.Quantities)
        for quantity in quantities:
            print("  %s" % (quantity.Id))

    def PrintQuantities(self, locationType, locationId, chainage=0):
        dataSet = None

        if locationType == LocationType.REACH:
            dataSet = self.searcher.FindReaches(locationId)[0]

        if locationType == LocationType.NODE:
            dataSet = self.searcher.FindNode(locationId)

        if locationType == LocationType.CATCHMENT:
            dataSet = self.searcher.FindCatchment(locationId)

        if dataSet is None:
            return

        dataItems = list(dataSet.DataItems)
        for dataItem in dataItems:
            print("'%s'" % dataItem.Quantity.Id)

    def PrintAllLocations(self, locationType):
        if locationType == LocationType.NODE:
            self.PrintAllNodes()

        if locationType == LocationType.REACH:
            self.PrintAllReaches()

        if locationType == LocationType.CATCHMENT:
            self.PrintAllCatchments()

    def PrintAllReaches(self):
        resultData = self.resultData
        for j in range(resultData.Reaches.Count):
            reach = list(resultData.Reaches)[j]
            gridPoints = list(reach.GridPoints)
            startChainage = gridPoints[0].Chainage
            endChainage = gridPoints[-1].Chainage
            print("'%-30s (%9.2f - %9.2f)'" %
                  (reach.Name, startChainage, endChainage))

    def PrintAllNodes(self):
        resultData = self.resultData
        for j in range(resultData.Nodes.Count):
            node = list(resultData.Nodes)[j]
            print("'%s'" % node.Id)

    def PrintAllCatchments(self):
        resultData = self.resultData
        for j in range(resultData.Catchments.Count):
            catchment = list(resultData.Catchments)[j]
            print("'%s'" % catchment.Id)

    def ConvertDataItemElementToList(self, dataItem, elementIndex):
        """
        Convert dataItem element to list of numbers.
        """
        if self.outputDataItem:
            return DataEntry(dataItem, elementIndex)

        if dataItem is None:
            return None

        data = []
        for j in range(dataItem.NumberOfTimeSteps):
            data.append(dataItem.GetValue(j, elementIndex))
        return data

    def GetTimes(self, toTicks=True):
        """
        Get a list of times
        """
        timesList = list(self.resultData.TimesList)
        if toTicks:
            return list(map(lambda x: x.Ticks, timesList))
        else:
            return timesList
Exemple #8
0
class Res1D:
    def __init__(self,
                 file_path=None,
                 lazy_load=False,
                 header_load=False,
                 reaches=None,
                 nodes=None,
                 catchments=None,
                 col_name_delimiter=NAME_DELIMITER,
                 put_chainage_in_col_name=True):

        self.file_path = file_path
        self._lazy_load = lazy_load

        self._reaches = reaches if reaches else []
        self._nodes = nodes if nodes else []
        self._catchments = catchments if catchments else []

        self._use_filter = (reaches is not None or nodes is not None
                            or catchments is not None)

        self._time_index = None
        self._start_time = None
        self._end_time = None

        self._queries = []

        self._load_header()
        if not header_load:
            self._load_file()

        self._col_name_delimiter = col_name_delimiter
        self._put_chainage_in_col_name = put_chainage_in_col_name

    def __repr__(self):
        out = ["<mikeio1d.Res1D>"]

        if self.file_path:
            out.append(f"Start time: {str(self.start_time)}")
            out.append(f"End time: {str(self.end_time)}")
            out.append(f"# Timesteps: {str(self.data.NumberOfTimeSteps)}")
            out.append(f"# Catchments: {self.data.Catchments.get_Count()}")
            out.append(f"# Nodes: {self.data.Nodes.get_Count()}")
            out.append(f"# Reaches: {self.data.Reaches.get_Count()}")

            out.append(f"# Globals: {self.data.GlobalData.DataItems.Count}")
            for i, quantity in enumerate(self.data.Quantities):
                out.append(
                    f"{i} - {quantity.Id} <{quantity.EumQuantity.UnitAbbreviation}>"
                )

        return str.join("\n", out)

    #region File loading

    def _load_header(self):
        if not os.path.exists(self.file_path):
            raise FileExistsError(f"File {self.file_path} does not exist.")

        self._data = ResultData()
        self._data.Connection = Connection.Create(self.file_path)
        self._diagnostics = Diagnostics("Loading header")

        if self._lazy_load:
            self._data.Connection.BridgeName = "res1dlazy"

        if self._use_filter:
            self._data.LoadHeader(True, self._diagnostics)
        else:
            self._data.LoadHeader(self._diagnostics)

    def _load_file(self):

        if self._use_filter:
            self._setup_filter()

            for reach in self._reaches:
                self._add_reach(reach)
            for node in self._nodes:
                self._add_node(node)
            for catchment in self._catchments:
                self._add_catchment(catchment)

            self._data.LoadData(self._diagnostics)
        else:
            self._data.Load(self._diagnostics)

        self._query = ResultDataQuery(self._data)

    def _setup_filter(self):
        """
        Setup the filter for result data object.
        """
        if not self._use_filter:
            return

        self._data_filter = Filter()
        self._data_subfilter = DataItemFilterName(self._data)
        self._data_filter.AddDataItemFilter(self._data_subfilter)

        self._data.Parameters.Filter = self._data_filter

    def _add_reach(self, reach_id):
        self._data_subfilter.Reaches.Add(reach_id)

    def _add_node(self, node_id):
        self._data_subfilter.Nodes.Add(node_id)

    def _add_catchment(self, catchment_id):
        self._data_subfilter.Catchments.Add(catchment_id)

    #endregion File loading

    def read(self, queries=None):
        """
        Read loaded .res1d file data.

        Parameters
        ----------
        queries: A single query or a list of queries.
        Default is None = reads all data.
        """

        if queries is None:
            return self.read_all()

        queries = queries if isinstance(queries, list) else [queries]

        dfs = []
        for query in queries:
            df = pd.DataFrame(index=self.time_index)
            df[str(query)] = query.get_values(self)
            dfs.append(df)

        return pd.concat(dfs, axis=1)

    def read_all(self):
        """ Read all data from res1d file to dataframe. """

        dfs = []
        for data_set in self.data.DataSets:

            # Skip filtered data sets
            name = Res1D.get_data_set_name(data_set)
            if self._use_filter and name not in self._catchments + self._reaches + self._nodes:
                continue

            for data_item in data_set.DataItems:
                values_name_pair = self.get_values(data_set, data_item)

                for values, col_name in values_name_pair:
                    df = pd.DataFrame(index=self.time_index)
                    df[col_name] = values
                    dfs.append(df)

        return pd.concat(dfs, axis=1)

    def get_values(self, data_set, data_item):
        """ Get all time series values in given data_item. """
        if data_item.IndexList is None:
            return self.get_scalar_value(data_set, data_item)
        else:
            return self.get_vector_values(data_set, data_item)

    def get_scalar_value(self, data_set, data_item):
        name = Res1D.get_data_set_name(data_set)
        quantity_id = data_item.Quantity.Id
        col_name = self._col_name_delimiter.join([quantity_id, name])
        element_index = 0

        yield data_item.CreateTimeSeriesData(element_index), col_name

    def get_vector_values(self, data_set, data_item):
        name = Res1D.get_data_set_name(data_set)
        chainages = data_set.GetChainages(data_item)

        for i in range(data_item.NumberOfElements):
            quantity_id = data_item.Quantity.Id
            postfix = f"{chainages[i]:g}" if self._put_chainage_in_col_name else str(
                i)
            col_name_i = self._col_name_delimiter.join(
                [quantity_id, name, postfix])

            yield data_item.CreateTimeSeriesData(i), col_name_i

    @staticmethod
    def get_data_set_name(data_set):
        name = data_set.Name if hasattr(data_set, "Name") else data_set.Id
        name = "" if name is None else name
        return name

    @property
    def time_index(self):
        """ pandas.DatetimeIndex of the time index. """
        if self._time_index is not None:
            return self._time_index

        time_stamps = [from_dotnet_datetime(t) for t in self.data.TimesList]
        self._time_index = pd.DatetimeIndex(time_stamps)
        return self._time_index

    @property
    def start_time(self):
        if self._start_time is not None:
            return self._start_time

        return from_dotnet_datetime(self.data.StartTime)

    @property
    def end_time(self):
        if self._end_time is not None:
            return self._end_time

        return from_dotnet_datetime(self.data.EndTime)

    @property
    def quantities(self):
        """ Quantities in res1d file. """
        return [quantity.Id for quantity in self._data.Quantities]

    @property
    def query(self):
        """
        .NET object ResultDataQuery to use for querying the loaded res1d data.

        More information about ResultDataQuery class see:
        https://manuals.mikepoweredbydhi.help/latest/General/Class_Library/DHI_MIKE1D/html/T_DHI_Mike1D_ResultDataAccess_ResultDataQuery.htm
        """
        return self._query

    @property
    def data(self):
        """
        .NET object ResultData with the loaded res1d data.

        More information about ResultData class see:
        https://manuals.mikepoweredbydhi.help/latest/General/Class_Library/DHI_MIKE1D/html/T_DHI_Mike1D_ResultDataAccess_ResultData.htm
        """
        return self._data

    @property
    def catchments(self):
        """ Catchments in res1d file. """
        return {
            Res1D.get_data_set_name(catchment): catchment
            for catchment in self._data.Catchments
        }

    @property
    def reaches(self):
        """ Reaches in res1d file. """
        return {
            Res1D.get_data_set_name(reach): reach
            for reach in self._data.Reaches
        }

    @property
    def nodes(self):
        """ Nodes in res1d file. """
        return {
            Res1D.get_data_set_name(node): node
            for node in self._data.Nodes
        }

    @property
    def global_data(self):
        """ Global data items in res1d file. """
        return {
            Res1D.get_data_set_name(gdat): gdat
            for gdat in self._data.GlobalData.DataItems
        }

    #region Query wrappers

    def get_catchment_values(self, catchment_id, quantity):
        return to_numpy(self.query.GetCatchmentValues(catchment_id, quantity))

    def get_node_values(self, node_id, quantity):
        return to_numpy(self.query.GetNodeValues(node_id, quantity))

    def get_reach_values(self, reach_name, chainage, quantity):
        return to_numpy(
            self.query.GetReachValues(reach_name, chainage, quantity))

    def get_reach_value(self, reach_name, chainage, quantity, time):
        time_dotnet = time if isinstance(
            time, DateTime) else to_dotnet_datetime(time)
        return self.query.GetReachValue(reach_name, chainage, quantity,
                                        time_dotnet)

    def get_reach_start_values(self, reach_name, quantity):
        return to_numpy(self.query.GetReachStartValues(reach_name, quantity))

    def get_reach_end_values(self, reach_name, quantity):
        return to_numpy(self.query.GetReachEndValues(reach_name, quantity))

    def get_reach_sum_values(self, reach_name, quantity):
        return to_numpy(self.query.GetReachSumValues(reach_name, quantity))
Exemple #9
0
class Res1D:
    def __init__(self, file_path=None):
        self.file_path = file_path
        self.file = None
        self._closed = True
        self._time_index = None
        self._data_types = None
        self._reach_names = None
        self.__reaches = None
        # Load the file on initialization
        self._load_file()

    def _load_file(self):
        """Load the file."""
        if not os.path.exists(self.file_path):
            raise FileExistsError(f"File {self.file_path} does not exist.")
        self.file = ResultData()
        self.file.Connection = Connection.Create(self.file_path)
        self.file.Load()
        self._closed = False

    def close(self):
        """Close the file handle."""
        self.file.Dispose()
        self._closed = True

    def __enter__(self):
        return self

    def __exit__(self, *excinfo):
        self.close()

    @property
    @_not_closed
    def data_types(self):
        """List of the data types"""
        if self._data_types:
            return self._data_types
        return [q.Id for q in self.file.get_Quantities()]

    @property
    def _reaches(self):
        if self.__reaches:
            return self.__reaches
        return list(self.file.Reaches)

    @property
    @_not_closed
    def reach_names(self):
        """A list of the reach names"""
        if self._reach_names:
            return self._reach_names
        return [reach.Name for reach in self._reaches]

    @staticmethod
    def _chainages(reach, data_type_idx):
        """Generates chainages given a reach object and a data_type_idx"""
        data_item = list(reach.DataItems)[data_type_idx]
        index_list = list(data_item.IndexList)
        gridpoints = list(reach.GridPoints)
        gridpoints_filtered = [gridpoints[i] for i in index_list]
        for gp in gridpoints_filtered:
            yield float(gp.Chainage)

    @staticmethod
    def _data_types_reach(reach):
        """A list of the data types IDs contained in a reach."""
        return [di.get_Quantity().Id for di in list(reach.get_DataItems())]

    @property
    @_not_closed
    def time_index(self):
        """panda.DatetimeIndex of the time index"""
        if self._time_index:
            return self._time_index
        time_stamps = []
        for t in self.file.TimesList:
            time_stamps.append(
                pd.Timestamp(
                    year=t.get_Year(),
                    month=t.get_Month(),
                    day=t.get_Day(),
                    hour=t.get_Hour(),
                    minute=t.get_Minute(),
                    second=t.get_Second()
                )
            )
        self._time_index = pd.DatetimeIndex(time_stamps)
        return self._time_index

    def _get_values(self, points):
        df = pd.DataFrame()
        p = zip(points["variable"], points["reach"], points["chainage"])
        for variable_type, reach, chainage in p:
            d = (self.file.Reaches.get_Item(reach.index)
                 .get_DataItems()
                 .get_Item(variable_type.index)
                 .CreateTimeSeriesData(chainage.index))
            name = f"{variable_type.value} {reach.value} {chainage.value:.3f}"
            d = pd.Series(list(d), name=name)
            df[name] = d
        return df

    def _get_data(self, points):
        df = self._get_values(points)
        df.index = self.time_index
        return df

    def _validate_queries(self, queries, chainage_tolerance=0.1):
        """Check whether the queries point to existing data in the file."""
        for q in queries:
            # Raise an error if the data type is not found globally
            if q.variable_type not in self.data_types:
                raise DataNotFoundInFile(
                    f"Data type '{q.variable_type}' was not found.")
            if q.reach_name is not None:
                if q.reach_name not in self.reach_names:
                    raise DataNotFoundInFile(
                        f"Reach '{q.reach_name}' was not found.")
            if q.chainage is not None:
                found_chainage = False
                for reach in self._reaches:
                    if found_chainage:
                        break
                    # Look for the targeted reach
                    if q.reach_name != reach.Name:
                        continue
                    # Raise an error if the data type isn't found in this reach
                    data_types_in_reach = self._data_types_reach(reach)
                    if q.variable_type not in data_types_in_reach:
                        raise DataNotFoundInFile(
                            f"Data type '{q.variable_type}' was not found.")
                    data_type_idx = data_types_in_reach.index(q.variable_type)
                    for chainage in self._chainages(reach, data_type_idx):
                        # Look for the targeted chainage
                        chainage_diff = chainage - q.chainage
                        if abs(chainage_diff) < chainage_tolerance:
                            found_chainage = True
                            break
                if not found_chainage:
                    raise DataNotFoundInFile(
                        f"Chainage {q.chainage} was not found.")

    def _build_queries(self, queries):
        """"
        A query can be in an undefined state if reach_name and/or chainage
        isn't set. This function takes care of building lists of queries
        for these cases. Chainages are rounded to three decimal places.

        >>> self._build_queries([QueryData("WaterLevel", "reach1")])
        [
            QueryData("WaterLevel", "reach1", 0),
            QueryData("WaterLevel", "reach1", 10)
        ]
        """
        built_queries = []
        for q in queries:
            # e.g. QueryData("WaterLevel", "reach1", 1)
            if q.reach_name and q.chainage:
                built_queries.append(q)
                continue
            # e.g QueryData("WaterLevel", "reach1") or QueryData("WaterLevel")
            q_variable_type = q.variable_type
            q_reach_name = q.reach_name
            for reach, reach_name in zip(self._reaches, self.reach_names):
                if q_reach_name is not None:  # When reach_name is set.
                    if reach_name != q_reach_name:
                        continue
                data_types_in_reach = self._data_types_reach(reach)
                if q.variable_type not in data_types_in_reach:
                    continue
                data_type_idx = data_types_in_reach.index(q.variable_type)
                for curr_chain in self._chainages(reach, data_type_idx):
                    if q_variable_type in DATA_TYPES_HANDLED_IN_QUERIES:
                        chainage = curr_chain
                    else:
                        continue

                    q = QueryData(
                        q_variable_type, reach_name, round(chainage, 3)
                    )
                    built_queries.append(q)
        return built_queries

    def _find_points(self, queries, chainage_tolerance=0.1):
        """From a list of queries returns a dictionary with the required
        information for each requested point to extract its time series
        later on."""

        PointInfo = namedtuple('PointInfo', ['index', 'value'])

        found_points = defaultdict(list)
        # Find the point given its variable type, reach, and chainage
        for q in queries:
            for reach_idx, curr_reach in enumerate(self._reaches):
                # Look for the targeted reach
                if q.reach_name != curr_reach.Name:
                    continue
                reach = PointInfo(reach_idx, q.reach_name)
                for data_type_idx, data_type in enumerate(self.data_types):
                    if q.variable_type.lower() == data_type.lower():
                        break
                data_type_info = PointInfo(data_type_idx, q.variable_type)
                for idx, curr_chain in enumerate(self._chainages(curr_reach, data_type_idx)):
                    # Look for the targeted chainage
                    chainage_diff = curr_chain - q.chainage
                    is_chainage = abs(chainage_diff) < chainage_tolerance
                    if not is_chainage:
                        continue
                    # idx is the index in the item data, not in the
                    # gridpoints data.
                    chainage = PointInfo(idx, q.chainage)
                    found_points["chainage"].append(chainage)
                    found_points["variable"].append(data_type_info)
                    found_points["reach"].append(reach)
                    break  # Break at the first chainage found.

        return dict(found_points)

    @_not_closed
    def read(self, queries):
        """Read the requested data from the res1d file and
        return a Pandas DataFrame.

        Parameters
        ----------
        queries: list
            `QueryData` objects that define the requested data.
        Returns
        -------
        pd.DataFrame
        """
        self._validate_queries(queries)
        built_queries = self._build_queries(queries)
        found_points = self._find_points(built_queries)
        df = self._get_data(found_points)
        return df
Exemple #10
0
def read(res1DFile, extractionPoints):
    import clr
    import pandas as pd
    import datetime
    import numpy as np
    import os.path

    clr.AddReference("DHI.Mike1D.ResultDataAccess")
    from DHI.Mike1D.ResultDataAccess import ResultData

    clr.AddReference("DHI.Mike1D.Generic")
    from DHI.Mike1D.Generic import Connection

    clr.AddReference("System")

    if os.path.isfile(res1DFile) is False:
        print("ERROR, File Not Found: " + res1DFile)

    # Create a ResultData object and read the data file.
    rd = ResultData()
    rd.Connection = Connection.Create(res1DFile)
    rd.Load()

    reachNums = []
    dataItemTypes = []
    indices = []

    tol = 0.1

    # Find the Item
    for ep in extractionPoints:
        item = -1
        reachNumber = -1
        idx = -1
        for i in range(0, rd.Reaches.Count):
            if rd.Reaches.get_Item(i).Name.lower().strip() == ep.BranchName.lower().strip():

                reach = rd.Reaches.get_Item(i)
                for j in range(0, reach.GridPoints.Count):
                    # print(str(j))
                    if abs(float(reach.GridPoints.get_Item(j).Chainage) - ep.Chainage) < tol:
                        if 'waterlevel' in ep.VariableType.lower().strip().replace(" ", ""):
                            idx = int(j / 2)
                        elif 'discharge' in ep.VariableType.lower().strip().replace(" ", ""):

                            idx = int((j - 1) / 2)
                        elif 'pollutant' in ep.VariableType.lower().strip().replace(" ", ""):
                            idx = int((j - 1) / 2)
                        else:
                            print('ERROR. Variable Type must be either Water Level, Discharge, or Pollutant')
                        reachNumber = i
                        break
                        break
                        break

        for i in range(0, rd.get_Quantities().Count):
            if ep.VariableType.lower().strip().replace(" ", "") == rd.get_Quantities().get_Item(
                    i).Description.lower().strip().replace(" ", ""):
                item = i
                break

        indices.append(idx)
        reachNums.append(reachNumber)
        dataItemTypes.append(item)

    if -1 in reachNums:
        print('ERROR. Reach Not Found')
        quit()
    if -1 in dataItemTypes:
        print('ERROR. Item Not Found')
        quit()
    if -1 in indices:
        print('ERROR. Chainage Not Found')
        quit()

        # Get the Data
    df = pd.DataFrame()
    for i in range(0, len(indices)):
        d = rd.Reaches.get_Item(reachNums[i]).get_DataItems().get_Item(dataItemTypes[i]).CreateTimeSeriesData(
            indices[i])
        name = extractionPoints[i].VariableType + ' ' + str(extractionPoints[i].BranchName) + ' ' + str(
            extractionPoints[i].Chainage)
        d = pd.Series(list(d))
        d = d.rename(name)
        df[name] = d

    # Get the Times
    times = []
    for i in range(0, rd.TimesList.Count):
        it = rd.TimesList.get_Item(i)
        t = pd.Timestamp(
            datetime.datetime(it.get_Year(), it.get_Month(), it.get_Day(), it.get_Hour(), it.get_Minute(),
                              it.get_Second()))
        times.append(t)

    df.index = pd.DatetimeIndex(times)

    rd.Dispose()

    return df
Exemple #11
0
class Res1D:
    def __init__(self, file_path=None, put_chainage_in_col_name=False):
        self.file_path = file_path
        self._time_index = None
        self._start_time = None
        self._end_time = None
        self._put_chainage_in_col_name = put_chainage_in_col_name
        self._load_file()

    def _load_file(self):
        if not os.path.exists(self.file_path):
            raise FileExistsError(f"File {self.file_path} does not exist.")

        self._data = ResultData()
        self._data.Connection = Connection.Create(self.file_path)
        self._data.Load(Diagnostics())
        self._query = ResultDataQuery(self._data)

    def read(self, queries=None):
        """
        Read loaded .res1d file data.

        Parameters
        ----------
        queries: A single query or a list of queries.
        Default is None = reads all data.
        """
        if queries is None:
            return self.read_all()

        queries = queries if isinstance(queries, list) else [queries]

        df = pd.DataFrame(index=self.time_index)
        for query in queries:
            df[str(query)] = query.get_values(self)

        return df

    def read_all(self):
        """ Read all data from res1d file to dataframe. """
        df = pd.DataFrame(index=self.time_index)
        for data_set in self.data.DataSets:
            for data_item in data_set.DataItems:
                for values, col_name in Res1D.get_values(
                        data_set, data_item, NAME_DELIMITER,
                        self._put_chainage_in_col_name):
                    df[col_name] = values
        return df

    @staticmethod
    def get_values(data_set,
                   data_item,
                   col_name_delimiter=":",
                   put_chainage_in_col_name=True):
        """ Get all time series values in given data_item. """
        name = data_set.Name if hasattr(data_set, "Name") else data_set.Id
        if data_item.IndexList is None or data_item.NumberOfElements == 1:
            col_name = col_name_delimiter.join([data_item.Quantity.Id, name])
            yield data_item.CreateTimeSeriesData(0), col_name
        else:
            chainages = data_set.GetChainages(data_item)
            for i in range(0, data_item.NumberOfElements):
                if put_chainage_in_col_name:
                    postfix = f"{chainages[i]:g}"
                else:
                    postfix = str(i)

                col_name_i = col_name_delimiter.join(
                    [data_item.Quantity.Id, name, postfix])
                yield data_item.CreateTimeSeriesData(i), col_name_i

    @property
    def time_index(self):
        """ pandas.DatetimeIndex of the time index. """
        if self._time_index is not None:
            return self._time_index

        time_stamps = [from_dotnet_datetime(t) for t in self.data.TimesList]
        self._time_index = pd.DatetimeIndex(time_stamps)
        return self._time_index

    @property
    def start_time(self):
        if self._start_time is not None:
            return self._start_time

        return from_dotnet_datetime(self.data.StartTime)

    @property
    def end_time(self):
        if self._end_time is not None:
            return self._end_time

        return from_dotnet_datetime(self.data.EndTime)

    @property
    def quantities(self):
        """ Quantities in res1d file. """
        return [quantity.Id for quantity in self._data.Quantities]

    @property
    def query(self):
        """
        Object to use for querying the loaded res1d data.
        Returns a C# ResultDataQuery object that has the following methods
        //
        // Summary:
        //     Find element in dataItem that is closest to chainage
        public int FindClosestElement(IRes1DReach reach, double chainage, IDataItem dataItem);
        //
        // Summary:
        //     Find data item in dataSet which quantity matches the given quantityId.
        //     Returns null if none found
        public IDataItem FindDataItem(IRes1DDataSet dataSet, string quantityId);
        //
        // Summary:
        //     Get time series values for quantityId of catchment with id catchmentId. If catchment
        //     or quantity could not be found, null is returned.
        public float[] GetCatchmentValues(string catchmentId, string quantityId);
        //
        // Summary:
        //     Get result file datetimes.
        public DateTime[] GetDateTimes();
        //
        // Summary:
        //     Get result file datetimes as strings.
        public string[] GetDateTimesAsStrings(string format = "u");
        //
        // Summary:
        //     Get time series values for the node with the id nodeId and quantity with id quantityId.
        //     If node or quantity could not be found, null is returned.
        public float[] GetNodeValues(string nodeId, string quantityId);
        //
        // Summary:
        //     Get time series values at the end of of the reach with name reachName and quantity
        //     with id quantityId. If reach or quantity could not be found, null is returned.
        public float[] GetReachEndValues(string reachName, string quantityId);
        //
        // Summary:
        //     Get time series values at the start of of the reach with name reachName and quantity
        //     with id quantityId. If reach or quantity could not be found, null is returned.
        public float[] GetReachStartValues(string reachName, string quantityId);
        //
        // Summary:
        //     Get time series values summing up quantity for all grid points in reach. This
        //     is useful for quantities like water volumes.
        public float[] GetReachSumValues(string reachName, string quantityId);
        //
        // Summary:
        //     Get value at the element that is closest to chainage in reach with name reachName
        //     and quantity with id quantityId, at time time, interpolated if required. If reach
        //     or quantity could not be found, null is returned.
        public float GetReachValue(string reachName, double chainage, string quantityId, DateTime time);
        //
        // Summary:
        //     Get time series values at the element that is closest to chainage in reach with
        //     name reachName and quantity with id quantityId. If reach or quantity could not
        //     be found, null is returned.
        public float[] GetReachValues(string reachName, double chainage, string quantityId);
        """
        return self._query

    @property
    def data(self):
        """
        Object with the loaded res1d data.
        Returns a C# ResultData object that has the following methods:
        // Summary:
        //     Data coverage start
        public DateTime StartTime { get; set; }
        //
        // Summary:
        //     Data coverage end
        public DateTime EndTime { get; set; }
        //
        // Summary:
        //     Number of time steps
        public int NumberOfTimeSteps { get; }
        //
        // Summary:
        //     Time axis for the data.
        public IListDateTimes TimesList { get; set; }
        public ResultTypes ResultType { get; set; }
        //
        // Summary:
        //     List of the contained quantities. Note: This is a derived property
        public IQuantities Quantities { get; }
        //
        // Summary:
        //     List of the contained quantities. Note: This is a derived property
        public IListstrings StructureTypes { get; }
        //
        // Summary:
        //     Get an iterator that iterates over all data items
        public IEnumerable<IDataItem> DataItems { get; }
        //
        // Summary:
        //     Get an iterator that iterates over all data sets
        public IEnumerable<IRes1DDataSet> DataSets { get; }
        //
        // Summary:
        //     List of nodes
        public IRes1DNodes Nodes { get; set; }
        //
        // Summary:
        //     Unit system of the simulation that produced the result data object.
        //     When creating a result data object and storing: Properties of ResultData objects
        //     (coordinates, bottom levels etc.) must always be set in SI units.
        //     When loading a result data object from storage: The DHI.Mike1D.ResultDataAccess.IResultDataParameters.UnitSystem
        //     and DHI.Mike1D.ResultDataAccess.IResultDataParameters.ConvertGeometry can be
        //     used to change units of data and properties in the Result Data object. This property
        //     will maintain the original value and will not be changed by updating the DHI.Mike1D.ResultDataAccess.IResultDataParameters
        public UnitSystem UnitSystem { get; set; }
        //
        // Summary:
        //     List of branches
        public IRes1DReaches Reaches { get; set; }
        //
        // Summary:
        //     Global data. Valid for entire network
        public IRes1DGlobalData GlobalData { get; set; }
        //
        // Summary:
        //     Static data on the network.
        //     Used for user defined markers from MIKE 11 and Critical Levels in MU.
        public IList<INetworkDataDouble> NetworkDatas { get; }
        public float DeleteValue { get; set; }
        public double SecondsBetweenFileFlush { get; set; }
        //
        // Summary:
        //     Result specification
        public ResultSpecification ResultSpecs { get; set; }
        public LoadStatus LoadStatus { get; }
        //
        // Summary:
        //     List of catchments
        public IRes1DCatchments Catchments { get; set; }
        //
        // Summary:
        //     A WKT string for a spatial reference system.
        public string ProjectionString { get; set; }
        """
        return self._data

    def get_node_values(self, node_id, quantity):
        return to_numpy(self.query.GetNodeValues(node_id, quantity))

    def get_reach_values(self, reach_name, chainage, quantity):
        return to_numpy(
            self.query.GetReachValues(reach_name, chainage, quantity))

    def get_reach_value(self, reach_name, chainage, quantity, time):
        time_dotnet = time if isinstance(
            time, DateTime) else to_dotnet_datetime(time)
        return self.query.GetReachValue(reach_name, chainage, quantity,
                                        time_dotnet)

    def get_reach_start_values(self, reach_name, quantity):
        return to_numpy(self.query.GetReachStartValues(reach_name, quantity))

    def get_reach_end_values(self, reach_name, quantity):
        return to_numpy(self.query.GetReachEndValues(reach_name, quantity))

    def get_reach_sum_values(self, reach_name, quantity):
        return to_numpy(self.query.GetReachSumValues(reach_name, quantity))