Ejemplo n.º 1
0
def test_dataset_buffer__write_value():
    filename = os.path.join(tempfile.gettempdir(), "store.h5")
    try:
        with h5py.File(filename, "w") as store:
            columns = ("1", "2", "3", "4")
            max_size = 2000
            dataset = DatasetBuffer(store, "data", max_size, np.float, columns)
            assert dataset._chunk_size == 1024
            for i in range(max_size):
                data = np.ones(4)
                dataset.write_value(data)
            assert dataset._buf_index == 2000 - dataset._chunk_size
            dataset.flush_data()
            assert dataset._buf_index == 0

        with h5py.File(filename, "r") as store:
            data = store["data"][:]
            assert len(data) == max_size
            assert [x for x in store["data"].attrs["columns"]] == list(columns)
            for i in range(max_size):
                for j in range(4):
                    assert data[i][j] == 1.0

            df = DatasetBuffer.to_dataframe(store["data"])
            assert isinstance(df, pd.DataFrame)
            assert len(df) == max_size
            assert df.iloc[0, 0] == 1.0
    finally:
        if os.path.exists(filename):
            os.remove(filename)
Ejemplo n.º 2
0
    def get_dataframe(self,
                      element_class,
                      prop,
                      element_name,
                      real_only=False,
                      **kwargs):
        """Return the dataframe for an element.

        Parameters
        ----------
        element_class : str
        prop : str
        element_name : str
        real_only : bool
            If dtype of any column is complex, drop the imaginary component.
        kwargs : **kwargs
            Filter on options. Option values can be strings or regular expressions.

        Returns
        -------
        pd.DataFrame

        Raises
        ------
        InvalidParameter
            Raised if the element is not stored.

        """
        if element_name not in self._elem_props:
            raise InvalidParameter(f"element {element_name} is not stored")

        elem_group = self._group[element_class][element_name]
        dataset = elem_group[prop]
        df = DatasetBuffer.to_dataframe(dataset)

        if kwargs:
            options = self._check_options(element_class, prop, **kwargs)
            columns = ValueStorageBase.get_columns(df, element_name, options,
                                                   **kwargs)
            df = df[columns]

        if self._data_format_version == "1.0.0":
            dataset_property_type = DatasetPropertyType.ELEMENT_PROPERTY
        else:
            dataset_property_type = get_dataset_property_type(dataset)
        if dataset_property_type == DatasetPropertyType.FILTERED:
            timestamp_path = get_timestamp_path(dataset)
            timestamp_dataset = self._hdf_store[timestamp_path]
            df["Timestamp"] = DatasetBuffer.to_datetime(timestamp_dataset)
            df.set_index("Timestamp", inplace=True)
        else:
            self._add_indices_to_dataframe(df)

        if real_only:
            for column in df.columns:
                if df[column].dtype == np.complex:
                    df[column] = [x.real for x in df[column]]

        return df
Ejemplo n.º 3
0
    def get_summed_element_dataframe(self,
                                     element_class,
                                     prop,
                                     real_only=False,
                                     abs_val=False,
                                     group=None):
        """Return the dataframe for a summed element property.

        Parameters
        ----------
        element_class : str
        prop : str
        group : str | None
            Specify a group name if sum_groups was assigned.
        real_only : bool
            If dtype of any column is complex, drop the imaginary component.
        abs_val : bool
            If dtype of any column is complex, compute its absolute value.

        Returns
        -------
        pd.DataFrame

        Raises
        ------
        InvalidParameter
            Raised if the element class is not stored.

        """
        if group is not None:
            prop = ValueStorageBase.DELIMITER.join((prop, group))
        if element_class not in self._summed_elem_timeseries_props:
            raise InvalidParameter(f"{element_class} is not stored")
        if prop not in self._summed_elem_timeseries_props[element_class]:
            raise InvalidParameter(f"{prop} is not stored")

        elem_group = self._group[element_class]["SummedElementProperties"]
        dataset = elem_group[prop]
        df = DatasetBuffer.to_dataframe(dataset)
        self._add_indices_to_dataframe(df)

        if real_only:
            for column in df.columns:
                if df[column].dtype == complex:
                    df[column] = np.real(df[column])
        elif abs_val:
            for column in df.columns:
                if df[column].dtype == complex:
                    df[column] = df[column].apply(np.absolute)

        return df
Ejemplo n.º 4
0
    def _parse_datasets(self):
        for elem_class in self._elem_classes:
            class_group = self._group[elem_class]
            if "ElementProperties" in class_group:
                prop_group = class_group["ElementProperties"]
                for prop, dataset in prop_group.items():
                    dataset_property_type = get_dataset_property_type(dataset)
                    if dataset_property_type == DatasetPropertyType.TIME_STEP:
                        continue
                    if dataset_property_type == DatasetPropertyType.VALUE:
                        self._elem_values_by_prop[elem_class][prop] = []
                        prop_names = self._elem_values_by_prop
                    elif dataset_property_type in (
                            DatasetPropertyType.PER_TIME_POINT,
                            DatasetPropertyType.FILTERED,
                    ):
                        self._elem_data_by_prop[elem_class][prop] = []
                        prop_names = self._elem_data_by_prop
                    else:
                        continue

                    self._props_by_class[elem_class].append(prop)
                    self._elem_indices_by_prop[elem_class][prop] = {}
                    names = DatasetBuffer.get_names(dataset)
                    self._column_ranges_per_elem[elem_class][prop] = \
                        DatasetBuffer.get_column_ranges(dataset)
                    for i, name in enumerate(names):
                        self._elems_by_class[elem_class].add(name)
                        prop_names[elem_class][prop].append(name)
                        self._elem_indices_by_prop[elem_class][prop][name] = i
                        self._elem_props[name].append(prop)
            else:
                self._elems_by_class[elem_class] = set()

            summed_elem_props = self._group[elem_class].get(
                "SummedElementProperties", [])
            for prop in summed_elem_props:
                dataset = self._group[elem_class]["SummedElementProperties"][
                    prop]
                dataset_property_type = get_dataset_property_type(dataset)
                if dataset_property_type == DatasetPropertyType.VALUE:
                    df = DatasetBuffer.to_dataframe(dataset)
                    assert len(df) == 1
                    self._summed_elem_props[elem_class][prop] = {
                        x: df[x].values[0]
                        for x in df.columns
                    }
                elif dataset_property_type == DatasetPropertyType.PER_TIME_POINT:
                    self._summed_elem_timeseries_props[elem_class].append(prop)
Ejemplo n.º 5
0
def test_export_overloads(mocked_func, simulation_settings):
    data1 = {
        "property": "ExportLoadingsMetric",
        "store_values_type": "all",
        "opendss_classes": ["Lines", "Transformers"],
    }
    prop1 = ExportListProperty("CktElement", data1)
    data2 = {
        "property": "ExportLoadingsMetric",
        "store_values_type": "max",
        "opendss_classes": ["Lines", "Transformers"],
    }
    prop2 = ExportListProperty("CktElement", data2)
    num_time_steps = NUM_LOADINGS_FILES
    metric = ExportLoadingsMetric(prop1, OBJS, simulation_settings)
    metric.add_property(prop2)
    with h5py.File(STORE_FILENAME, mode="w", driver="core") as hdf_store:
        metric.initialize_data_store(hdf_store, "", num_time_steps)
        global overloads_file_id
        for i in range(num_time_steps):
            metric.append_values(i)
            overloads_file_id += 1
        metric.close()

        dataset1 = hdf_store[
            "CktElement/ElementProperties/ExportLoadingsMetric"]
        assert dataset1.attrs["length"] == num_time_steps
        assert dataset1.attrs["type"] == "per_time_point"
        df = DatasetBuffer.to_dataframe(dataset1)
        assert isinstance(df, pd.DataFrame)
        assert [x for x in df["Line.one__Loading"].values] == LINE_1_VALUES
        assert [x for x in df["Line.two__Loading"].values] == LINE_2_VALUES
        assert [x for x in df["Transformer.one__Loading"].values
                ] == TRANSFORMER_1_VALUES
        assert [x for x in df["Transformer.two__Loading"].values
                ] == TRANSFORMER_2_VALUES

        dataset2 = hdf_store[
            "CktElement/ElementProperties/ExportLoadingsMetricMax"]
        assert dataset2.attrs["length"] == 1
        assert dataset2.attrs["type"] == "value"
        assert dataset2[0][0] == max(LINE_1_VALUES)
        assert dataset2[0][1] == max(LINE_2_VALUES)
        assert dataset2[0][2] == max(TRANSFORMER_1_VALUES)
        assert dataset2[0][3] == max(TRANSFORMER_2_VALUES)
Ejemplo n.º 6
0
 def _export_summed_element_timeseries(self, path, fmt, compress):
     for elem_class in self._summed_elem_timeseries_props:
         for prop in self._summed_elem_timeseries_props[elem_class]:
             fields = prop.split(ValueStorageBase.DELIMITER)
             if len(fields) == 1:
                 base = ValueStorageBase.DELIMITER.join([elem_class, prop])
             else:
                 assert len(fields) == 2, fields
                 # This will be <elem_class>__<prop>__<group>
                 base = ValueStorageBase.DELIMITER.join([elem_class, prop])
             filename = os.path.join(path,
                                     base + "." + fmt.replace(".", ""))
             dataset = self._group[elem_class]["SummedElementProperties"][
                 prop]
             prop_type = get_dataset_property_type(dataset)
             if prop_type == DatasetPropertyType.PER_TIME_POINT:
                 df = DatasetBuffer.to_dataframe(dataset)
                 self._finalize_dataframe(df, dataset)
                 write_dataframe(df, filename, compress=compress)
Ejemplo n.º 7
0
    def get_full_dataframe(self,
                           element_class,
                           prop,
                           real_only=False,
                           abs_val=False,
                           **kwargs):
        """Return a dataframe containing all data.  The dataframe is copied.

        Parameters
        ----------
        element_class : str
        prop : str
        real_only : bool
            If dtype of any column is complex, drop the imaginary component.
        abs_val : bool
            If dtype of any column is complex, compute its absolute value.
        kwargs
            Filter on options; values can be strings or regular expressions.

        Returns
        -------
        pd.DataFrame

        """
        if prop not in self.list_element_properties(element_class):
            raise InvalidParameter(f"property {prop} is not stored")

        dataset = self._group[f"{element_class}/ElementProperties/{prop}"]
        df = DatasetBuffer.to_dataframe(dataset)
        if kwargs:
            options = self._check_options(element_class, prop, **kwargs)
            names = self._elems_by_class.get(element_class, set())
            columns = ValueStorageBase.get_columns(df, names, options,
                                                   **kwargs)
            columns = list(columns)
            columns.sort()
            df = df[columns]
        self._finalize_dataframe(df,
                                 dataset,
                                 real_only=real_only,
                                 abs_val=abs_val)
        return df
Ejemplo n.º 8
0
    def _get_elem_prop_dataframe(self,
                                 elem_class,
                                 prop,
                                 name,
                                 dataset,
                                 real_only=False,
                                 abs_val=False,
                                 **kwargs):
        col_range = self._get_element_column_range(elem_class, prop, name)
        df = DatasetBuffer.to_dataframe(dataset, column_range=col_range)

        if kwargs:
            options = self._check_options(elem_class, prop, **kwargs)
            columns = ValueStorageBase.get_columns(df, name, options, **kwargs)
            df = df[columns]

        self._finalize_dataframe(df,
                                 dataset,
                                 real_only=real_only,
                                 abs_val=abs_val)
        return df
Ejemplo n.º 9
0
def test_export_powers(mocked_func, simulation_settings):
    data1 = {
        "property": "ExportPowersMetric",
        "store_values_type": "all",
        "opendss_classes": ["Lines", "Loads", "PVSystems", "Transformers"],
    }
    prop1 = ExportListProperty("CktElement", data1)
    data2 = {
        "property": "ExportPowersMetric",
        "store_values_type": "max",
        "opendss_classes": ["Lines", "Loads", "PVSystems", "Transformers"],
    }
    prop2 = ExportListProperty("CktElement", data2)
    data3 = {
        "property": "ExportPowersMetric",
        "store_values_type": "sum",
        "opendss_classes": ["Lines", "Loads", "PVSystems", "Transformers"],
    }
    prop3 = ExportListProperty("CktElement", data3)
    num_time_steps = NUM_POWERS_FILES
    metric = ExportPowersMetric(prop1, OBJS, simulation_settings)
    metric.add_property(prop2)
    metric.add_property(prop3)
    with h5py.File(STORE_FILENAME, mode="w", driver="core") as hdf_store:
        metric.initialize_data_store(hdf_store, "", num_time_steps)
        global powers_file_id
        for i in range(num_time_steps):
            metric.append_values(i)
            powers_file_id += 1
        metric.close()

        dataset1 = hdf_store["CktElement/ElementProperties/ExportPowersMetric"]
        assert dataset1.attrs["length"] == num_time_steps
        assert dataset1.attrs["type"] == "per_time_point"
        df = DatasetBuffer.to_dataframe(dataset1)
        assert isinstance(df, pd.DataFrame)
        assert [x for x in df["Load.one__Powers"].values] == LOAD_1_VALUES
        assert [x for x in df["Load.two__Powers"].values] == LOAD_2_VALUES
        assert [x for x in df["PVSystem.one__Powers"].values
                ] == PV_SYSTEM_1_VALUES
        assert [x for x in df["PVSystem.two__Powers"].values
                ] == PV_SYSTEM_2_VALUES

        dataset2 = hdf_store[
            "CktElement/ElementProperties/ExportPowersMetricMax"]
        assert dataset2.attrs["length"] == 1
        assert dataset2.attrs["type"] == "value"
        # Loads are at the index 2, PVSystems at 4
        assert dataset2[0][2] == max(LOAD_1_VALUES)
        assert dataset2[0][3] == max(LOAD_2_VALUES)
        assert dataset2[0][4] == max(PV_SYSTEM_1_VALUES)
        assert dataset2[0][5] == max(PV_SYSTEM_2_VALUES)

        dataset3 = hdf_store[
            "CktElement/ElementProperties/ExportPowersMetricSum"]
        assert dataset3.attrs["length"] == 1
        assert dataset3.attrs["type"] == "value"
        assert dataset3[0][2] == sum(LOAD_1_VALUES)
        assert dataset3[0][3] == sum(LOAD_2_VALUES)
        assert dataset3[0][4] == sum(PV_SYSTEM_1_VALUES)
        assert dataset3[0][5] == sum(PV_SYSTEM_2_VALUES)