Ejemplo n.º 1
0
    def GetVariable(self, VarName, convert=False):
        if VarName not in self._Variables:
            raise InvalidParameter(
                f'{VarName} is an invalid variable name for element {self._FullName}'
            )

        self.SetActiveObject()
        func = self._Variables[VarName]
        if func is None:
            print(func, VarName)
            raise InvalidParameter(
                f"get function for {self._FullName} / {VarName} is None")

        value = func()
        if not convert:
            return value

        if VarName in self.VARIABLE_OUTPUTS_BY_LABEL:
            info = self.VARIABLE_OUTPUTS_BY_LABEL[VarName]
            is_complex = info["is_complex"]
            units = info["units"]
            return ValueByLabel(self._FullName, VarName, value, self._Nodes,
                                is_complex, units)
        elif VarName in self.VARIABLE_OUTPUTS_COMPLEX:
            assert isinstance(value, list) and len(value) == 2, str(value)
            value = complex(value[0], value[1])
        return ValueByNumber(self._FullName, VarName, value)
Ejemplo n.º 2
0
    def get_summed_element_total(self, element_class, prop, group=None):
        """Return the total value for a summed element property.

        Parameters
        ----------
        element_class : str
        prop : str
        group : str | None
            Specify a group name if sum_groups was assigned.

        Returns
        -------
        dict

        Raises
        ------
        InvalidParameter
            Raised if the element class is not stored.

        """
        if group is not None:
            prop = ValueStorageBase.DELIMITER.join((prop, group))
        if element_class not in self._summed_elem_props:
            raise InvalidParameter(f"{element_class} is not stored")
        if prop not in self._summed_elem_props[element_class]:
            raise InvalidParameter(f"{prop} is not stored")

        return self._summed_elem_props[element_class][prop]
Ejemplo n.º 3
0
    def GetVariable(self, VarName, convert=False):
        if VarName not in self._Variables:
            raise InvalidParameter(
                f'{VarName} is an invalid variable name for element {self._FullName}'
            )
        if self._dssInstance.Element.Name() != self._FullName:
            self.SetActiveObject()
        func = self._Variables[VarName]
        if func is None:
            raise InvalidParameter(
                f"get function for {self._FullName} / {VarName} is None")

        value = func()
        if not convert:
            return value

        if VarName in self.VARIABLE_OUTPUTS_BY_LABEL:
            info = self.VARIABLE_OUTPUTS_BY_LABEL[VarName]
            is_complex = info["is_complex"]
            units = info["units"]
            return ValueByLabel(self._FullName, VarName, value, self._Nodes,
                                is_complex, units)
        elif VarName in self.VARIABLE_OUTPUTS_COMPLEX:
            assert isinstance(value, list) and len(value) == 2, str(value)
            value = complex(value[0], value[1])
        elif VarName in self.VARIABLE_OUTPUTS_BY_LIST:
            assert isinstance(value, list), str(value)
            labels = [f"_bus_index_{i}" for i in range(len(value))]
            return ValueByList(self._FullName, VarName, value, labels)
        return ValueByNumber(self._FullName, VarName, value)
Ejemplo n.º 4
0
    def read_report(self, report_name):
        """Return the report data.

        Parameters
        ----------
        report_name : str

        Returns
        -------
        str

        """
        all_reports = Reports.get_all_reports()
        if report_name not in all_reports:
            raise InvalidParameter(f"invalid report name {report_name}")
        report_cls = all_reports[report_name]

        # This bypasses self._fs_intf because reports are always extracted.
        reports_dir = os.path.join(self._project.project_path, REPORTS_DIR)
        for filename in os.listdir(reports_dir):
            name, ext = os.path.splitext(filename)
            if name == os.path.splitext(report_cls.FILENAME)[0]:
                path = os.path.join(reports_dir, filename)
                if ext in (".json", ".toml"):
                    return load_data(path)
                if ext in (".csv", ".h5"):
                    return read_dataframe(path)

        raise InvalidParameter(
            f"did not find report {report_name} in {reports_dir}")
Ejemplo n.º 5
0
    def get_columns(df, names, options, **kwargs):
        """Return the column names in the dataframe that match names and kwargs.

        Parameters
        ----------
        df : pd.DataFrame
        names : str | list
            single name or list of names
        kwargs : dict
            Filter on options; values can be strings or regular expressions.

        Returns
        -------
        list

        """
        if isinstance(names, str):
            names = set([names])
        elif isinstance(names, set):
            pass
        else:
            names = set(names)
        field_indices = {option: i + 1 for i, option in enumerate(options)}
        columns = []
        for column in df.columns:
            col = column
            index = column.find(" [")
            if index != -1:
                col = column[:index]
            # [name, option1, option2, ...]
            fields = ValueStorageBase.get_fields(col, next(iter(names)))
            if options and kwargs:
                assert len(fields) == 1 + len(
                    options), f"fields={fields} options={options}"
            _name = fields[0]
            if _name not in names:
                continue
            match = True
            for key, val in kwargs.items():
                if isinstance(val, str):
                    if fields[field_indices[key]] != val:
                        match = False
                elif isinstance(val, re.Pattern):
                    if val.search(fields[field_indices[key]]) is None:
                        match = False
                elif val is None:
                    continue
                else:
                    raise InvalidParameter(f"unhandled option value '{val}'")
                if not match:
                    break
            if match:
                columns.append(column)

        if not columns:
            raise InvalidParameter(f"{names} does not exist in DataFrame")

        return columns
Ejemplo n.º 6
0
 def get_element_property_number(self, element_class, prop, element_name):
     """Return the number stored for the element property."""
     if element_class not in self._elem_prop_nums:
         raise InvalidParameter(f"{element_class} is not stored")
     if prop not in self._elem_prop_nums[element_class]:
         raise InvalidParameter(f"{prop} is not stored")
     if element_name not in self._elem_prop_nums[element_class][prop]:
         raise InvalidParameter(f"{element_name} is not stored")
     return self._elem_prop_nums[element_class][prop][element_name]
Ejemplo n.º 7
0
 def add_profiles(self, data, name, pType, startTime, resolution_sec=900, units="", info=""):
     if type(startTime) is not datetime.datetime:
         raise InvalidParameter("startTime should be a python datetime object")
     if pType not in PROFILE_TYPES.names():
         raise InvalidParameter("Valid values for pType are {}".format(PROFILE_TYPES.names()))
     if data:
         self.add_from_arrays(data, name, pType, startTime, resolution_sec, units=units, info=info)
     self.store.flush()
     return
Ejemplo n.º 8
0
    def _add_controller(self, controller_type, controller):
        name = controller["name"]
        filename = controller["filename"]
        if self.is_controller_registered(controller_type, name):
            raise InvalidParameter(
                f"{controller_type} / {name} is already registered")
        if not os.path.exists(filename):
            raise InvalidParameter(f"{filename} does not exist.")
        # Make sure the file can be parsed.
        load_data(filename)

        self._controllers[controller_type][name] = controller
Ejemplo n.º 9
0
    def get_summed_element_dataframe(self,
                                     element_class,
                                     prop,
                                     real_only=False,
                                     abs_val=False,
                                     group=None):
        """Return the dataframe for a summed element property.

        Parameters
        ----------
        element_class : str
        prop : str
        group : str | None
            Specify a group name if sum_groups was assigned.
        real_only : bool
            If dtype of any column is complex, drop the imaginary component.
        abs_val : bool
            If dtype of any column is complex, compute its absolute value.

        Returns
        -------
        pd.DataFrame

        Raises
        ------
        InvalidParameter
            Raised if the element class is not stored.

        """
        if group is not None:
            prop = ValueStorageBase.DELIMITER.join((prop, group))
        if element_class not in self._summed_elem_timeseries_props:
            raise InvalidParameter(f"{element_class} is not stored")
        if prop not in self._summed_elem_timeseries_props[element_class]:
            raise InvalidParameter(f"{prop} is not stored")

        elem_group = self._group[element_class]["SummedElementProperties"]
        dataset = elem_group[prop]
        df = DatasetBuffer.to_dataframe(dataset)
        self._add_indices_to_dataframe(df)

        if real_only:
            for column in df.columns:
                if df[column].dtype == complex:
                    df[column] = np.real(df[column])
        elif abs_val:
            for column in df.columns:
                if df[column].dtype == complex:
                    df[column] = df[column].apply(np.absolute)

        return df
Ejemplo n.º 10
0
    def get_units(prop, index=None):
        units = unit_info.get(prop)
        if units is None:
            raise InvalidParameter(f"no units are stored for {prop}")

        if isinstance(units, dict):
            if index is None:
                raise InvalidParameter(f"index must be provided for {prop}")
            if index == 0:
                return units["E"]
            if index == 1:
                return units["O"]
            raise InvalidParameter("index must be 0 or 1")

        return units
Ejemplo n.º 11
0
    def __init__(self, Path):
        self.pyPlots = {}
        filenames = os.listdir(Path)
        found_config_file = False
        found_excel_file = False
        for filename in filenames:
            pyPlotType, ext = os.path.splitext(filename)
            if filename.startswith('~$'):
                continue
            elif ext == '.xlsx':
                filename = convert_config_data_to_toml(filename)
            elif ext != ".toml":
                continue
            if pyPlotType not in self.pyPlots:
                self.pyPlots[pyPlotType] = {}
            filepath = os.path.join(Path, filename)
            assert (os.path.exists(filepath)
                    ), 'path: "{}" does not exist!'.format(filepath)

            assert (os.path.exists(filepath)
                    ), 'path: "{}" does not exist!'.format(filepath)
            for name, plot in load_data(filepath).items():
                if name in self.pyPlots[pyPlotType]:
                    raise InvalidParameter(
                        f"Multiple PyDSS dynamic plot definitions of the same type with the same name not allowed: "
                        f"{name} already exists for plot type {pyPlotType}")
                self.pyPlots[pyPlotType][name] = plot
Ejemplo n.º 12
0
    def SetVariable(self, VarName, Value):
        if self._dssInstance.Element.Name() != self._FullName:
            self.SetActiveObject()
        if VarName not in self._Variables:
            raise InvalidParameter(f"invalid variable name {VarName}")

        return self._Variables[VarName](Value)
Ejemplo n.º 13
0
    def get_option_values(df, name):
        """Return the option values parsed from the column names.

        Parameters
        ----------
        df : pd.DataFrame
        name : str

        Returns
        -------
        list

        """
        values = []
        for column in df.columns:
            col = column
            index = column.find(" [")
            if index != -1:
                col = column[:index]
            # [name, option1, option2, ...]
            fields = col.split(ValueStorageBase.DELIMITER)
            _name = fields[0]
            if _name != name:
                continue
            values += fields[1:]

        if not values:
            raise InvalidParameter(f"{name} does not exist in DataFrame")

        return values
Ejemplo n.º 14
0
    def read_element_info_file(self, filename):
        """Return the contents of file describing an OpenDSS element object.

        Parameters
        ----------
        filename : str
            full path to a file (returned by list_element_info_files) or
            an element class, like "Transformers"

        Returns
        -------
        pd.DataFrame

        """
        if "." not in filename:
            actual = None
            for _file in self.list_element_info_files():
                basename = os.path.splitext(os.path.basename(_file))[0]
                if basename.replace("Info", "") == filename:
                    actual = _file
            if actual is None:
                raise InvalidParameter(
                    f"element info file for {filename} is not stored")
            filename = actual

        return self._fs_intf.read_csv(filename)
Ejemplo n.º 15
0
 def get_element_property_value(self, element_class, prop, element_name):
     """Return the number stored for the element property."""
     if element_class not in self._elem_values_by_prop:
         raise InvalidParameter(f"{element_class} is not stored")
     if prop not in self._elem_values_by_prop[element_class]:
         raise InvalidParameter(f"{prop} is not stored")
     if element_name not in self._elem_values_by_prop[element_class][prop]:
         raise InvalidParameter(f"{element_name} is not stored")
     dataset = self._group[f"{element_class}/ElementProperties/{prop}"]
     col_range = self._get_element_column_range(element_class, prop,
                                                element_name)
     start = col_range[0]
     length = col_range[1]
     if length == 1:
         return dataset[:][0][start]
     return dataset[:][0][start:start + length]
Ejemplo n.º 16
0
    def _create_exports(self):
        elements = {}  # element name to ElementData
        for elem_class in self._export_list.list_element_classes():
            if elem_class == "Buses":
                objs = self._buses
            elif elem_class in self._objects_by_class:
                objs = self._objects_by_class[elem_class]
            else:
                continue
            for name, obj in objs.items():
                if not obj.Enabled:
                    continue
                for prop in self._export_list.iter_export_properties(
                        elem_class=elem_class):
                    if prop.custom_function is None and not obj.IsValidAttribute(
                            prop.name):
                        raise InvalidParameter(
                            f"{name} / {prop.name} cannot be exported")
                    if prop.should_store_name(name):
                        if name not in elements:
                            elements[name] = ElementData(
                                name,
                                obj,
                                max_chunk_bytes=self._max_chunk_bytes,
                                options=self._options)
                        elements[name].append_property(prop)
                        self._logger.debug("Store %s %s name=%s", elem_class,
                                           prop.name, name)

        self._elements = elements.values()
Ejemplo n.º 17
0
 def _check_input_fields(self):
     required_fields = self._get_required_input_fields()
     fields = set(self.config.keys())
     for field in required_fields:
         if field not in fields:
             raise InvalidParameter(
                 f"{self.__class__.__name__} requires input field {field}")
Ejemplo n.º 18
0
    def __init__(self, name, controller_types=None, controllers=None,
                 export_modes=None, exports=None, visualizations=None,
                 post_process_infos=None, visualization_types=None):
        self.name = name
        self.post_process_infos = []

        if visualization_types is None and visualizations is None:
            self.visualizations = {
                x: self.load_visualization_config_from_type(x)
                for x in PyDssScenario.DEFAULT_VISUALIZATION_TYPES
            }
        elif visualization_types is not None:
            self.visualizations = {
                x: self.load_visualization_config_from_type(x)
                for x in visualization_types
            }
        elif isinstance(visualizations, str):
            basename = os.path.splitext(os.path.basename(visualizations))[0]
            visualization_type = VisualizationType(basename)
            self.visualizations = {visualization_type: load_data(controllers)}
        else:
            assert isinstance(visualizations, dict)
            self.visualizations = visualizations

        if (controller_types is None and controllers is None):
            self.controllers = {}
        elif controller_types is not None:
            self.controllers = {
                x: self.load_controller_config_from_type(x)
                for x in controller_types
            }
        elif isinstance(controllers, str):
            basename = os.path.splitext(os.path.basename(controllers))[0]
            controller_type = ControllerType(basename)
            self.controllers = {controller_type: load_data(controllers)}
        else:
            assert isinstance(controllers, dict)
            self.controllers = controllers

        if export_modes is not None and exports is not None:
            raise InvalidParameter(
                "export_modes and exports cannot both be set"
            )
        if (export_modes is None and exports is None):
            mode = PyDssScenario.DEFAULT_EXPORT_MODE
            self.exports = {mode: self.load_export_config_from_mode(mode)}
        elif export_modes is not None:
            self.exports = {
                x: self.load_export_config_from_mode(x) for x in export_modes
            }
        elif isinstance(exports, str):
            mode = ExportMode(os.path.splitext(os.path.basename(exports))[0])
            self.exports = {mode: load_data(exports)}
        else:
            assert isinstance(exports, dict)
            self.exports = exports

        if post_process_infos is not None:
            for pp_info in post_process_infos:
                self.add_post_process(pp_info)
Ejemplo n.º 19
0
    def get_dataframe(self,
                      element_class,
                      prop,
                      element_name,
                      real_only=False,
                      **kwargs):
        """Return the dataframe for an element.

        Parameters
        ----------
        element_class : str
        prop : str
        element_name : str
        real_only : bool
            If dtype of any column is complex, drop the imaginary component.
        kwargs : **kwargs
            Filter on options. Option values can be strings or regular expressions.

        Returns
        -------
        pd.DataFrame

        Raises
        ------
        InvalidParameter
            Raised if the element is not stored.

        """
        if element_name not in self._elem_props:
            raise InvalidParameter(f"element {element_name} is not stored")

        elem_group = self._group[element_class][element_name]
        dataset = elem_group[prop]
        df = DatasetBuffer.to_dataframe(dataset)

        if kwargs:
            options = self._check_options(element_class, prop, **kwargs)
            columns = ValueStorageBase.get_columns(df, element_name, options,
                                                   **kwargs)
            df = df[columns]

        if self._data_format_version == "1.0.0":
            dataset_property_type = DatasetPropertyType.ELEMENT_PROPERTY
        else:
            dataset_property_type = get_dataset_property_type(dataset)
        if dataset_property_type == DatasetPropertyType.FILTERED:
            timestamp_path = get_timestamp_path(dataset)
            timestamp_dataset = self._hdf_store[timestamp_path]
            df["Timestamp"] = DatasetBuffer.to_datetime(timestamp_dataset)
            df.set_index("Timestamp", inplace=True)
        else:
            self._add_indices_to_dataframe(df)

        if real_only:
            for column in df.columns:
                if df[column].dtype == np.complex:
                    df[column] = [x.real for x in df[column]]

        return df
Ejemplo n.º 20
0
    def unregister_controller(self, controller_type, name):
        """Unregisters a controller.

        Parameters
        ----------
        controller_type : str
        name : str

        """
        if not self.is_controller_registered(controller_type, name):
            raise InvalidParameter(
                f"{controller_type} / {name} isn't registered")
        if self._is_default_controller(controller_type, name):
            raise InvalidParameter(f"Cannot unregister a default controller")

        self._controllers[controller_type].pop(name)
        self._serialize_registry()
Ejemplo n.º 21
0
    def _check_options(self, element_class, prop, **kwargs):
        """Checks that kwargs are valid and returns available option names."""
        for option in kwargs:
            if not self._options.is_option_valid(element_class, prop, option):
                raise InvalidParameter(
                    f"class={element_class} property={prop} option={option} is invalid"
                )

        return self._options.list_options(element_class, prop)
Ejemplo n.º 22
0
def get_solver_from_simulation_type(settings: ProjectModel):
    """Return a solver from the simulation type."""
    if settings.simulation_type == SimulationType.SNAPSHOT:
        return Snapshot(dssInstance=dss, settings=settings)
    elif settings.simulation_type == SimulationType.QSTS:
        return QSTS(dssInstance=dss, settings=settings)
    elif settings.simulation_type == SimulationType.DYNAMIC:
        return Dynamic(dssInstance=dss, settings=settings)
    raise InvalidParameter(
        f"{settings.simulation_type} does not have a supported solver")
Ejemplo n.º 23
0
 def add_property(self, prop):
     """Add an instance of ExportListProperty for tracking."""
     if prop.are_names_filtered != self._are_names_filtered:
         raise InvalidConfiguration(f"All properties for shared elements must have the same filters: "
             f"{self._elem_class.__name__} / {prop.name}.")
     existing = self._properties.get(prop.store_values_type)
     if existing is None:
         self._properties[prop.store_values_type] = prop
     elif prop != existing:
         raise InvalidParameter(f"{prop.store_values_type} is already stored")
Ejemplo n.º 24
0
    def add_post_process(self, post_process_info):
        """Add a post-process script to a scenario.

        Parameters
        ----------
        post_process_info : dict
            Must define all fields in PyDssScenario.REQUIRED_POST_PROCESS_FIELDS

        """
        for field in self.REQUIRED_POST_PROCESS_FIELDS:
            if field not in post_process_info:
                raise InvalidParameter(f"missing post-process field={field}")
        config_file = post_process_info["config_file"]
        if not os.path.exists(config_file):
            raise InvalidParameter(f"{config_file} does not exist")

        self.post_process_infos.append(post_process_info)
        logger.info("Appended post-process script %s to %s",
                    post_process_info["script"], self.name)
Ejemplo n.º 25
0
    def __init__(self,
                 project_path=None,
                 project=None,
                 in_memory=False,
                 frequency=False,
                 mode=False):
        """Constructs PyDssResults object.

        Parameters
        ----------
        project_path : str | None
            Load project from files in path
        project : PyDssProject | None
            Existing project object
        in_memory : bool
            If true, load all exported data into memory.
        frequency : bool
            If true, add frequency column to all dataframes.
        mode : bool
            If true, add mode column to all dataframes.

        """
        options = ElementOptions()
        if project_path is not None:
            # TODO: handle old version?
            self._project = PyDssProject.load_project(
                project_path,
                simulation_file=RUN_SIMULATION_FILENAME,
            )
        elif project is None:
            raise InvalidParameter("project_path or project must be set")
        else:
            self._project = project
        self._fs_intf = self._project.fs_interface
        self._scenarios = []
        filename = self._project.get_hdf_store_filename()
        driver = "core" if in_memory else None
        self._hdf_store = h5py.File(filename, "r", driver=driver)

        if self._project.simulation_config.exports.export_results:
            for name in self._project.list_scenario_names():
                metadata = self._project.read_scenario_export_metadata(name)
                scenario_result = PyDssScenarioResults(
                    name,
                    self.project_path,
                    self._hdf_store,
                    self._fs_intf,
                    metadata,
                    options,
                    frequency=frequency,
                    mode=mode,
                )
                self._scenarios.append(scenario_result)
Ejemplo n.º 26
0
    def get_dataframe(self,
                      element_class,
                      prop,
                      element_name,
                      real_only=False,
                      abs_val=False,
                      **kwargs):
        """Return the dataframe for an element.

        Parameters
        ----------
        element_class : str
        prop : str
        element_name : str
        real_only : bool
            If dtype of any column is complex, drop the imaginary component.
        abs_val : bool
            If dtype of any column is complex, compute its absolute value.
        kwargs
            Filter on options; values can be strings or regular expressions.

        Returns
        -------
        pd.DataFrame

        Raises
        ------
        InvalidParameter
            Raised if the element is not stored.

        """
        if element_name not in self._elem_props:
            raise InvalidParameter(f"element {element_name} is not stored")

        dataset = self._group[f"{element_class}/ElementProperties/{prop}"]
        prop_type = get_dataset_property_type(dataset)
        if prop_type == DatasetPropertyType.PER_TIME_POINT:
            return self._get_elem_prop_dataframe(element_class,
                                                 prop,
                                                 element_name,
                                                 dataset,
                                                 real_only=real_only,
                                                 abs_val=abs_val,
                                                 **kwargs)
        elif prop_type == DatasetPropertyType.FILTERED:
            return self._get_filtered_dataframe(element_class,
                                                prop,
                                                element_name,
                                                dataset,
                                                real_only=real_only,
                                                abs_val=abs_val,
                                                **kwargs)
        assert False, str(prop_type)
Ejemplo n.º 27
0
    def __init__(self,
                 value,
                 hdf_store,
                 path,
                 max_size,
                 dataset_property_type,
                 max_chunk_bytes=None,
                 store_timestamp=False):
        group_name = os.path.dirname(path)
        basename = os.path.basename(path)
        try:
            if basename in hdf_store[group_name].keys():
                raise InvalidParameter(f"duplicate dataset name {basename}")
        except KeyError:
            # Don't bother checking each sub path.
            pass

        dtype = self._TYPE_MAPPING.get(value.value_type)
        assert dtype is not None
        scaleoffset = None
        if dtype == np.float:
            scaleoffset = 4
        elif dtype == np.int:
            scaleoffset = 0
        attributes = {"type": dataset_property_type.value}
        timestamp_path = None

        if store_timestamp:
            timestamp_path = self.timestamp_path(path)
            self._timestamps = DatasetBuffer(
                hdf_store,
                timestamp_path,
                max_size,
                np.float,
                ["Timestamp"],
                scaleoffset=scaleoffset,
                max_chunk_bytes=max_chunk_bytes,
                attributes={"type": DatasetPropertyType.TIMESTAMP.value},
            )
            attributes["timestamp_path"] = timestamp_path
        else:
            self._timestamps = None

        self._dataset = DatasetBuffer(
            hdf_store,
            path,
            max_size,
            dtype,
            value.make_columns(),
            scaleoffset=scaleoffset,
            max_chunk_bytes=max_chunk_bytes,
            attributes=attributes,
        )
Ejemplo n.º 28
0
    def run(self, logging_configured=True, tar_project=False, zip_project=False, dry_run=False):
        """Run all scenarios in the project."""
        if isinstance(self._fs_intf, PyDssArchiveFileInterfaceBase):
            raise InvalidConfiguration("cannot run from an archived project")
        if tar_project and zip_project:
            raise InvalidParameter("tar_project and zip_project cannot both be True")
        if self._simulation_config['Project']['DSS File'] == "":
            raise InvalidConfiguration("a valid opendss file needs to be passed")

        inst = instance()
        self._simulation_config["Logging"]["Pre-configured logging"] = logging_configured

        if dry_run:
            store_filename = os.path.join(tempfile.gettempdir(), STORE_FILENAME)
        else:
            store_filename = os.path.join(self._project_dir, STORE_FILENAME)

        driver = None
        if self._simulation_config["Exports"].get("Export Data In Memory", True):
            driver = "core"
        with h5py.File(store_filename, mode="w", driver=driver) as hdf_store:
            self._hdf_store = hdf_store
            self._hdf_store.attrs["version"] = DATA_FORMAT_VERSION
            for scenario in self._scenarios:
                self._simulation_config["Project"]["Active Scenario"] = scenario.name
                inst.run(self._simulation_config, self, scenario, dry_run=dry_run)
                self._estimated_space[scenario.name] = inst.get_estimated_space()

        if not dry_run:
            results = None
            export_tables = self._simulation_config["Exports"].get(
                "Export Data Tables", False
            )
            generate_reports = self._simulation_config.get("Reports", False)
            if export_tables or generate_reports:
                # Hack. Have to import here. Need to re-organize to fix.
                from PyDSS.pydss_results import PyDssResults
                results = PyDssResults(self._project_dir)
                if export_tables:
                    for scenario in results.scenarios:
                        scenario.export_data()

                if generate_reports:
                    results.generate_reports()

        if tar_project:
            self._tar_project_files()
        elif zip_project:
            self._zip_project_files()

        if dry_run and os.path.exists(store_filename):
            os.remove(store_filename)
Ejemplo n.º 29
0
    def get_full_dataframe(self, element_class, prop, real_only=False):
        """Return a dataframe containing all data.  The dataframe is copied.

        Parameters
        ----------
        element_class : str
        prop : str
        real_only : bool
            If dtype of any column is complex, drop the imaginary component.

        Returns
        -------
        pd.DataFrame

        """
        if prop not in self.list_element_properties(element_class):
            raise InvalidParameter(f"property {prop} is not stored")

        master_df = None
        length = None
        for _, df in self.iterate_dataframes(element_class,
                                             prop,
                                             real_only=real_only):
            cur_len = len(df)
            if master_df is None:
                master_df = df
                length = cur_len
            else:
                if cur_len != length:
                    raise InvalidParameter(
                        "cannot create full dataframe when elements have different indices"
                    )
                for column in ("Frequency", "Simulation Mode"):
                    if column in df.columns:
                        df.drop(column, axis=1, inplace=True)
                master_df = master_df.join(df)

        return master_df
Ejemplo n.º 30
0
def track_reg_control_tap_number_changes(
        reg_control, timestamp, step_number, options, last_value, count
    ):
    reg_control.dss.RegControls.Name(reg_control.Name)
    if reg_control.dss.CktElement.Name() != reg_control.dss.Element.Name():
        raise InvalidParameter(
            f"Object is not a circuit element {reg_control.Name()}"
        )
    tap_number = reg_control.dss.RegControls.TapNumber()
    if last_value is not None:
        count += abs(tap_number - last_value)
    logger.debug("%s changed count from %s to %s count=%s", reg_control.Name,
                 last_value, tap_number, count)
    return tap_number, count