Exemplo n.º 1
0
    def _export_feeder_head_info(self, metadata):
        """
        Gets feeder head information comprising:
        1- The name of the feeder head line
        2- The feeder head loading in per unit
        3- The feeder head load in (kW, kVar). Negative in case of power injection
        4- The reverse power flow flag. True if power is flowing back to the feeder head, False otherwise
        """

        dss = self._dss_instance
        if not "feeder_head_info_files" in metadata.keys():
            metadata["feeder_head_info_files"] = []

        df_dict = {
            "FeederHeadLine": self._find_feeder_head_line(),
            "FeederHeadLoading": self._get_feeder_head_loading(),
            "FeederHeadLoad": dss.Circuit.TotalPower(),
            "ReversePowerFlow": self._reverse_powerflow()
        }

        #df = pd.DataFrame.from_dict(df_dict)

        filename = "FeederHeadInfo"
        fname = filename + ".json"

        relpath = os.path.join(self._export_relative_dir, fname)
        filepath = os.path.join(self._export_dir, fname)
        #write_dataframe(df, filepath)
        dump_data(df_dict, filepath)
        metadata["feeder_head_info_files"].append(relpath)
        self._logger.info("Exported %s information to %s.", filename, filepath)
Exemplo n.º 2
0
Arquivo: pyDSS.py Projeto: jgu2/PyDSS
 def _dump_scenario_simulation_settings(self, dss_args):
     # Various settings may have been updated. Write the actual settings to a file.
     scenario_simulation_filename = os.path.join(
         dss_args["Project"]["Project Path"],
         dss_args["Project"]["Active Project"], "Scenarios",
         dss_args["Project"]["Active Scenario"], "simulation-run.toml")
     dump_data(dss_args, scenario_simulation_filename)
Exemplo n.º 3
0
    def _export_element_values(self, path, fmt, compress):
        elem_prop_nums = defaultdict(dict)
        for elem_class in self._elem_values_by_prop:
            for prop in self._elem_values_by_prop[elem_class]:
                dataset = self._group[f"{elem_class}/ElementProperties/{prop}"]
                for name in self._elem_values_by_prop[elem_class][prop]:
                    col_range = self._get_element_column_range(
                        elem_class, prop, name)
                    start = col_range[0]
                    length = col_range[1]
                    if length == 1:
                        val = dataset[:][0][start]
                    else:
                        val = dataset[:][0][start:start + length]
                    if prop not in elem_prop_nums[elem_class]:
                        elem_prop_nums[elem_class][prop] = {}
                    elem_prop_nums[elem_class][prop][name] = val
        if elem_prop_nums:
            filename = os.path.join(path, "element_property_values.json")
            dump_data(elem_prop_nums,
                      filename,
                      indent=2,
                      default=make_json_serializable)

        logger.info("Exported data to %s", path)
Exemplo n.º 4
0
    def __init__(self, settings: SimulationSettingsModel, system_paths, dss_objects,
                 dss_objects_by_class, dss_buses, dss_solver, dss_command,
                 dss_instance):
        self._logger = logger
        self._dss_solver = dss_solver
        self._results = {}
        self._buses = dss_buses
        self._objects_by_element = dss_objects
        self._objects_by_class = dss_objects_by_class
        self.system_paths = system_paths
        self._element_metrics = {}  # (elem_class, prop_name) to OpenDssPropertyMetric
        self._summed_element_metrics = {}
        self._settings = settings
        self._cur_step = 0
        self._current_results = {}

        self._dss_command = dss_command
        self._start_day = dss_solver.StartDay
        self._end_day = dss_solver.EndDay
        self._time_dataset = None
        self._frequency_dataset = None
        self._mode_dataset = None
        self._simulation_mode = []
        self._hdf_store = None
        self._scenario = settings.project.active_scenario
        self._base_scenario = settings.project.active_scenario
        self._export_format = settings.exports.export_format
        self._export_compression = settings.exports.export_compression
        self._max_chunk_bytes = settings.exports.hdf_max_chunk_bytes
        self._export_dir = os.path.join(
            self.system_paths["Export"],
            settings.project.active_scenario,
        )
        # Use / because this is used in HDFStore
        self._export_relative_dir = "Exports/" + settings.project.active_scenario
        self._store_frequency = False
        self._store_mode = False
        if settings.frequency.enable_frequency_sweep:
            self._store_frequency = True
            self._store_mode = True

        pathlib.Path(self._export_dir).mkdir(parents=True, exist_ok=True)

        export_list_filename = os.path.join(
            system_paths["ExportLists"],
            "Exports.toml",
        )
        if not os.path.exists(export_list_filename):
            export_list_filename = os.path.join(
                system_paths["ExportLists"],
                "ExportMode-byClass.toml",
            )
        self._export_list = ExportListReader(export_list_filename)
        Reports.append_required_exports(self._export_list, settings)
        dump_data(
            self._export_list.serialize(),
            os.path.join(self._export_dir, "ExportsActual.toml"),
        )
        self._circuit_metrics = {}
        self._create_exports()
Exemplo n.º 5
0
    def serialize(self, path):
        """Serialize a PyDssScenario to a directory.

        Parameters
        ----------
        path : str
            full path to scenario

        """
        os.makedirs(path, exist_ok=True)
        for name in self._SCENARIO_DIRECTORIES:
            os.makedirs(os.path.join(path, name), exist_ok=True)

        for controller_type, controllers in self.controllers.items():
            filename = os.path.join(path, "pyControllerList",
                                    filename_from_enum(controller_type))
            dump_data(controllers, filename)

        for mode, exports in self.exports.items():
            dump_data(
                exports,
                os.path.join(path, "ExportLists", filename_from_enum(mode)))

        for visualization_type, visualizations in self.visualizations.items():
            filename = os.path.join(path, "pyPlotList",
                                    filename_from_enum(visualization_type))
            dump_data(visualizations, filename)

        dump_data(
            load_data(DEFAULT_MONTE_CARLO_SETTINGS_FILE),
            os.path.join(path, "Monte_Carlo", MONTE_CARLO_SETTINGS_FILENAME))

        dump_data(load_data(DEFAULT_SUBSCRIPTIONS_FILE),
                  os.path.join(path, "ExportLists", SUBSCRIPTIONS_FILENAME))
Exemplo n.º 6
0
    def _serialize_registry(self):
        data = {"Controllers": defaultdict(list)}
        for controller_type in self._controllers:
            for controller in self._controllers[controller_type].values():
                data["Controllers"][controller_type].append(controller)

        filename = self.registry_filename
        dump_data(data, filename, indent=2)
        logger.debug("Serialized data to %s", filename)
Exemplo n.º 7
0
def dump_settings(settings: SimulationSettingsModel, filename):
    """Dump the settings into a TOML file.

    Parameters
    ----------
    settings : SimulationSettingsModel

    """
    dump_data(settings.dict(by_alias=False), filename)
    print(f"Created {filename}")
Exemplo n.º 8
0
    def __init__(self, registry_filename=None):
        if registry_filename is None:
            self._registry_filename = Path.home() / self._REGISTRY_FILENAME
        else:
            self._registry_filename = Path(registry_filename)

        self._controllers = {x: {} for x in CONTROLLER_TYPES}
        data = copy.deepcopy(DEFAULT_REGISTRY)
        for controller_type, controllers in DEFAULT_REGISTRY[
                "Controllers"].items():
            for controller in controllers:
                path = Path(controller["filename"])
                if not path.exists():
                    raise InvalidConfiguration(
                        f"Default controller file={path} does not exist")

        # This is written to work with legacy versions where default controllers were
        # written to the registry.
        if self._registry_filename.exists():
            registered = load_data(self._registry_filename)
            to_delete = []
            for controller_type, controllers in registered[
                    "Controllers"].items():
                for i, controller in enumerate(controllers):
                    path = Path(controller["filename"])
                    if not path.exists():
                        name = controller["name"]
                        msg = f"The registry contains a controller with an invalid file. " \
                        f"Type={controller_type} name={name} file={path}.\nWould you like to " \
                        "delete it? (y/n) -> "
                        response = input(msg).lower()
                        if response == "y":
                            to_delete.append((controller_type, i))
                            continue
                        else:
                            logger.error(
                                "Exiting because the registry %s is invalid",
                                self._registry_filename)
                            sys.exit(1)
                    if not self._is_default_controller(controller_type,
                                                       controller["name"]):
                        data["Controllers"][controller_type].append(controller)
            if to_delete:
                for ref in reversed(to_delete):
                    registered["Controllers"][ref[0]].pop(ref[1])
                backup = str(self._registry_filename) + ".bk"
                self._registry_filename.rename(backup)
                dump_data(registered, self._registry_filename, indent=2)
                logger.info("Fixed the registry and moved the original to %s",
                            backup)

        for controller_type, controllers in data["Controllers"].items():
            for controller in controllers:
                self._add_controller(controller_type, controller)
Exemplo n.º 9
0
def simulation_file(filenames, name=None):
    """Convert a legacy simulation TOML file to the new format."""
    for filename in filenames:
        dirname = os.path.dirname(filename)
        if name is None:
            new_filename = os.path.join(dirname, "Exports.toml")
        else:
            new_filename = name
        reader = ExportListReader(filename)
        dump_data(reader.serialize(), new_filename)
        print(f"Converted {filename} to {new_filename}")
Exemplo n.º 10
0
def run_project_with_custom_exports(path, scenario, sim_file, data):
    """Runs a project while overriding an export config file."""
    exports = f"{path}/Scenarios/{scenario}/ExportLists/Exports.toml"
    backup = exports + ".bk"
    shutil.copyfile(exports, backup)
    dump_data(data, exports)

    try:
        PyDssProject.run_project(path, simulation_file=sim_file)
    finally:
        os.remove(exports)
        os.rename(backup, exports)
Exemplo n.º 11
0
    def export_data(self, path=None, fmt="csv", compress=False):
        """Export data to path.

        Parameters
        ----------
        path : str
            Output directory; defaults to scenario exports path
        fmt : str
            Filer format type (csv, h5)
        compress : bool
            Compress data

        """
        if path is None:
            path = os.path.join(self._project_path, "Exports", self._name)
        os.makedirs(path, exist_ok=True)

        for elem_class in self.list_element_classes():
            for prop in self.list_element_properties(elem_class):
                try:
                    df = self.get_full_dataframe(elem_class, prop)
                except InvalidParameter:
                    logger.info(f"cannot create full dataframe for %s %s",
                                elem_class, prop)
                    self._export_filtered_dataframes(elem_class, prop, path,
                                                     fmt, compress)
                    continue
                base = "__".join([elem_class, prop])
                filename = os.path.join(path,
                                        base + "." + fmt.replace(".", ""))
                write_dataframe(df, filename, compress=compress)

        if self._elem_prop_nums:
            data = copy.deepcopy(self._elem_prop_nums)
            for elem_class, prop, name, val in self.iterate_element_property_numbers(
            ):
                # JSON lib cannot serialize complex numbers.
                if isinstance(val, np.ndarray):
                    new_val = []
                    convert_str = val.dtype == "complex"
                    for item in val:
                        if convert_str:
                            item = str(item)
                        new_val.append(item)
                    data[elem_class][prop][name] = new_val
                elif isinstance(val, complex):
                    data[elem_class][prop][name] = str(val)

            filename = os.path.join(path, "element_property_numbers.json")
            dump_data(data, filename, indent=2)

        logger.info("Exported data to %s", path)
Exemplo n.º 12
0
    def generate(self, output_dir):
        data = {"pv_systems": []}
        for name in self._pv_system_names:
            clipping = {
                "name": name,
                "pv_clipping": self.calculate_pv_clipping(name),
            }
            data["pv_systems"].append(clipping)

        filename = os.path.join(output_dir, self.FILENAME)
        dump_data(data, filename, indent=2)
        logger.info("Generated PV Clipping report %s", filename)
        return filename
Exemplo n.º 13
0
 def serialize(self, opendss_project_folder):
     """Create the project on the filesystem."""
     os.makedirs(self._project_dir, exist_ok=True)
     for name in PROJECT_DIRECTORIES:
         os.makedirs(os.path.join(self._project_dir, name), exist_ok=True)
     if opendss_project_folder:
         dest = os.path.join(self._project_dir, PROJECT_DIRECTORIES[0])
         print("OpenDSS project: ", opendss_project_folder)
         print("Destination: ", dest)
         copy_tree(opendss_project_folder, dest)
     self._serialize_scenarios()
     dump_data(
         self._simulation_config,
         os.path.join(self._project_dir, self._simulation_file),
     )
     logger.info("Initialized directories in %s", self._project_dir)
Exemplo n.º 14
0
    def generate(self, output_dir):
        data = {"scenarios": []}
        for scenario in self._results.scenarios:
            scenario_data = {"name": scenario.name, "capacitors": []}
            for capacitor in scenario.list_element_names("Capacitors"):
                change_count = int(
                    scenario.get_element_property_value(
                        "Capacitors", "TrackStateChanges", capacitor))
                changes = {"name": capacitor, "change_count": change_count}
                scenario_data["capacitors"].append(changes)
            data["scenarios"].append(scenario_data)

        filename = os.path.join(output_dir, self.FILENAME)
        dump_data(data, filename, indent=2)
        logger.info("Generated %s", filename)
        return filename
Exemplo n.º 15
0
    def _serialize_registry(self):
        data = {"Controllers": defaultdict(list)}
        has_entries = False
        for controller_type in self._controllers:
            for controller in self._controllers[controller_type].values():
                # Serializing default controllers is not necessary and causes problems
                # when the software is upgraded or installed to a new location.
                if not self._is_default_controller(controller_type,
                                                   controller["name"]):
                    data["Controllers"][controller_type].append(controller)
                    has_entries = True

        if has_entries:
            filename = self.registry_filename
            dump_data(data, filename, indent=2)
            logger.debug("Serialized data to %s", filename)
Exemplo n.º 16
0
    def generate(self, output_dir):
        data = {"scenarios": []}
        for scenario in self._results.scenarios:
            scenario_data = {"name": scenario.name, "reg_controls": []}
            for reg_control in scenario.list_element_names("RegControls"):
                change_count = int(
                    scenario.get_element_property_number(
                        "RegControls", "TrackTapNumberChanges", reg_control))
                changes = {"name": reg_control, "change_count": change_count}
                scenario_data["reg_controls"].append(changes)
            data["scenarios"].append(scenario_data)

        filename = os.path.join(output_dir, self.FILENAME)
        dump_data(data, filename, indent=2)
        logger.info("Generated %s", filename)
        return filename
Exemplo n.º 17
0
    def _export_pv_profiles(self):
        dss = self._dss_instance
        pv_systems = self._objects_by_class.get("PVSystems")
        if pv_systems is None:
            raise InvalidConfiguration("PVSystems are not exported")

        pv_infos = []
        profiles = set()
        for full_name, obj in pv_systems.items():
            profile_name = obj.GetParameter("yearly").lower()
            if profile_name != "":
                profiles.add(profile_name)
            pv_infos.append({
                "irradiance": obj.GetParameter("irradiance"),
                "name": full_name,
                "pmpp": obj.GetParameter("pmpp"),
                "load_shape_profile": profile_name,
            })

        pmult_sums = {}
        dss.LoadShape.First()
        sim_resolution = self._options["Project"]["Step resolution (sec)"]
        while True:
            name = dss.LoadShape.Name().lower()
            if name in profiles:
                sinterval = dss.LoadShape.SInterval()
                assert sim_resolution >= sinterval
                offset = int(sim_resolution / dss.LoadShape.SInterval())
                pmult_sums[name] = sum(dss.LoadShape.PMult()[::offset])
            if dss.LoadShape.Next() == 0:
                break

        for pv_info in pv_infos:
            profile = pv_info["load_shape_profile"]
            if profile == "":
                pv_info["load_shape_pmult_sum"] = 0
            else:
                pv_info["load_shape_pmult_sum"] = pmult_sums[profile]

        data = {"pv_systems": pv_infos}
        filename = os.path.join(self._export_dir, "pv_profiles.json")
        dump_data(data, filename, indent=2)
        self._logger.info("Exported PV profile information to %s", filename)
Exemplo n.º 18
0
    def ExportResults(self):
        metadata = {
            "event_log": None,
            "element_info_files": [],
        }

        if self._settings.exports.export_event_log:
            self._export_event_log(metadata)
        if self._settings.exports.export_elements:
            self._export_elements(metadata, set(self._settings.exports.export_element_types))
            self._export_feeder_head_info(metadata)
        if self._settings.exports.export_pv_profiles:
            self._export_pv_profiles()
        if self._settings.exports.export_node_names_by_type:
            self._export_node_names_by_type()

        filename = os.path.join(self._export_dir, self.METADATA_FILENAME)
        dump_data(metadata, filename, indent=4)
        self._logger.info("Exported metadata to %s", filename)
        self._hdf_store = None
Exemplo n.º 19
0
def convert_config_data_to_toml(filename, name=None):
    """Converts an Excel config file to TOML.

    Parameters
    ----------
    filename : str
    name : str
        If not None, use this name instead of an auto-generated name.

    """
    dirname = os.path.dirname(filename)
    basename = os.path.splitext(os.path.basename(filename))[0]
    config_type = _get_config_type(basename)
    data = config_type["convert"](filename)
    if name is None:
        new_filename = os.path.join(dirname, basename + ".toml")
    else:
        new_filename = name
    dump_data(data, new_filename)
    logger.info("Converted %s to %s", filename, new_filename)
    return new_filename
Exemplo n.º 20
0
def test_pv_reports_per_element_per_time_point(cleanup_project):
    # Generates reports from data stored at every time point and then
    # use those to compare with the in-memory metrics.
    PyDssProject.run_project(
        PV_REPORTS_PROJECT_STORE_ALL_PATH,
        simulation_file=SIMULATION_SETTINGS_FILENAME,
    )

    baseline_thermal = SimulationThermalMetricsModel(**load_data(
        Path(PV_REPORTS_PROJECT_STORE_ALL_PATH) / "Reports" /
        "thermal_metrics.json"))
    baseline_voltage = SimulationVoltageMetricsModel(**load_data(
        Path(PV_REPORTS_PROJECT_STORE_ALL_PATH) / "Reports" /
        "voltage_metrics.json"))
    baseline_feeder_losses = SimulationFeederLossesMetricsModel(**load_data(
        Path(PV_REPORTS_PROJECT_STORE_ALL_PATH) / "Reports" /
        "feeder_losses.json"))

    granularities = [x for x in ReportGranularity]
    for granularity in granularities:
        settings = load_data(BASE_FILENAME)
        settings["Reports"]["Granularity"] = granularity.value
        dump_data(settings, TEST_FILENAME)
        try:
            PyDssProject.run_project(
                PV_REPORTS_PROJECT_PATH,
                simulation_file=TEST_SIM_BASE_NAME,
            )
            if granularity == ReportGranularity.PER_ELEMENT_PER_TIME_POINT:
                verify_skip_night()
                assert verify_thermal_metrics(baseline_thermal)
                assert verify_voltage_metrics(baseline_voltage)
                assert verify_feeder_losses(baseline_feeder_losses)
            verify_pv_reports(granularity)
            verify_feeder_head_metrics()
        finally:
            os.remove(TEST_FILENAME)
            for artifact in ARTIFACTS:
                if os.path.exists(artifact):
                    os.remove(artifact)
Exemplo n.º 21
0
    def ExportResults(self, fileprefix=""):
        self.FlushData()
        for element in self._elements:
            element.export_change_counts()
            element.export_sums()

        metadata = {
            "event_log": None,
            "element_info_files": [],
        }

        if self._options["Exports"]["Export Event Log"]:
            self._export_event_log(metadata)
        if self._options["Exports"]["Export Elements"]:
            self._export_elements(metadata)
        if self._options["Exports"]["Export PV Profiles"]:
            self._export_pv_profiles()

        filename = os.path.join(self._export_dir, self.METADATA_FILENAME)
        dump_data(metadata, filename, indent=4)
        self._logger.info("Exported metadata to %s", filename)
        self._hdf_store = None
Exemplo n.º 22
0
 def _export_node_names_by_type(self):
     data = get_node_names_by_type()
     filename = os.path.join(self._export_dir, NODE_NAMES_BY_TYPE_FILENAME)
     dump_data(data, filename, indent=2)
     self._logger.info("Exported node names by type to %s", filename)
Exemplo n.º 23
0
def get_snapshot_timepoint(settings: SimulationSettingsModel,
                           mode: SnapshotTimePointSelectionMode):
    pv_systems = dss.PVsystems.AllNames()
    if not pv_systems:
        logger.info("No PVSystems are present.")
        if mode != SnapshotTimePointSelectionMode.MAX_LOAD:
            mode = SnapshotTimePointSelectionMode.MAX_LOAD
            logger.info("Changed mode to %s",
                        SnapshotTimePointSelectionMode.MAX_LOAD.value)
    if mode == SnapshotTimePointSelectionMode.MAX_LOAD:
        column = "Max Load"
    elif mode == SnapshotTimePointSelectionMode.MAX_PV_LOAD_RATIO:
        column = "Max PV to Load Ratio"
    elif mode == SnapshotTimePointSelectionMode.DAYTIME_MIN_LOAD:
        column = "Min Daytime Load"
    elif mode == SnapshotTimePointSelectionMode.MAX_PV_MINUS_LOAD:
        column = "Max PV minus Load"
    else:
        assert False, f"{mode} is not supported"

    temp_filename = settings.project.project_path / settings.project.active_project / "Exports" / ".snapshot_time_points.json"
    final_filename = settings.project.project_path / settings.project.active_project / "Exports" / "snapshot_time_points.json"
    if temp_filename.exists():
        timepoints = pd.read_json(temp_filename)
        if settings.project.active_scenario == settings.project.scenarios[
                -1].name:
            os.rename(temp_filename, final_filename)
        return pd.to_datetime(timepoints[column][0]).to_pydatetime()
    pv_generation_hours = {'start_time': '8:00', 'end_time': '17:00'}
    aggregate_profiles = pd.DataFrame(columns=['Load', 'PV'])
    pv_shapes = {}
    for pv_name in pv_systems:
        dss.PVsystems.Name(pv_name)
        pmpp = float(dss.Properties.Value('Pmpp'))
        profile_name = dss.Properties.Value('yearly')
        dss.LoadShape.Name(profile_name)
        if profile_name not in pv_shapes.keys():
            pv_shapes[
                profile_name] = create_loadshape_pmult_dataframe_for_simulation(
                    settings)
        if len(aggregate_profiles) == 0:
            aggregate_profiles['PV'] = (pv_shapes[profile_name] * pmpp)[0]
            aggregate_profiles = aggregate_profiles.replace(np.nan, 0)
        else:
            aggregate_profiles['PV'] = aggregate_profiles['PV'] + (
                pv_shapes[profile_name] * pmpp)[0]
    del pv_shapes
    loads = dss.Loads.AllNames()
    if not loads:
        logger.info("No Loads are present")
    load_shapes = {}
    for load_name in loads:
        dss.Loads.Name(load_name)
        kw = float(dss.Properties.Value('kW'))
        profile_name = dss.Properties.Value('yearly')
        dss.LoadShape.Name(profile_name)
        if profile_name not in load_shapes.keys():
            load_shapes[
                profile_name] = create_loadshape_pmult_dataframe_for_simulation(
                    settings)
        if len(aggregate_profiles) == 0:
            aggregate_profiles['Load'] = (load_shapes[profile_name] * kw)[0]
        else:
            aggregate_profiles['Load'] = aggregate_profiles['Load'] + (
                load_shapes[profile_name] * kw)[0]
    del load_shapes
    if pv_systems:
        aggregate_profiles['PV to Load Ratio'] = aggregate_profiles[
            'PV'] / aggregate_profiles['Load']
        aggregate_profiles['PV minus Load'] = aggregate_profiles[
            'PV'] - aggregate_profiles['Load']

    timepoints = pd.DataFrame(columns=['Timepoints'])
    timepoints.loc['Max Load'] = aggregate_profiles['Load'].idxmax()
    if pv_systems:
        timepoints.loc[
            'Max PV to Load Ratio'] = aggregate_profiles.between_time(
                pv_generation_hours['start_time'],
                pv_generation_hours['end_time'])['PV to Load Ratio'].idxmax()
        timepoints.loc['Max PV minus Load'] = aggregate_profiles.between_time(
            pv_generation_hours['start_time'],
            pv_generation_hours['end_time'])['PV minus Load'].idxmax()
        timepoints.loc['Max PV'] = aggregate_profiles.between_time(
            pv_generation_hours['start_time'],
            pv_generation_hours['end_time'])['PV'].idxmax()
    timepoints.loc['Min Load'] = aggregate_profiles['Load'].idxmin()
    timepoints.loc['Min Daytime Load'] = aggregate_profiles.between_time(
        pv_generation_hours['start_time'],
        pv_generation_hours['end_time'])['Load'].idxmin()
    logger.info("Time points: %s",
                {k: str(v)
                 for k, v in timepoints.to_records()})
    dump_data(timepoints.astype(str).to_dict(orient='index'),
              temp_filename,
              indent=2)
    if settings.project.active_scenario == settings.project.scenarios[-1].name:
        os.rename(temp_filename, final_filename)
    return timepoints.loc[column][0].to_pydatetime()
Exemplo n.º 24
0
def update_pydss_controllers(project_path, scenario, controller_type,
                             controller, dss_file):
    """Update a scenario's controllers from an OpenDSS file.

    Parameters
    ----------
    project_path : str
        PyDSS project path.
    scenario : str
        PyDSS scenario name in project.
    controller_type : str
        A type of PyDSS controler
    controller : str
        The controller name
    dss_file : str
        A DSS file path
    """
    if controller_type not in READ_CONTROLLER_FUNCTIONS:
        supported_types = list(READ_CONTROLLER_FUNCTIONS.keys())
        print(f"Currently only {supported_types} types are supported")
        sys.exit(1)

    sim_file = os.path.join(project_path, SIMULATION_SETTINGS_FILENAME)
    config = load_data(sim_file)
    if not config["Project"].get("Use Controller Registry", False):
        print(f"'Use Controller Registry' must be set to true in {sim_file}")
        sys.exit(1)

    registry = Registry()
    if not registry.is_controller_registered(controller_type, controller):
        print(f"{controller_type} / {controller} is not registered")
        sys.exit(1)

    data = {}
    filename = f"{project_path}/Scenarios/{scenario}/pyControllerList/{controller_type}.toml"
    if os.path.exists(filename):
        data = load_data(filename)
        for val in data.values():
            if not isinstance(val, list):
                print(f"{filename} has an invalid format")
                sys.exit(1)

    element_names = READ_CONTROLLER_FUNCTIONS[controller_type](dss_file)
    num_added = 0
    if controller in data:
        existing = set(data[controller])
        final = list(existing.union(set(element_names)))
        data[controller] = final
        num_added = len(final) - len(existing)
    else:
        data[controller] = element_names
        num_added = len(element_names)

    # Remove element_names from any other controllers.
    set_names = set(element_names)
    for _controller, values in data.items():
        if _controller != controller:
            final = set(values).difference_update(set_names)
            if final is None:
                final_list = None
            else:
                final_list = list(final)
            data[_controller] = final_list

    dump_data(data, filename)
    print(f"Added {num_added} names to {filename}")
Exemplo n.º 25
0
 def _export_json_report(self, data, output_dir, filename):
     """Export report to a JSON file."""
     filename = os.path.join(output_dir, filename)
     dump_data(data, filename, indent=2, default=make_json_serializable)
     logger.info("Generated %s", filename)
Exemplo n.º 26
0
 def _export_summed_element_values(self, path, fmt, compress):
     filename = os.path.join(path, "summed_element_property_values.json")
     dump_data(self._summed_elem_props,
               filename,
               default=make_json_serializable)
Exemplo n.º 27
0
    def _export_pv_profiles(self):
        granularity = self._settings.reports.granularity
        pv_systems = self._objects_by_class.get("PVSystems")
        if pv_systems is None:
            logger.info("No PVSystems are present")
            return

        pv_infos = []
        profiles = set()
        for full_name, obj in pv_systems.items():
            profile_name = obj.GetParameter("yearly").lower()
            if profile_name != "":
                profiles.add(profile_name)
            pv_infos.append({
                "irradiance": obj.GetParameter("irradiance"),
                "name": full_name,
                "pmpp": obj.GetParameter("pmpp"),
                "load_shape_profile": profile_name,
            })

        pmult_sums = {}
        if dss.LoadShape.First() == 0:
            self._logger.warning("There are no load shapes.")
            return

        sim_resolution = self._settings.project.step_resolution_sec
        per_time_point = (
            ReportGranularity.PER_ELEMENT_PER_TIME_POINT,
            ReportGranularity.ALL_ELEMENTS_PER_TIME_POINT,
        )
        load_shape_data = {}
        while True:
            name = dss.LoadShape.Name().lower()
            if name in profiles:
                sinterval = dss.LoadShape.SInterval()
                assert sim_resolution >= sinterval, f"{sim_resolution} >= {sinterval}"
                df = create_loadshape_pmult_dataframe_for_simulation(self._settings)
                sum_values = df.iloc[:, 0].sum()
                if granularity in per_time_point:
                    load_shape_data[name] = df.iloc[:, 0].values
                    pmult_sums[name] = sum_values
                else:
                    pmult_sums[name] = sum_values
            if dss.LoadShape.Next() == 0:
                break

        if load_shape_data and granularity in per_time_point:
            filename = os.path.join(self._export_dir, PV_LOAD_SHAPE_FILENAME)
            index = create_datetime_index_from_settings(self._settings)
            df = pd.DataFrame(load_shape_data, index=index)
            write_dataframe(df, filename, compress=True)

        for pv_info in pv_infos:
            profile = pv_info["load_shape_profile"]
            if profile == "":
                pv_info["load_shape_pmult_sum"] = 0
            else:
                pv_info["load_shape_pmult_sum"] = pmult_sums[profile]

        data = {"pv_systems": pv_infos}
        filename = os.path.join(self._export_dir, PV_PROFILES_FILENAME)
        dump_data(data, filename, indent=2)
        self._logger.info("Exported PV profile information to %s", filename)