def serialize(self, path): """Serialize a PyDssScenario to a directory. Parameters ---------- path : str full path to scenario """ os.makedirs(path, exist_ok=True) for name in self._SCENARIO_DIRECTORIES: os.makedirs(os.path.join(path, name), exist_ok=True) for controller_type, controllers in self.controllers.items(): filename = os.path.join(path, "pyControllerList", filename_from_enum(controller_type)) dump_data(controllers, filename) for mode, exports in self.exports.items(): dump_data( exports, os.path.join(path, "ExportLists", filename_from_enum(mode))) for visualization_type, visualizations in self.visualizations.items(): filename = os.path.join(path, "pyPlotList", filename_from_enum(visualization_type)) dump_data(visualizations, filename) dump_data( load_data(DEFAULT_MONTE_CARLO_SETTINGS_FILE), os.path.join(path, "Monte_Carlo", MONTE_CARLO_SETTINGS_FILENAME)) dump_data(load_data(DEFAULT_SUBSCRIPTIONS_FILE), os.path.join(path, "ExportLists", SUBSCRIPTIONS_FILENAME))
def __init__(self, name, controller_types=None, controllers=None, export_modes=None, exports=None, visualizations=None, post_process_infos=None, visualization_types=None): self.name = name self.post_process_infos = [] if visualization_types is None and visualizations is None: self.visualizations = { x: self.load_visualization_config_from_type(x) for x in PyDssScenario.DEFAULT_VISUALIZATION_TYPES } elif visualization_types is not None: self.visualizations = { x: self.load_visualization_config_from_type(x) for x in visualization_types } elif isinstance(visualizations, str): basename = os.path.splitext(os.path.basename(visualizations))[0] visualization_type = VisualizationType(basename) self.visualizations = {visualization_type: load_data(controllers)} else: assert isinstance(visualizations, dict) self.visualizations = visualizations if (controller_types is None and controllers is None): self.controllers = {} elif controller_types is not None: self.controllers = { x: self.load_controller_config_from_type(x) for x in controller_types } elif isinstance(controllers, str): basename = os.path.splitext(os.path.basename(controllers))[0] controller_type = ControllerType(basename) self.controllers = {controller_type: load_data(controllers)} else: assert isinstance(controllers, dict) self.controllers = controllers if export_modes is not None and exports is not None: raise InvalidParameter( "export_modes and exports cannot both be set" ) if (export_modes is None and exports is None): mode = PyDssScenario.DEFAULT_EXPORT_MODE self.exports = {mode: self.load_export_config_from_mode(mode)} elif export_modes is not None: self.exports = { x: self.load_export_config_from_mode(x) for x in export_modes } elif isinstance(exports, str): mode = ExportMode(os.path.splitext(os.path.basename(exports))[0]) self.exports = {mode: load_data(exports)} else: assert isinstance(exports, dict) self.exports = exports if post_process_infos is not None: for pp_info in post_process_infos: self.add_post_process(pp_info)
def _add_controller(self, controller_type, controller): name = controller["name"] filename = controller["filename"] if self.is_controller_registered(controller_type, name): raise InvalidParameter( f"{controller_type} / {name} is already registered") if not os.path.exists(filename): raise InvalidParameter(f"{filename} does not exist.") # Make sure the file can be parsed. load_data(filename) self._controllers[controller_type][name] = controller
def __init__(self, Path): self.pyPlots = {} filenames = os.listdir(Path) found_config_file = False found_excel_file = False for filename in filenames: pyPlotType, ext = os.path.splitext(filename) if filename.startswith('~$'): continue elif ext == '.xlsx': filename = convert_config_data_to_toml(filename) elif ext != ".toml": continue if pyPlotType not in self.pyPlots: self.pyPlots[pyPlotType] = {} filepath = os.path.join(Path, filename) assert (os.path.exists(filepath) ), 'path: "{}" does not exist!'.format(filepath) assert (os.path.exists(filepath) ), 'path: "{}" does not exist!'.format(filepath) for name, plot in load_data(filepath).items(): if name in self.pyPlots[pyPlotType]: raise InvalidParameter( f"Multiple PyDSS dynamic plot definitions of the same type with the same name not allowed: " f"{name} already exists for plot type {pyPlotType}") self.pyPlots[pyPlotType][name] = plot
def _generate_from_all_time_points(self): scenarios = {} for scenario in self._results.scenarios: filename = os.path.join( str(self._settings.project.active_project_path), "Exports", scenario.name, NODE_NAMES_BY_TYPE_FILENAME, ) node_names_by_type = load_data(filename) assert len(set(node_names_by_type["primaries"])) == len( node_names_by_type["primaries"]) assert len(set(node_names_by_type["secondaries"])) == len( node_names_by_type["secondaries"]) df = scenario.get_full_dataframe("Buses", "puVmagAngle", mag_ang="mag") columns = [] for column in df.columns: # Make the names match the results from NodeVoltageMetrics. column = column.replace("__mag [pu]", "") column = column.replace("__A1", ".1") column = column.replace("__B1", ".2") column = column.replace("__C1", ".3") columns.append(column) df.columns = columns by_type = {} for node_type in ("primaries", "secondaries"): df_by_type = df[node_names_by_type[node_type]] by_type[node_type] = self._gen_metrics(df_by_type) scenarios[scenario.name] = VoltageMetricsByBusTypeModel(**by_type) return scenarios
def load_simulation_settings(path: Path): """Load the simulation settings. Parameters ---------- path : Path Path to simulation.toml Returns ------- SimulationSettingsModel Raises ------ ValueError Raised if any setting is invalid. """ settings = SimulationSettingsModel(**load_data(path)) enabled_reports = [x for x in settings.reports.types if x.enabled] if enabled_reports and not settings.exports.export_results: raise ValueError( "Reports are only supported with exported_results = true.") return settings
def _check_sum_groups(self, sum_groups_file): if sum_groups_file is not None: if self._sum_groups: raise InvalidConfiguration( f"Cannot set both sum_groups and sum_groups_file") # This path needs to be relative to the current directory, not the Exports.toml. # This might need to be changed. if not Path(sum_groups_file).exists(): raise InvalidConfiguration( f"{sum_groups_file} does not exist. The path must be relative to the current directory." ) self._sum_groups = load_data(sum_groups_file)["sum_groups"] self._sum_elements = True # Ignore the user setting. This must be true. # Ensure that there are no duplicate names. orig_length = 0 all_names = set() for group in self._sum_groups: orig_length += len(group["elements"]) group["elements"] = set(group["elements"]) all_names = all_names.union(group["elements"]) if orig_length != len(all_names): tag = f"{self.elem_class}/{self.name}" raise InvalidConfiguration( f"{tag} has duplicate element names in sum_groups")
def read_report(self, report_name): """Return the report data. Parameters ---------- report_name : str Returns ------- str """ all_reports = Reports.get_all_reports() if report_name not in all_reports: raise InvalidParameter(f"invalid report name {report_name}") report_cls = all_reports[report_name] # This bypasses self._fs_intf because reports are always extracted. reports_dir = os.path.join(self._project.project_path, REPORTS_DIR) for filename in os.listdir(reports_dir): name, ext = os.path.splitext(filename) if name == os.path.splitext(report_cls.FILENAME)[0]: path = os.path.join(reports_dir, filename) if ext in (".json", ".toml"): return load_data(path) if ext in (".csv", ".h5"): return read_dataframe(path) raise InvalidParameter( f"did not find report {report_name} in {reports_dir}")
def __init__(self, SimulationSettings, dssPaths, dssObjects, dssObjectsByClass): if SimulationSettings["Logging"]["Pre-configured logging"]: LoggerTag = __name__ else: LoggerTag = getLoggerTag(SimulationSettings) self.pyLogger = logging.getLogger(LoggerTag) self.__dssPaths = dssPaths self.__dssObjects = dssObjects self.__Settings = SimulationSettings self.__dssObjectsByClass = dssObjectsByClass try: MCfile = os.path.join( self.__Settings['Project']['Active Scenario'], 'Monte_Carlo', 'MonteCarloSettings.toml') MCfilePath = os.path.join(self.__dssPaths['Import'], MCfile) self.pyLogger.info( 'Reading monte carlo scenario settings file from ' + MCfilePath) self.__MCsettingsDict = utils.load_data(MCfilePath) except: self.pyLogger.error( 'Failed to read Monte Carlo scenario generation file %s', MCfilePath) raise return
def read_scenario_pv_profiles(self, scenario_name): filename = os.path.join( self._project_dir, "Exports", scenario_name, "pv_profiles.json", ) return load_data(filename)
def read_scenario_export_metadata(self, scenario_name): filename = os.path.join( self._project_dir, "Exports", scenario_name, "metadata.json", ) return load_data(filename)
def update_scenario_settings(self, simulation_config): path = os.path.dirname(PyDSS.__file__) dss_args = load_data(os.path.join(path, 'defaults', 'simulation.toml')) for category, params in dss_args.items(): if category in simulation_config: params.update(simulation_config[category]) self.__validate_settings(dss_args) return dss_args
def __init__(self, filePath): self.SubscriptionList = {} if not os.path.exists(filePath): raise FileNotFoundError( 'path: "{}" does not exist!'.format(filePath)) for elem, elem_data in load_data(filePath).items(): if elem_data["Subscribe"]: self.SubscriptionList[elem] = elem_data
def __init__(self, project_dir, simulation_file): self._project_dir = project_dir self._scenarios_dir = os.path.join(self._project_dir, SCENARIOS) self._dss_dir = os.path.join(self._project_dir, "DSSfiles") self._simulation_config = load_data( os.path.join(self._project_dir, simulation_file) ) self._check_scenarios()
def read_export_config(self, scenario): exports = {} path = os.path.join(self._project_dir, SCENARIOS, scenario, "ExportLists") for filename in os.listdir(path): base, ext = os.path.splitext(filename) if ext == ".toml": export_mode = ExportMode(base) exports[export_mode] = load_data(os.path.join(path, filename)) return exports
def read_controller_config(self, scenario): controllers = {} path = os.path.join(self._project_dir, SCENARIOS, scenario, "pyControllerList") for filename in os.listdir(path): base, ext = os.path.splitext(filename) if ext == ".toml": controller_type = ControllerType(base) controllers[controller_type] = load_data(os.path.join(path, filename)) return controllers
def read_visualization_config(self, scenario): visuals = {} path = os.path.join(self._project_dir, SCENARIOS, scenario, "pyPlotList") for filename in os.listdir(path): base, ext = os.path.splitext(filename) if ext == ".toml": visual_type = VisualizationType(base) visuals[visual_type] = load_data(os.path.join(path, filename)) return visuals
def __init__(self, registry_filename=None): if registry_filename is None: self._registry_filename = Path.home() / self._REGISTRY_FILENAME else: self._registry_filename = Path(registry_filename) self._controllers = {x: {} for x in CONTROLLER_TYPES} data = copy.deepcopy(DEFAULT_REGISTRY) for controller_type, controllers in DEFAULT_REGISTRY[ "Controllers"].items(): for controller in controllers: path = Path(controller["filename"]) if not path.exists(): raise InvalidConfiguration( f"Default controller file={path} does not exist") # This is written to work with legacy versions where default controllers were # written to the registry. if self._registry_filename.exists(): registered = load_data(self._registry_filename) to_delete = [] for controller_type, controllers in registered[ "Controllers"].items(): for i, controller in enumerate(controllers): path = Path(controller["filename"]) if not path.exists(): name = controller["name"] msg = f"The registry contains a controller with an invalid file. " \ f"Type={controller_type} name={name} file={path}.\nWould you like to " \ "delete it? (y/n) -> " response = input(msg).lower() if response == "y": to_delete.append((controller_type, i)) continue else: logger.error( "Exiting because the registry %s is invalid", self._registry_filename) sys.exit(1) if not self._is_default_controller(controller_type, controller["name"]): data["Controllers"][controller_type].append(controller) if to_delete: for ref in reversed(to_delete): registered["Controllers"][ref[0]].pop(ref[1]) backup = str(self._registry_filename) + ".bk" self._registry_filename.rename(backup) dump_data(registered, self._registry_filename, indent=2) logger.info("Fixed the registry and moved the original to %s", backup) for controller_type, controllers in data["Controllers"].items(): for controller in controllers: self._add_controller(controller_type, controller)
def _parse_file(filename): data = load_data(filename) for elem_class, prop_info in data.items(): if isinstance(prop_info, list): for prop in prop_info: yield elem_class, prop else: assert isinstance(prop_info, dict) for prop, values in prop_info.items(): new_data = {"property": prop, **values} yield elem_class, new_data
def test_pv_reports_per_element_per_time_point(cleanup_project): # Generates reports from data stored at every time point and then # use those to compare with the in-memory metrics. PyDssProject.run_project( PV_REPORTS_PROJECT_STORE_ALL_PATH, simulation_file=SIMULATION_SETTINGS_FILENAME, ) baseline_thermal = SimulationThermalMetricsModel(**load_data( Path(PV_REPORTS_PROJECT_STORE_ALL_PATH) / "Reports" / "thermal_metrics.json")) baseline_voltage = SimulationVoltageMetricsModel(**load_data( Path(PV_REPORTS_PROJECT_STORE_ALL_PATH) / "Reports" / "voltage_metrics.json")) baseline_feeder_losses = SimulationFeederLossesMetricsModel(**load_data( Path(PV_REPORTS_PROJECT_STORE_ALL_PATH) / "Reports" / "feeder_losses.json")) granularities = [x for x in ReportGranularity] for granularity in granularities: settings = load_data(BASE_FILENAME) settings["Reports"]["Granularity"] = granularity.value dump_data(settings, TEST_FILENAME) try: PyDssProject.run_project( PV_REPORTS_PROJECT_PATH, simulation_file=TEST_SIM_BASE_NAME, ) if granularity == ReportGranularity.PER_ELEMENT_PER_TIME_POINT: verify_skip_night() assert verify_thermal_metrics(baseline_thermal) assert verify_voltage_metrics(baseline_voltage) assert verify_feeder_losses(baseline_feeder_losses) verify_pv_reports(granularity) verify_feeder_head_metrics() finally: os.remove(TEST_FILENAME) for artifact in ARTIFACTS: if os.path.exists(artifact): os.remove(artifact)
def verify_thermal_metrics(baseline_metrics): filename = Path( PV_REPORTS_PROJECT_PATH) / "Reports" / "thermal_metrics.json" metrics = SimulationThermalMetricsModel(**load_data(filename)) match = True for scenario in metrics.scenarios: if not compare_thermal_metrics( baseline_metrics.scenarios[scenario].line_loadings, metrics.scenarios[scenario].line_loadings, ): match = False return match
def verify_voltage_metrics(baseline_metrics): filename = Path( PV_REPORTS_PROJECT_PATH) / "Reports" / "voltage_metrics.json" metrics = SimulationVoltageMetricsModel(**load_data(filename)) match = True for scenario in metrics.scenarios: if not compare_voltage_metrics( baseline_metrics.scenarios[scenario], metrics.scenarios[scenario], ): match = False return match
def create_project(cls, path, name, scenarios, simulation_config=None, options=None, simulation_file=SIMULATION_SETTINGS_FILENAME, opendss_project_folder=None, master_dss_file=OPENDSS_MASTER_FILENAME, force=False): """Create a new PyDssProject on the filesystem. Parameters ---------- path : str path in which to create directories name : str project name scenarios : list list of PyDssScenario objects simulation_config : str simulation config file; if None, use default """ if simulation_config is None: scenario_names = [x.name for x in scenarios] simulation_config = create_simulation_settings(path, name, scenario_names, force=force) simulation_config = load_data(simulation_config) if options is not None: for category, category_options in options.items(): simulation_config[category].update(category_options) if master_dss_file: simulation_config["project"]["dss_file"] = master_dss_file simulation_config["project"]["project_path"] = path simulation_config["project"]["active_project"] = name settings = SimulationSettingsModel(**simulation_config) project = cls( path=path, name=name, scenarios=scenarios, settings=settings, simulation_file=simulation_file, ) project.serialize(opendss_project_folder=opendss_project_folder) sc_names = project.list_scenario_names() logger.info("Created project=%s with scenarios=%s at %s", name, sc_names, path) return project
def _generate_from_in_memory_metrics(self): scenarios = {} for scenario in self._results.scenarios: filename = os.path.join( str(self._settings.project.active_project_path), "Exports", scenario.name, self.FILENAME, ) scenarios[scenario.name] = ThermalMetricsSummaryModel(**load_data(filename)) # We won't need this file after we write the consolidated file. self._files_to_delete.append(filename) return scenarios
def register(controller_type, filename): """Register a controller in the local registry.""" if controller_type not in CONTROLLER_TYPES: print(f"controller_type must be one of {CONTROLLER_TYPES}") sys.exit(1) if not os.path.exists(filename): print(f"{filename} does not exist") sys.exit(1) registry = Registry() for name in load_data(filename): data = {"name": name, "filename": filename} registry.register_controller(controller_type, data) print(f"Registered {controller_type} {name}")
def __init__(self, filePath): self.pyControllers = {} self.publicationList = [] xlsx_filename = os.path.splitext(filePath)[0] + '.xlsx' if not os.path.exists(filePath) and os.path.exists(xlsx_filename): convert_config_data_to_toml(xlsx_filename) if not os.path.exists(filePath): raise FileNotFoundError( 'path: "{}" does not exist!'.format(filePath)) for elem, elem_data in load_data(filePath).items(): self.pyControllers[elem] = elem_data["Publish"][:] self.pyControllers[elem] += elem_data["NoPublish"] for item in elem_data["Publish"]: self.publicationList.append(f"{elem} {item}")
def load_config(path): """Return a configuration from files. Parameters ---------- path : str Returns ------- dict """ files = [os.path.join(path, x) for x in os.listdir(path) \ if os.path.splitext(x)[1] == ".toml"] assert len(files) == 1, "only 1 .toml file is currently supported" return load_data(files[0])
def __init__(self, registry_filename=None): if registry_filename is None: self._registry_filename = os.path.join( str(pathlib.Path.home()), self._REGISTRY_FILENAME, ) else: self._registry_filename = registry_filename self._controllers = {x: {} for x in CONTROLLER_TYPES} if not os.path.exists(self._registry_filename): self.reset_defaults() else: data = load_data(self._registry_filename) for controller_type in data["Controllers"]: for controller in data["Controllers"][controller_type]: self._add_controller(controller_type, controller)
def __init__(self, project, scenario, inputs, dssInstance, dssSolver, dssObjects, dssObjectsByClass, simulationSettings, logger): """This is the constructor class. """ self.project = project self.scenario = scenario if inputs.config_file == "": self.config = {} else: self.config = load_data(inputs.config_file) self.config["Outputs"] = project.get_post_process_directory( scenario.name) os.makedirs(self.config["Outputs"], exist_ok=True) self.Settings = simulationSettings self._dssInstance = dssInstance self.logger = logger self._check_input_fields()
def create_project(cls, path, name, scenarios, simulation_config=None, options=None, simulation_file=SIMULATION_SETTINGS_FILENAME, opendss_project_folder=None, master_dss_file=OPENDSS_MASTER_FILENAME): """Create a new PyDssProject on the filesystem. Parameters ---------- path : str path in which to create directories name : str project name scenarios : list list of PyDssScenario objects simulation_config : str simulation config file; if None, use default """ if simulation_config is None: simulation_config = DEFAULT_SIMULATION_SETTINGS_FILE simulation_config = load_data(simulation_config) if options is not None: simulation_config.update(options) if master_dss_file: simulation_config["Project"]["DSS File"] = master_dss_file simulation_config["Project"]["Project Path"] = path simulation_config["Project"]["Active Project"] = name project = cls( path=path, name=name, scenarios=scenarios, simulation_config=simulation_config, simulation_file=simulation_file, ) project.serialize(opendss_project_folder=opendss_project_folder) sc_names = project.list_scenario_names() logger.info("Created project=%s with scenarios=%s at %s", name, sc_names, path) return project