def test_opendss_to_json(): """Test the JSON writer with OpenDSS models as input.""" from ditto.readers.opendss.read import Reader from ditto.store import Store from ditto.writers.json.write import Writer opendss_models = [ f for f in os.listdir( os.path.join(current_directory, "data/small_cases/opendss/")) if not f.startswith(".") ] for model in opendss_models: m = Store() r = Reader( master_file=os.path.join( current_directory, "data/small_cases/opendss/{model}/master.dss".format( model=model), ), buscoordinates_file=os.path.join( current_directory, "data/small_cases/opendss/{model}/buscoord.dss".format( model=model), ), ) r.parse(m) m.set_names() output_path = tempfile.TemporaryDirectory() w = Writer(output_path=output_path.name) w.write(m)
def apply(cls, stack, model, sub_internals): reduced_model = model modifier = Modifier() csv_reader = CsvReader() for sub in os.listdir(sub_internals): to_delete = Store() to_delete2 = Store() to_delete3 = Store() sub_lines = os.path.join(sub_internals, sub, 'internals_lines.csv') csv_reader.parse(to_delete, sub_lines) sub_nodes = os.path.join(sub_internals, sub, 'internals_nodes.csv') csv_reader.parse(to_delete2, sub_nodes) to_delete = modifier.add(to_delete, to_delete2) sub_transformers = os.path.join(sub_internals, sub, 'internals_transformers.csv') csv_reader.parse(to_delete3, sub_transformers) to_delete = modifier.add(to_delete, to_delete3) reduced_model = modifier.delete(reduced_model, to_delete) substation = Store() final_model = reduced_model for sub in os.listdir(sub_internals): odss_reader = OpenDSSReader() sub_master = os.path.join(sub_internals, sub, 'master.dss') odss_reader.build_opendssdirect(sub_master) sub_bus_coords = os.path.join(sub_internals, sub, 'Buscoords.dss') odss_reader.set_dss_file_names({'Nodes': sub_bus_coords}) odss_reader.parse(substation, verbose=True) final_model = modifier.add(final_model, substation) return final_model
def apply(cls, stack, *args, **kwargs): if 'base_dir' in kwargs: base_dir = kwargs['base_dir'] else: base_dir = './' #if not os.path.exists(os.path.join(base_dir,cyme_location)): # raise ValueError("No folder exists at {}".format(os.path.join(base_dir,cyme_location))) if 'network_filename' in kwargs: network_filename = kwargs['network_filename'] else: network_filename = 'network.txt' #Default if 'equipment_filename' in kwargs: equipment_filename = kwargs['equipment_filename'] else: equipment_filename = 'equipment.txt' #Default if 'load_filename' in kwargs: load_filename = kwargs['load_filename'] else: load_filename = 'load.txt' #Default base_model = Store() reader = CymeReader(data_folder_path=base_dir, network_filename=network_filename, equipment_filename=equipment_filename, load_filename=load_filename) reader.parse(base_model) base_model.set_names() stack.model = base_model return True
def test_opendss_to_gridlabd(): ''' Test the OpenDSS to GridlabD conversion. ''' from ditto.readers.opendss.read import Reader from ditto.store import Store from ditto.writers.gridlabd.write import Writer opendss_models = [ f for f in os.listdir( os.path.join(current_directory, 'data/small_cases/opendss/')) if not f.startswith('.') ] for model in opendss_models: m = Store() r = Reader(master_file=os.path.join( current_directory, 'data/small_cases/opendss/{model}/master.dss'.format(model=model)), buscoordinates_file=os.path.join( current_directory, 'data/small_cases/opendss/{model}/buscoord.dss'.format( model=model))) r.parse(m) m.set_names() #TODO: Log properly print('>OpenDSS model {model} red...'.format(model=model)) t = tempfile.TemporaryDirectory() w = Writer(output_path=t.name) w.write(m) #TODO: Log properly print('>...and written to GridLabD.\n')
def test_opendss_to_ephasor(): ''' Test the OpenDSS to Ephasor conversion. ''' from ditto.readers.opendss.read import Reader from ditto.store import Store from ditto.writers.ephasor.write import Writer opendss_models=[f for f in os.listdir(os.path.join(current_directory, 'data/small_cases/opendss/')) if not f.startswith('.')] for model in opendss_models: m = Store() r = Reader( master_file=os.path.join(current_directory, 'data/small_cases/opendss/{model}/master.dss'.format(model=model)), buscoordinates_file=os.path.join(current_directory, 'data/small_cases/opendss/{model}/buscoord.dss'.format(model=model)) ) r.parse(m) m.set_names() m.build_networkx() m.direct_from_source() m.set_node_voltages() #TODO: Log properly print('>OpenDSS model {model} red...'.format(model=model)) output_path = tempfile.TemporaryDirectory() w = Writer(output_path=output_path) w.write(m) #TODO: Log properly print('>...and written to Ephasor.\n')
def test_remove_opendss_default_values(): m = Store() r = Reader( master_file=os.path.join(current_directory, "test_default_values.dss"), remove_opendss_default_values_flag=True, ) r.parse(m) m.set_names() assert m["line1"].faultrate == None assert m["line1"].impedance_matrix == None assert m["line1"].capacitance_matrix == None assert m["cap1"].connection_type == None assert m["cap1"].low == None assert m["cap1"].high == None assert m["cap1"].delay == None assert m["cap1"].pt_ratio == None assert m["cap1"].ct_ratio == None assert m["cap1"].pt_phase == None assert m["reg1"].reactances == None assert m["regulator_reg1"].ct_prim == None assert m["regulator_reg1"].delay == None assert m["regulator_reg1"].highstep == None assert m["regulator_reg1"].pt_ratio == None assert m["regulator_reg1"].bandwidth == None assert m["regulator_reg1"].bandcenter == None assert m["load_load1"].connection_type == None assert m["load_load1"].vmin == None assert m["load_load1"].vmax == None
def test_nodes(): from ditto.store import Store from ditto.readers.opendss.read import Reader # test on the test_nodes.dss m = Store() r = Reader( master_file=os.path.join(current_directory, "test_nodes.dss"), buscoordinates_file=os.path.join(current_directory, "buscoord.dss"), ) r.parse(m) m.set_names() assert (m["bus1"].name) == "bus1" assert (m["bus1"].nominal_voltage) == None assert (m["bus1"].positions[0].long) == float(300) assert (m["bus1"].positions[0].lat) == float(400) assert (m["bus1"].positions[0].elevation) == 0 assert (m["bus1"].feeder_name) == "sourcebus_src" assert (m["sourcebus"].name) == "sourcebus" assert (m["sourcebus"].nominal_voltage) == None assert (m["sourcebus"].positions[0].long) == float(1674346.56814483) assert (m["sourcebus"].positions[0].lat) == float(12272927.0644858) assert (m["sourcebus"].positions[0].elevation) == 0 assert (m["sourcebus"].feeder_name) == "sourcebus_src" assert (m["b1"].name) == "b1" assert (m["b1"].nominal_voltage) == None assert (m["b1"].positions[0].long) == float(1578139) assert (m["b1"].positions[0].lat) == float(14291312) assert (m["b1"].positions[0].elevation) == 0 assert (m["b1"].feeder_name) == "sourcebus_src"
def test_load_p_and_q(): m = Store() r = Reader(master_file=os.path.join(current_directory, "test_load_p_and_q.dss")) r.parse(m) m.set_names() # P and Q values should be equally divided accross phase loads # Here we sum P and Q and check that the obtained values match the values in the DSS file # precision = 0.001 assert len(m["load_load1"].phase_loads) == 3 # Load1 is a three phase load assert sum( [phase_load.p for phase_load in m["load_load1"].phase_loads] ) == pytest.approx(5400 * 10 ** 3, precision) assert sum( [phase_load.q for phase_load in m["load_load1"].phase_loads] ) == pytest.approx(4285 * 10 ** 3, precision) assert len(m["load_load2"].phase_loads) == 3 # Load2 is a three phase load assert sum( [phase_load.p for phase_load in m["load_load2"].phase_loads] ) == pytest.approx(3466 * 10 ** 3, precision) assert sum( [phase_load.q for phase_load in m["load_load2"].phase_loads] ) == pytest.approx(3466.0 * math.sqrt(1.0 / 0.9 ** 2 - 1) * 10 ** 3, precision) assert len(m["load_load3"].phase_loads) == 2 # Load3 is a two phase load assert sum( [phase_load.p for phase_load in m["load_load3"].phase_loads] ) == pytest.approx(1600 * 10 ** 3, precision) assert sum( [phase_load.q for phase_load in m["load_load3"].phase_loads] ) == pytest.approx(980 * 10 ** 3, precision) assert len(m["load_load4"].phase_loads) == 2 # Load4 is a two phase load assert sum( [phase_load.p for phase_load in m["load_load4"].phase_loads] ) == pytest.approx(1555 * 10 ** 3, precision) assert sum( [phase_load.q for phase_load in m["load_load4"].phase_loads] ) == pytest.approx(1555.0 * math.sqrt(1.0 / 0.95 ** 2 - 1) * 10 ** 3, precision) assert len(m["load_load5"].phase_loads) == 1 # Load5 is a one phase load assert sum( [phase_load.p for phase_load in m["load_load5"].phase_loads] ) == pytest.approx(650 * 10 ** 3, precision) assert sum( [phase_load.q for phase_load in m["load_load5"].phase_loads] ) == pytest.approx(500.5 * 10 ** 3, precision) assert len(m["load_load6"].phase_loads) == 1 # Load6 is a one phase load assert sum( [phase_load.p for phase_load in m["load_load6"].phase_loads] ) == pytest.approx(623.21 * 10 ** 3, precision) assert sum( [phase_load.q for phase_load in m["load_load6"].phase_loads] ) == pytest.approx(623.21 * math.sqrt(1.0 / 0.85 ** 2 - 1) * 10 ** 3, precision)
def test_metric_extraction(): """ This test reads all small OpenDSS test cases, set the nominal voltages using a system_structure_modifier object and compute all metrics using a network analyzer object. Finally, it exports the metrics to excel and Json formats. """ from ditto.readers.opendss.read import Reader from ditto.store import Store from ditto.modify.system_structure import system_structure_modifier from ditto.metrics.network_analysis import NetworkAnalyzer as network_analyzer opendss_models = [ f for f in os.listdir( os.path.join(current_directory, "data/small_cases/opendss/")) if not f.startswith(".") ] opendss_models.remove("storage_test") for model in opendss_models: m = Store() r = Reader( master_file=os.path.join( current_directory, "data/small_cases/opendss/{model}/master.dss".format( model=model), ), buscoordinates_file=os.path.join( current_directory, "data/small_cases/opendss/{model}/buscoord.dss".format( model=model), ), ) r.parse(m) m.set_names() # Create a modifier object modifier = system_structure_modifier(m) # And set the nominal voltages of the elements since we don't have it from OpenDSS modifier.set_nominal_voltages_recur() modifier.set_nominal_voltages_recur_line() # Create a Network analyszer object with the modified model net = network_analyzer(modifier.model, True, "sourcebus") net.model.set_names() # Compute all the available metrics net.compute_all_metrics() output_path = tempfile.gettempdir() # Export them to excel net.export(os.path.join(output_path, "metrics.xlsx")) # Export them to JSON net.export_json(os.path.join(output_path, "metrics.json"))
def dssToGridLab(inFilePath, outFilePath, busCoords=None): ''' Convert dss file to gridlab. ''' model = Store() #TODO: do something about busCoords: dss_reader = dReader(master_file = inFilePath) dss_reader.parse(model) model.set_names() glm_writer = gWriter(output_path=".") # TODO: no way to specify output filename, so move and rename. glm_writer.write(model)
def gridLabToDSS(inFilePath, outFilePath): ''' Convert gridlab file to dss. ''' model = Store() # HACK: the gridlab reader can't handle brace syntax that ditto itself writes... # command = 'sed -i -E "s/{/ {/" ' + inFilePath # os.system(command) gld_reader = gReader(input_file = inFilePath) gld_reader.parse(model) model.set_names() dss_writer = dWriter(output_path=".") # TODO: no way to specify output filename, so move and rename. dss_writer.write(model)
def test_linegeometries(): m = Store() r = Reader(master_file=os.path.join(current_directory, "test_linegeometries.dss")) r.parse(m) m.set_names() # Number of wires assert len(m["line1"].wires) == 4 # Line1 should have 4 wires # Phases of the different wires assert set([w.phase for w in m["line1"].wires]) == set(["A", "B", "C", "N"]) phased_wires = {} for wire in m["line1"].wires: phased_wires[wire.phase] = wire # Nameclass for p in ["A", "B", "C"]: assert phased_wires[p].nameclass == "ACSR336" assert phased_wires["N"].nameclass == "ACSR1/0" # Positions of the wires assert (phased_wires["A"].X, phased_wires["A"].Y) == (-1.2909, 13.716) assert (phased_wires["B"].X, phased_wires["B"].Y) == ( -0.1530096 * 0.3048, 4.1806368 * 0.3048, ) assert (phased_wires["C"].X, phased_wires["C"].Y) == (0.5737, 13.716) assert (phased_wires["N"].X, phased_wires["N"].Y) == (0.0, 14.648) # GMR for p in ["A", "B", "C"]: assert phased_wires[p].gmr == 0.0255 * 0.3048 assert phased_wires["N"].gmr == 0.00446 * 0.3048 # Diameter for p in ["A", "B", "C"]: assert phased_wires[p].diameter == 0.741 * 0.0254 assert phased_wires["N"].diameter == 0.398 * 0.0254 # Resistance # TODO: Change this once the resistance of a Wire object will no longer be the total # resistance, but the per meter resistance... # for p in ["A", "B", "C"]: assert phased_wires[p].resistance == pytest.approx( 0.306 * 0.000621371 * 300 * 0.3048, 0.00001 ) assert phased_wires["N"].resistance == pytest.approx( 1.12 * 0.000621371 * 300 * 0.3048, 0.00001 )
def test_default_values(): m = Store() r = Reader( master_file=os.path.join(current_directory, "test_default_values.dss"), default_values_file=os.path.join(current_directory, "test_default_values.json"), ) r.parse(m) m.set_names() assert m["line1"].faultrate == 0.2 assert m["line1"].impedance_matrix == [ [(0.00113148 + 0.000884886j), (0.000142066 + 0.000366115j)], [(0.000142066 + 0.000366115j), (0.00113362 + 0.000882239j)], ] assert m["line1"].capacitance_matrix == [ [(0.00733718 + 0j), (-0.00239809 + 0j)], [(-0.00239809 + 0j), (0.00733718 + 0j)], ] phased_wires = {} for wire in m["line1"].wires: phased_wires[wire.phase] = wire # Ampacity for p in ["A", "B", "C"]: assert phased_wires[p].ampacity == 200 assert phased_wires[p].emergency_ampacity == 400 assert m["cap1"].connection_type == "Y" assert m["cap1"].low == 114 assert m["cap1"].high == 125 assert m["cap1"].delay == 10 assert m["cap1"].pt_ratio == 50 assert m["cap1"].ct_ratio == 50 assert m["cap1"].pt_phase == "A" assert m["reg1"].reactances == [6] assert m["regulator_reg1"].ct_prim == 300 assert m["regulator_reg1"].delay == 16 assert m["regulator_reg1"].highstep == 15 assert m["regulator_reg1"].pt_ratio == 60 assert m["regulator_reg1"].bandwidth == 3 assert m["regulator_reg1"].bandcenter == 130 assert m["load_load1"].connection_type == "Y" assert m["load_load1"].vmin == 0.95 assert m["load_load1"].vmax == 1.05
def test_single_phase_capacitor_writing(): m = Store() # Create a one phase, 100kVar capacitor on phase A cap1 = Capacitor(m) cap1.name = "cap1" cap1.nominal_voltage = 4.16 * 10**3 cap1.connecting_element = "bus23" cap1_A = PhaseCapacitor(m) cap1_A.phase = "A" cap1_A.var = 100 * 10**3 cap1.phase_capacitors.append(cap1_A) output_path = tempfile.gettempdir() w = Writer(output_path=output_path) w.write(m) # Check that the OpenDSS writer created a Master file assert os.path.exists(os.path.join(output_path, "Master.dss")) # Check that the OpenDSS writer created a Capacitors.dss file assert os.path.exists(os.path.join(output_path, "Capacitors.dss")) with open(os.path.join(output_path, "Capacitors.dss"), "r") as fp: lines = fp.readlines() assert (len(lines) == 2 ) # There is one line with the capacitor string and one empty line assert lines[ 0] == "New Capacitor.cap1 Bus1=bus23.1 phases=1 Kv=4.16 Kvar=100.0\n"
def test_three_phase_capacitor_writing(): m = Store() # Create a three phase, 900kVar capacitor cap1 = Capacitor(m) cap1.connecting_element = "bus66" cap1.nominal_voltage = 4.16 * 10**3 cap1.name = "cap1" for phase in ["A", "B", "C"]: cap1.phase_capacitors.append( PhaseCapacitor(m, phase=phase, var=300 * 10**3)) output_path = tempfile.gettempdir() w = Writer(output_path=output_path) w.write(m) # Check that the OpenDSS writer created a Master file assert os.path.exists(os.path.join(output_path, "Master.dss")) # Check that the OpenDSS writer created a Capacitors.dss file assert os.path.exists(os.path.join(output_path, "Capacitors.dss")) with open(os.path.join(output_path, "Capacitors.dss"), "r") as fp: lines = fp.readlines() assert (len(lines) == 2 ) # There is one line with the capacitor string and one empty line assert lines[ 0] == "New Capacitor.cap1 Bus1=bus66 phases=3 Kv=4.16 Kvar=900.0\n"
def apply(cls, stack, model, *args, **kwargs): if 'filename' in kwargs: filename = kwargs['filename'] #If the file does not exist, do nothing and return the input model if not os.path.exists(filename): return model # If the file is empty also do nothing and return the input model try: df = pd.read_csv(filename) except pd.errors.EmptyDataError: logging.warning("Empty Dataframe loaded for {}".format(filename)) return model #Create a CSV reader csv_reader = CSVReader() m2 = Store() csv_reader.parse(m2, filename) #Create a Modifier object modifier = Modifier() #Merge the two models new_model = modifier.merge(model, m2) #Return the new model return new_model
def apply(cls, stack, *args, **kwargs): if 'base_dir' in kwargs: base_dir = kwargs['base_dir'] else: base_dir = './' if 'input_filename' in kwargs: input_filename = kwargs['input_filename'] else: input_filename = 'full_model.json' #Default base_model = Store() reader = JsonReader(input_file = os.path.join(base_dir,input_filename)) reader.parse(base_model) base_model.set_names() stack.model = base_model return True
def test_center_tap_load_writing(): """ Tests the writing of center tap loads. """ from ditto.models.load import Load from ditto.models.phase_load import PhaseLoad m = Store() l = Load(m) l.name = "load1" l.is_center_tap = 1 l.center_tap_perct_1_N = .5 l.center_tap_perct_N_2 = .5 l.center_tap_perct_1_2 = 0 pl = PhaseLoad(m) pl.p = 10 pl.q = 8 pl.phase = "C" l.phase_loads.append(pl) w = Writer(output_path=current_directory) w.write(m) with open(os.path.join(current_directory, "loads.txt"), "r") as fp: lines = fp.readlines() assert lines[ -1] == ",SPOT,0,C,,,0,PQ,50.0,50.0,0.005,0.004,0.005,0.004,0\n" # Cleaning os.remove(os.path.join(current_directory, "loads.txt")) os.remove(os.path.join(current_directory, "equipment.txt")) os.remove(os.path.join(current_directory, "network.txt"))
def test_cyme_to_ephasor(): ''' Test the Cyme to Ephasor conversion. ''' from ditto.store import Store from ditto.readers.cyme.read import Reader from ditto.writers.ephasor.write import Writer cyme_models = [ f for f in os.listdir( os.path.join(current_directory, 'data/small_cases/cyme/')) if not f.startswith('.') ] for model in cyme_models: m = Store() r = Reader(data_folder_path=os.path.join( current_directory, 'data/small_cases/cyme', model)) r.parse(m) #TODO: Log properly print('>Cyme model {model} red...'.format(model=model)) t = tempfile.TemporaryDirectory() w = Writer(output_path=t.name) w.write(m) #TODO: Log properly print('>...and written to Ephasor.\n')
def test_cyme_to_opendss(): ''' Test the Cyme to OpenDSS conversion. ''' list_of_directories = [] from ditto.store import Store from ditto.readers.cyme.read import Reader from ditto.writers.opendss.write import Writer import opendssdirect as dss cyme_models = [ f for f in os.listdir( os.path.join(current_directory, 'data/small_cases/cyme/')) if not f.startswith('.') ] for model in cyme_models: print(model) m = Store() r = Reader(data_folder_path=os.path.join( current_directory, 'data/small_cases/cyme', model)) r.parse(m) #TODO: Log properly # print('>Cyme model {model} read...'.format(model=model)) output_path = tempfile.TemporaryDirectory() list_of_directories.append(output_path) w = Writer(output_path=output_path.name) w.write(m) #TODO: Log properly # print('>...and written to OpenDSS.\n') print(model) dss.run_command("clear")
def create_output_dir(tests_dir): """Reading the input from every reader for each test case and creating the Opendss output.""" # Creating output directory current_dir = os.path.realpath(os.path.dirname(__file__)) validation_dir = os.path.join(current_dir, "validation_outputs") if os.path.exists(validation_dir): shutil.rmtree(validation_dir) for each in os.listdir(tests_dir): if each == "cim" or each == "demo": continue for dirname in os.listdir(os.path.join(tests_dir, each)): if dirname == "storage_test": continue output_dir = os.path.join(validation_dir, dirname, each + "_output") test_path = os.path.join(tests_dir, each, dirname) m = Store() if each == "opendss": r1 = OpenDSS_Reader( master_file=os.path.join(test_path, "master.dss")) elif each == "synergi": if dirname == "ieee_4node": r1 = Synergi_Reader( input_file=os.path.join(test_path, "network.mdb")) elif each == "cyme": r1 = Cyme_Reader(data_folder_path=os.path.join(test_path)) elif each == "gridlabd": r1 = Gridlabd_Reader( input_file=os.path.join(test_path, "node.glm")) r1.parse(m) w1 = OpenDSS_Writer(output_path=output_dir) w1.write(m, separate_feeders=True) return validation_dir
def test_opendss_transformer(): opendss_test_data = """ Clear new circuit.IEEE13 ~ basekv=4.16 pu=1.0000 phases=3 bus1=SourceBus ~ Angle=0 ~ MVAsc3=200000 MVASC1=200000 ! stiffen the source to approximate inf source New Transformer.reg Phases=3 Windings=2 XHL=0.01 ~ wdg=1 bus=Sourcebus.1.2.3.0 conn=Wye kv=4.16 kva=5000 %r=0.000498 XHT=.00498 ~ wdg=2 bus=651.1.2.3 conn=Wye kv=4.16 kva=5000 %r=0.000498 XLT=.00498 New Transformer.XFM1 Phases=3 Windings=2 XHL=2 ~ wdg=1 bus=633.1.2.3.0 conn=Wye kv=4.16 kva=500 %r=.55 XHT=1 ~ wdg=2 bus=634.1.2.3 conn=Wye kv=0.480 kva=500 %r=.55 XLT=1 """ master_file = os.path.join(tempfile.gettempdir(), os.urandom(24).hex()) # TODO: figure out why named temporary file doesn't work as expected on windows # master_file = tempfile.NamedTemporaryFile(mode="w") with open(master_file, "w") as f: f.write(opendss_test_data) m = Store() r = Reader(master_file=master_file, ) r.parse(m) m.set_names() for t in m.iter_models(type=PowerTransformer): assert len(t.windings) == 2 assert t.windings[0].is_grounded is True assert t.windings[1].is_grounded is False output_path = tempfile.TemporaryDirectory() w = Writer(output_path=output_path.name) w.write(m) with open(os.path.join(output_path.name, "Transformers.dss")) as f: string = f.read() assert "sourcebus.1.2.3.0" in string assert "651.1.2.3.0" not in string assert "651.1.2.3" in string assert "633.1.2.3.0" in string assert "634.1.2.3.0" not in string assert "634.1.2.3" in string
def apply(cls, stack, model, random_percent=None, scale_factor=None, timeseries_path=None): if timeseries_path is not None: scale_factor_model = Store() reader = CsvReader() reader.parse(sc_factor_model, scaling_path) modifier = Modifier() intermediate_model = modifier.merge(model, scale_factor_model) return intermediate_model if random_percent == None: random_percent = 100 if scale_factor is not None: scale_factor_model = Store() modifier = Modifier() total_loads = 0 for obj_name in model.model_names: if isinstance(model.model_names[obj_name], Load): total_loads = total_loads + 1 num_selected = int(random_percent / 100.0 * total_loads) to_select = [1 for i in range(num_selected) ] + [0 for i in range(total_loads - num_selected)] shuffle(to_select) load_cnt = 0 for obj_name in model.model_names: if isinstance(model.model_names[obj_name], Load) and to_select[load_cnt] == 1: tmp_load = Load(scale_factor_model) tmp_timeseries = Timeseries(scale_factor_model) tmp_timeseries.scale_factor = scale_factor tmp_load.name = obj_name tmp_load.timeseries = [tmp_timeseries] modifier.set_attributes(model, model.model_names[obj_name], tmp_load) model.set_names() return model else: logger.warn('no scaling factor set') return model
def apply(cls, stack, model, load_path): loads = Store() reader = CsvReader() reader.parse(loads, load_path) modifier = Modifier() intermediate_model = modifier.merge(model, loads) return intermediate_model
def test_opendss_center_transformer(): opendss_test_data = """ Clear new circuit.IEEE13 ~ basekv=4.16 pu=1.0000 phases=3 bus1=SourceBus ~ Angle=0 ~ MVAsc3=200000 MVASC1=200000 ! stiffen the source to approximate inf source New Transformer.Example1-ph phases=1 Windings=3 ~ Xhl=2.04 Xht=2.04 Xlt=1.36 %noloadloss=.2 ~ Buses=[bus1.1.2 bus2.1.0 bus2.0.2] ! mid-point of secondary is grounded ~ kVs=[12.47 .12 .12] ! ratings of windings ~ kVAs=[25 25 25] ~ %Rs = [0.6 1.2 1.2] ~ conns=[delta wye wye] """ master_file = os.path.join(tempfile.gettempdir(), os.urandom(24).hex()) # TODO: figure out why named temporary file doesn't work as expected on windows # master_file = tempfile.NamedTemporaryFile(mode="w") with open(master_file, "w") as f: f.write(opendss_test_data) m = Store() r = Reader(master_file=master_file) r.parse(m) m.set_names() for t in m.iter_models(type=PowerTransformer): assert len(t.windings) == 3 assert t.windings[0].is_grounded is False assert t.windings[1].is_grounded is False assert t.windings[2].is_grounded is False output_path = tempfile.TemporaryDirectory() w = Writer(output_path=output_path.name) w.write(m) with open(os.path.join(output_path.name, "Transformers.dss")) as f: string = f.read() assert "bus1.1.2 " in string assert "bus2.1.0 " in string assert "bus2.0.2 " in string
def test_json_serialize_deserialize(): """Write a model to JSON, read it back in, and test that both models match.""" from ditto.readers.opendss.read import Reader from ditto.store import Store from ditto.writers.json.write import Writer from ditto.readers.json.read import Reader as json_reader opendss_models = [ f for f in os.listdir( os.path.join(current_directory, "data/small_cases/opendss/")) if not f.startswith(".") ] opendss_models.remove("storage_test") for model in opendss_models: m = Store() r = Reader( master_file=os.path.join( current_directory, "data/small_cases/opendss/{model}/master.dss".format( model=model), ), buscoordinates_file=os.path.join( current_directory, "data/small_cases/opendss/{model}/buscoord.dss".format( model=model), ), ) r.parse(m) m.set_names() w = Writer(output_path="./") w.write(m) jr = json_reader(input_file="./Model.json") jr.parse(m) jr.model.set_names() for obj in m.models: if hasattr(obj, "name"): json_obj = jr.model[obj.name] assert compare(obj, json_obj) for json_obj in jr.model.models: if hasattr(json_obj, "name"): obj = m[json_obj.name] assert compare(json_obj, obj) os.remove("./Model.json")
def test_cyme_to_json(): """Test the JSON writer with CYME models as input.""" from ditto.readers.cyme.read import Reader from ditto.store import Store from ditto.writers.json.write import Writer cyme_models = [ f for f in os.listdir( os.path.join(current_directory, "data/small_cases/cyme/")) if not f.startswith(".") ] for model in cyme_models: m = Store() r = Reader(data_folder_path=os.path.join( current_directory, "data/small_cases/cyme", model)) r.parse(m) m.set_names() output_path = tempfile.TemporaryDirectory() w = Writer(output_path=output_path.name) w.write(m)
def test_gld_reader(): gridlabd_models_dir = os.path.join(current_directory, 'data', 'small_cases', 'gridlabd') gridlabd_models = [ f for f in os.listdir(gridlabd_models_dir) if not f.startswith('.') ] from ditto.readers.gridlabd.read import Reader for modelfile in gridlabd_models: m = Store() r = Reader(input_file=os.path.join(gridlabd_models_dir, modelfile)) r.parse(m)
def test_capacitor_kvar(): m = Store() r = Reader( master_file=os.path.join(current_directory, "test_capacitor_kvar.dss")) r.parse(m) m.set_names() assert len( m["cap1"].phase_capacitors) == 3 # Cap1 is a three phase capacitor assert sum([ phase_capacitor.var for phase_capacitor in m["cap1"].phase_capacitors ]) == pytest.approx(600 * 10**3, 0.0001) assert len( m["cap2"].phase_capacitors) == 1 # Cap2 is a one phase capacitor assert m["cap2"].phase_capacitors[0].var == 100 * 10**3 assert len( m["cap3"].phase_capacitors) == 1 # Cap3 is a one phase capacitor assert m["cap3"].phase_capacitors[0].var == 200.37 * 10**3
def test_lines_write(): m = Store() r = Reader( master_file=os.path.join( current_directory, "../../../readers/opendss/Lines/test_linegeometries.dss" ) ) r.parse(m) m.set_names() output_path = tempfile.gettempdir() jw = Json_Writer(output_path=output_path) jw.write(m) w = Writer(output_path=output_path) w.write(m) # Check that the OpenDSS writer created a Master file assert os.path.exists(os.path.join(output_path, "Master.dss")) r_w = Reader(master_file=os.path.join(output_path, "Master.dss")) r_w.parse(m) m.set_names() jw = Json_Writer(output_path="./") jw.write(m) with open(os.path.join(output_path, "Model.json"), "r") as f1: reader_json = json.load(f1) with open("./Model.json", "r") as f2: writer_json = json.load(f2) assert reader_json["model"] == writer_json["model"]