def create_rnm_to_opendss_stack_scenarios(dataset_dir, region, solar, batteries): '''Create the stack to convert RNM models in OpenDSS to OpenDSS.''' stack = Stack(name='RNM to OpenDSS Stack') #Parse load coordinates csv file stack.append(Layer(os.path.join(layer_library_dir, 'csv_processing'))) #Parse Capacitor coordinates csv file stack.append(Layer(os.path.join(layer_library_dir, 'csv_processing'))) #Read the OpenDSS input model stack.append(Layer(os.path.join(layer_library_dir, 'from_opendss'))) #Add regulators with setpoints stack.append(Layer(os.path.join(layer_library_dir, 'add_rnm_regulators'))) #Ensure all LV lines are triplex stack.append(Layer(os.path.join(layer_library_dir, 'set_lv_as_triplex'))) #Modify the model stack.append(Layer(os.path.join(layer_library_dir, 'post-processing'))) #Add the load coordinates with a model merge stack.append(Layer(os.path.join(layer_library_dir, 'merging-layer'))) #Add the capacitor coordinates with a model merge stack.append(Layer(os.path.join(layer_library_dir, 'merging-layer'))) #Set number of customers stack.append(Layer(os.path.join(layer_library_dir, 'set_num_customers'))) #Split the network into feeders stack.append(Layer(os.path.join(layer_library_dir, 'network_split'))) #Calculate metrics on customer per transfomer stack.append( Layer( os.path.join(layer_library_dir, 'partitioned_customers_per_transformer_plots'))) #Add intermediate node coordinates stack.append(Layer(os.path.join(layer_library_dir, 'intermediate_node'))) #Create residential placement for PV stack.append( Layer(os.path.join(layer_library_dir, 'create_nested_placement'))) #Create commercial placement for PV stack.append(Layer(os.path.join(layer_library_dir, 'create_placement'))) #Add Load PV stack.append(Layer(os.path.join(layer_library_dir, 'add_pv'))) #Add Utility PV stack.append(Layer(os.path.join(layer_library_dir, 'add_pv'))) #Find missing coordinates stack.append(Layer(os.path.join(layer_library_dir, 'find_missing_coords'))) #Adjust overlaid nodes stack.append(Layer(os.path.join(layer_library_dir, 'move_overlayed_nodes'))) #Add cyme substations stack.append(Layer(os.path.join(layer_library_dir, 'add_cyme_substations'))) #Add ltc control settings stack.append(Layer(os.path.join(layer_library_dir, 'set_ltc_controls'))) #Add fuse control settings stack.append(Layer(os.path.join(layer_library_dir, 'set_fuse_controls'))) #Add extra switches to long lines stack.append( Layer(os.path.join(layer_library_dir, 'add_switches_to_long_lines'))) #Add Additional regulators stack.append( Layer(os.path.join(layer_library_dir, 'add_additional_regulators'))) #Add Capacitor control settings stack.append( Layer(os.path.join(layer_library_dir, 'set_capacitor_controlers'))) #Reduce overloaded nodes stack.append( Layer(os.path.join(layer_library_dir, 'reduce_overload_nodes'))) #Set any delta connections stack.append(Layer(os.path.join(layer_library_dir, 'set_delta_systems'))) #Set source kv stack.append(Layer(os.path.join(layer_library_dir, 'set_source_voltage'))) #Write to OpenDSS stack.append(Layer(os.path.join(layer_library_dir, 'to_opendss'))) #Write to OpenDSS stack.append(Layer(os.path.join(layer_library_dir, 'to_json'))) #Copy Tag file over stack.append(Layer(os.path.join(layer_library_dir, 'add_tags'))) #Run validation metrics stack.append( Layer(os.path.join(layer_library_dir, 'statistical_validation'))) for layer in stack: layer.args.mode = ArgMode.USE layer.kwargs.mode = ArgMode.USE #Load coordinate layer load_coordinates = stack[0] load_coordinates.kwargs['input_filename'] = os.path.join( dataset_dir, region, 'IntermediateFormat', 'Loads_IntermediateFormat.csv') load_coordinates.kwargs['output_filename'] = os.path.join( dataset_dir, region, 'IntermediateFormat', 'Loads_IntermediateFormat2.csv') load_coordinates.kwargs['object_name'] = 'Load' #Capacitor coordinate layer capacitor_coordinates = stack[1] capacitor_coordinates.kwargs['input_filename'] = os.path.join( dataset_dir, region, 'IntermediateFormat', 'Capacitors_IntermediateFormat.csv') capacitor_coordinates.kwargs['output_filename'] = os.path.join( dataset_dir, region, 'IntermediateFormat', 'Capacitors_IntermediateFormat2.csv') capacitor_coordinates.kwargs['object_name'] = 'Capacitor' #Read OpenDSS layer from_opendss = stack[2] from_opendss.args[0] = os.path.join(region, 'OpenDSS', 'Master.dss') from_opendss.args[1] = os.path.join(region, 'OpenDSS', 'BusCoord.dss') from_opendss.kwargs['base_dir'] = dataset_dir #Set regulators with setpoints rnm_regulators = stack[3] rnm_regulators.kwargs['rnm_name'] = 'CRegulador' rnm_regulators.kwargs['setpoint'] = 103 #Ensure all LV lines are triplex set_lv_triplex = stack[4] set_lv_triplex.kwargs['to_replace'] = ['Ionic', 'Corinthian', 'Doric'] #Modify layer #No input except the model. Nothing to do here... post_processing = stack[5] post_processing.kwargs['path_to_feeder_file'] = os.path.join( dataset_dir, region, 'Auxiliary', 'Feeder.txt') post_processing.kwargs['path_to_switching_devices_file'] = os.path.join( dataset_dir, region, 'OpenDSS', 'SwitchingDevices.dss') post_processing.kwargs['center_tap_postprocess'] = True post_processing.kwargs['switch_to_recloser'] = True post_processing.kwargs['center_tap_postprocess'] = False #Merging Load layer merging_load = stack[6] merging_load.kwargs['filename'] = os.path.join( dataset_dir, region, 'IntermediateFormat', 'Loads_IntermediateFormat2.csv') #Merging Capacitor Layer merging_caps = stack[7] merging_caps.kwargs['filename'] = os.path.join( dataset_dir, region, 'IntermediateFormat', 'Capacitors_IntermediateFormat2.csv') #Resetting customer number layer customer = stack[8] customer.kwargs['num_customers'] = 1 #Splitting layer split = stack[9] split.kwargs['path_to_feeder_file'] = os.path.join(dataset_dir, region, 'Auxiliary', 'Feeder.txt') split.kwargs['path_to_no_feeder_file'] = os.path.join( dataset_dir, region, 'Auxiliary', 'NoFeeder.txt') split.kwargs['compute_metrics'] = True split.kwargs[ 'compute_kva_density_with_transformers'] = True #RNM networks have LV information split.kwargs['excel_output'] = os.path.join( '.', 'results_v2', region, 'peak_solar_' + solar + '_battery_' + batteries, 'opendss', 'metrics.csv') split.kwargs['json_output'] = os.path.join( '.', 'results_v2', region, 'peak_solar_' + solar + '_battery_' + batteries, 'opendss', 'metrics.json') #Customer per Transformer plotting layer transformer_metrics = stack[10] transformer_metrics.kwargs['customer_file'] = os.path.join( dataset_dir, region, 'Inputs', 'customers_ext.txt') transformer_metrics.kwargs['output_folder'] = os.path.join( '.', 'results_v2', region, 'peak_solar_' + solar + '_battery_' + batteries, 'opendss') #Intermediate node layer inter = stack[11] inter.kwargs['filename'] = os.path.join(dataset_dir, region, 'OpenDSS', 'LineCoord.txt') #Create Placement for PV load_selection_mapping = { 'none': None, 'low': [('Random', 0, 15)], 'medium': [('Random', 0, 15), ('Random', 15, 35)], 'high': [('Random', 0, 15), ('Random', 15, 35), ('Random', 35, 55), ('Random', 55, 75)] } utility_selection_mapping = { 'none': None, 'low': None, 'medium': ('Reclosers', 1, 2), 'high': ('Reclosers', 2, 2) } #(Reclosers,1,2) means algorithm will select 2 Reclosers that are not upstream of each other and return the first. Useful for consistency with larger selections utility_feeder_mapping = { 'none': None, 'low': None, 'medium': [50], 'high': [100, 75] } load_feeder_mapping = { 'none': None, 'low': [100], 'medium': [100, 100], 'high': [100, 100, 100, 100] } utility_max_feeder_sizing = { 'none': None, 'low': None, 'medium': 33, 'high': 80 } load_max_feeder_sizing = { 'none': None, 'low': 75, 'medium': 150, 'high': None } powerfactor_mapping = { 'none': None, 'low': [1], 'medium': [1, -0.95], 'high': [1, -0.95, 1, 1] } #the pf=1 in the last two should be overridden by the controllers inverter_control_mapping = { 'none': None, 'low': ['powerfactor'], 'medium': ['powerfactor', 'powerfactor'], 'high': ['powerfactor', 'powerfactor', 'voltvar', 'voltwatt'] } cutin_mapping = { 'none': None, 'low': [0.1], 'medium': [0.1, 0.1], 'high': [0.1, 0.1, 0.1, 0.1] } cutout_mapping = { 'none': None, 'low': [0.1], 'medium': [0.1, 0.1], 'high': [0.1, 0.1, 0.1, 0.1] } kvar_percent_mapping = { 'none': None, 'low': [None], 'medium': [None, None], 'high': [None, None, 44, 44] } oversizing_mapping = { 'none': None, 'low': [1.1], 'medium': [1.1, 1.1], 'high': [1.1, 1.1, 1.2, 1.2] } load_equipment_type = 'ditto.models.load.Load' utility_equipment_type = 'ditto.models.node.Node' seed = 1 placement_folder = os.path.join(placement_library_dir, region) load_solar_placement = stack[12] load_solar_placement.args[0] = load_feeder_mapping[solar] load_solar_placement.args[1] = load_equipment_type load_solar_placement.args[2] = load_selection_mapping[solar] load_solar_placement.args[3] = seed load_solar_placement.args[4] = placement_folder utility_solar_placement = stack[13] utility_solar_placement.args[0] = utility_feeder_mapping[ solar] # Length should equal selection[1]. values should be in decreasing order utility_solar_placement.args[1] = None utility_solar_placement.args[2] = utility_selection_mapping[solar] utility_solar_placement.args[3] = None utility_solar_placement.args[4] = placement_folder add_load_pv = stack[14] load_file_names = None #Do nothing if this is the case powerfactors = None inverters = None cutin = None cutout = None kvar_percent = None oversizing = None if load_selection_mapping[solar] is not None: load_file_names = [] powerfactors = [] for selection in load_selection_mapping[solar]: file_name = str( load_feeder_mapping[solar][-1] ) + '_' + load_equipment_type.split( '.' )[-1] + '_' + selection[0] + '-' + str(selection[1]) + '-' + str( selection[2] ) + '_' + str( seed ) + '.json' # Note - assume all subregions are using all feeders load_file_names.append(file_name) powerfactors = powerfactor_mapping[solar] inverters = inverter_control_mapping[solar] cutin = cutin_mapping[solar] cutout = cutin_mapping[solar] kvar_percent = kvar_percent_mapping[solar] oversizing = oversizing_mapping[solar] add_load_pv.kwargs['placement_folder'] = placement_folder add_load_pv.kwargs['placement_names'] = load_file_names add_load_pv.kwargs['residential_sizes'] = [3000, 5000, 8000] add_load_pv.kwargs['residential_areas'] = [75, 300] add_load_pv.kwargs['commercial_sizes'] = [ 3000, 6000, 8000, 40000, 100000, 300000 ] add_load_pv.kwargs['commercial_areas'] = [100, 300, 600, 1000, 2000] add_load_pv.kwargs['customer_file'] = os.path.join(dataset_dir, region, 'Inputs', 'customers_ext.txt') add_load_pv.kwargs['max_feeder_sizing_percent'] = load_max_feeder_sizing[ solar] # total_pv <= max_feeder_size*total_feeder_load. add_load_pv.kwargs['power_factors'] = powerfactors add_load_pv.kwargs['inverters'] = inverters add_load_pv.kwargs['cutin'] = cutin add_load_pv.kwargs['cutout'] = cutout add_load_pv.kwargs['kvar_percent'] = kvar_percent add_load_pv.kwargs['oversizing'] = oversizing add_utility_pv = stack[15] utility_file_name = None if utility_selection_mapping[solar] is not None: feeders_str = str(utility_feeder_mapping[solar]) if isinstance(utility_feeder_mapping[solar], list): feeders_str = '' for f in utility_feeder_mapping[solar]: feeders_str = feeders_str + str(f) + '-' feeders_str = feeders_str.strip('-') utility_file_name = [ feeders_str + '_Node_' + utility_selection_mapping[solar][0] + '-' + str(utility_selection_mapping[solar][1]) + '-' + str(utility_selection_mapping[solar][2]) + '.json' ] add_utility_pv.kwargs['placement_folder'] = placement_folder add_utility_pv.kwargs['placement_names'] = utility_file_name add_utility_pv.kwargs['single_size'] = 2000000 add_utility_pv.kwargs[ 'max_feeder_sizing_percent'] = utility_max_feeder_sizing[ solar] # total_pv <= max_feeder_size*total_feeder_load add_utility_pv.kwargs['power_factors'] = [0.95] add_utility_pv.kwargs['inverters'] = [ 'voltvar' ] #Note that in Opendss this needs kvar to be set to 0 add_utility_pv.kwargs['cutin'] = [0.1] add_utility_pv.kwargs['cutout'] = [0.1] add_utility_pv.kwargs['kvar_percent'] = [44] add_utility_pv.kwargs['oversizing'] = [1.1] # Missing coords # No args/kwargs for this layer # Move overlayed node layer adjust = stack[17] adjust.kwargs['delta_x'] = 10 adjust.kwargs['delta_y'] = 10 #Substations add_substations = stack[18] readme_list = [ os.path.join(dataset_dir, region, 'Inputs', f) for f in os.listdir(os.path.join(dataset_dir, region, 'Inputs')) if f.startswith('README') ] readme = None if len(readme_list) == 1: readme = readme_list[0] add_substations.args[0] = os.path.join(dataset_dir, region, 'Auxiliary', 'Feeder.txt') add_substations.kwargs['base_dir'] = dataset_dir add_substations.kwargs['readme_file'] = readme #LTC Controls ltc_controls = stack[19] ltc_controls.kwargs['setpoint'] = 103 #Fuse Controls fuse_controls = stack[20] fuse_controls.kwargs['current_rating'] = 100 fuse_controls.kwargs['high_current_rating'] = 600 #Add switch in long lines switch_cut = stack[21] switch_cut.kwargs['cutoff_length'] = 800 #Add additional regulators additional_regs = stack[22] additional_regs.kwargs['file_location'] = os.path.join( dataset_dir, region, 'Auxiliary', 'additional_regs.csv') additional_regs.kwargs['setpoint'] = 103 # Capacitor controls cap_controls = stack[23] cap_controls.kwargs['delay'] = 100 cap_controls.kwargs['lowpoint'] = 120.5 cap_controls.kwargs['highpoint'] = 125 # Reduce overloaded nodes overload_nodes = stack[24] overload_nodes.kwargs['powerflow_file'] = os.path.join( dataset_dir, region, 'Auxiliary', 'powerflow.csv') overload_nodes.kwargs['threshold'] = 0.94 overload_nodes.kwargs['scale_factor'] = 2.0 # Set delta loads and transformers delta = stack[25] readme_list = [ os.path.join(dataset_dir, region, 'Inputs', f) for f in os.listdir(os.path.join(dataset_dir, region, 'Inputs')) if f.startswith('README') ] readme = None if len(readme_list) == 1: readme = readme_list[0] delta.kwargs['readme_location'] = readme #Set source KV value set_source = stack[26] set_source.kwargs['source_kv'] = 230 set_source.kwargs['source_names'] = ['st_mat'] #Write to OpenDSS final = stack[27] final.args[0] = os.path.join( '.', 'results_v2', region, 'peak_solar_' + solar + '_battery_' + batteries, 'opendss') final.kwargs['separate_feeders'] = True final.kwargs['separate_substations'] = True #Dump to Ditto json final_json = stack[28] final_json.kwargs['base_dir'] = os.path.join( '.', 'results_v2', region, 'peak_solar_' + solar + '_battery_' + batteries, 'json_opendss') #Write Tags tags = stack[29] tags.kwargs['output_folder'] = os.path.join( '.', 'results_v2', region, 'peak_solar_' + solar + '_battery_' + batteries, 'opendss') tags.kwargs['tag_file'] = os.path.join(dataset_dir, region, 'Auxiliary', 'FeederStats.txt') #Write validation validation = stack[30] validation.kwargs['output_folder'] = os.path.join( '.', 'results_v2', region, 'peak_solar_' + solar + '_battery_' + batteries, 'opendss') validation.kwargs['input_folder'] = os.path.join( '.', 'results_v2', region, 'peak_solar_' + solar + '_battery_' + batteries, 'opendss') validation.kwargs['rscript_folder'] = os.path.join( '..', '..', 'smartdsR-analysis-lite') validation.kwargs['output_name'] = region stack.save( os.path.join( stack_library_dir, 'rnm_to_opendss_stack_peak_' + region + 'solar_' + solar + '_batteries_' + batteries + '.json'))
def create_rnm_to_cyme_stack(dataset_dir, region): '''Create the stack to convert RNM models in OpenDSS to CYME.''' stack = Stack(name='RNM to CYME Stack') #Parse load coordinates csv file stack.append(Layer(os.path.join(layer_library_dir, 'csv_processing'))) #Parse Capacitor coordinates csv file stack.append(Layer(os.path.join(layer_library_dir, 'csv_processing'))) #Read the OpenDSS input model stack.append(Layer(os.path.join(layer_library_dir, 'from_opendss'))) #Add regulators with setpoints stack.append(Layer(os.path.join(layer_library_dir, 'add_rnm_regulators'))) #Ensure all LV lines are triplex stack.append(Layer(os.path.join(layer_library_dir, 'set_lv_as_triplex'))) #Modify the model stack.append(Layer(os.path.join(layer_library_dir, 'post-processing'))) #Add the load coordinates with a model merge stack.append(Layer(os.path.join(layer_library_dir, 'merging-layer'))) #Add the capacitor coordinates with a model merge stack.append(Layer(os.path.join(layer_library_dir, 'merging-layer'))) #Set number of customers stack.append(Layer(os.path.join(layer_library_dir, 'set_num_customers'))) #Split the network into feeders stack.append(Layer(os.path.join(layer_library_dir, 'network_split'))) #Calculate metrics on customer per transfomer stack.append( Layer( os.path.join(layer_library_dir, 'partitioned_customers_per_transformer_plots'))) #Add intermediate node coordinates stack.append(Layer(os.path.join(layer_library_dir, 'intermediate_node'))) #Find missing coordinates stack.append(Layer(os.path.join(layer_library_dir, 'find_missing_coords'))) #Adjust overlaid nodes stack.append(Layer(os.path.join(layer_library_dir, 'move_overlayed_nodes'))) #Add cyme substations stack.append(Layer(os.path.join(layer_library_dir, 'add_cyme_substations'))) #Add ltc control settings stack.append(Layer(os.path.join(layer_library_dir, 'set_ltc_controls'))) #Add fuse control settings stack.append(Layer(os.path.join(layer_library_dir, 'set_fuse_controls'))) #Add extra switches to long lines stack.append( Layer(os.path.join(layer_library_dir, 'add_switches_to_long_lines'))) #Add Additional regulators stack.append( Layer(os.path.join(layer_library_dir, 'add_additional_regulators'))) #Add Capacitor control settings stack.append( Layer(os.path.join(layer_library_dir, 'set_capacitor_controlers'))) #Reduce overloaded nodes stack.append( Layer(os.path.join(layer_library_dir, 'reduce_overload_nodes'))) #Set any delta connections stack.append(Layer(os.path.join(layer_library_dir, 'set_delta_systems'))) #Set source kv stack.append(Layer(os.path.join(layer_library_dir, 'set_source_voltage'))) #Write to CYME stack.append(Layer(os.path.join(layer_library_dir, 'to_cyme'))) #Write to OpenDSS stack.append(Layer(os.path.join(layer_library_dir, 'to_json'))) #Copy Tag file over stack.append(Layer(os.path.join(layer_library_dir, 'add_tags'))) #Run validation metrics stack.append( Layer(os.path.join(layer_library_dir, 'statistical_validation'))) for layer in stack: layer.args.mode = ArgMode.USE layer.kwargs.mode = ArgMode.USE #Load coordinate layer load_coordinates = stack[0] load_coordinates.kwargs['input_filename'] = os.path.join( dataset_dir, region, 'IntermediateFormat', 'Loads_IntermediateFormat.csv') load_coordinates.kwargs['output_filename'] = os.path.join( dataset_dir, region, 'IntermediateFormat', 'Loads_IntermediateFormat2.csv') load_coordinates.kwargs['object_name'] = 'Load' #Capacitor coordinate layer capacitor_coordinates = stack[1] capacitor_coordinates.kwargs['input_filename'] = os.path.join( dataset_dir, region, 'IntermediateFormat', 'Capacitors_IntermediateFormat.csv') capacitor_coordinates.kwargs['output_filename'] = os.path.join( dataset_dir, region, 'IntermediateFormat', 'Capacitors_IntermediateFormat2.csv') capacitor_coordinates.kwargs['object_name'] = 'Capacitor' #Read OpenDSS layer from_opendss = stack[2] from_opendss.args[0] = os.path.join(region, 'OpenDSS', 'Master.dss') from_opendss.args[1] = os.path.join(region, 'OpenDSS', 'BusCoord.dss') from_opendss.kwargs['base_dir'] = dataset_dir #Set regulators with setpoints rnm_regulators = stack[3] rnm_regulators.kwargs['rnm_name'] = 'CRegulador' rnm_regulators.kwargs['setpoint'] = 103 #Ensure all LV lines are triplex set_lv_triplex = stack[4] set_lv_triplex.kwargs['to_replace'] = ['Ionic', 'Corinthian', 'Doric'] #Modify layer #No input except the model. Nothing to do here... post_processing = stack[5] post_processing.kwargs['path_to_feeder_file'] = os.path.join( dataset_dir, region, 'Auxiliary', 'Feeder.txt') post_processing.kwargs['path_to_switching_devices_file'] = os.path.join( dataset_dir, region, 'OpenDSS', 'SwitchingDevices.dss') post_processing.kwargs['center_tap_postprocess'] = True post_processing.kwargs['switch_to_recloser'] = True #Merging Load layer merging_load = stack[6] merging_load.kwargs['filename'] = os.path.join( dataset_dir, region, 'IntermediateFormat', 'Loads_IntermediateFormat2.csv') #Merging Capacitor Layer merging_caps = stack[7] merging_caps.kwargs['filename'] = os.path.join( dataset_dir, region, 'IntermediateFormat', 'Capacitors_IntermediateFormat2.csv') #Resetting customer number layer customer = stack[8] customer.kwargs['num_customers'] = 1 #Splitting layer split = stack[9] split.kwargs['path_to_feeder_file'] = os.path.join(dataset_dir, region, 'Auxiliary', 'Feeder.txt') split.kwargs['path_to_no_feeder_file'] = os.path.join( dataset_dir, region, 'Auxiliary', 'NoFeeder.txt') split.kwargs['compute_metrics'] = True split.kwargs[ 'compute_kva_density_with_transformers'] = True #RNM networks have LV information split.kwargs['excel_output'] = os.path.join('.', 'results_v2', region, 'base', 'cyme', 'metrics.csv') split.kwargs['json_output'] = os.path.join('.', 'results_v2', region, 'base', 'cyme', 'metrics.json') #Customer per Transformer plotting layer transformer_metrics = stack[10] transformer_metrics.kwargs['customer_file'] = os.path.join( dataset_dir, region, 'Inputs', 'customers_ext.txt') transformer_metrics.kwargs['output_folder'] = os.path.join( '.', 'results_v2', region, 'base', 'cyme') #Intermediate node layer inter = stack[11] inter.kwargs['filename'] = os.path.join(dataset_dir, region, 'OpenDSS', 'LineCoord.txt') # Missing coords # No args/kwargs for this layer # Move overlayed node layer adjust = stack[13] adjust.kwargs['delta_x'] = 10 adjust.kwargs['delta_y'] = 10 #Substations add_substations = stack[14] readme_list = [ os.path.join(dataset_dir, region, 'Inputs', f) for f in os.listdir(os.path.join(dataset_dir, region, 'Inputs')) if f.startswith('README') ] readme = None if len(readme_list) == 1: readme = readme_list[0] add_substations.args[0] = os.path.join(dataset_dir, region, 'Auxiliary', 'Feeder.txt') add_substations.kwargs['base_dir'] = dataset_dir add_substations.kwargs['readme_file'] = readme #LTC Controls ltc_controls = stack[15] ltc_controls.kwargs['setpoint'] = 103 #Fuse Controls fuse_controls = stack[16] fuse_controls.kwargs['current_rating'] = 100 fuse_controls.kwargs['high_current_rating'] = 600 #Add switch in long lines switch_cut = stack[17] switch_cut.kwargs['cutoff_length'] = 800 #Add additional regulators additional_regs = stack[18] additional_regs.kwargs['file_location'] = os.path.join( dataset_dir, region, 'Auxiliary', 'additional_regs.csv') additional_regs.kwargs['setpoint'] = 103 # Capacitor controls cap_controls = stack[19] cap_controls.kwargs['delay'] = 100 cap_controls.kwargs['lowpoint'] = 120.5 cap_controls.kwargs['highpoint'] = 125 # Reduce overloaded nodes overload_nodes = stack[20] overload_nodes.kwargs['powerflow_file'] = os.path.join( dataset_dir, region, 'Auxiliary', 'powerflow.csv') overload_nodes.kwargs['threshold'] = 0.94 overload_nodes.kwargs['scale_factor'] = 2.0 # Set delta loads and transformers delta = stack[21] readme_list = [ os.path.join(dataset_dir, region, 'Inputs', f) for f in os.listdir(os.path.join(dataset_dir, region, 'Inputs')) if f.startswith('README') ] readme = None if len(readme_list) == 1: readme = readme_list[0] delta.kwargs['readme_location'] = readme #Set source KV value set_source = stack[22] set_source.kwargs['source_kv'] = 230 set_source.kwargs['source_names'] = ['st_mat'] #Write to CYME final = stack[23] final.args[0] = os.path.join('.', 'results_v2', region, 'base', 'cyme') #Dump to Ditto json final_json = stack[24] final_json.kwargs['base_dir'] = os.path.join('.', 'results_v2', region, 'base', 'json_cyme') #Write Tags tags = stack[25] tags.kwargs['output_folder'] = os.path.join('.', 'results_v2', region, 'base', 'cyme') tags.kwargs['tag_file'] = os.path.join(dataset_dir, region, 'Auxiliary', 'FeederStats.txt') #Write validation validation = stack[26] validation.kwargs['output_folder'] = os.path.join('.', 'results_v2', region, 'base', 'cyme') validation.kwargs['input_folder'] = os.path.join('.', 'results_v2', region, 'base', 'cyme') validation.kwargs['rscript_folder'] = os.path.join( '..', '..', 'smartdsR-analysis-lite') validation.kwargs['output_name'] = region stack.save( os.path.join(stack_library_dir, 'rnm_to_cyme_stack_' + region + '.json'))
def create_rnm_to_opendss_stack_pv(dataset_dir, region, pct_pv=15): '''Create the stack to convert RNM models in OpenDSS to OpenDSS.''' pct_pv = float(pct_pv) stack = Stack(name='RNM to OpenDSS Stack') #Parse load coordinates csv file stack.append(Layer(os.path.join(layer_library_dir,'csv_processing'))) #Parse Capacitor coordinates csv file stack.append(Layer(os.path.join(layer_library_dir,'csv_processing'))) #Read the OpenDSS input model stack.append(Layer(os.path.join(layer_library_dir,'from_opendss'))) #Add regulators with setpoints stack.append(Layer(os.path.join(layer_library_dir,'add_rnm_regulators'))) #Modify the model stack.append(Layer(os.path.join(layer_library_dir,'post-processing'))) #Add the load coordinates with a model merge stack.append(Layer(os.path.join(layer_library_dir,'merging-layer'))) #Add the capacitor coordinates with a model merge stack.append(Layer(os.path.join(layer_library_dir,'merging-layer'))) #Set number of customers stack.append(Layer(os.path.join(layer_library_dir,'set_num_customers'))) #Split the network into feeders stack.append(Layer(os.path.join(layer_library_dir,'network_split'))) #Add intermediate node coordinates stack.append(Layer(os.path.join(layer_library_dir,'intermediate_node'))) #Create placement for PV stack.append(Layer(os.path.join(layer_library_dir,'create_placement'))) #Add PV stack.append(Layer(os.path.join(layer_library_dir,'add_pv'))) #Find missing coordinates stack.append(Layer(os.path.join(layer_library_dir,'find_missing_coords'))) #Adjust overlaid nodes stack.append(Layer(os.path.join(layer_library_dir,'move_overlayed_nodes'))) #Add cyme substations stack.append(Layer(os.path.join(layer_library_dir,'add_cyme_substations'))) #Add ltc control settings stack.append(Layer(os.path.join(layer_library_dir,'set_ltc_controls'))) #Add fuse control settings stack.append(Layer(os.path.join(layer_library_dir,'set_fuse_controls'))) #Add extra switches to long lines stack.append(Layer(os.path.join(layer_library_dir,'add_switches_to_long_lines'))) #Write to OpenDSS stack.append(Layer(os.path.join(layer_library_dir,'to_opendss'))) #Copy Tag file over stack.append(Layer(os.path.join(layer_library_dir,'add_tags'))) for layer in stack: layer.args.mode = ArgMode.USE layer.kwargs.mode = ArgMode.USE #Load coordinate layer load_coordinates = stack[0] load_coordinates.kwargs['input_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Loads_IntermediateFormat.csv') load_coordinates.kwargs['output_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Loads_IntermediateFormat2.csv') load_coordinates.kwargs['object_name'] = 'Load' #Capacitor coordinate layer capacitor_coordinates = stack[1] capacitor_coordinates.kwargs['input_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Capacitors_IntermediateFormat.csv') capacitor_coordinates.kwargs['output_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Capacitors_IntermediateFormat2.csv') capacitor_coordinates.kwargs['object_name'] = 'Capacitor' #Read OpenDSS layer from_opendss = stack[2] from_opendss.args[0] = os.path.join(region,'OpenDSS','Master.dss') from_opendss.args[1] = os.path.join(region,'OpenDSS','BusCoord.dss') from_opendss.kwargs['base_dir'] = dataset_dir #Set regulators with setpoints rnm_regulators = stack[3] rnm_regulators.kwargs['rnm_name'] = 'CRegulador' rnm_regulators.kwargs['setpoint'] = 103 #Modify layer #No input except the model. Nothing to do here... post_processing = stack[4] post_processing.kwargs['path_to_feeder_file'] = os.path.join(dataset_dir,region,'Auxiliary','Feeder.txt') post_processing.kwargs['path_to_switching_devices_file'] = os.path.join(dataset_dir,region,'OpenDSS','SwitchingDevices.dss') post_processing.kwargs['center_tap_postprocess'] = True post_processing.kwargs['switch_to_recloser'] = True post_processing.kwargs['center_tap_postprocess'] = False #Merging Load layer merging_load = stack[5] merging_load.kwargs['filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Loads_IntermediateFormat2.csv') #Merging Capacitor Layer merging_caps = stack[6] merging_caps.kwargs['filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Capacitors_IntermediateFormat2.csv') #Resetting customer number layer customer = stack[7] customer.kwargs['num_customers'] = 1 #Splitting layer split = stack[8] split.kwargs['path_to_feeder_file'] = os.path.join(dataset_dir,region,'Auxiliary','Feeder.txt') split.kwargs['path_to_no_feeder_file'] = os.path.join(dataset_dir,region,'Auxiliary','NoFeeder.txt') split.kwargs['compute_metrics'] = True split.kwargs['compute_kva_density_with_transformers'] = True #RNM networks have LV information split.kwargs['excel_output'] = os.path.join('.', 'results', region, '{pct}_pv'.format(pct=pct_pv),'opendss', 'metrics.csv') split.kwargs['json_output'] = os.path.join('.', 'results', region, '{pct}_pv'.format(pct=pct_pv),'opendss', 'metrics.json') #Intermediate node layer inter = stack[9] inter.kwargs['filename'] = os.path.join(dataset_dir,region,'OpenDSS','LineCoord.txt') #Create Placement for PV feeders = 'all' equipment_type = 'ditto.models.load.Load' selection = ('Random',pct_pv) seed = 1 placement_folder = os.path.join(placement_library_dir,region) file_name = feeders+'_'+equipment_type.split('.')[-1]+'_'+selection[0]+'-'+str(selection[1])+'_'+str(seed)+'.txt' create_placement = stack[10] create_placement.args[0] = feeders create_placement.args[1] = equipment_type create_placement.args[2] = selection create_placement.args[3] = seed create_placement.args[4] = placement_folder create_placement.args[5] = file_name add_pv = stack[11] add_pv.args[0] = os.path.join(placement_folder,file_name) # placement add_pv.args[1] = 4000 # rated power (Watts) add_pv.args[2] = 1.0 # power factor # Missing coords # No args/kwargs for this layer # Move overlayed node layer adjust = stack[13] adjust.kwargs['delta_x'] = 10 adjust.kwargs['delta_y'] = 10 #Substations add_substations = stack[14] readme_list = [os.path.join(dataset_dir,region,'Inputs',f) for f in os.listdir(os.path.join(dataset_dir,region,'Inputs')) if f.startswith('README')] readme = None if len(readme_list)==1: readme = readme_list[0] add_substations.args[0] = os.path.join(dataset_dir,region,'Auxiliary', 'Feeder.txt') add_substations.kwargs['base_dir'] = dataset_dir add_substations.kwargs['readme_file'] = readme #LTC Controls ltc_controls = stack[15] ltc_controls.kwargs['setpoint'] = 103 #Fuse Controls fuse_controls = stack[16] fuse_controls.kwargs['current_rating'] = 100 #Add switch in long lines switch_cut = stack[17] switch_cut.kwargs['cutoff_length'] = 800 #Write to OpenDSS final = stack[18] final.args[0] = os.path.join('.','results',region,'{pct}_pv'.format(pct=pct_pv),'opendss') final.kwargs['separate_feeders'] = True final.kwargs['separate_substations'] = True #Write Tags tags = stack[19] tags.kwargs['output_folder'] = os.path.join('.','results',region,'{pct}_pv'.format(pct=pct_pv),'opendss') tags.kwargs['tag_file'] = os.path.join(dataset_dir,region,'Auxiliary','FeederStats.txt') stack.save(os.path.join(stack_library_dir,'rnm_to_opendss_stack_pv_'+region+'_'+str(pct_pv)+'_pct.json'))
def apply(cls, stack, model, feeder_file, output_substation_folder, base_dir=None, substation_folder=None): logger.debug("Starting add_substations") if base_dir and (not os.path.exists(feeder_file)): feeder_file = os.path.join(base_dir,feeder_file) if base_dir and (not os.path.exists(output_substation_folder)): output_substation_folder = os.path.join(base_dir,output_substation_folder) if substation_folder == None: substation_folder = os.path.join(os.path.dirname(__file__),'resources') # Need to load OpenDSS files later. Make sure we can find the required layer. from_opendss_layer_dir = None # first look in stack for layer in stack: if layer.name == 'From OpenDSS': from_opendss_layer_dir = layer.layer_dir break # then try this layer's library directory if from_opendss_layer_dir is None: from_opendss_layer_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)),'from_opendss') if not os.path.exists(from_opendss_layer_dir): msg = "Cannot find the 'From OpenDSS' layer." logger.error(msg) raise Exception(msg) logger.debug("Building the model network") model.build_networkx(source=None) # Used to remove internal edges in substation df = pd.read_csv(feeder_file,' ') #The RNM file specifying which feeders belong to which substation substations = {} for index,row in df.iterrows(): substation = row.iloc[1] feeder = row.iloc[2] buses = feeder.split('->') bus1 = buses[1] bus2 = buses[0] if bus1[0:4].lower() == 'ucmv': # Not swapped if MV load connected to it bus1 = buses[0] bus2 = buses[1] adjusted_feeder = bus1+'->'+bus2 #In the feeder file bus1 and bus2 are swapped if substation in substations: substations[substation].add(adjusted_feeder) else: substations[substation]=set([adjusted_feeder]) logger.debug("Building to_delete and modifier") to_delete = Store() modifier = Modifier() for sub in substations: #sub is the name of the substation and substations[sub] is a set of all the connected feeders logger.debug("Processing substation {}. There are {} in total.".format(sub,len(substations))) all_nodes = [] subname = sub.replace('.','') subname = subname.lower() all_nodes.append(subname) hv_subname = subname+'->'+subname.replace('1247','69')+'_s' all_nodes.append(hv_subname) sourcenode = hv_subname+'_s' #Source point of connection to the substation all_nodes.append(sourcenode) feeder_names = [] # Feeder point of connection to the substation rated_power = None emergency_power = None loadloss = None noloadloss = None reactance = None for feeder in substations[sub]: feeder_name = feeder.replace('.','')+'_s' feeder_name = feeder_name.lower() feeder_names.append(feeder_name) all_nodes.append(feeder_name) all_nodes_set = set(all_nodes) internal_edges = model.get_internal_edges(all_nodes_set) for n in all_nodes_set: obj_name = type(model[n]).__name__ base_obj = globals()[obj_name](to_delete) base_obj.name = n for e in internal_edges: obj_name = type(model[e]).__name__ if obj_name == 'PowerTransformer': reactance = model[e].reactances[0] # Assume the same for all windings in a substation loadloss = model[e].loadloss noloadloss = model[e].noload_loss rated_power = model[e].windings[0].rated_power # Assume the same for all windings in a substation emergency_power = model[e].windings[0].emergency_power # Assume the same for all windings in a substationr base_obj = globals()[obj_name](to_delete) base_obj.name = e num_model_feeders = len(substations[sub]) not_allocated = True # Write the substation files to disk. These are then read and added for sub_file in os.listdir(substation_folder): # Important these must be listed in increasing order if len(pd.read_csv(substation_folder+'/%s/feeders.csv'%sub_file))>= num_model_feeders: generic_source = list(pd.read_csv(substation_folder+'/%s/source.csv'%sub_file)['source']) generic_feeders = list(pd.read_csv(substation_folder+'/%s/feeders.csv'%sub_file)['feeders'])[:num_model_feeders] #Select the first n feeder bays of the substation as required generic_nodes = list(pd.read_csv(substation_folder+'/%s/all_nodes.csv'%sub_file)['node']) generic_substation_fp = open(substation_folder+'/%s/%s.dss'%(sub_file,sub_file),'r') generic_substation_dss = generic_substation_fp.read() generic_substation_fp.close() substation_dss = generic_substation_dss.replace(generic_source[0],'%s'%sourcenode) # Replace source node for i in range(len(feeder_names)): substation_dss = substation_dss.replace(generic_feeders[i],'%s'%feeder_names[i]) # Replace feeder nodes # TODO: do this in a better way. for i in range(len(generic_nodes)): #Replace any remaining nodes that haven't been changed yet. Unallocated feeder heads are managed here substation_dss = substation_dss.replace(generic_nodes[i]+ ' ','%s_%s_%s '%(sub_file,subname,generic_nodes[i])) substation_dss = substation_dss.replace(generic_nodes[i]+ '.','%s_%s_%s.'%(sub_file,subname,generic_nodes[i])) substation_dss = substation_dss.replace('Line.','Line.%s_%s_'%(sub_file,subname)) substation_dss = substation_dss.replace('LineCode.','LineCode.%s_%s_'%(sub_file,subname)) substation_dss = substation_dss.replace('Capacitor.','Capacitor.%s_%s_'%(sub_file,subname)) substation_dss = substation_dss.replace('CapControl.','CapControl.%s_%s_'%(sub_file,subname)) substation_dss = substation_dss.replace('Monitor.','Monitor.%s_%s_'%(sub_file,subname)) substation_dss = substation_dss.replace('Relay.','Relay.%s_%s_'%(sub_file,subname)) substation_dss = substation_dss.replace('Transformer.','Transformer.%s_%s_'%(sub_file,subname)) substation_dss = substation_dss.replace('transformer=','transformer=%s_%s_'%(sub_file,subname)) substation_dss = substation_dss.replace('Regcontrol.','Regcontrol.%s_%s_'%(sub_file,subname)) # TODO: WARNING: This is a total hack to replace the substation attributes and should not be used long-term. # This is very specific to the substations used in dataset3 and is very case sensative. substation_dss = substation_dss.replace('kvas=(30000, 30000)','kvas=(%f, %f)'%(rated_power,rated_power)) substation_dss = substation_dss.replace('kvas=(25000, 25000)','kvas=(%f, %f)'%(rated_power/3.0,rated_power/3.0)) substation_dss = substation_dss.replace('%noloadloss=0.12','%noloadloss={noll}'.format(noll=noloadloss)) substation_dss = substation_dss.replace('%loadloss=0.1','%loadloss={ll}'.format(ll=loadloss)) substation_dss = substation_dss.replace('XHL=0.1','XHL=%f'%(reactance)) if not os.path.isdir(output_substation_folder+'/%s'%subname): os.makedirs(output_substation_folder+'/%s'%subname) substation_output = open(output_substation_folder+'/%s/substation.dss'%(subname),'w') substation_output.write(substation_dss) substation_output.close() buscoords = open(output_substation_folder+'/%s/Buscoords.dss'%subname,'w') buscoords.close() masterfile_fp = open(substation_folder+'/%s/master.dss'%sub_file,'r') masterfile_dss = masterfile_fp.read() masterfile_fp.close() masterfile_dss = masterfile_dss.replace('SourceBus',sourcenode) master_output = open(output_substation_folder+'/%s/master.dss'%subname,'w') master_output.write(masterfile_dss) master_output.close() #shutil.copyfile(substation_folder+'/%s/master.dss'%sub_file,output_substation_folder+'/%s/master.dss'%subname) not_allocated = False break if not_allocated: raise('Substation too small. %d feeders needed. Exiting...'%(num_model_feeders)) logger.debug("Creating reduced and final models") reduced_model = modifier.delete(model, to_delete) final_model = reduced_model from_opendss_layer = Layer(from_opendss_layer_dir) from_opendss_layer.args.mode = ArgMode.USE from_opendss_layer.kwargs.mode = ArgMode.USE for p, dirnames, filenames in os.walk(output_substation_folder): for sub_folder in dirnames: logger.debug("Processing output_substation_folder/{}".format(sub_folder)) from_opendss_layer.args[0] = os.path.join(output_substation_folder,sub_folder,'master.dss') from_opendss_layer.args[1] = os.path.join(output_substation_folder, sub_folder, 'Buscoords.dss') from_opendss_layer.kwargs['read_power_source'] = False s = Stack() from_opendss_layer.run_layer(s) substation_model = s.model logger.debug("Adding model from {} to final_model".format(sub_folder)) final_model = modifier.add(final_model, substation_model) break logger.debug("Returning {!r}".format(final_model)) return final_model