Пример #1
0
def create_test_stack_basic(dataset_dir, dataset_name='dataset3'):
    stack = Stack(
        name='DiTTo Test Stack Basic {}'.format(dataset_name.title()))

    stack.append(Layer(os.path.join(layer_library_dir, 'from_opendss')))
    stack.append(Layer(os.path.join(layer_library_dir, 'to_opendss')))

    if not dataset_name:
        dataset_name = os.path.basename(dataset_dir)

    for layer in stack:
        layer.args.mode = ArgMode.USE
        layer.kwargs.mode = ArgMode.USE

    if dataset_name == 'dataset3':
        from_opendss = stack[0]
        from_opendss.args[0] = os.path.join('mixed_humid', 'industrial',
                                            'OpenDSS', 'master.dss')
        from_opendss.args[1] = os.path.join('mixed_humid', 'industrial',
                                            'OpenDSS', 'buscoords.dss')
        from_opendss.kwargs['base_dir'] = dataset_dir

        to_opendss = stack[1]
        to_opendss.args[0] = os.path.join('post_process', 'mixed_humid',
                                          'industrial')
        to_opendss.kwargs['base_dir'] = dataset_dir
    else:
        raise NotImplementedError(
            "Unknown dataset_name {!r}".format(dataset_name))

    stack.save(os.path.join(stack_library_dir, stack.suggested_filename))
Пример #2
0
def create_compute_metrics_sce(path, feeder_name):
    '''
        Create the stack to compute the metrics for SCE feeders (in CYME format).
    '''

    stack = Stack(name='SCE metrics Stack')

    #Read the CYME input model
    stack.append(Layer(os.path.join(layer_library_dir, 'from_cyme')))

    #Compute the metrics
    stack.append(
        Layer(os.path.join(layer_library_dir, 'sce_metric_computation_layer')))

    for layer in stack:
        layer.args.mode = ArgMode.USE
        layer.kwargs.mode = ArgMode.USE

    #Read CYME layer
    from_cyme = stack[0]
    from_cyme.kwargs['base_dir'] = os.path.join(path, feeder_name)
    from_cyme.kwargs['network_filename'] = 'net.txt'
    from_cyme.kwargs['equipment_filename'] = 'eqt.txt'
    from_cyme.kwargs['load_filename'] = 'load.txt'

    #Compute metrics layer
    metric_computation = stack[1]
    metric_computation.kwargs['feeder_name'] = feeder_name
    metric_computation.kwargs['output_filename_xlsx'] = os.path.join(
        path, feeder_name, "metrics_{}.xlsx".format(feeder_name))
    metric_computation.kwargs['output_filename_json'] = os.path.join(
        path, feeder_name, "metrics_{}.json".format(feeder_name))

    stack.save(os.path.join(stack_library_dir, 'compute_metrics_sce.json'))
Пример #3
0
def create_rnm_to_cyme_stack(dataset_dir, region):
    '''Create the stack to convert RNM models in OpenDSS to CYME.'''

    stack = Stack(name='JSON to CYME Stack')

    #Read the OpenDSS input model
    stack.append(Layer(os.path.join(layer_library_dir,'from_json')))


    #Write to CYME
    stack.append(Layer(os.path.join(layer_library_dir,'to_cyme')))


    for layer in stack:
        layer.args.mode = ArgMode.USE
        layer.kwargs.mode = ArgMode.USE

    #Read from json
    from_json = stack[0]
    from_json.kwargs['input_filename'] = 'full_model.json'
    from_json.kwargs['base_dir'] = os.path.join('.','results_v2',region,'base','json_cyme')

    #Write to CYME
    final = stack[1]
    final.args[0] = os.path.join('.','results_v2',region,'base','json_cyme_rerun')

    stack.save(os.path.join(stack_library_dir,'json_to_cyme_stack_'+region+'.json'))
def create_rnm_to_opendss_stack(dataset_dir, region):
    '''Create the stack to convert json models in OpenDSS to Opendss.'''

    stack = Stack(name='JSON to OpenDSS Stack')

    #Read the OpenDSS input model
    stack.append(Layer(os.path.join(layer_library_dir,'from_json')))


    #Write to OpenDSS
    stack.append(Layer(os.path.join(layer_library_dir,'to_opendss')))


    for layer in stack:
        layer.args.mode = ArgMode.USE
        layer.kwargs.mode = ArgMode.USE

    #Read from json
    from_json = stack[0]
    from_json.kwargs['input_filename'] = 'full_model.json'
    from_json.kwargs['base_dir'] = os.path.join('.','results_v2',region,'base','json_opendss')

    #Write to OpenDSS
    final = stack[1]
    final.args[0] = os.path.join('.','results_v2',region,'base','json_opendss_rerun')
    final.kwargs['separate_feeders'] = True
    final.kwargs['separate_substations'] = True

    stack.save(os.path.join(stack_library_dir,'json_to_opendss_stack_'+region+'.json'))
Пример #5
0
def create_test_stack(dataset_dir, dataset_name='dataset3'):
    """
    Saves a DiTTo test stack to the stack_library.

    :param dataset_dir: Directory where the Smart-DS dataset resides
    :type dataset_dir: str
    :param dataset_name: Name of the Smart-DS dataset
    :type dataset_name: str
    """
    stack = Stack(name='DiTTo Test Stack {}'.format(dataset_name.title()))
    stack.append(Layer(os.path.join(layer_library_dir, 'from_opendss')))
    stack.append(Layer(os.path.join(layer_library_dir, 'add_substations')))
    stack.append(Layer(os.path.join(layer_library_dir, 'add_timeseries_load')))
    stack.append(Layer(os.path.join(layer_library_dir, 'scale_loads')))
    stack.append(Layer(os.path.join(layer_library_dir, 'to_opendss')))

    if not dataset_name:
        dataset_name = os.path.basename(dataset_dir)

    for layer in stack:
        layer.args.mode = ArgMode.USE
        layer.kwargs.mode = ArgMode.USE

    if dataset_name == 'dataset3':
        from_opendss = stack[0]
        from_opendss.args[0] = os.path.join('mixed_humid', 'industrial',
                                            'OpenDSS', 'master.dss')
        from_opendss.args[1] = os.path.join('mixed_humid', 'industrial',
                                            'OpenDSS', 'buscoords.dss')
        from_opendss.kwargs['base_dir'] = dataset_dir

        add_substations = stack[1]
        add_substations.args[0] = os.path.join('mixed_humid', 'industrial',
                                               'feeders', 'feeders.txt')
        add_substations.args[1] = os.path.join('post_process',
                                               'modified_substations')
        add_substations.kwargs['base_dir'] = dataset_dir

        add_timeseries_load = stack[2]
        add_timeseries_load.args[0] = os.path.join('mixed_humid', 'industrial',
                                                   'consumers&street_map',
                                                   'customers_extended.csv')
        add_timeseries_load.kwargs['base_dir'] = dataset_dir

        scale_loads = stack[3]
        scale_loads.kwargs['scale_factor'] = 1.1

        to_opendss = stack[4]
        to_opendss.args[0] = os.path.join('post_process', 'mixed_humid',
                                          'industrial')
        to_opendss.kwargs['base_dir'] = dataset_dir
    else:
        raise NotImplementedError(
            "Unknown dataset_name {!r}".format(dataset_name))

    stack.save(os.path.join(stack_library_dir, stack.suggested_filename))
Пример #6
0
def dataset3_timeseries(dataset_dir,
                        climate_zone='MixedHumid',
                        feeder_type='industrial',
                        pct_energy_efficiency=10):
    stack_name = "Dataset3 Timeseries {}, {}, {} Percent Energy Efficient".format(
        climate_zone, feeder_type, pct_energy_efficiency)
    short_name = 'ds3_timeseries_{}_{}_{}pct_ee'.format(
        climate_zone, feeder_type, pct_energy_efficiency)

    stack = Stack(name=stack_name)
    stack.append(Layer(os.path.join(layer_library_dir, 'from_opendss')))
    stack.append(Layer(os.path.join(layer_library_dir, 'add_substations')))
    stack.append(Layer(os.path.join(layer_library_dir, 'add_timeseries_load')))
    stack.append(Layer(os.path.join(layer_library_dir, 'scale_loads')))
    stack.append(Layer(os.path.join(layer_library_dir, 'to_opendss')))

    for layer in stack:
        layer.args.mode = ArgMode.USE
        layer.kwargs.mode = ArgMode.USE

    from_opendss = stack[0]
    from_opendss.args[0] = os.path.join(climate_zone, feeder_type, 'OpenDSS',
                                        'master.dss')
    from_opendss.args[1] = os.path.join(climate_zone, feeder_type, 'OpenDSS',
                                        'buscoords.dss')
    from_opendss.kwargs['base_dir'] = dataset_dir

    add_substations = stack[1]
    add_substations.args[0] = os.path.join(climate_zone, feeder_type,
                                           'feeders', 'feeders.txt')
    add_substations.args[1] = os.path.join('post_process',
                                           'modified_substations')
    add_substations.kwargs['base_dir'] = dataset_dir

    add_timeseries_load = stack[2]
    add_timeseries_load.args[0] = os.path.join(climate_zone, feeder_type,
                                               'Inputs',
                                               'customers_extended.txt')
    add_timeseries_load.kwargs['base_dir'] = dataset_dir

    scale_loads = stack[3]
    scale_loads.kwargs[
        'scale_factor'] = 1.0 - float(pct_energy_efficiency) / 100.0

    to_opendss = stack[4]
    to_opendss.args[0] = '.'

    stack.save(os.path.join(stack_library_dir, short_name + '.json'))
Пример #7
0
def create_rnm_to_opendss_stack_pv(dataset_dir, region, pct_pv=15):
    '''Create the stack to convert RNM models in OpenDSS to OpenDSS.'''

    pct_pv = float(pct_pv)
    stack = Stack(name='RNM to OpenDSS Stack')

    #Parse load coordinates csv file
    stack.append(Layer(os.path.join(layer_library_dir,'csv_processing')))

    #Parse Capacitor coordinates csv file
    stack.append(Layer(os.path.join(layer_library_dir,'csv_processing')))

    #Read the OpenDSS input model
    stack.append(Layer(os.path.join(layer_library_dir,'from_opendss')))

    #Add regulators with setpoints
    stack.append(Layer(os.path.join(layer_library_dir,'add_rnm_regulators')))

    #Add Timeseries loads
    stack.append(Layer(os.path.join(layer_library_dir,'connect_timeseries_loads')))

    #Modify the model
    stack.append(Layer(os.path.join(layer_library_dir,'post-processing')))

    #Add the load coordinates with a model merge
    stack.append(Layer(os.path.join(layer_library_dir,'merging-layer')))

    #Add the capacitor coordinates with a model merge
    stack.append(Layer(os.path.join(layer_library_dir,'merging-layer')))

    #Set number of customers
    stack.append(Layer(os.path.join(layer_library_dir,'set_num_customers')))

    #Split the network into feeders
    stack.append(Layer(os.path.join(layer_library_dir,'network_split')))

    #Add intermediate node coordinates
    stack.append(Layer(os.path.join(layer_library_dir,'intermediate_node')))

    #Create placement for PV
    stack.append(Layer(os.path.join(layer_library_dir,'create_placement')))

    #Add PV
    stack.append(Layer(os.path.join(layer_library_dir,'add_pv')))

    #Find missing coordinates
    stack.append(Layer(os.path.join(layer_library_dir,'find_missing_coords')))

    #Adjust overlaid nodes
    stack.append(Layer(os.path.join(layer_library_dir,'move_overlayed_nodes')))

    #Add cyme substations
    stack.append(Layer(os.path.join(layer_library_dir,'add_cyme_substations')))

    #Add ltc control settings
    stack.append(Layer(os.path.join(layer_library_dir,'set_ltc_controls')))

    #Add fuse control settings
    stack.append(Layer(os.path.join(layer_library_dir,'set_fuse_controls')))

    #Add extra switches to long lines 
    stack.append(Layer(os.path.join(layer_library_dir,'add_switches_to_long_lines')))

    #Write to OpenDSS
    stack.append(Layer(os.path.join(layer_library_dir,'to_opendss')))

    #Copy Tag file over
    stack.append(Layer(os.path.join(layer_library_dir,'add_tags')))


    for layer in stack:
        layer.args.mode = ArgMode.USE
        layer.kwargs.mode = ArgMode.USE

    #Load coordinate layer
    load_coordinates = stack[0]
    load_coordinates.kwargs['input_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Loads_IntermediateFormat.csv')
    load_coordinates.kwargs['output_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Loads_IntermediateFormat2.csv')
    load_coordinates.kwargs['object_name'] = 'Load'

    #Capacitor coordinate layer
    capacitor_coordinates = stack[1]
    capacitor_coordinates.kwargs['input_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Capacitors_IntermediateFormat.csv')
    capacitor_coordinates.kwargs['output_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Capacitors_IntermediateFormat2.csv')
    capacitor_coordinates.kwargs['object_name'] = 'Capacitor'

    #Read OpenDSS layer
    from_opendss = stack[2]
    from_opendss.args[0] = os.path.join(region,'OpenDSS','Master.dss')
    from_opendss.args[1] = os.path.join(region,'OpenDSS','BusCoord.dss')
    from_opendss.kwargs['base_dir'] = dataset_dir

    #Set regulators with setpoints
    rnm_regulators = stack[3]
    rnm_regulators.kwargs['rnm_name'] = 'CRegulador'
    rnm_regulators.kwargs['setpoint'] = 103

    #Timeseries Loads
    add_timeseries = stack[4]
    add_timeseries.kwargs['customer_file'] = os.path.join(dataset_dir,region,'Inputs','customers_ext.txt')
    add_timeseries.kwargs['residential_load_data'] = os.path.join('..','..','Loads','residential','Greensboro','datapoints_elec_only.h5')
    add_timeseries.kwargs['residential_load_metadata'] = os.path.join('..','..','Loads','residential','Greensboro','results_fips.csv')
    add_timeseries.kwargs['commercial_load_data'] = os.path.join('..','..','Loads','commercial','NC - Guilford','com_guilford_electricity_only.dsg')
    add_timeseries.kwargs['commercial_load_metadata'] = os.path.join('..','..','Loads','commercial','NC - Guilford','results.csv')
    add_timeseries.kwargs['output_folder'] = os.path.join('.','results',region,'timeseries_{pct}_pv'.format(pct=pct_pv),'opendss')
    add_timeseries.kwargs['write_cyme_file'] = False

    #Modify layer
    #No input except the model. Nothing to do here...
    post_processing = stack[5]
    post_processing.kwargs['path_to_feeder_file'] = os.path.join(dataset_dir,region,'Auxiliary','Feeder.txt')
    post_processing.kwargs['path_to_switching_devices_file'] = os.path.join(dataset_dir,region,'OpenDSS','SwitchingDevices.dss')
    post_processing.kwargs['center_tap_postprocess'] = True
    post_processing.kwargs['switch_to_recloser'] = True
    post_processing.kwargs['center_tap_postprocess'] = False

    #Merging Load layer
    merging_load = stack[6]
    merging_load.kwargs['filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Loads_IntermediateFormat2.csv')

    #Merging Capacitor Layer
    merging_caps = stack[7]
    merging_caps.kwargs['filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Capacitors_IntermediateFormat2.csv')

    #Resetting customer number layer
    customer = stack[8]
    customer.kwargs['num_customers'] = 1

    #Splitting layer
    split = stack[9]
    split.kwargs['path_to_feeder_file'] = os.path.join(dataset_dir,region,'Auxiliary','Feeder.txt')
    split.kwargs['path_to_no_feeder_file'] = os.path.join(dataset_dir,region,'Auxiliary','NoFeeder.txt')
    split.kwargs['compute_metrics'] = True
    split.kwargs['compute_kva_density_with_transformers'] = True #RNM networks have LV information
    split.kwargs['excel_output'] = os.path.join('.', 'results', region, 'timeseries_{pct}_pv'.format(pct=pct_pv),'opendss', 'metrics.csv')
    split.kwargs['json_output'] = os.path.join('.', 'results', region,  'timeseries_{pct}_pv'.format(pct=pct_pv),'opendss', 'metrics.json')


    #Intermediate node layer
    inter = stack[10]
    inter.kwargs['filename'] = os.path.join(dataset_dir,region,'OpenDSS','LineCoord.txt')

    #Create Placement for PV
    feeders = 'all'
    equipment_type = 'ditto.models.load.Load'
    selection = ('Random',pct_pv)
    seed = 1
    placement_folder = os.path.join(placement_library_dir,region)
    file_name = feeders+'_'+equipment_type.split('.')[-1]+'_'+selection[0]+'-'+str(selection[1])+'_'+str(seed)+'.txt'


    create_placement = stack[11]
    create_placement.args[0] = feeders
    create_placement.args[1] = equipment_type
    create_placement.args[2] = selection
    create_placement.args[3] = seed
    create_placement.args[4] = placement_folder
    create_placement.args[5] = file_name

    add_pv = stack[12]
    add_pv.args[0] = os.path.join(placement_folder,file_name) # placement
    add_pv.args[1] = 4000                                     # rated power (Watts)
    add_pv.args[2] = 1.0                                      # power factor



    # Missing coords
    # No args/kwargs for this layer

    # Move overlayed node layer
    adjust = stack[14]
    adjust.kwargs['delta_x'] = 10
    adjust.kwargs['delta_y'] = 10

    #Substations

    add_substations = stack[15]
    readme_list = [os.path.join(dataset_dir,region,'Inputs',f) for f in os.listdir(os.path.join(dataset_dir,region,'Inputs')) if f.startswith('README')]
    readme = None
    if len(readme_list)==1:
        readme = readme_list[0]
    add_substations.args[0] = os.path.join(dataset_dir,region,'Auxiliary', 'Feeder.txt')
    add_substations.kwargs['base_dir'] = dataset_dir
    add_substations.kwargs['readme_file'] = readme

    
    #LTC Controls

    ltc_controls = stack[16]
    ltc_controls.kwargs['setpoint'] = 103

    #Fuse Controls

    fuse_controls = stack[17]
    fuse_controls.kwargs['current_rating'] = 100

    #Add switch in long lines

    switch_cut = stack[18]
    switch_cut.kwargs['cutoff_length'] = 800

    #Write to OpenDSS
    final = stack[19]
    final.args[0] = os.path.join('.','results',region,'timeseries_{pct}_pv'.format(pct=pct_pv),'opendss')
    final.kwargs['separate_feeders'] = True
    final.kwargs['separate_substations'] = True

    #Write Tags
    tags = stack[20]
    tags.kwargs['output_folder'] = os.path.join('.','results',region,'timeseries_{pct}_pv'.format(pct=pct_pv),'opendss')
    tags.kwargs['tag_file'] = os.path.join(dataset_dir,region,'Auxiliary','FeederStats.txt')

    stack.save(os.path.join(stack_library_dir,'rnm_to_opendss_stack_timeseries_pv_'+region+'_'+str(pct_pv)+'_pct.json'))
Пример #8
0
def create_rnm_to_cyme_stack_scenarios(dataset_dir, region, solar, batteries):
    '''Create the stack to convert RNM models in OpenDSS to CYME.'''

    stack = Stack(name='RNM to CYME Stack')

    #Parse load coordinates csv file
    stack.append(Layer(os.path.join(layer_library_dir, 'csv_processing')))

    #Parse Capacitor coordinates csv file
    stack.append(Layer(os.path.join(layer_library_dir, 'csv_processing')))

    #Read the OpenDSS input model
    stack.append(Layer(os.path.join(layer_library_dir, 'from_opendss')))

    #Add regulators with setpoints
    stack.append(Layer(os.path.join(layer_library_dir, 'add_rnm_regulators')))

    #Ensure all LV lines are triplex
    stack.append(Layer(os.path.join(layer_library_dir, 'set_lv_as_triplex')))

    #Modify the model
    stack.append(Layer(os.path.join(layer_library_dir, 'post-processing')))

    #Add the load coordinates with a model merge
    stack.append(Layer(os.path.join(layer_library_dir, 'merging-layer')))

    #Add the capacitor coordinates with a model merge
    stack.append(Layer(os.path.join(layer_library_dir, 'merging-layer')))

    #Set number of customers
    stack.append(Layer(os.path.join(layer_library_dir, 'set_num_customers')))

    #Split the network into feeders
    stack.append(Layer(os.path.join(layer_library_dir, 'network_split')))

    #Calculate metrics on customer per transfomer
    stack.append(
        Layer(
            os.path.join(layer_library_dir,
                         'partitioned_customers_per_transformer_plots')))

    #Add intermediate node coordinates
    stack.append(Layer(os.path.join(layer_library_dir, 'intermediate_node')))

    #Create residential placement for PV
    stack.append(
        Layer(os.path.join(layer_library_dir, 'create_nested_placement')))

    #Create commercial placement for PV
    stack.append(Layer(os.path.join(layer_library_dir, 'create_placement')))

    #Add Load PV
    stack.append(Layer(os.path.join(layer_library_dir, 'add_pv')))

    #Add Utility PV
    stack.append(Layer(os.path.join(layer_library_dir, 'add_pv')))

    #Find missing coordinates
    stack.append(Layer(os.path.join(layer_library_dir, 'find_missing_coords')))

    #Add Timeseries Solar
    stack.append(
        Layer(os.path.join(layer_library_dir, 'connect_solar_timeseries')))

    #Add Timeseries loads
    stack.append(
        Layer(os.path.join(layer_library_dir, 'connect_timeseries_loads')))

    #Adjust overlaid nodes
    stack.append(Layer(os.path.join(layer_library_dir,
                                    'move_overlayed_nodes')))

    #Add cyme substations
    stack.append(Layer(os.path.join(layer_library_dir,
                                    'add_cyme_substations')))

    #Add ltc control settings
    stack.append(Layer(os.path.join(layer_library_dir, 'set_ltc_controls')))

    #Add fuse control settings
    stack.append(Layer(os.path.join(layer_library_dir, 'set_fuse_controls')))

    #Add extra switches to long lines
    stack.append(
        Layer(os.path.join(layer_library_dir, 'add_switches_to_long_lines')))

    #Add Additional regulators
    stack.append(
        Layer(os.path.join(layer_library_dir, 'add_additional_regulators')))

    #Add Capacitor control settings
    stack.append(
        Layer(os.path.join(layer_library_dir, 'set_capacitor_controlers')))

    #Reduce overloaded nodes
    stack.append(
        Layer(os.path.join(layer_library_dir, 'reduce_overload_nodes')))

    #Set any delta connections
    stack.append(Layer(os.path.join(layer_library_dir, 'set_delta_systems')))

    #Set source kv
    stack.append(Layer(os.path.join(layer_library_dir, 'set_source_voltage')))

    #Write to CYME
    stack.append(Layer(os.path.join(layer_library_dir, 'to_cyme')))

    #Write to OpenDSS
    stack.append(Layer(os.path.join(layer_library_dir, 'to_json')))

    #Copy Tag file over
    stack.append(Layer(os.path.join(layer_library_dir, 'add_tags')))

    #Run validation metrics
    stack.append(
        Layer(os.path.join(layer_library_dir, 'statistical_validation')))

    for layer in stack:
        layer.args.mode = ArgMode.USE
        layer.kwargs.mode = ArgMode.USE

    #Load coordinate layer
    load_coordinates = stack[0]
    load_coordinates.kwargs['input_filename'] = os.path.join(
        dataset_dir, region, 'IntermediateFormat',
        'Loads_IntermediateFormat.csv')
    load_coordinates.kwargs['output_filename'] = os.path.join(
        dataset_dir, region, 'IntermediateFormat',
        'Loads_IntermediateFormat2.csv')
    load_coordinates.kwargs['object_name'] = 'Load'

    #Capacitor coordinate layer
    capacitor_coordinates = stack[1]
    capacitor_coordinates.kwargs['input_filename'] = os.path.join(
        dataset_dir, region, 'IntermediateFormat',
        'Capacitors_IntermediateFormat.csv')
    capacitor_coordinates.kwargs['output_filename'] = os.path.join(
        dataset_dir, region, 'IntermediateFormat',
        'Capacitors_IntermediateFormat2.csv')
    capacitor_coordinates.kwargs['object_name'] = 'Capacitor'

    #Read OpenDSS layer
    from_opendss = stack[2]
    from_opendss.args[0] = os.path.join(region, 'OpenDSS', 'Master.dss')
    from_opendss.args[1] = os.path.join(region, 'OpenDSS', 'BusCoord.dss')
    from_opendss.kwargs['base_dir'] = dataset_dir

    #Set regulators with setpoints
    rnm_regulators = stack[3]
    rnm_regulators.kwargs['rnm_name'] = 'CRegulador'
    rnm_regulators.kwargs['setpoint'] = 103

    #Ensure all LV lines are triplex
    set_lv_triplex = stack[4]
    set_lv_triplex.kwargs['to_replace'] = ['Ionic', 'Corinthian', 'Doric']

    #Modify layer
    #No input except the model. Nothing to do here...
    post_processing = stack[5]
    post_processing.kwargs['path_to_feeder_file'] = os.path.join(
        dataset_dir, region, 'Auxiliary', 'Feeder.txt')
    post_processing.kwargs['path_to_switching_devices_file'] = os.path.join(
        dataset_dir, region, 'OpenDSS', 'SwitchingDevices.dss')
    post_processing.kwargs['center_tap_postprocess'] = True
    post_processing.kwargs['switch_to_recloser'] = True

    #Merging Load layer
    merging_load = stack[6]
    merging_load.kwargs['filename'] = os.path.join(
        dataset_dir, region, 'IntermediateFormat',
        'Loads_IntermediateFormat2.csv')

    #Merging Capacitor Layer
    merging_caps = stack[7]
    merging_caps.kwargs['filename'] = os.path.join(
        dataset_dir, region, 'IntermediateFormat',
        'Capacitors_IntermediateFormat2.csv')

    #Resetting customer number layer
    customer = stack[8]
    customer.kwargs['num_customers'] = 1

    #Splitting layer
    split = stack[9]
    split.kwargs['path_to_feeder_file'] = os.path.join(dataset_dir, region,
                                                       'Auxiliary',
                                                       'Feeder.txt')
    split.kwargs['path_to_no_feeder_file'] = os.path.join(
        dataset_dir, region, 'Auxiliary', 'NoFeeder.txt')
    split.kwargs['compute_metrics'] = True
    split.kwargs[
        'compute_kva_density_with_transformers'] = True  #RNM networks have LV information
    split.kwargs['excel_output'] = os.path.join(
        '.', 'results_v2', region,
        'timeseries_solar_' + solar + '_battery_' + batteries, 'cyme',
        'metrics.csv')
    split.kwargs['json_output'] = os.path.join(
        '.', 'results_v2', region,
        'timeseries_solar_' + solar + '_battery_' + batteries, 'cyme',
        'metrics.json')

    #Customer per Transformer plotting layer
    transformer_metrics = stack[10]
    transformer_metrics.kwargs['customer_file'] = os.path.join(
        dataset_dir, region, 'Inputs', 'customers_ext.txt')
    transformer_metrics.kwargs['output_folder'] = os.path.join(
        '.', 'results_v2', region,
        'timeseries_solar_' + solar + '_battery_' + batteries, 'cyme')

    #Intermediate node layer
    inter = stack[11]
    inter.kwargs['filename'] = os.path.join(dataset_dir, region, 'OpenDSS',
                                            'LineCoord.txt')

    #Create Placement for PV
    load_selection_mapping = {
        'none':
        None,
        'low': [('Random', 0, 15)],
        'medium': [('Random', 0, 15), ('Random', 15, 35)],
        'high': [('Random', 0, 15), ('Random', 15, 35), ('Random', 35, 55),
                 ('Random', 55, 75)]
    }
    utility_selection_mapping = {
        'none': None,
        'low': None,
        'medium': ('Reclosers', 1, 2),
        'high': ('Reclosers', 2, 2)
    }  #(Reclosers,1,2) means algorithm will select 2 Reclosers that are not upstream of each other and return the first. Useful for consistency with larger selections
    utility_feeder_mapping = {
        'none': None,
        'low': None,
        'medium': [50],
        'high': [100, 75]
    }
    load_feeder_mapping = {
        'none': None,
        'low': [100],
        'medium': [100, 100],
        'high': [100, 100, 100, 100]
    }
    utility_max_feeder_sizing = {
        'none': None,
        'low': None,
        'medium': 33,
        'high': 80
    }
    load_max_feeder_sizing = {
        'none': None,
        'low': 75,
        'medium': 150,
        'high': None
    }

    powerfactor_mapping = {
        'none': None,
        'low': [1],
        'medium': [1, -0.95],
        'high': [1, -0.95, 1, 1]
    }  #the pf=1 in the last two should be overridden by the controllers
    inverter_control_mapping = {
        'none': None,
        'low': ['powerfactor'],
        'medium': ['powerfactor', 'powerfactor'],
        'high': ['powerfactor', 'powerfactor', 'voltvar', 'voltwatt']
    }
    cutin_mapping = {
        'none': None,
        'low': [0.1],
        'medium': [0.1, 0.1],
        'high': [0.1, 0.1, 0.1, 0.1]
    }
    cutout_mapping = {
        'none': None,
        'low': [0.1],
        'medium': [0.1, 0.1],
        'high': [0.1, 0.1, 0.1, 0.1]
    }
    kvar_percent_mapping = {
        'none': None,
        'low': [None],
        'medium': [None, None],
        'high': [None, None, 44, 44]
    }
    oversizing_mapping = {
        'none': None,
        'low': [1.1],
        'medium': [1.1, 1.1],
        'high': [1.1, 1.1, 1.2, 1.2]
    }
    load_equipment_type = 'ditto.models.load.Load'
    utility_equipment_type = 'ditto.models.node.Node'
    seed = 1
    placement_folder = os.path.join(placement_library_dir, region)

    load_solar_placement = stack[12]
    load_solar_placement.args[0] = load_feeder_mapping[solar]
    load_solar_placement.args[1] = load_equipment_type
    load_solar_placement.args[2] = load_selection_mapping[solar]
    load_solar_placement.args[3] = seed
    load_solar_placement.args[4] = placement_folder

    utility_solar_placement = stack[13]
    utility_solar_placement.args[0] = utility_feeder_mapping[
        solar]  # Length should equal selection[1]. values should be in decreasing order
    utility_solar_placement.args[1] = None
    utility_solar_placement.args[2] = utility_selection_mapping[solar]
    utility_solar_placement.args[3] = None
    utility_solar_placement.args[4] = placement_folder

    add_load_pv = stack[14]
    load_file_names = None  #Do nothing if this is the case
    powerfactors = None
    inverters = None
    cutin = None
    cutout = None
    kvar_percent = None
    oversizing = None
    if load_selection_mapping[solar] is not None:
        load_file_names = []
        powerfactors = []
        for selection in load_selection_mapping[solar]:
            file_name = str(
                load_feeder_mapping[solar][-1]
            ) + '_' + load_equipment_type.split(
                '.'
            )[-1] + '_' + selection[0] + '-' + str(selection[1]) + '-' + str(
                selection[2]
            ) + '_' + str(
                seed
            ) + '.json'  # Note - assume all subregions are using all feeders
            load_file_names.append(file_name)
        powerfactors = powerfactor_mapping[solar]
        inverters = inverter_control_mapping[solar]
        cutin = cutin_mapping[solar]
        cutout = cutin_mapping[solar]
        kvar_percent = kvar_percent_mapping[solar]
        oversizing = oversizing_mapping[solar]
    add_load_pv.kwargs['placement_folder'] = placement_folder
    add_load_pv.kwargs['placement_names'] = load_file_names
    add_load_pv.kwargs['residential_sizes'] = [3000, 5000, 8000]
    add_load_pv.kwargs['residential_areas'] = [75, 300]
    add_load_pv.kwargs['commercial_sizes'] = [
        3000, 6000, 8000, 40000, 100000, 300000
    ]
    add_load_pv.kwargs['commercial_areas'] = [100, 300, 600, 1000, 2000]
    add_load_pv.kwargs['customer_file'] = os.path.join(dataset_dir, region,
                                                       'Inputs',
                                                       'customers_ext.txt')
    add_load_pv.kwargs['max_feeder_sizing_percent'] = load_max_feeder_sizing[
        solar]  # total_pv <= max_feeder_size*total_feeder_load.
    add_load_pv.kwargs['power_factors'] = powerfactors
    add_load_pv.kwargs['inverters'] = inverters
    add_load_pv.kwargs['cutin'] = cutin
    add_load_pv.kwargs['cutout'] = cutout
    add_load_pv.kwargs['kvar_percent'] = kvar_percent
    add_load_pv.kwargs['oversizing'] = oversizing

    add_utility_pv = stack[15]
    utility_file_name = None
    if utility_selection_mapping[solar] is not None:
        feeders_str = str(utility_feeder_mapping[solar])
        if isinstance(utility_feeder_mapping[solar], list):
            feeders_str = ''
            for f in utility_feeder_mapping[solar]:
                feeders_str = feeders_str + str(f) + '-'
            feeders_str = feeders_str.strip('-')
        utility_file_name = [
            feeders_str + '_Node_' + utility_selection_mapping[solar][0] +
            '-' + str(utility_selection_mapping[solar][1]) + '-' +
            str(utility_selection_mapping[solar][2]) + '.json'
        ]
    add_utility_pv.kwargs['placement_folder'] = placement_folder
    add_utility_pv.kwargs['placement_names'] = utility_file_name
    add_utility_pv.kwargs['single_size'] = 2000000
    add_utility_pv.kwargs[
        'max_feeder_sizing_percent'] = utility_max_feeder_sizing[
            solar]  # total_pv <= max_feeder_size*total_feeder_load
    add_utility_pv.kwargs['power_factors'] = [0.95]
    add_utility_pv.kwargs['inverters'] = [
        'voltvar'
    ]  #Note that in Opendss this needs kvar to be set to 0
    add_utility_pv.kwargs['cutin'] = [0.1]
    add_utility_pv.kwargs['cutout'] = [0.1]
    add_utility_pv.kwargs['kvar_percent'] = [44]
    add_utility_pv.kwargs['oversizing'] = [1.1]

    # Missing coords
    # No args/kwargs for this layer

    #Timeseries Solar
    add_solar_timeseries = stack[17]
    dataset = dataset_dir.split(
        '/')[2][:9]  #Warning - tightly coupled to dataset naming convention
    add_solar_timeseries.kwargs['dataset'] = dataset
    add_solar_timeseries.kwargs['base_folder'] = os.path.join(
        '..', '..', 'Solar')
    add_solar_timeseries.kwargs['output_folder'] = os.path.join(
        '.', 'results_v2', region,
        'timeseries_solar_' + solar + '_battery_' + batteries, 'cyme')
    add_solar_timeseries.kwargs['write_cyme_file'] = True
    add_solar_timeseries.kwargs['write_opendss_file'] = False

    #Timeseries Loads
    add_timeseries = stack[18]
    add_timeseries.kwargs['customer_file'] = os.path.join(
        dataset_dir, region, 'Inputs', 'customers_ext.txt')
    county = None
    lower_case_county = None
    if dataset == 'dataset_4':
        try:
            f = open(
                os.path.join(dataset_dir, region, 'Inputs',
                             'customers_ext.txt'), 'r')
            line = f.readlines()[0].split(';')
            county = 'CA - ' + line[-1].strip()
            lower_case_county = line[-1].strip().lower()
        except:
            county = 'CA - SanFrancisco'
            print('Warning - county not found. Using San Francisco as default')
            lower_case_county = 'sanfrancisco'

    if dataset == 'dataset_3':
        county = 'NC - Guilford'
        lower_case_county = 'guilford'
    if dataset == 'dataset_2':
        county = 'NM - Santa Fe'
        lower_case_county = 'santafe'

    load_map = {
        'dataset_4': 'SanFrancisco',
        'dataset_3': 'Greensboro',
        'dataset_2': 'SantaFe'
    }
    load_location = load_map[dataset]
    add_timeseries.kwargs['residential_load_data'] = os.path.join(
        '..', '..', 'Loads', 'residential', load_location,
        'datapoints_elec_only.h5')
    add_timeseries.kwargs['residential_load_metadata'] = os.path.join(
        '..', '..', 'Loads', 'residential', load_location, 'results_fips.csv')
    add_timeseries.kwargs['commercial_load_data'] = os.path.join(
        '..', '..', 'Loads', 'commercial', county,
        'com_' + lower_case_county + '_electricity_only.dsg')
    add_timeseries.kwargs['commercial_load_metadata'] = os.path.join(
        '..', '..', 'Loads', 'commercial', county, 'results.csv')
    add_timeseries.kwargs['output_folder'] = os.path.join(
        '.', 'results_v2', region,
        'timeseries_solar_' + solar + '_battery_' + batteries, 'cyme')
    add_timeseries.kwargs['write_cyme_file'] = True
    add_timeseries.kwargs['write_opendss_file'] = False
    add_timeseries.kwargs['dataset'] = dataset

    # Move overlayed node layer
    adjust = stack[19]
    adjust.kwargs['delta_x'] = 10
    adjust.kwargs['delta_y'] = 10

    #Substations

    add_substations = stack[20]
    readme_list = [
        os.path.join(dataset_dir, region, 'Inputs', f)
        for f in os.listdir(os.path.join(dataset_dir, region, 'Inputs'))
        if f.startswith('README')
    ]
    readme = None
    if len(readme_list) == 1:
        readme = readme_list[0]
    add_substations.args[0] = os.path.join(dataset_dir, region, 'Auxiliary',
                                           'Feeder.txt')
    add_substations.kwargs['base_dir'] = dataset_dir
    add_substations.kwargs['readme_file'] = readme

    #LTC Controls

    ltc_controls = stack[21]
    ltc_controls.kwargs['setpoint'] = 103

    #Fuse Controls

    fuse_controls = stack[22]
    fuse_controls.kwargs['current_rating'] = 100
    fuse_controls.kwargs['high_current_rating'] = 600

    #Add switch in long lines

    switch_cut = stack[23]
    switch_cut.kwargs['cutoff_length'] = 800

    #Add additional regulators

    additional_regs = stack[24]
    additional_regs.kwargs['file_location'] = os.path.join(
        dataset_dir, region, 'Auxiliary', 'additional_regs.csv')
    additional_regs.kwargs['setpoint'] = 103

    # Capacitor controls
    cap_controls = stack[25]
    cap_controls.kwargs['delay'] = 100
    cap_controls.kwargs['lowpoint'] = 120.5
    cap_controls.kwargs['highpoint'] = 125

    # Reduce overloaded nodes
    overload_nodes = stack[26]
    overload_nodes.kwargs['powerflow_file'] = os.path.join(
        dataset_dir, region, 'Auxiliary', 'powerflow.csv')
    overload_nodes.kwargs['threshold'] = 0.94
    overload_nodes.kwargs['scale_factor'] = 2.0

    # Set delta loads and transformers
    delta = stack[27]
    readme_list = [
        os.path.join(dataset_dir, region, 'Inputs', f)
        for f in os.listdir(os.path.join(dataset_dir, region, 'Inputs'))
        if f.startswith('README')
    ]
    readme = None
    if len(readme_list) == 1:
        readme = readme_list[0]
    delta.kwargs['readme_location'] = readme

    #Set source KV value
    set_source = stack[28]
    set_source.kwargs['source_kv'] = 230
    set_source.kwargs['source_names'] = ['st_mat']

    #Write to CYME
    final = stack[29]
    final.args[0] = os.path.join(
        '.', 'results_v2', region,
        'timeseries_solar_' + solar + '_battery_' + batteries, 'cyme')

    #Dump to Ditto json
    final_json = stack[30]
    final_json.kwargs['base_dir'] = os.path.join(
        '.', 'results_v2', region,
        'timeseries_solar_' + solar + '_battery_' + batteries, 'json_cyme')

    #Write Tags
    tags = stack[31]
    tags.kwargs['output_folder'] = os.path.join(
        '.', 'results_v2', region,
        'timeseries_solar_' + solar + '_battery_' + batteries, 'cyme')
    tags.kwargs['tag_file'] = os.path.join(dataset_dir, region, 'Auxiliary',
                                           'FeederStats.txt')

    #Write validation
    validation = stack[32]
    validation.kwargs['output_folder'] = os.path.join(
        '.', 'results_v2', region,
        'timeseries_solar_' + solar + '_battery_' + batteries, 'cyme')
    validation.kwargs['input_folder'] = os.path.join(
        '.', 'results_v2', region,
        'timeseries_solar_' + solar + '_battery_' + batteries, 'cyme')
    validation.kwargs['rscript_folder'] = os.path.join(
        '..', '..', 'smartdsR-analysis-lite')
    validation.kwargs['output_name'] = region

    stack.save(
        os.path.join(
            stack_library_dir, 'rnm_to_cyme_stack_timeseries_' + region +
            'solar_' + solar + '_batteries_' + batteries + '.json'))
Пример #9
0
def create_test_stack_DERs(dataset_dir, dataset_name='dataset3'):
    stack = Stack(name='DiTTo Test Stack DERs {}'.format(dataset_name.title()))
    stack.append(Layer(os.path.join(layer_library_dir, 'from_opendss')))
    stack.append(Layer(os.path.join(layer_library_dir, 'create_placement')))
    stack.append(Layer(os.path.join(layer_library_dir, 'add_pv')))
    stack.append(Layer(os.path.join(layer_library_dir, 'add_storage')))
    stack.append(Layer(os.path.join(layer_library_dir, 'to_opendss')))
    # TODO: add the other layer pieces here too.

    if not dataset_name:
        dataset_name = os.path.basename(dataset_dir)

    for layer in stack:
        layer.args.mode = ArgMode.USE
        layer.kwargs.mode = ArgMode.USE

    if dataset_name == 'dataset3':
        from_opendss = stack[0]
        from_opendss.args[0] = os.path.join('mixed_humid', 'industrial',
                                            'OpenDSS', 'master.dss')
        from_opendss.args[1] = os.path.join('mixed_humid', 'industrial',
                                            'OpenDSS', 'buscoords.dss')
        from_opendss.kwargs['base_dir'] = dataset_dir

        feeders = 'all'
        equipment_type = 'ditto.models.load.Load'
        selection = ('Random', 10)
        seed = 1
        placement_folder = placement_library_dir
        file_name = feeders + '_' + equipment_type.split(
            '.')[-1] + '_' + selection[0] + '-' + str(
                selection[1]) + '_' + str(seed) + '.txt'

        create_placement = stack[1]
        create_placement.args[0] = 'all'
        create_placement.args[1] = 'ditto.models.load.Load'
        create_placement.args[2] = ('Random', 10)
        create_placement.args[3] = 1
        create_placement.args[4] = placement_library_dir
        create_placement.args[5] = file_name

        add_pv = stack[2]
        add_pv.args[0] = os.path.join(placement_folder, file_name)
        add_pv.args[1] = 10
        add_pv.args[2] = 1.0

        add_storage = stack[3]
        add_storage.args[0] = os.path.join(placement_folder, file_name)
        add_storage.args[1] = 8
        add_storage.args[2] = 14

        to_opendss = stack[4]
        to_opendss.args[0] = os.path.join('post_process', 'mixed_humid',
                                          'industrial')
        to_opendss.kwargs['base_dir'] = dataset_dir

    else:
        raise NotImplementedError(
            "Unknown dataset_name {!r}".format(dataset_name))

    stack.save(os.path.join(stack_library_dir, stack.suggested_filename))
Пример #10
0
def create_rnm_to_opendss_stack(dataset_dir, region, dataset):
    '''Create the stack to convert RNM models in OpenDSS to OpenDSS.'''

    stack = Stack(name='RNM to OpenDSS Stack')

    #Parse load coordinates csv file
    stack.append(Layer(os.path.join(layer_library_dir,'csv_processing')))

    #Parse Capacitor coordinates csv file
    stack.append(Layer(os.path.join(layer_library_dir,'csv_processing')))

    #Read the OpenDSS input model
    stack.append(Layer(os.path.join(layer_library_dir,'from_opendss')))

    #Add regulators with setpoints
    stack.append(Layer(os.path.join(layer_library_dir,'add_rnm_regulators')))

    #Add Timeseries loads
    stack.append(Layer(os.path.join(layer_library_dir,'connect_timeseries_loads')))

    #Modify the model
    stack.append(Layer(os.path.join(layer_library_dir,'post-processing')))

    #Add the load coordinates with a model merge
    stack.append(Layer(os.path.join(layer_library_dir,'merging-layer')))

    #Add the capacitor coordinates with a model merge
    stack.append(Layer(os.path.join(layer_library_dir,'merging-layer')))

    #Set number of customers
    stack.append(Layer(os.path.join(layer_library_dir,'set_num_customers')))

    #Split the network into feeders
    stack.append(Layer(os.path.join(layer_library_dir,'network_split')))

    #Calculate metrics on customer per transfomer
    stack.append(Layer(os.path.join(layer_library_dir,'partitioned_customers_per_transformer_plots')))

    #Add intermediate node coordinates
    stack.append(Layer(os.path.join(layer_library_dir,'intermediate_node')))

    #Find missing coordinates
    stack.append(Layer(os.path.join(layer_library_dir,'find_missing_coords')))

    #Adjust overlaid nodes
    stack.append(Layer(os.path.join(layer_library_dir,'move_overlayed_nodes')))

    #Add cyme substations
    stack.append(Layer(os.path.join(layer_library_dir,'add_cyme_substations')))

    #Add ltc control settings
    stack.append(Layer(os.path.join(layer_library_dir,'set_ltc_controls')))

    #Add fuse control settings
    stack.append(Layer(os.path.join(layer_library_dir,'set_fuse_controls')))

    #Add extra switches to long lines 
    stack.append(Layer(os.path.join(layer_library_dir,'add_switches_to_long_lines')))

    #Add Additional regulators
    stack.append(Layer(os.path.join(layer_library_dir,'add_additional_regulators')))

    #Add Capacitor control settings
    stack.append(Layer(os.path.join(layer_library_dir,'set_capacitor_controlers')))

    #Reduce overloaded nodes
    stack.append(Layer(os.path.join(layer_library_dir,'reduce_overload_nodes')))

    #Set any delta connections
    stack.append(Layer(os.path.join(layer_library_dir,'set_delta_systems')))

    #Set source kv
    stack.append(Layer(os.path.join(layer_library_dir,'set_source_voltage')))

    #Write to OpenDSS
    stack.append(Layer(os.path.join(layer_library_dir,'to_opendss')))

    #Copy Tag file over
    stack.append(Layer(os.path.join(layer_library_dir,'add_tags')))

    #Run validation metrics
    stack.append(Layer(os.path.join(layer_library_dir,'statistical_validation')))

    for layer in stack:
        layer.args.mode = ArgMode.USE
        layer.kwargs.mode = ArgMode.USE

    #Load coordinate layer
    load_coordinates = stack[0]
    load_coordinates.kwargs['input_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Loads_IntermediateFormat.csv')
    load_coordinates.kwargs['output_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Loads_IntermediateFormat2.csv')
    load_coordinates.kwargs['object_name'] = 'Load'

    #Capacitor coordinate layer
    capacitor_coordinates = stack[1]
    capacitor_coordinates.kwargs['input_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Capacitors_IntermediateFormat.csv')
    capacitor_coordinates.kwargs['output_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Capacitors_IntermediateFormat2.csv')
    capacitor_coordinates.kwargs['object_name'] = 'Capacitor'

    #Read OpenDSS layer
    from_opendss = stack[2]
    from_opendss.args[0] = os.path.join(region,'OpenDSS','Master.dss')
    from_opendss.args[1] = os.path.join(region,'OpenDSS','BusCoord.dss')
    from_opendss.kwargs['base_dir'] = dataset_dir

    #Set regulators with setpoints
    rnm_regulators = stack[3]
    rnm_regulators.kwargs['rnm_name'] = 'CRegulador'
    rnm_regulators.kwargs['setpoint'] = 103


    #Timeseries Loads
    add_timeseries = stack[4]
    add_timeseries.kwargs['customer_file'] = os.path.join(dataset_dir,region,'Inputs','customers_ext.txt')
    county = None
    lower_case_county = None
    if dataset == 'dataset_4':
        try: 
            f = open(os.path.join(dataset_dir,region,'Inputs','customers_ext.txt'),'r')
            line = f.readlines()[0].split(';')
            county = 'CA - '+line[-1].strip()
            lower_case_county = line[-1].strip().lower()
        except:
            county = 'CA - SanFrancisco'
            print('Warning - county not found. Using San Francisco as default')
            lower_case_county = 'sanfrancisco'
            
    if dataset == 'dataset_3':
        county = 'NC - Guilford'
        lower_case_county = 'guilford'
    if dataset == 'dataset_2':
        county = 'NM - Santa Fe'
        lower_case_county = 'santafe'


    load_map = {'dataset_4':'SanFrancisco','dataset_3':'Greensboro','dataset_2':'SantaFe'}
    load_location = load_map[dataset]
    add_timeseries.kwargs['residential_load_data'] = os.path.join('..','..','Loads','residential',load_location,'datapoints_elec_only.h5')
    add_timeseries.kwargs['residential_load_metadata'] = os.path.join('..','..','Loads','residential',load_location,'results_fips.csv')
    add_timeseries.kwargs['commercial_load_data'] = os.path.join('..','..','Loads','commercial',county,'com_'+lower_case_county+'_electricity_only.dsg')
    add_timeseries.kwargs['commercial_load_metadata'] = os.path.join('..','..','Loads','commercial',county,'results.csv')
    add_timeseries.kwargs['output_folder'] = os.path.join('.','results',region,'timeseries','opendss')
    add_timeseries.kwargs['write_cyme_file'] = False
    add_timeseries.kwargs['dataset'] = dataset




    #Modify layer
    #No input except the model. Nothing to do here...
    post_processing = stack[5]
    post_processing.kwargs['path_to_feeder_file'] = os.path.join(dataset_dir,region,'Auxiliary','Feeder.txt')
    post_processing.kwargs['path_to_switching_devices_file'] = os.path.join(dataset_dir,region,'OpenDSS','SwitchingDevices.dss')
    post_processing.kwargs['center_tap_postprocess'] = True
    post_processing.kwargs['switch_to_recloser'] = True
    post_processing.kwargs['center_tap_postprocess'] = False

    #Merging Load layer
    merging_load = stack[6]
    merging_load.kwargs['filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Loads_IntermediateFormat2.csv')

    #Merging Capacitor Layer
    merging_caps = stack[7]
    merging_caps.kwargs['filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Capacitors_IntermediateFormat2.csv')

    #Resetting customer number layer
    customer = stack[8]
    customer.kwargs['num_customers'] = 1

    #Splitting layer
    split = stack[9]
    split.kwargs['path_to_feeder_file'] = os.path.join(dataset_dir,region,'Auxiliary','Feeder.txt')
    split.kwargs['path_to_no_feeder_file'] = os.path.join(dataset_dir,region,'Auxiliary','NoFeeder.txt')
    split.kwargs['compute_metrics'] = True
    split.kwargs['compute_kva_density_with_transformers'] = True #RNM networks have LV information
    split.kwargs['excel_output'] = os.path.join('.', 'results', region, 'timeseries','opendss', 'metrics.csv')
    split.kwargs['json_output'] = os.path.join('.', 'results', region, 'timeseries', 'opendss','metrics.json')

    #Customer per Transformer plotting layer
    transformer_metrics = stack[10]
    transformer_metrics.kwargs['customer_file'] = os.path.join(dataset_dir,region,'Inputs','customers_ext.txt') 
    transformer_metrics.kwargs['output_folder'] = os.path.join('.','results',region,'timeseries','opendss')

    #Intermediate node layer
    inter = stack[11]
    inter.kwargs['filename'] = os.path.join(dataset_dir,region,'OpenDSS','LineCoord.txt')

    # Missing coords
    # No args/kwargs for this layer

    # Move overlayed node layer
    adjust = stack[13]
    adjust.kwargs['delta_x'] = 10
    adjust.kwargs['delta_y'] = 10

    #Substations

    add_substations = stack[14]
    readme_list = [os.path.join(dataset_dir,region,'Inputs',f) for f in os.listdir(os.path.join(dataset_dir,region,'Inputs')) if f.startswith('README')]
    readme = None
    if len(readme_list)==1:
        readme = readme_list[0]
    add_substations.args[0] = os.path.join(dataset_dir,region,'Auxiliary', 'Feeder.txt')
    add_substations.kwargs['base_dir'] = dataset_dir
    add_substations.kwargs['readme_file'] = readme

    #LTC Controls

    ltc_controls = stack[15]
    ltc_controls.kwargs['setpoint'] = 103

    #Fuse Controls

    fuse_controls = stack[16]
    fuse_controls.kwargs['current_rating'] = 100

    #Add switch in long lines

    switch_cut = stack[17]
    switch_cut.kwargs['cutoff_length'] = 800

    #Add additional regulators

    additional_regs = stack[18]
    additional_regs.kwargs['file_location'] = os.path.join(dataset_dir,region,'Auxiliary','additional_regs.csv')
    additional_regs.kwargs['setpoint'] = 103

    # Capacitor controls
    cap_controls = stack[19]
    cap_controls.kwargs['delay'] = 100
    cap_controls.kwargs['lowpoint'] = 118
    cap_controls.kwargs['highpoint'] = 123

    # Reduce overloaded nodes
    overload_nodes = stack[20]
    overload_nodes.kwargs['powerflow_file'] = os.path.join(dataset_dir,region,'Auxiliary','powerflow.csv')
    overload_nodes.kwargs['threshold'] = 0.94
    overload_nodes.kwargs['scale_factor'] = 2.0

    # Set delta loads and transformers   
    delta = stack[21]
    readme_list = [os.path.join(dataset_dir,region,'Inputs',f) for f in os.listdir(os.path.join(dataset_dir,region,'Inputs')) if f.startswith('README')]
    readme = None
    if len(readme_list)==1:
        readme = readme_list[0]
    delta.kwargs['readme_location'] = readme

    #Set source KV value
    set_source = stack[22]
    set_source.kwargs['source_kv'] = 230
    set_source.kwargs['source_names'] = ['st_mat']

    #Write to OpenDSS
    final = stack[23]
    final.args[0] = os.path.join('.','results',region,'timeseries','opendss')
    final.kwargs['separate_feeders'] = True
    final.kwargs['separate_substations'] = True

    #Write Tags 
    tags = stack[24]
    tags.kwargs['output_folder'] = os.path.join('.','results',region,'timeseries','opendss')
    tags.kwargs['tag_file'] = os.path.join(dataset_dir,region,'Auxiliary','FeederStats.txt')

    #Write validation
    validation = stack[25]
    validation.kwargs['output_folder'] = os.path.join('.','results',region,'timeseries','opendss')
    validation.kwargs['input_folder'] = os.path.join('.','results',region,'timeseries','opendss')
    validation.kwargs['rscript_folder'] = os.path.join('..','..','smartdsR-analysis-lite')
    validation.kwargs['output_name'] = region


    stack.save(os.path.join(stack_library_dir,'rnm_to_opendss_stack_'+region+'.json'))
Пример #11
0
def dataset3_snapshot(dataset_dir,
                      climate_zone='MixedHumid',
                      feeder_type='industrial',
                      pct_customers=10):
    stack_name = "Dataset3 Postprocessed Snapshot {}, {}, {} Percent of Customers".format(
        climate_zone, feeder_type, pct_customers)
    short_name = 'ds3_post_snap_{}_{}_{}pct_customers'.format(
        climate_zone, feeder_type, pct_customers)

    stack = Stack(name=stack_name)
    stack.append(Layer(os.path.join(layer_library_dir, 'from_cyme')))
    #stack.append(Layer(os.path.join(layer_library_dir,'from_opendss')))
    stack.append(Layer(os.path.join(layer_library_dir,
                                    'add_cyme_substations')))
    #    stack.append(Layer(os.path.join(layer_library_dir,'create_placement')))
    #    stack.append(Layer(os.path.join(layer_library_dir,'add_pv')))
    #    stack.append(Layer(os.path.join(layer_library_dir,'add_storage')))
    stack.append(Layer(os.path.join(layer_library_dir, 'to_cyme')))

    for layer in stack:
        layer.args.mode = ArgMode.USE
        layer.kwargs.mode = ArgMode.USE

    from_cyme = stack[0]
    from_cyme.args[0] = os.path.join(climate_zone, feeder_type, 'CYME')
    #   from_cyme.args[0] = os.path.join(climate_zone,feeder_type,'OpenDSS','Master.dss')
    #   from_cyme.args[1] = os.path.join(climate_zone,feeder_type,'OpenDSS','BusCoord.dss')
    from_cyme.kwargs['base_dir'] = dataset_dir

    add_substations = stack[1]
    add_substations.args[0] = os.path.join(climate_zone, feeder_type,
                                           'Feeders', 'feeders.txt')
    add_substations.kwargs['base_dir'] = dataset_dir

    feeders = 'all'
    equipment_type = 'ditto.models.load.Load'
    selection = ('Random', pct_customers)
    seed = 1
    placement_folder = os.path.join(placement_library_dir, 'dataset3',
                                    climate_zone, feeder_type)
    file_name = feeders + '_' + equipment_type.split(
        '.')[-1] + '_' + selection[0] + '-' + str(
            selection[1]) + '_' + str(seed) + '.txt'

    #    create_placement = stack[2]
    #    create_placement.args[0] = feeders
    #    create_placement.args[1] = equipment_type
    #    create_placement.args[2] = selection
    #    create_placement.args[3] = seed
    #    create_placement.args[4] = placement_library_dir
    #    create_placement.args[5] = file_name

    #    add_pv = stack[3]
    #    add_pv.args[0] = os.path.join(placement_folder,file_name) # placement
    #    add_pv.args[1] = 10                                       # rated power
    #    add_pv.args[2] = 1.0                                      # power factor

    #    add_storage = stack[4]
    #    add_storage.args[0] = os.path.join(placement_folder,file_name) # placement
    #    add_storage.args[1] = 8                                        # rated power
    #    add_storage.args[2] = 16                                       # rated kWh

    to_cyme = stack[2]
    to_cyme.args[0] = '.'  # output to run directory

    stack.save(os.path.join(stack_library_dir, short_name + '.json'))
Пример #12
0
def create_rnm_to_opendss_stack(dataset_dir, region):
    '''Create the stack to convert RNM models in OpenDSS to OpenDSS.'''

    stack = Stack(name='RNM to OpenDSS Stack')

    #Parse load coordinates csv file
    stack.append(Layer(os.path.join(layer_library_dir,'csv_processing')))

    #Parse Capacitor coordinates csv file
    stack.append(Layer(os.path.join(layer_library_dir,'csv_processing')))

    #Read the OpenDSS input model
    stack.append(Layer(os.path.join(layer_library_dir,'from_opendss')))

    #Add regulators with setpoints
    stack.append(Layer(os.path.join(layer_library_dir,'add_rnm_regulators')))

    #Ensure all LV lines are triplex
    stack.append(Layer(os.path.join(layer_library_dir,'set_lv_as_triplex')))

    #Modify the model
    stack.append(Layer(os.path.join(layer_library_dir,'post-processing')))

    #Add the load coordinates with a model merge
    stack.append(Layer(os.path.join(layer_library_dir,'merging-layer')))

    #Add the capacitor coordinates with a model merge
    stack.append(Layer(os.path.join(layer_library_dir,'merging-layer')))

    #Set number of customers
    stack.append(Layer(os.path.join(layer_library_dir,'set_num_customers')))

    #Split the network into feeders
    stack.append(Layer(os.path.join(layer_library_dir,'network_split')))

    #Calculate metrics on customer per transfomer
    stack.append(Layer(os.path.join(layer_library_dir,'partitioned_customers_per_transformer_plots')))

    #Add intermediate node coordinates
    stack.append(Layer(os.path.join(layer_library_dir,'intermediate_node')))

    #Find missing coordinates
    stack.append(Layer(os.path.join(layer_library_dir,'find_missing_coords')))

    #Adjust overlaid nodes
    stack.append(Layer(os.path.join(layer_library_dir,'move_overlayed_nodes')))

    #Add cyme substations
    stack.append(Layer(os.path.join(layer_library_dir,'add_cyme_substations')))

    #Add ltc control settings
    stack.append(Layer(os.path.join(layer_library_dir,'set_ltc_controls')))

    #Add fuse control settings
    stack.append(Layer(os.path.join(layer_library_dir,'set_fuse_controls')))

    #Add extra switches to long lines 
    stack.append(Layer(os.path.join(layer_library_dir,'add_switches_to_long_lines')))

    #Add Additional regulators
    stack.append(Layer(os.path.join(layer_library_dir,'add_additional_regulators')))

    #Add Capacitor control settings
    stack.append(Layer(os.path.join(layer_library_dir,'set_capacitor_controlers')))

    #Reduce overloaded nodes
    stack.append(Layer(os.path.join(layer_library_dir,'reduce_overload_nodes')))

    #Set any delta connections
    stack.append(Layer(os.path.join(layer_library_dir,'set_delta_systems')))

    #Set source kv
    stack.append(Layer(os.path.join(layer_library_dir,'set_source_voltage')))

    #Write to OpenDSS
    stack.append(Layer(os.path.join(layer_library_dir,'to_opendss')))

    #Write to OpenDSS
    stack.append(Layer(os.path.join(layer_library_dir,'to_json')))

    #Copy Tag file over
    stack.append(Layer(os.path.join(layer_library_dir,'add_tags')))

    #Run validation metrics
    stack.append(Layer(os.path.join(layer_library_dir,'statistical_validation')))

    for layer in stack:
        layer.args.mode = ArgMode.USE
        layer.kwargs.mode = ArgMode.USE

    #Load coordinate layer
    load_coordinates = stack[0]
    load_coordinates.kwargs['input_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Loads_IntermediateFormat.csv')
    load_coordinates.kwargs['output_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Loads_IntermediateFormat2.csv')
    load_coordinates.kwargs['object_name'] = 'Load'

    #Capacitor coordinate layer
    capacitor_coordinates = stack[1]
    capacitor_coordinates.kwargs['input_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Capacitors_IntermediateFormat.csv')
    capacitor_coordinates.kwargs['output_filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Capacitors_IntermediateFormat2.csv')
    capacitor_coordinates.kwargs['object_name'] = 'Capacitor'

    #Read OpenDSS layer
    from_opendss = stack[2]
    from_opendss.args[0] = os.path.join(region,'OpenDSS','Master.dss')
    from_opendss.args[1] = os.path.join(region,'OpenDSS','BusCoord.dss')
    from_opendss.kwargs['base_dir'] = dataset_dir

    #Set regulators with setpoints
    rnm_regulators = stack[3]
    rnm_regulators.kwargs['rnm_name'] = 'CRegulador'
    rnm_regulators.kwargs['setpoint'] = 103

    #Ensure all LV lines are triplex
    set_lv_triplex = stack[4]
    set_lv_triplex.kwargs['to_replace'] = ['Ionic', 'Corinthian', 'Doric']

    #Modify layer
    #No input except the model. Nothing to do here...
    post_processing = stack[5]
    post_processing.kwargs['path_to_feeder_file'] = os.path.join(dataset_dir,region,'Auxiliary','Feeder.txt')
    post_processing.kwargs['path_to_switching_devices_file'] = os.path.join(dataset_dir,region,'OpenDSS','SwitchingDevices.dss')
    post_processing.kwargs['center_tap_postprocess'] = True
    post_processing.kwargs['switch_to_recloser'] = True
    post_processing.kwargs['center_tap_postprocess'] = False

    #Merging Load layer
    merging_load = stack[6]
    merging_load.kwargs['filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Loads_IntermediateFormat2.csv')

    #Merging Capacitor Layer
    merging_caps = stack[7]
    merging_caps.kwargs['filename'] = os.path.join(dataset_dir,region,'IntermediateFormat','Capacitors_IntermediateFormat2.csv')

    #Resetting customer number layer
    customer = stack[8]
    customer.kwargs['num_customers'] = 1

    #Splitting layer
    split = stack[9]
    split.kwargs['path_to_feeder_file'] = os.path.join(dataset_dir,region,'Auxiliary','Feeder.txt')
    split.kwargs['path_to_no_feeder_file'] = os.path.join(dataset_dir,region,'Auxiliary','NoFeeder.txt')
    split.kwargs['compute_metrics'] = True
    split.kwargs['compute_kva_density_with_transformers'] = True #RNM networks have LV information
    split.kwargs['excel_output'] = os.path.join('.', 'results_v2', region, 'base','opendss', 'metrics.csv')
    split.kwargs['json_output'] = os.path.join('.', 'results_v2', region, 'base', 'opendss','metrics.json')

    #Customer per Transformer plotting layer
    transformer_metrics = stack[10]
    transformer_metrics.kwargs['customer_file'] = os.path.join(dataset_dir,region,'Inputs','customers_ext.txt') 
    transformer_metrics.kwargs['output_folder'] = os.path.join('.','results_v2',region,'base','opendss')

    #Intermediate node layer
    inter = stack[11]
    inter.kwargs['filename'] = os.path.join(dataset_dir,region,'OpenDSS','LineCoord.txt')

    # Missing coords
    # No args/kwargs for this layer

    # Move overlayed node layer
    adjust = stack[13]
    adjust.kwargs['delta_x'] = 10
    adjust.kwargs['delta_y'] = 10

    #Substations

    add_substations = stack[14]
    readme_list = [os.path.join(dataset_dir,region,'Inputs',f) for f in os.listdir(os.path.join(dataset_dir,region,'Inputs')) if f.startswith('README')]
    readme = None
    if len(readme_list)==1:
        readme = readme_list[0]
    add_substations.args[0] = os.path.join(dataset_dir,region,'Auxiliary', 'Feeder.txt')
    add_substations.kwargs['base_dir'] = dataset_dir
    add_substations.kwargs['readme_file'] = readme

    #LTC Controls

    ltc_controls = stack[15]
    ltc_controls.kwargs['setpoint'] = 103

    #Fuse Controls

    fuse_controls = stack[16]
    fuse_controls.kwargs['current_rating'] = 100
    fuse_controls.kwargs['high_current_rating'] = 600

    #Add switch in long lines

    switch_cut = stack[17]
    switch_cut.kwargs['cutoff_length'] = 800

    #Add additional regulators

    additional_regs = stack[18]
    additional_regs.kwargs['file_location'] = os.path.join(dataset_dir,region,'Auxiliary','additional_regs.csv')
    additional_regs.kwargs['setpoint'] = 103

    # Capacitor controls
    cap_controls = stack[19]
    cap_controls.kwargs['delay'] = 100
    cap_controls.kwargs['lowpoint'] = 120.5
    cap_controls.kwargs['highpoint'] = 125

    # Reduce overloaded nodes
    overload_nodes = stack[20]
    overload_nodes.kwargs['powerflow_file'] = os.path.join(dataset_dir,region,'Auxiliary','powerflow.csv')
    overload_nodes.kwargs['threshold'] = 0.94
    overload_nodes.kwargs['scale_factor'] = 2.0

    # Set delta loads and transformers   
    delta = stack[21]
    readme_list = [os.path.join(dataset_dir,region,'Inputs',f) for f in os.listdir(os.path.join(dataset_dir,region,'Inputs')) if f.startswith('README')]
    readme = None
    if len(readme_list)==1:
        readme = readme_list[0]
    delta.kwargs['readme_location'] = readme

    #Set source KV value
    set_source = stack[22]
    set_source.kwargs['source_kv'] = 230
    set_source.kwargs['source_names'] = ['st_mat']

    #Write to OpenDSS
    final = stack[23]
    final.args[0] = os.path.join('.','results_v2',region,'base','opendss')
    final.kwargs['separate_feeders'] = True
    final.kwargs['separate_substations'] = True

    #Dump to Ditto json
    final_json = stack[24]
    final_json.kwargs['base_dir'] = os.path.join('.','results_v2',region,'base','json_opendss')

    #Write Tags 
    tags = stack[25]
    tags.kwargs['output_folder'] = os.path.join('.','results_v2',region,'base','opendss')
    tags.kwargs['tag_file'] = os.path.join(dataset_dir,region,'Auxiliary','FeederStats.txt')

    #Write validation
    validation = stack[26]
    validation.kwargs['output_folder'] = os.path.join('.','results_v2',region,'base','opendss')
    validation.kwargs['input_folder'] = os.path.join('.','results_v2',region,'base','opendss')
    validation.kwargs['rscript_folder'] = os.path.join('..','..','smartdsR-analysis-lite')
    validation.kwargs['output_name'] = region


    stack.save(os.path.join(stack_library_dir,'rnm_to_opendss_stack_'+region+'.json'))
Пример #13
0
from layerstack.layer import Layer, LayerBase
from ditto.dittolayers import DiTToLayerBase
import os

Layer.create('from_json',os.path.join('..','layer_library'),desc='Read a json DiTTo model into DiTTo',layer_base_class=LayerBase)

Пример #14
0
    def apply(cls, stack, model, feeder_file, output_substation_folder, base_dir=None, substation_folder=None):
        logger.debug("Starting add_substations")
        if base_dir and (not os.path.exists(feeder_file)):
            feeder_file = os.path.join(base_dir,feeder_file)
        if base_dir and (not os.path.exists(output_substation_folder)):
            output_substation_folder = os.path.join(base_dir,output_substation_folder)

        if substation_folder == None:
            substation_folder = os.path.join(os.path.dirname(__file__),'resources')

        # Need to load OpenDSS files later. Make sure we can find the required layer.
        from_opendss_layer_dir = None
        # first look in stack
        for layer in stack:
            if layer.name == 'From OpenDSS':
                from_opendss_layer_dir = layer.layer_dir
                break
        # then try this layer's library directory
        if from_opendss_layer_dir is None:
            from_opendss_layer_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)),'from_opendss')        
        if not os.path.exists(from_opendss_layer_dir):
            msg = "Cannot find the 'From OpenDSS' layer."
            logger.error(msg)
            raise Exception(msg)

        logger.debug("Building the model network")

        model.build_networkx(source=None) # Used to remove internal edges in substation
        df = pd.read_csv(feeder_file,' ') #The RNM file specifying which feeders belong to which substation
        substations = {}
        for index,row in df.iterrows():
            substation = row.iloc[1]
            feeder = row.iloc[2]
            buses = feeder.split('->')
            bus1 = buses[1]
            bus2 = buses[0]
            if bus1[0:4].lower() == 'ucmv': # Not swapped if MV load connected to it
                bus1 = buses[0]
                bus2 = buses[1]
            adjusted_feeder = bus1+'->'+bus2 #In the feeder file bus1 and bus2 are swapped
            if substation in substations:
                substations[substation].add(adjusted_feeder)
            else:
                substations[substation]=set([adjusted_feeder])

        logger.debug("Building to_delete and modifier")

        to_delete = Store()
        modifier = Modifier()
        for sub in substations: #sub is the name of the substation and substations[sub] is a set of all the connected feeders
            logger.debug("Processing substation {}. There are {} in total.".format(sub,len(substations)))

            all_nodes = []
            subname = sub.replace('.','')
            subname = subname.lower()
            all_nodes.append(subname)
            hv_subname = subname+'->'+subname.replace('1247','69')+'_s'
            all_nodes.append(hv_subname)
            sourcenode = hv_subname+'_s' #Source point of connection to the substation
            all_nodes.append(sourcenode)
            feeder_names = [] # Feeder point of connection to the substation
            rated_power = None
            emergency_power = None
            loadloss = None
            noloadloss = None
            reactance = None
            for feeder in substations[sub]:
                feeder_name = feeder.replace('.','')+'_s'
                feeder_name = feeder_name.lower()
                feeder_names.append(feeder_name)
                all_nodes.append(feeder_name)

            all_nodes_set = set(all_nodes)
            internal_edges = model.get_internal_edges(all_nodes_set)
            for n in all_nodes_set:
                obj_name = type(model[n]).__name__
                base_obj = globals()[obj_name](to_delete)
                base_obj.name = n
            for e in internal_edges:
                obj_name = type(model[e]).__name__
                if obj_name == 'PowerTransformer':
                    reactance = model[e].reactances[0] # Assume the same for all windings in a substation
                    loadloss = model[e].loadloss
                    noloadloss = model[e].noload_loss
                    rated_power = model[e].windings[0].rated_power # Assume the same for all windings in a substation
                    emergency_power = model[e].windings[0].emergency_power # Assume the same for all windings in a substationr

                base_obj = globals()[obj_name](to_delete)
                base_obj.name = e

            num_model_feeders = len(substations[sub])
            not_allocated = True
            # Write the substation files to disk. These are then read and added            
            for sub_file in os.listdir(substation_folder): # Important these must be listed in increasing order
                if len(pd.read_csv(substation_folder+'/%s/feeders.csv'%sub_file))>= num_model_feeders:
                    generic_source = list(pd.read_csv(substation_folder+'/%s/source.csv'%sub_file)['source'])
                    generic_feeders = list(pd.read_csv(substation_folder+'/%s/feeders.csv'%sub_file)['feeders'])[:num_model_feeders] #Select the first n feeder bays of the substation as required
                    generic_nodes = list(pd.read_csv(substation_folder+'/%s/all_nodes.csv'%sub_file)['node'])
                    generic_substation_fp = open(substation_folder+'/%s/%s.dss'%(sub_file,sub_file),'r')
                    generic_substation_dss = generic_substation_fp.read()
                    generic_substation_fp.close()
                    substation_dss = generic_substation_dss.replace(generic_source[0],'%s'%sourcenode) # Replace source node
                    for i in range(len(feeder_names)):
                        substation_dss = substation_dss.replace(generic_feeders[i],'%s'%feeder_names[i]) # Replace feeder nodes

                    # TODO: do this in a better way.
                    for i in range(len(generic_nodes)): #Replace any remaining nodes that haven't been changed yet. Unallocated feeder heads are managed here
                        substation_dss = substation_dss.replace(generic_nodes[i]+ ' ','%s_%s_%s '%(sub_file,subname,generic_nodes[i]))
                        substation_dss = substation_dss.replace(generic_nodes[i]+ '.','%s_%s_%s.'%(sub_file,subname,generic_nodes[i]))
                    substation_dss = substation_dss.replace('Line.','Line.%s_%s_'%(sub_file,subname))
                    substation_dss = substation_dss.replace('LineCode.','LineCode.%s_%s_'%(sub_file,subname))
                    substation_dss = substation_dss.replace('Capacitor.','Capacitor.%s_%s_'%(sub_file,subname))
                    substation_dss = substation_dss.replace('CapControl.','CapControl.%s_%s_'%(sub_file,subname))
                    substation_dss = substation_dss.replace('Monitor.','Monitor.%s_%s_'%(sub_file,subname))
                    substation_dss = substation_dss.replace('Relay.','Relay.%s_%s_'%(sub_file,subname))
                    substation_dss = substation_dss.replace('Transformer.','Transformer.%s_%s_'%(sub_file,subname))
                    substation_dss = substation_dss.replace('transformer=','transformer=%s_%s_'%(sub_file,subname))
                    substation_dss = substation_dss.replace('Regcontrol.','Regcontrol.%s_%s_'%(sub_file,subname))

                    # TODO: WARNING: This is a total hack to replace the substation attributes and should not be used long-term.
                    # This is very specific to the substations used in dataset3 and is very case sensative.
                    substation_dss = substation_dss.replace('kvas=(30000, 30000)','kvas=(%f, %f)'%(rated_power,rated_power))
                    substation_dss = substation_dss.replace('kvas=(25000, 25000)','kvas=(%f, %f)'%(rated_power/3.0,rated_power/3.0))
                    substation_dss = substation_dss.replace('%noloadloss=0.12','%noloadloss={noll}'.format(noll=noloadloss))
                    substation_dss = substation_dss.replace('%loadloss=0.1','%loadloss={ll}'.format(ll=loadloss))
                    substation_dss = substation_dss.replace('XHL=0.1','XHL=%f'%(reactance))

                    if not os.path.isdir(output_substation_folder+'/%s'%subname):
                        os.makedirs(output_substation_folder+'/%s'%subname)
                    substation_output = open(output_substation_folder+'/%s/substation.dss'%(subname),'w')
                    substation_output.write(substation_dss)
                    substation_output.close()
                    buscoords = open(output_substation_folder+'/%s/Buscoords.dss'%subname,'w')
                    buscoords.close()
                    masterfile_fp = open(substation_folder+'/%s/master.dss'%sub_file,'r')
                    masterfile_dss = masterfile_fp.read()
                    masterfile_fp.close()
                    masterfile_dss = masterfile_dss.replace('SourceBus',sourcenode)
                    master_output = open(output_substation_folder+'/%s/master.dss'%subname,'w')
                    master_output.write(masterfile_dss)
                    master_output.close()
                    #shutil.copyfile(substation_folder+'/%s/master.dss'%sub_file,output_substation_folder+'/%s/master.dss'%subname)
                    not_allocated = False
                    break
            if not_allocated:
                raise('Substation too small. %d feeders needed.  Exiting...'%(num_model_feeders))

        logger.debug("Creating reduced and final models")

        reduced_model = modifier.delete(model, to_delete) 
        final_model = reduced_model
        from_opendss_layer = Layer(from_opendss_layer_dir)
        from_opendss_layer.args.mode = ArgMode.USE
        from_opendss_layer.kwargs.mode = ArgMode.USE
        for p, dirnames, filenames in os.walk(output_substation_folder):
            for sub_folder in dirnames:
                logger.debug("Processing output_substation_folder/{}".format(sub_folder))
                from_opendss_layer.args[0] = os.path.join(output_substation_folder,sub_folder,'master.dss')
                from_opendss_layer.args[1] = os.path.join(output_substation_folder, sub_folder, 'Buscoords.dss')
                from_opendss_layer.kwargs['read_power_source'] = False
                s = Stack()
                from_opendss_layer.run_layer(s)
                substation_model = s.model
                logger.debug("Adding model from {} to final_model".format(sub_folder))
                final_model = modifier.add(final_model, substation_model)
            break
        logger.debug("Returning {!r}".format(final_model))
        return final_model