def main(config):
    print('Running decentralized model for buildings with scenario = %s' % config.scenario)
    locator = cea.inputlocator.InputLocator(config.scenario)
    total_demand = pd.read_csv(locator.get_total_demand())
    building_names = total_demand.Name.values
    prices = Prices(locator, config)
    lca = lca_calculations(locator, config)

    disconnected_building_main(locator=locator, building_names=building_names, config=config, prices=prices, lca=lca)
Ejemplo n.º 2
0
def moo_optimization(locator, weather_file, gv, config):
    '''
    This function optimizes the conversion, storage and distribution systems of a heating distribution for the case
    study. It requires that the energy demand, technology potential and thermal networks are simulated, as follows:

        - energy demand simulation: run cea/demand/demand_main.py
        - PV potential: run cea/technologies/solar/photovoltaic.py
        - PVT potential: run cea/technologies/solar/photovoltaic_thermal.py
        - flat plate solar collector potential: run cea/technologies/solar/solar_collector.py with
          config.solar.type_scpanel = 'FP'
        - evacuated tube solar collector potential: run cea/technologies/solar/solar_collector.py with
          config.solar.type_scpanel = 'ET'
        - waste water heat recovery: run cea/resources/sewage_heat_exchanger.py
        - lake water potential: run cea/resources/lake_potential.py
        - thermal network simulation: run cea/technologies/thermal_network/thermal_network_matrix.py
          if no network is currently present in the case study, consider running network_layout/main.py first
        - decentralized building simulation: run cea/optimization/preprocessing/decentralized_building_main.py

    :param locator: path to input locator
    :param weather_file: path to weather file
    :param gv: global variables class
    :type locator: string
    :type weather_file: string
    :type gv: class

    :returns: None
    :rtype: Nonetype
    '''

    # read total demand file and names and number of all buildings
    total_demand = pd.read_csv(locator.get_total_demand())
    building_names = total_demand.Name.values
    gv.num_tot_buildings = total_demand.Name.count()
    lca = lca_calculations(locator, config)
    prices = Prices(locator, config)

    # pre-process information regarding resources and technologies (they are treated before the optimization)
    # optimize best systems for every individual building (they will compete against a district distribution solution)
    print "PRE-PROCESSING"
    extra_costs, extra_CO2, extra_primary_energy, solar_features = preproccessing(
        locator, total_demand, building_names, weather_file, gv, config,
        prices, lca)

    # optimize the distribution and linearize the results(at the moment, there is only a linearization of values in Zug)
    print "NETWORK OPTIMIZATION"
    network_features = network_opt_main.network_opt_main(config, locator)

    # optimize conversion systems
    print "CONVERSION AND STORAGE OPTIMIZATION"
    master_main.non_dominated_sorting_genetic_algorithm(
        locator, building_names, extra_costs, extra_CO2, extra_primary_energy,
        solar_features, network_features, gv, config, prices, lca)
Ejemplo n.º 3
0
def moo_optimization(locator, weather_file, gv, config):
    '''
    This function optimizes the conversion, storage and distribution systems of a heating distribution for the case study.
    It requires that solar technologies be calculated in advance and nodes of a distribution should have been already generated.

    :param locator: path to input locator
    :param weather_file: path to weather file
    :param gv: global variables class
    :type locator: string
    :type weather_file: string
    :type gv: class

    :returns: None
    :rtype: Nonetype
    '''

    # read total demand file and names and number of all buildings
    total_demand = pd.read_csv(locator.get_total_demand())
    building_names = total_demand.Name.values
    gv.num_tot_buildings = total_demand.Name.count()
    lca = lca_calculations(locator, config)
    prices = Prices(locator, config)

    # pre-process information regarding resources and technologies (they are treated before the optimization)
    # optimize best systems for every individual building (they will compete against a district distribution solution)
    print "PRE-PROCESSING"
    extra_costs, extra_CO2, extra_primary_energy, solarFeat = preproccessing(
        locator, total_demand, building_names, weather_file, gv, config,
        prices, lca)

    # optimize the distribution and linearize the results(at the moment, there is only a linearization of values in Zug)
    print "NETWORK OPTIMIZATION"
    network_features = network_opt.network_opt_main(config, locator)

    # optimize conversion systems
    print "CONVERSION AND STORAGE OPTIMIZATION"
    master.evolutionary_algo_main(locator, building_names, extra_costs,
                                  extra_CO2, extra_primary_energy, solarFeat,
                                  network_features, gv, config, prices, lca)
def natural_gas_imports(generation, individual, locator, config):
    category = "optimization-detailed"

    data_cooling = pd.read_csv(
        os.path.join(
            locator.get_optimization_slave_cooling_activation_pattern(
                individual, generation)))

    # Natural Gas supply for the CCGT plant
    lca = lca_calculations(locator, config)
    co2_CCGT = data_cooling['CO2_from_using_CCGT']
    E_gen_CCGT_W = data_cooling[
        'E_gen_CCGT_associated_with_absorption_chillers_W']
    NG_used_CCGT_W = np.zeros(8760)
    for hour in range(8760):
        NG_used_CCGT_W[hour] = (co2_CCGT[hour] +
                                E_gen_CCGT_W[hour] * lca.EL_TO_CO2 * 3600E-6
                                ) * 1.0E6 / (lca.NG_CC_TO_CO2_STD * WH_TO_J)

    date = data_cooling.DATE.values

    results = pd.DataFrame({
        "DATE":
        date,
        "NG_used_CCGT_W":
        NG_used_CCGT_W,
        "CO2_from_using_CCGT":
        co2_CCGT,
        "E_gen_CCGT_associated_with_absorption_chillers_W":
        E_gen_CCGT_W
    })

    results.to_csv(locator.get_optimization_slave_natural_gas_imports(
        individual, generation, category),
                   index=False)

    return results
Ejemplo n.º 5
0
def preprocessing_cost_data(locator, data_raw, individual, generations,
                            data_address, config):

    string_network = data_raw['network'].loc[individual].values[0]
    total_demand = pd.read_csv(locator.get_total_demand())
    building_names = total_demand.Name.values
    individual_barcode_list = data_raw['individual_barcode'].loc[
        individual].values[0]

    # The current structure of CEA has the following columns saved, in future, this will be slightly changed and
    # correspondingly these columns_of_saved_files needs to be changed
    columns_of_saved_files = [
        'CHP/Furnace', 'CHP/Furnace Share', 'Base Boiler', 'Base Boiler Share',
        'Peak Boiler', 'Peak Boiler Share', 'Heating Lake',
        'Heating Lake Share', 'Heating Sewage', 'Heating Sewage Share', 'GHP',
        'GHP Share', 'Data Centre', 'Compressed Air', 'PV', 'PV Area Share',
        'PVT', 'PVT Area Share', 'SC_ET', 'SC_ET Area Share', 'SC_FP',
        'SC_FP Area Share', 'DHN Temperature', 'DHN unit configuration',
        'Lake Cooling', 'Lake Cooling Share', 'VCC Cooling',
        'VCC Cooling Share', 'Absorption Chiller', 'Absorption Chiller Share',
        'Storage', 'Storage Share', 'DCN Temperature', 'DCN unit configuration'
    ]
    for i in building_names:  # DHN
        columns_of_saved_files.append(str(i) + ' DHN')

    for i in building_names:  # DCN
        columns_of_saved_files.append(str(i) + ' DCN')

    df_current_individual = pd.DataFrame(
        np.zeros(shape=(1, len(columns_of_saved_files))),
        columns=columns_of_saved_files)
    for i, ind in enumerate((columns_of_saved_files)):
        df_current_individual[ind] = individual_barcode_list[i]

    data_address = data_address[data_address['individual_list'] == individual]

    generation_number = data_address['generation_number_address'].values[0]
    individual_number = data_address['individual_number_address'].values[0]
    # get data about the activation patterns of these buildings (main units)

    if config.multi_criteria.network_type == 'DH':
        building_demands_df = pd.read_csv(
            locator.get_optimization_network_results_summary(
                string_network)).set_index("DATE")
        data_activation_path = os.path.join(
            locator.get_optimization_slave_heating_activation_pattern(
                individual_number, generation_number))
        df_heating = pd.read_csv(data_activation_path).set_index("DATE")

        data_activation_path = os.path.join(
            locator.
            get_optimization_slave_electricity_activation_pattern_heating(
                individual_number, generation_number))
        df_electricity = pd.read_csv(data_activation_path).set_index("DATE")

        # get data about the activation patterns of these buildings (storage)
        data_storage_path = os.path.join(
            locator.get_optimization_slave_storage_operation_data(
                individual_number, generation_number))
        df_SO = pd.read_csv(data_storage_path).set_index("DATE")

        # join into one database
        data_processed = df_heating.join(df_electricity).join(df_SO).join(
            building_demands_df)

    elif config.multi_criteria.network_type == 'DC':

        data_costs = pd.read_csv(
            os.path.join(
                locator.
                get_optimization_slave_investment_cost_detailed_cooling(
                    individual_number, generation_number)))
        data_cooling = pd.read_csv(
            os.path.join(
                locator.get_optimization_slave_cooling_activation_pattern(
                    individual_number, generation_number)))
        data_electricity = pd.read_csv(
            os.path.join(
                locator.
                get_optimization_slave_electricity_activation_pattern_cooling(
                    individual_number, generation_number)))

        # Total CAPEX calculations
        # Absorption Chiller
        Absorption_chiller_cost_data = pd.read_excel(
            locator.get_supply_systems(config.region),
            sheetname="Absorption_chiller",
            usecols=[
                'type', 'code', 'cap_min', 'cap_max', 'a', 'b', 'c', 'd', 'e',
                'IR_%', 'LT_yr', 'O&M_%'
            ])
        Absorption_chiller_cost_data = Absorption_chiller_cost_data[
            Absorption_chiller_cost_data['type'] == 'double']
        max_ACH_chiller_size = max(
            Absorption_chiller_cost_data['cap_max'].values)
        Inv_IR = (Absorption_chiller_cost_data.iloc[0]['IR_%']) / 100
        Inv_LT = Absorption_chiller_cost_data.iloc[0]['LT_yr']
        Q_ACH_max_W = data_cooling['Q_from_ACH_W'].max()
        Q_ACH_max_W = Q_ACH_max_W * (1 + SIZING_MARGIN)
        number_of_ACH_chillers = max(
            int(ceil(Q_ACH_max_W / max_ACH_chiller_size)), 1)
        Q_nom_ACH_W = Q_ACH_max_W / number_of_ACH_chillers
        Capex_a_ACH, Opex_fixed_ACH = calc_Cinv(Q_nom_ACH_W, locator, 'double',
                                                config)
        Capex_total_ACH = (Capex_a_ACH * ((1 + Inv_IR)**Inv_LT - 1) /
                           (Inv_IR) *
                           (1 + Inv_IR)**Inv_LT) * number_of_ACH_chillers
        data_costs['Capex_total_ACH'] = Capex_total_ACH
        data_costs['Opex_total_ACH'] = np.sum(
            data_cooling['Opex_var_ACH']) + data_costs['Opex_fixed_ACH']

        # VCC
        VCC_cost_data = pd.read_excel(locator.get_supply_systems(
            config.region),
                                      sheetname="Chiller")
        VCC_cost_data = VCC_cost_data[VCC_cost_data['code'] == 'CH3']
        max_VCC_chiller_size = max(VCC_cost_data['cap_max'].values)
        Inv_IR = (VCC_cost_data.iloc[0]['IR_%']) / 100
        Inv_LT = VCC_cost_data.iloc[0]['LT_yr']
        Q_VCC_max_W = data_cooling['Q_from_VCC_W'].max()
        Q_VCC_max_W = Q_VCC_max_W * (1 + SIZING_MARGIN)
        number_of_VCC_chillers = max(
            int(ceil(Q_VCC_max_W / max_VCC_chiller_size)), 1)
        Q_nom_VCC_W = Q_VCC_max_W / number_of_VCC_chillers
        Capex_a_VCC, Opex_fixed_VCC = calc_Cinv_VCC(Q_nom_VCC_W, locator,
                                                    config, 'CH3')
        Capex_total_VCC = (Capex_a_VCC * ((1 + Inv_IR)**Inv_LT - 1) /
                           (Inv_IR) *
                           (1 + Inv_IR)**Inv_LT) * number_of_VCC_chillers
        data_costs['Capex_total_VCC'] = Capex_total_VCC
        data_costs['Opex_total_VCC'] = np.sum(
            data_cooling['Opex_var_VCC']) + data_costs['Opex_fixed_VCC']

        # VCC Backup
        Q_VCC_backup_max_W = data_cooling['Q_from_VCC_backup_W'].max()
        Q_VCC_backup_max_W = Q_VCC_backup_max_W * (1 + SIZING_MARGIN)
        number_of_VCC_backup_chillers = max(
            int(ceil(Q_VCC_backup_max_W / max_VCC_chiller_size)), 1)
        Q_nom_VCC_backup_W = Q_VCC_backup_max_W / number_of_VCC_backup_chillers
        Capex_a_VCC_backup, Opex_fixed_VCC_backup = calc_Cinv_VCC(
            Q_nom_VCC_backup_W, locator, config, 'CH3')
        Capex_total_VCC_backup = (
            Capex_a_VCC_backup * ((1 + Inv_IR)**Inv_LT - 1) / (Inv_IR) *
            (1 + Inv_IR)**Inv_LT) * number_of_VCC_backup_chillers
        data_costs['Capex_total_VCC_backup'] = Capex_total_VCC_backup
        data_costs['Opex_total_VCC_backup'] = np.sum(
            data_cooling['Opex_var_VCC_backup']
        ) + data_costs['Opex_fixed_VCC_backup']

        # Storage Tank
        storage_cost_data = pd.read_excel(locator.get_supply_systems(
            config.region),
                                          sheetname="TES")
        storage_cost_data = storage_cost_data[storage_cost_data['code'] ==
                                              'TES2']
        Inv_IR = (storage_cost_data.iloc[0]['IR_%']) / 100
        Inv_LT = storage_cost_data.iloc[0]['LT_yr']
        Capex_a_storage_tank = data_costs['Capex_a_Tank'][0]
        Capex_total_storage_tank = (Capex_a_storage_tank *
                                    ((1 + Inv_IR)**Inv_LT - 1) / (Inv_IR) *
                                    (1 + Inv_IR)**Inv_LT)
        data_costs['Capex_total_storage_tank'] = Capex_total_storage_tank
        data_costs['Opex_total_storage_tank'] = np.sum(
            data_cooling['Opex_var_VCC_backup']
        ) + data_costs['Opex_fixed_Tank']

        # Cooling Tower
        CT_cost_data = pd.read_excel(locator.get_supply_systems(config.region),
                                     sheetname="CT")
        CT_cost_data = CT_cost_data[CT_cost_data['code'] == 'CT1']
        max_CT_size = max(CT_cost_data['cap_max'].values)
        Inv_IR = (CT_cost_data.iloc[0]['IR_%']) / 100
        Inv_LT = CT_cost_data.iloc[0]['LT_yr']
        Qc_CT_max_W = data_cooling['Qc_CT_associated_with_all_chillers_W'].max(
        )
        number_of_CT = max(int(ceil(Qc_CT_max_W / max_CT_size)), 1)
        Qnom_CT_W = Qc_CT_max_W / number_of_CT
        Capex_a_CT, Opex_fixed_CT = calc_Cinv_CT(Qnom_CT_W, locator, config,
                                                 'CT1')
        Capex_total_CT = (Capex_a_CT * ((1 + Inv_IR)**Inv_LT - 1) / (Inv_IR) *
                          (1 + Inv_IR)**Inv_LT) * number_of_CT
        data_costs['Capex_total_CT'] = Capex_total_CT
        data_costs['Opex_total_CT'] = np.sum(
            data_cooling['Opex_var_CT']) + data_costs['Opex_fixed_CT']

        # CCGT
        CCGT_cost_data = pd.read_excel(locator.get_supply_systems(
            config.region),
                                       sheetname="CCGT")
        technology_code = list(set(CCGT_cost_data['code']))
        CCGT_cost_data = CCGT_cost_data[CCGT_cost_data['code'] ==
                                        technology_code[0]]
        Inv_IR = (CCGT_cost_data.iloc[0]['IR_%']) / 100
        Inv_LT = CCGT_cost_data.iloc[0]['LT_yr']
        Capex_a_CCGT = data_costs['Capex_a_CCGT'][0]
        Capex_total_CCGT = (Capex_a_CCGT * ((1 + Inv_IR)**Inv_LT - 1) /
                            (Inv_IR) * (1 + Inv_IR)**Inv_LT)
        data_costs['Capex_total_CCGT'] = Capex_total_CCGT
        data_costs['Opex_total_CCGT'] = np.sum(
            data_cooling['Opex_var_CCGT']) + data_costs['Opex_fixed_CCGT']

        # pump
        config.restricted_to = None  # FIXME: remove this later
        config.thermal_network.network_type = config.multi_criteria.network_type
        config.thermal_network.network_names = []
        network_features = network_opt.network_opt_main(config, locator)
        DCN_barcode = ""
        for name in building_names:
            DCN_barcode += str(df_current_individual[name + ' DCN'][0])
        if df_current_individual['Data Centre'][0] == 1:
            df = pd.read_csv(
                locator.get_optimization_network_data_folder(
                    "Network_summary_result_" + hex(int(str(DCN_barcode), 2)) +
                    ".csv"),
                usecols=[
                    "mdot_cool_space_cooling_and_refrigeration_netw_all_kgpers"
                ])
        else:
            df = pd.read_csv(
                locator.get_optimization_network_data_folder(
                    "Network_summary_result_" + hex(int(str(DCN_barcode), 2)) +
                    ".csv"),
                usecols=[
                    "mdot_cool_space_cooling_data_center_and_refrigeration_netw_all_kgpers"
                ])
        mdotA_kgpers = np.array(df)
        mdotnMax_kgpers = np.amax(mdotA_kgpers)
        deltaPmax = np.max((network_features.DeltaP_DCN) *
                           DCN_barcode.count("1") / len(DCN_barcode))
        E_pumping_required_W = mdotnMax_kgpers * deltaPmax / DENSITY_OF_WATER_AT_60_DEGREES_KGPERM3
        P_motor_tot_W = E_pumping_required_W / PUMP_ETA  # electricty to run the motor
        Pump_max_kW = 375.0
        Pump_min_kW = 0.5
        nPumps = int(np.ceil(P_motor_tot_W / 1000.0 / Pump_max_kW))
        # if the nominal load (electric) > 375kW, a new pump is installed
        Pump_Array_W = np.zeros((nPumps))
        Pump_Remain_W = P_motor_tot_W
        Capex_total_pumps = 0
        Capex_a_total_pumps = 0
        for pump_i in range(nPumps):
            # calculate pump nominal capacity
            Pump_Array_W[pump_i] = min(Pump_Remain_W, Pump_max_kW * 1000)
            if Pump_Array_W[pump_i] < Pump_min_kW * 1000:
                Pump_Array_W[pump_i] = Pump_min_kW * 1000
            Pump_Remain_W -= Pump_Array_W[pump_i]
            pump_cost_data = pd.read_excel(locator.get_supply_systems(
                config.region),
                                           sheetname="Pump")
            pump_cost_data = pump_cost_data[pump_cost_data['code'] == 'PU1']
            # if the Q_design is below the lowest capacity available for the technology, then it is replaced by the least
            # capacity for the corresponding technology from the database
            if Pump_Array_W[pump_i] < pump_cost_data.iloc[0]['cap_min']:
                Pump_Array_W[pump_i] = pump_cost_data.iloc[0]['cap_min']
            pump_cost_data = pump_cost_data[
                (pump_cost_data['cap_min'] <= Pump_Array_W[pump_i])
                & (pump_cost_data['cap_max'] > Pump_Array_W[pump_i])]
            Inv_a = pump_cost_data.iloc[0]['a']
            Inv_b = pump_cost_data.iloc[0]['b']
            Inv_c = pump_cost_data.iloc[0]['c']
            Inv_d = pump_cost_data.iloc[0]['d']
            Inv_e = pump_cost_data.iloc[0]['e']
            Inv_IR = (pump_cost_data.iloc[0]['IR_%']) / 100
            Inv_LT = pump_cost_data.iloc[0]['LT_yr']
            Inv_OM = pump_cost_data.iloc[0]['O&M_%'] / 100
            InvC = Inv_a + Inv_b * (Pump_Array_W[pump_i])**Inv_c + (
                Inv_d + Inv_e * Pump_Array_W[pump_i]) * log(
                    Pump_Array_W[pump_i])
            Capex_total_pumps += InvC
            Capex_a_total_pumps += InvC * (Inv_IR) * (1 + Inv_IR)**Inv_LT / (
                (1 + Inv_IR)**Inv_LT - 1)
        data_costs['Capex_total_pumps'] = Capex_total_pumps
        data_costs['Opex_total_pumps'] = data_costs[
            'Opex_fixed_pump'] + data_costs['Opex_fixed_pump']

        # PV
        pv_installed_area = data_electricity['Area_PV_m2'].max()
        Capex_a_PV, Opex_fixed_PV = calc_Cinv_pv(pv_installed_area, locator,
                                                 config)
        pv_annual_production_kWh = (data_electricity['E_PV_W'].sum()) / 1000
        Opex_a_PV = calc_opex_PV(pv_annual_production_kWh, pv_installed_area)
        PV_cost_data = pd.read_excel(locator.get_supply_systems(config.region),
                                     sheetname="PV")
        technology_code = list(set(PV_cost_data['code']))
        PV_cost_data[PV_cost_data['code'] == technology_code[0]]
        Inv_IR = (PV_cost_data.iloc[0]['IR_%']) / 100
        Inv_LT = PV_cost_data.iloc[0]['LT_yr']
        Capex_total_PV = (Capex_a_PV * ((1 + Inv_IR)**Inv_LT - 1) / (Inv_IR) *
                          (1 + Inv_IR)**Inv_LT)
        data_costs['Capex_total_PV'] = Capex_total_PV
        data_costs['Opex_total_PV'] = Opex_a_PV + Opex_fixed_PV

        # Disconnected Buildings
        Capex_total_disconnected = 0
        Opex_total_disconnected = 0
        Capex_a_total_disconnected = 0

        for (index, building_name) in zip(DCN_barcode, building_names):
            if index is '0':
                df = pd.read_csv(
                    locator.
                    get_optimization_disconnected_folder_building_result_cooling(
                        building_name, configuration='AHU_ARU_SCU'))
                dfBest = df[df["Best configuration"] == 1]

                if dfBest['VCC to AHU_ARU_SCU Share'].iloc[
                        0] == 1:  #FIXME: Check for other options
                    Inv_IR = (VCC_cost_data.iloc[0]['IR_%']) / 100
                    Inv_LT = VCC_cost_data.iloc[0]['LT_yr']

                if dfBest['single effect ACH to AHU_ARU_SCU Share (FP)'].iloc[
                        0] == 1:
                    Inv_IR = (
                        Absorption_chiller_cost_data.iloc[0]['IR_%']) / 100
                    Inv_LT = Absorption_chiller_cost_data.iloc[0]['LT_yr']

                Opex_total_disconnected += dfBest[
                    "Operation Costs [CHF]"].iloc[0]
                Capex_a_total_disconnected += dfBest[
                    "Annualized Investment Costs [CHF]"].iloc[0]
                Capex_total_disconnected += (
                    dfBest["Annualized Investment Costs [CHF]"].iloc[0] *
                    ((1 + Inv_IR)**Inv_LT - 1) / (Inv_IR) *
                    (1 + Inv_IR)**Inv_LT)
        data_costs[
            'Capex_total_disconnected_Mio'] = Capex_total_disconnected / 1000000
        data_costs[
            'Opex_total_disconnected_Mio'] = Opex_total_disconnected / 1000000
        data_costs[
            'Capex_a_disconnected_Mio'] = Capex_a_total_disconnected / 1000000

        data_costs['costs_Mio'] = data_raw['population']['costs_Mio'][
            individual]
        data_costs['emissions_kiloton'] = data_raw['population'][
            'emissions_kiloton'][individual]
        data_costs['prim_energy_TJ'] = data_raw['population'][
            'prim_energy_TJ'][individual]

        # Electricity Details/Renewable Share
        total_electricity_demand_decentralized_W = np.zeros(8760)

        DCN_barcode = ""
        for name in building_names:  # identifying the DCN code
            DCN_barcode += str(
                int(df_current_individual[name + ' DCN'].values[0]))
        for i, name in zip(
                DCN_barcode, building_names
        ):  # adding the electricity demand from the decentralized buildings
            if i is '0':
                building_demand = pd.read_csv(
                    locator.get_demand_results_folder() + '//' + name + ".csv",
                    usecols=['E_sys_kWh'])

                total_electricity_demand_decentralized_W += building_demand[
                    'E_sys_kWh'] * 1000

        lca = lca_calculations(locator, config)

        data_electricity_processed = electricity_import_and_exports(
            generation_number, individual_number, locator, config)

        data_costs['Network_electricity_demand_GW'] = (
            data_electricity['E_total_req_W'].sum()) / 1000000000  # GW
        data_costs['Decentralized_electricity_demand_GW'] = (
            data_electricity_processed['E_decentralized_appliances_W'].sum()
        ) / 1000000000  # GW
        data_costs['Total_electricity_demand_GW'] = (
            data_electricity_processed['E_total_req_W'].sum()
        ) / 1000000000  # GW

        renewable_share_electricity = (data_electricity_processed['E_PV_to_directload_W'].sum() +
                                       data_electricity_processed['E_PV_to_grid_W'].sum()) * 100 / \
                                      (data_costs['Total_electricity_demand_GW'] * 1000000000)
        data_costs['renewable_share_electricity'] = renewable_share_electricity

        data_costs['Electricity_Costs_Mio'] = (
            (data_electricity_processed['E_from_grid_W'].sum() +
             data_electricity_processed['E_total_to_grid_W_negative'].sum()) *
            lca.ELEC_PRICE) / 1000000

        data_costs['Capex_a_total_Mio'] = (Capex_a_ACH * number_of_ACH_chillers + Capex_a_VCC * number_of_VCC_chillers + \
                    Capex_a_VCC_backup * number_of_VCC_backup_chillers + Capex_a_CT * number_of_CT + Capex_a_storage_tank + \
                    Capex_a_total_pumps + Capex_a_CCGT + Capex_a_PV + Capex_a_total_disconnected) / 1000000

        data_costs['Capex_a_ACH'] = Capex_a_ACH * number_of_ACH_chillers
        data_costs['Capex_a_VCC'] = Capex_a_VCC * number_of_VCC_chillers
        data_costs[
            'Capex_a_VCC_backup'] = Capex_a_VCC_backup * number_of_VCC_backup_chillers
        data_costs['Capex_a_CT'] = Capex_a_CT * number_of_CT
        data_costs['Capex_a_storage_tank'] = Capex_a_storage_tank
        data_costs['Capex_a_total_pumps'] = Capex_a_total_pumps
        data_costs['Capex_a_CCGT'] = Capex_a_CCGT
        data_costs['Capex_a_PV'] = Capex_a_PV

        data_costs['Capex_total_Mio'] = (data_costs['Capex_total_ACH'] + data_costs['Capex_total_VCC'] + data_costs['Capex_total_VCC_backup'] + \
                                    data_costs['Capex_total_storage_tank'] + data_costs['Capex_total_CT'] + data_costs['Capex_total_CCGT'] + \
                                    data_costs['Capex_total_pumps'] + data_costs['Capex_total_PV'] + Capex_total_disconnected) / 1000000

        data_costs['Opex_total_Mio'] = ((data_costs['Opex_total_ACH'] + data_costs['Opex_total_VCC'] + data_costs['Opex_total_VCC_backup'] + \
                                   data_costs['Opex_total_storage_tank'] + data_costs['Opex_total_CT'] + data_costs['Opex_total_CCGT'] + \
                                   data_costs['Opex_total_pumps'] + Opex_total_disconnected) / 1000000) + data_costs['Electricity_Costs_Mio']

        data_costs['TAC_Mio'] = data_costs['Capex_a_total_Mio'] + data_costs[
            'Opex_total_Mio']

    return data_costs
Ejemplo n.º 6
0
    def preprocessing_final_generation_data_cost_centralized(self, locator, data_raw, config, data_address):

        total_demand = pd.read_csv(locator.get_total_demand())
        building_names = total_demand.Name.values

        df_all_generations = pd.read_csv(locator.get_optimization_all_individuals())
        preprocessing_costs = pd.read_csv(locator.get_preprocessing_costs())

        # The current structure of CEA has the following columns saved, in future, this will be slightly changed and
        # correspondingly these columns_of_saved_files needs to be changed
        columns_of_saved_files = ['CHP/Furnace', 'CHP/Furnace Share', 'Base Boiler',
                                  'Base Boiler Share', 'Peak Boiler', 'Peak Boiler Share',
                                  'Heating Lake', 'Heating Lake Share', 'Heating Sewage', 'Heating Sewage Share', 'GHP',
                                  'GHP Share',
                                  'Data Centre', 'Compressed Air', 'PV', 'PV Area Share', 'PVT', 'PVT Area Share', 'SC_ET',
                                  'SC_ET Area Share', 'SC_FP', 'SC_FP Area Share', 'DHN Temperature', 'DHN unit configuration',
                                  'Lake Cooling', 'Lake Cooling Share', 'VCC Cooling', 'VCC Cooling Share',
                                  'Absorption Chiller', 'Absorption Chiller Share', 'Storage', 'Storage Share',
                                  'DCN Temperature', 'DCN unit configuration']
        for i in building_names:  # DHN
            columns_of_saved_files.append(str(i) + ' DHN')

        for i in building_names:  # DCN
            columns_of_saved_files.append(str(i) + ' DCN')

        individual_index = data_raw['individual_barcode'].index.values
        if config.plots_optimization.network_type == 'DH':
            data_activation_path = os.path.join(
                locator.get_optimization_slave_investment_cost_detailed(1, 1))
            df_heating_costs = pd.read_csv(data_activation_path)
            column_names = df_heating_costs.columns.values
            column_names = np.append(column_names, ['Opex_HP_Sewage', 'Opex_HP_Lake', 'Opex_GHP', 'Opex_CHP_BG',
                                                    'Opex_CHP_NG', 'Opex_Furnace_wet', 'Opex_Furnace_dry',
                                                    'Opex_BaseBoiler_BG', 'Opex_BaseBoiler_NG', 'Opex_PeakBoiler_BG',
                                                    'Opex_PeakBoiler_NG', 'Opex_BackupBoiler_BG', 'Opex_BackupBoiler_NG',
                                                    'Capex_SC', 'Capex_PVT', 'Capex_Boiler_backup', 'Capex_storage_HEX',
                                                    'Capex_furnace', 'Capex_Boiler', 'Capex_Boiler_peak', 'Capex_Lake', 'Capex_CHP',
                                                    'Capex_Sewage', 'Capex_pump', 'Opex_Total', 'Capex_Total', 'Capex_Boiler_Total',
                                                    'Opex_Boiler_Total', 'Opex_CHP_Total', 'Opex_Furnace_Total', 'Disconnected_costs',
                                                    'Capex_Decentralized', 'Opex_Decentralized', 'Capex_Centralized', 'Opex_Centralized', 'Electricity_Costs', 'Process_Heat_Costs'])

            data_processed = pd.DataFrame(np.zeros([len(data_raw['individual_barcode']), len(column_names)]), columns=column_names)

        elif config.plots_optimization.network_type == 'DC':
            data_activation_path = os.path.join(
                locator.get_optimization_slave_investment_cost_detailed_cooling(1, 1))
            df_cooling_costs = pd.read_csv(data_activation_path)
            column_names = df_cooling_costs.columns.values
            column_names = np.append(column_names,
                                     ['Opex_var_ACH', 'Opex_var_CCGT', 'Opex_var_CT', 'Opex_var_Lake', 'Opex_var_VCC', 'Opex_var_PV',
                                      'Opex_var_VCC_backup', 'Capex_ACH', 'Capex_CCGT', 'Capex_CT', 'Capex_Tank', 'Capex_VCC', 'Capex_a_PV',
                                      'Capex_VCC_backup', 'Capex_a_pump', 'Opex_Total', 'Capex_Total', 'Opex_var_pumps', 'Disconnected_costs',
                                      'Capex_Decentralized', 'Opex_Decentralized', 'Capex_Centralized', 'Opex_Centralized', 'Electricitycosts_for_appliances',
                                      'Process_Heat_Costs', 'Electricitycosts_for_hotwater'])

            data_processed = pd.DataFrame(np.zeros([len(data_raw['individual_barcode']), len(column_names)]), columns=column_names)


        for individual_code in range(len(data_raw['individual_barcode'])):

            individual_barcode_list = data_raw['individual_barcode'].loc[individual_index[individual_code]].values[0]
            df_current_individual = pd.DataFrame(np.zeros(shape = (1, len(columns_of_saved_files))), columns=columns_of_saved_files)
            for i, ind in enumerate((columns_of_saved_files)):
                df_current_individual[ind] = individual_barcode_list[i]
            data_address_individual = data_address[data_address['individual_list'] == individual_index[individual_code]]

            generation_pointer = data_address_individual['generation_number_address'].values[0]  # points to the correct file to be referenced from optimization folders
            individual_pointer = data_address_individual['individual_number_address'].values[0]

            if config.plots_optimization.network_type == 'DH':
                data_activation_path = os.path.join(
                    locator.get_optimization_slave_investment_cost_detailed(individual_pointer, generation_pointer))
                df_heating_costs = pd.read_csv(data_activation_path)

                data_activation_path = os.path.join(
                    locator.get_optimization_slave_heating_activation_pattern(individual_pointer, generation_pointer))
                df_heating = pd.read_csv(data_activation_path).set_index("DATE")

                for column_name in df_heating_costs.columns.values:
                    data_processed.loc[individual_code][column_name] = df_heating_costs[column_name].values


                data_processed.loc[individual_code]['Opex_HP_Sewage'] = np.sum(df_heating['Opex_var_HP_Sewage'])
                data_processed.loc[individual_code]['Opex_HP_Lake'] = np.sum(df_heating['Opex_var_HP_Lake'])
                data_processed.loc[individual_code]['Opex_GHP'] = np.sum(df_heating['Opex_var_GHP'])
                data_processed.loc[individual_code]['Opex_CHP_BG'] = np.sum(df_heating['Opex_var_CHP_BG'])
                data_processed.loc[individual_code]['Opex_CHP_NG'] = np.sum(df_heating['Opex_var_CHP_NG'])
                data_processed.loc[individual_code]['Opex_Furnace_wet'] = np.sum(df_heating['Opex_var_Furnace_wet'])
                data_processed.loc[individual_code]['Opex_Furnace_dry'] = np.sum(df_heating['Opex_var_Furnace_dry'])
                data_processed.loc[individual_code]['Opex_BaseBoiler_BG'] = np.sum(df_heating['Opex_var_BaseBoiler_BG'])
                data_processed.loc[individual_code]['Opex_BaseBoiler_NG'] = np.sum(df_heating['Opex_var_BaseBoiler_NG'])
                data_processed.loc[individual_code]['Opex_PeakBoiler_BG'] = np.sum(df_heating['Opex_var_PeakBoiler_BG'])
                data_processed.loc[individual_code]['Opex_PeakBoiler_NG'] = np.sum(df_heating['Opex_var_PeakBoiler_NG'])
                data_processed.loc[individual_code]['Opex_BackupBoiler_BG'] = np.sum(df_heating['Opex_var_BackupBoiler_BG'])
                data_processed.loc[individual_code]['Opex_BackupBoiler_NG'] = np.sum(df_heating['Opex_var_BackupBoiler_NG'])


                data_processed.loc[individual_code]['Capex_SC'] = data_processed.loc[individual_code]['Capex_a_SC'] + data_processed.loc[individual_code]['Opex_fixed_SC']
                data_processed.loc[individual_code]['Capex_PVT'] = data_processed.loc[individual_code]['Capex_a_PVT'] + data_processed.loc[individual_code]['Opex_fixed_PVT']
                data_processed.loc[individual_code]['Capex_Boiler_backup'] = data_processed.loc[individual_code]['Capex_a_Boiler_backup']+ data_processed.loc[individual_code]['Opex_fixed_Boiler_backup']
                data_processed.loc[individual_code]['Capex_storage_HEX'] = data_processed.loc[individual_code]['Capex_a_storage_HEX'] + data_processed.loc[individual_code]['Opex_fixed_storage_HEX']
                data_processed.loc[individual_code]['Capex_furnace'] = data_processed.loc[individual_code]['Capex_a_furnace']+ data_processed.loc[individual_code]['Opex_fixed_furnace']
                data_processed.loc[individual_code]['Capex_Boiler'] = data_processed.loc[individual_code]['Capex_a_Boiler'] + data_processed.loc[individual_code]['Opex_fixed_Boiler']
                data_processed.loc[individual_code]['Capex_Boiler_peak'] = data_processed.loc[individual_code]['Capex_a_Boiler_peak']+ data_processed.loc[individual_code]['Opex_fixed_Boiler_peak']
                data_processed.loc[individual_code]['Capex_Lake'] = data_processed.loc[individual_code]['Capex_a_Lake']+ data_processed.loc[individual_code]['Opex_fixed_Lake']
                data_processed.loc[individual_code]['Capex_Sewage'] = data_processed.loc[individual_code]['Capex_a_Sewage'] + data_processed.loc[individual_code]['Opex_fixed_Boiler']
                data_processed.loc[individual_code]['Capex_pump'] = data_processed.loc[individual_code]['Capex_a_pump'] + data_processed.loc[individual_code]['Opex_fixed_pump']
                data_processed.loc[individual_code]['Capex_CHP'] = data_processed.loc[individual_code]['Capex_a_CHP'] + data_processed.loc[individual_code]['Opex_fixed_CHP']
                data_processed.loc[individual_code]['Disconnected_costs'] = df_heating_costs['CostDiscBuild']

                data_processed.loc[individual_code]['Capex_Boiler_Total'] = data_processed.loc[individual_code]['Capex_Boiler'] + \
                                                                            data_processed.loc[individual_code][
                                                                                'Capex_Boiler_peak'] + \
                                                                            data_processed.loc[individual_code][
                                                                                'Capex_Boiler_backup']
                data_processed.loc[individual_code]['Opex_Boiler_Total'] = data_processed.loc[individual_code]['Opex_BackupBoiler_NG'] + \
                                                                           data_processed.loc[individual_code][
                                                                               'Opex_BackupBoiler_BG'] + \
                                                                           data_processed.loc[individual_code][
                                                                               'Opex_PeakBoiler_NG'] + \
                                                                           data_processed.loc[individual_code][
                                                                               'Opex_PeakBoiler_BG'] + \
                                                                           data_processed.loc[individual_code][
                                                                               'Opex_BaseBoiler_NG'] + \
                                                                           data_processed.loc[individual_code][
                                                                               'Opex_BaseBoiler_BG']
                data_processed.loc[individual_code]['Opex_CHP_Total'] = data_processed.loc[individual_code]['Opex_CHP_NG'] + \
                                                                        data_processed.loc[individual_code][
                                                                            'Opex_CHP_BG']

                data_processed.loc[individual_code]['Opex_Furnace_Total'] = data_processed.loc[individual_code]['Opex_Furnace_wet'] + \
                                                                          data_processed.loc[individual_code]['Opex_Furnace_dry']

                data_processed.loc[individual_code]['Electricity_Costs'] = preprocessing_costs['elecCosts'].values[0]
                data_processed.loc[individual_code]['Process_Heat_Costs'] = preprocessing_costs['hpCosts'].values[0]




                data_processed.loc[individual_code]['Opex_Centralized'] \
                    = data_processed.loc[individual_code]['Opex_HP_Sewage'] + data_processed.loc[individual_code]['Opex_HP_Lake'] + \
                      data_processed.loc[individual_code]['Opex_GHP'] + data_processed.loc[individual_code]['Opex_CHP_BG'] + \
                      data_processed.loc[individual_code]['Opex_CHP_NG'] + data_processed.loc[individual_code]['Opex_Furnace_wet'] + \
                      data_processed.loc[individual_code]['Opex_Furnace_dry'] + data_processed.loc[individual_code]['Opex_BaseBoiler_BG'] + \
                      data_processed.loc[individual_code]['Opex_BaseBoiler_NG'] + data_processed.loc[individual_code]['Opex_PeakBoiler_BG'] + \
                      data_processed.loc[individual_code]['Opex_PeakBoiler_NG'] + data_processed.loc[individual_code]['Opex_BackupBoiler_BG'] + \
                      data_processed.loc[individual_code]['Opex_BackupBoiler_NG'] + \
                      data_processed.loc[individual_code]['Electricity_Costs'] + data_processed.loc[individual_code][
                          'Process_Heat_Costs']

                data_processed.loc[individual_code]['Capex_Centralized'] = data_processed.loc[individual_code]['Capex_SC'] + \
                            data_processed.loc[individual_code]['Capex_PVT'] + data_processed.loc[individual_code]['Capex_Boiler_backup'] + \
                            data_processed.loc[individual_code]['Capex_storage_HEX'] + data_processed.loc[individual_code]['Capex_furnace'] + \
                            data_processed.loc[individual_code]['Capex_Boiler'] + data_processed.loc[individual_code]['Capex_Boiler_peak'] + \
                            data_processed.loc[individual_code]['Capex_Lake'] + data_processed.loc[individual_code]['Capex_Sewage'] + \
                            data_processed.loc[individual_code]['Capex_pump']

                data_processed.loc[individual_code]['Capex_Decentralized'] = df_heating_costs['Capex_Disconnected']
                data_processed.loc[individual_code]['Opex_Decentralized'] = df_heating_costs['Opex_Disconnected']
                data_processed.loc[individual_code]['Capex_Total'] = data_processed.loc[individual_code]['Capex_Centralized'] + data_processed.loc[individual_code]['Capex_Decentralized']
                data_processed.loc[individual_code]['Opex_Total'] = data_processed.loc[individual_code]['Opex_Centralized'] + data_processed.loc[individual_code]['Opex_Decentralized']

            elif config.plots_optimization.network_type == 'DC':
                data_activation_path = os.path.join(
                    locator.get_optimization_slave_investment_cost_detailed(individual_pointer, generation_pointer))
                disconnected_costs = pd.read_csv(data_activation_path)

                data_activation_path = os.path.join(
                    locator.get_optimization_slave_investment_cost_detailed_cooling(individual_pointer, generation_pointer))
                df_cooling_costs = pd.read_csv(data_activation_path)

                data_activation_path = os.path.join(
                    locator.get_optimization_slave_cooling_activation_pattern(individual_pointer, generation_pointer))
                df_cooling = pd.read_csv(data_activation_path).set_index("DATE")
                data_load = pd.read_csv(os.path.join(
                    locator.get_optimization_slave_cooling_activation_pattern(individual_pointer, generation_pointer)))
                data_load_electricity = pd.read_csv(os.path.join(
                    locator.get_optimization_slave_electricity_activation_pattern_cooling(individual_pointer, generation_pointer)))

                for column_name in df_cooling_costs.columns.values:
                    data_processed.loc[individual_code][column_name] = df_cooling_costs[column_name].values

                data_processed.loc[individual_code]['Opex_var_ACH'] = np.sum(df_cooling['Opex_var_ACH'])
                data_processed.loc[individual_code]['Opex_var_CCGT'] = np.sum(df_cooling['Opex_var_CCGT'])
                data_processed.loc[individual_code]['Opex_var_CT'] = np.sum(df_cooling['Opex_var_CT'])
                data_processed.loc[individual_code]['Opex_var_Lake'] = np.sum(df_cooling['Opex_var_Lake'])
                data_processed.loc[individual_code]['Opex_var_VCC'] = np.sum(df_cooling['Opex_var_VCC'])
                data_processed.loc[individual_code]['Opex_var_VCC_backup'] = np.sum(df_cooling['Opex_var_VCC_backup'])
                data_processed.loc[individual_code]['Opex_var_pumps'] = np.sum(data_processed.loc[individual_code]['Opex_var_pump'])
                data_processed.loc[individual_code]['Opex_var_PV'] = -np.sum(data_load_electricity['KEV'])

                Absorption_chiller_cost_data = pd.read_excel(locator.get_supply_systems(config.region),
                                                             sheetname="Absorption_chiller",
                                                             usecols=['type', 'code', 'cap_min', 'cap_max', 'a', 'b',
                                                                      'c', 'd', 'e', 'IR_%',
                                                                      'LT_yr', 'O&M_%'])
                Absorption_chiller_cost_data = Absorption_chiller_cost_data[
                    Absorption_chiller_cost_data['type'] == 'double']
                max_chiller_size = max(Absorption_chiller_cost_data['cap_max'].values)

                Q_ACH_max_W = data_load['Q_from_ACH_W'].max()
                Q_ACH_max_W = Q_ACH_max_W * (1 + SIZING_MARGIN)
                number_of_ACH_chillers = int(ceil(Q_ACH_max_W / max_chiller_size))

                VCC_cost_data = pd.read_excel(locator.get_supply_systems(config.region), sheetname="Chiller")
                VCC_cost_data = VCC_cost_data[VCC_cost_data['code'] == 'CH3']
                max_VCC_chiller_size = max(VCC_cost_data['cap_max'].values)

                Q_VCC_max_W = data_load['Q_from_VCC_W'].max()
                Q_VCC_max_W = Q_VCC_max_W * (1 + SIZING_MARGIN)
                number_of_VCC_chillers = int(ceil(Q_VCC_max_W / max_VCC_chiller_size))

                PV_peak_kW = data_load_electricity['E_PV_W'].max() / 1000
                Capex_a_PV, Opex_fixed_PV = calc_Cinv_pv(PV_peak_kW, locator, config)

                data_processed.loc[individual_code]['Capex_ACH'] = (data_processed.loc[individual_code]['Capex_a_ACH'] + data_processed.loc[individual_code]['Opex_fixed_ACH']) * number_of_ACH_chillers
                data_processed.loc[individual_code]['Capex_CCGT'] = data_processed.loc[individual_code]['Capex_a_CCGT'] + data_processed.loc[individual_code]['Opex_fixed_CCGT']
                data_processed.loc[individual_code]['Capex_CT'] = data_processed.loc[individual_code]['Capex_a_CT']+ data_processed.loc[individual_code]['Opex_fixed_CT']
                data_processed.loc[individual_code]['Capex_Tank'] = data_processed.loc[individual_code]['Capex_a_Tank'] + data_processed.loc[individual_code]['Opex_fixed_Tank']
                data_processed.loc[individual_code]['Capex_VCC'] = (data_processed.loc[individual_code]['Capex_a_VCC']+ data_processed.loc[individual_code]['Opex_fixed_VCC']) * number_of_VCC_chillers
                data_processed.loc[individual_code]['Capex_VCC_backup'] = data_processed.loc[individual_code]['Capex_a_VCC_backup'] + data_processed.loc[individual_code]['Opex_fixed_VCC_backup']
                data_processed.loc[individual_code]['Capex_a_pump'] = data_processed.loc[individual_code]['Capex_pump']+ data_processed.loc[individual_code]['Opex_fixed_pump']
                data_processed.loc[individual_code]['Capex_a_PV'] =  Capex_a_PV + Opex_fixed_PV

                data_processed.loc[individual_code]['Disconnected_costs'] = disconnected_costs['CostDiscBuild']
                data_processed.loc[individual_code]['Capex_Decentralized'] = disconnected_costs['Capex_Disconnected']
                data_processed.loc[individual_code]['Opex_Decentralized'] = disconnected_costs['Opex_Disconnected']

                data_processed.loc[individual_code]['Electricitycosts_for_appliances'] = preprocessing_costs['elecCosts'].values[0]
                data_processed.loc[individual_code]['Process_Heat_Costs'] = preprocessing_costs['hpCosts'].values[0]

                E_for_hot_water_demand_W = np.zeros(8760)
                lca = lca_calculations(locator, config)

                for name in building_names:  # adding the electricity demand from the decentralized buildings
                    building_demand = pd.read_csv(locator.get_demand_results_folder() + '//' + name + ".csv",
                                                  usecols=['E_ww_kWh'])

                    E_for_hot_water_demand_W += building_demand['E_ww_kWh'] * 1000

                data_processed.loc[individual_code]['Electricitycosts_for_hotwater'] = E_for_hot_water_demand_W.sum() * lca.ELEC_PRICE


                data_processed.loc[individual_code]['Opex_Centralized'] = data_processed.loc[individual_code]['Opex_var_ACH'] + data_processed.loc[individual_code]['Opex_var_CCGT'] + \
                                               data_processed.loc[individual_code]['Opex_var_CT'] + data_processed.loc[individual_code]['Opex_var_Lake'] + \
                                               data_processed.loc[individual_code]['Opex_var_VCC'] + data_processed.loc[individual_code]['Opex_var_VCC_backup'] + data_processed.loc[individual_code]['Opex_var_pumps'] + \
                                               data_processed.loc[individual_code]['Electricitycosts_for_appliances'] + data_processed.loc[individual_code]['Process_Heat_Costs'] + \
                                               data_processed.loc[individual_code]['Opex_var_PV'] + data_processed.loc[individual_code]['Electricitycosts_for_hotwater']

                data_processed.loc[individual_code]['Capex_Centralized'] = data_processed.loc[individual_code]['Capex_a_ACH'] + data_processed.loc[individual_code]['Capex_a_CCGT'] + \
                                               data_processed.loc[individual_code]['Capex_a_CT'] + data_processed.loc[individual_code]['Capex_a_Tank'] + \
                                               data_processed.loc[individual_code]['Capex_a_VCC'] + data_processed.loc[individual_code]['Capex_a_VCC_backup'] + \
                                               data_processed.loc[individual_code]['Capex_pump'] + data_processed.loc[individual_code]['Opex_fixed_ACH'] + \
                                               data_processed.loc[individual_code]['Opex_fixed_CCGT'] + data_processed.loc[individual_code]['Opex_fixed_CT'] + \
                                               data_processed.loc[individual_code]['Opex_fixed_Tank'] + data_processed.loc[individual_code]['Opex_fixed_VCC'] + \
                                               data_processed.loc[individual_code]['Opex_fixed_VCC_backup'] + data_processed.loc[individual_code]['Opex_fixed_pump'] + Capex_a_PV + Opex_fixed_PV


                data_processed.loc[individual_code]['Capex_Total'] = data_processed.loc[individual_code]['Capex_Centralized'] + data_processed.loc[individual_code]['Capex_Decentralized']
                data_processed.loc[individual_code]['Opex_Total'] = data_processed.loc[individual_code]['Opex_Centralized'] + data_processed.loc[individual_code]['Opex_Decentralized']

        individual_names = ['ind' + str(i) for i in data_processed.index.values]
        data_processed['indiv'] = individual_names
        data_processed.set_index('indiv', inplace=True)
        return data_processed
Ejemplo n.º 7
0
    print ('combined euclidean distance = ' + str(combined_euclidean_distance))
    print ('spread = ' + str(spread_final))

    return combined_euclidean_distance, spread_final


if __name__ == "__main__":
    config = cea.config.Configuration()
    gv = cea.globalvar.GlobalVariables()
    locator = cea.inputlocator.InputLocator(scenario=config.scenario)
    weather_file = config.weather
    total_demand = pd.read_csv(locator.get_total_demand())
    building_names = total_demand.Name.values
    gv.num_tot_buildings = total_demand.Name.count()
    lca = lca_calculations(locator, config)
    prices = Prices(locator, config)
    extra_costs, extra_CO2, extra_primary_energy, solar_features = preproccessing(locator, total_demand, building_names,
                                                                             weather_file, gv, config,
                                                                             prices, lca)

    # optimize the distribution and linearize the results(at the moment, there is only a linearization of values in Zug)
    print "NETWORK OPTIMIZATION"
    nBuildings = len(building_names)


    network_features = network_opt_main.network_opt_main(config, locator)


    non_dominated_sorting_genetic_algorithm(locator, building_names, extra_costs, extra_CO2, extra_primary_energy, solar_features,
                                            network_features, gv, config, prices, lca)
Ejemplo n.º 8
0
def main(config):
    """
    run the whole optimization routine
    """
    gv = cea.globalvar.GlobalVariables()
    locator = cea.inputlocator.InputLocator(scenario=config.scenario)
    weather_file = config.weather

    try:
        if not demand_files_exist(config, locator):
            raise ValueError(
                "Missing demand data of the scenario. Consider running demand script first"
            )

        if not os.path.exists(locator.get_total_demand()):
            raise ValueError(
                "Missing total demand of the scenario. Consider running demand script first"
            )

        if not os.path.exists(locator.PV_totals()):
            raise ValueError(
                "Missing PV potential of the scenario. Consider running photovoltaic script first"
            )

        if config.district_heating_network:
            if not os.path.exists(locator.PVT_totals()):
                raise ValueError(
                    "Missing PVT potential of the scenario. Consider running photovoltaic-thermal script first"
                )

        if not os.path.exists(locator.SC_totals(panel_type='FP')):
            raise ValueError(
                "Missing SC potential of panel type 'FP' of the scenario. Consider running solar-collector script first with panel_type as SC1 and t-in-SC as 75"
            )

        if not os.path.exists(locator.SC_totals(panel_type='ET')):
            raise ValueError(
                "Missing SC potential of panel type 'ET' of the scenario. Consider running solar-collector script first with panel_type as SC2 and t-in-SC as 150"
            )

        if not os.path.exists(locator.get_sewage_heat_potential()):
            raise ValueError(
                "Missing sewage potential of the scenario. Consider running sewage heat exchanger script first"
            )

        if not os.path.exists(
                locator.get_optimization_network_edge_list_file(
                    config.thermal_network.network_type, '')):
            raise ValueError(
                "Missing network edge list. Consider running thermal network script first"
            )
    except ValueError as err:
        import sys
        print(err.message)
        sys.exit(1)

    # read total demand file and names and number of all buildings
    total_demand = pd.read_csv(locator.get_total_demand())
    building_names = total_demand.Name.values
    gv.num_tot_buildings = total_demand.Name.count()
    prices = Prices(locator, config)
    lca = lca_calculations(locator, config)

    # pre-process information regarding resources and technologies (they are treated before the optimization)
    # optimize best systems for every individual building (they will compete against a district distribution solution)
    extra_costs, extra_CO2, extra_primary_energy, solarFeat = preproccessing(
        locator, total_demand, building_names, weather_file, gv, config,
        prices, lca)

    # optimize the distribution and linearize the results(at the moment, there is only a linearization of values in Zug)
    network_features = network_opt_main.network_opt_main(config, locator)

    ## generate individual from config
    # heating technologies at the centralized plant
    heating_block = [
        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 90.0,
        6
    ]
    # FIXME: connect PV to config
    # cooling technologies at the centralized plant
    centralized_vcc_size = config.supply_system_simulation.centralized_vcc
    centralized_ach_size = config.supply_system_simulation.centralized_ach
    centralized_storage_size = config.supply_system_simulation.centralized_storage
    cooling_block = [0, 0, 1, 0.3, 1, 0.4, 1, 0.2, 6, 7]
    cooling_block[2:4] = [1, centralized_vcc_size
                          ] if (centralized_vcc_size != 0) else [0, 0]
    cooling_block[4:6] = [1, centralized_ach_size
                          ] if (centralized_ach_size != 0) else [0, 0]
    cooling_block[6:8] = [1, centralized_storage_size
                          ] if (centralized_storage_size != 0) else [0, 0]

    total_demand = pd.read_csv(locator.get_total_demand())
    building_names = total_demand.Name.values
    # read list of buildings connected to DC from config
    if len(config.supply_system_simulation.dc_connected_buildings) == 0:
        dc_connected_buildings = building_names  # default, all connected
    else:
        dc_connected_buildings = config.supply_system_simulation.dc_connected_buildings
    # dc_connected_buildings = building_names  # default, all connected

    # buildings connected to networks
    heating_network = [0] * building_names.size
    cooling_network = [0] * building_names.size
    for building in dc_connected_buildings:
        index = np.where(building_names == building)[0][0]
        cooling_network[index] = 1

    individual = heating_block + cooling_block + heating_network + cooling_network
    # individual = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.01,1,0.535812211,0,0,0,0,10,7,1,0,1,1,0,1,0,0,0,0,1,1,1,1,0,1,1,0,1,1]

    supply_calculation(individual, building_names, total_demand, locator,
                       extra_costs, extra_CO2, extra_primary_energy, solarFeat,
                       network_features, gv, config, prices, lca)

    print 'Buildings connected to thermal network:', dc_connected_buildings
    print 'Centralized systems:', centralized_vcc_size, 'VCC', centralized_ach_size, 'ACH', centralized_storage_size
    print 'Decentralized systems:', config.supply_system_simulation.decentralized_systems
    print 'supply calculation succeeded!'
Ejemplo n.º 9
0
def supply_calculation(individual, building_names, total_demand, locator,
                       extra_costs, extra_CO2, extra_primary_energy,
                       solar_features, network_features, gv, config, prices,
                       lca):
    """
    This function evaluates one supply system configuration of the case study.
    :param individual: a list that indicates the supply system configuration
    :type individual: list
    :param building_names: names of all building in the district
    :type building_names: ndarray
    :param locator:
    :param extra_costs: cost of decentralized supply systems
    :param extra_CO2: CO2 emission of decentralized supply systems
    :param extra_primary_energy: Primary energy of decentralized supply systems
    :param solar_features: Energy production potentials of solar technologies, including area of installed panels and annual production
    :type solar_features: dict
    :param network_features: hourly network operating conditions (thermal/pressure losses) and capital costs
    :type network_features: dict
    :param gv:
    :param config:
    :param prices:
    :return:
    """
    individual = evaluation.check_invalid(individual, len(building_names),
                                          config)

    # Initialize objective functions costs, CO2 and primary energy
    costs = 0
    CO2 = extra_CO2
    prim = extra_primary_energy
    QUncoveredDesign = 0
    QUncoveredAnnual = 0

    # Create the string representation of the individual
    DHN_barcode, DCN_barcode, DHN_configuration, DCN_configuration = sFn.individual_to_barcode(
        individual, building_names)

    # read the total loads from buildings connected to thermal networks
    if DHN_barcode.count("1") == gv.num_tot_buildings:
        network_file_name_heating = "Network_summary_result_all.csv"
        Q_DHNf_W = pd.read_csv(
            locator.get_optimization_network_all_results_summary('all'),
            usecols=["Q_DHNf_W"]).values
        Q_heating_max_W = Q_DHNf_W.max()
    elif DHN_barcode.count("1") == 0:
        network_file_name_heating = "Network_summary_result_all.csv"
        Q_heating_max_W = 0
    else:
        # Run the substation and distribution routines
        sMain.substation_main(locator,
                              total_demand,
                              building_names,
                              DHN_configuration,
                              DCN_configuration,
                              Flag=True)

        nM.network_main(locator, total_demand, building_names, config, gv,
                        DHN_barcode)

        network_file_name_heating = "Network_summary_result_" + hex(
            int(str(DHN_barcode), 2)) + ".csv"
        Q_DHNf_W = pd.read_csv(
            locator.get_optimization_network_results_summary(DHN_barcode),
            usecols=["Q_DHNf_W"]).values
        Q_heating_max_W = Q_DHNf_W.max()

    if DCN_barcode.count("1") == gv.num_tot_buildings:
        network_file_name_cooling = "Network_summary_result_all.csv"
        if individual[
                N_HEAT *
                2] == 1:  # if heat recovery is ON, then only need to satisfy cooling load of space cooling and refrigeration
            Q_DCNf_W = pd.read_csv(
                locator.get_optimization_network_all_results_summary('all'),
                usecols=["Q_DCNf_space_cooling_and_refrigeration_W"]).values
        else:
            Q_DCNf_W = pd.read_csv(
                locator.get_optimization_network_all_results_summary('all'),
                usecols=[
                    "Q_DCNf_space_cooling_data_center_and_refrigeration_W"
                ]).values
        Q_cooling_max_W = Q_DCNf_W.max()
    elif DCN_barcode.count("1") == 0:
        network_file_name_cooling = "Network_summary_result_none.csv"
        Q_cooling_max_W = 0
    else:
        # Run the substation and distribution routines
        sMain.substation_main(locator,
                              total_demand,
                              building_names,
                              DHN_configuration,
                              DCN_configuration,
                              Flag=True)

        nM.network_main(locator, total_demand, building_names, config, gv,
                        DCN_barcode)

        network_file_name_cooling = "Network_summary_result_" + hex(
            int(str(DCN_barcode), 2)) + ".csv"

        if individual[
                N_HEAT *
                2] == 1:  # if heat recovery is ON, then only need to satisfy cooling load of space cooling and refrigeration
            Q_DCNf_W = pd.read_csv(
                locator.get_optimization_network_results_summary(DCN_barcode),
                usecols=["Q_DCNf_space_cooling_and_refrigeration_W"]).values
        else:
            Q_DCNf_W = pd.read_csv(
                locator.get_optimization_network_results_summary(DCN_barcode),
                usecols=[
                    "Q_DCNf_space_cooling_data_center_and_refrigeration_W"
                ]).values
        Q_cooling_max_W = Q_DCNf_W.max()

    Q_heating_nom_W = Q_heating_max_W * (1 + Q_MARGIN_FOR_NETWORK)
    Q_cooling_nom_W = Q_cooling_max_W * (1 + Q_MARGIN_FOR_NETWORK)

    # Modify the individual with the extra GHP constraint
    try:
        cCheck.GHPCheck(individual, locator, Q_heating_nom_W, gv)
    except:
        print "No GHP constraint check possible \n"

    # Export to context
    individual_number = calc_individual_number(locator)
    master_to_slave_vars = evaluation.calc_master_to_slave_variables(
        individual, Q_heating_max_W, Q_cooling_max_W, building_names,
        individual_number, GENERATION_NUMBER)
    master_to_slave_vars.network_data_file_heating = network_file_name_heating
    master_to_slave_vars.network_data_file_cooling = network_file_name_cooling
    master_to_slave_vars.total_buildings = len(building_names)

    if master_to_slave_vars.number_of_buildings_connected_heating > 1:
        if DHN_barcode.count("0") == 0:
            master_to_slave_vars.fNameTotalCSV = locator.get_total_demand()
        else:
            master_to_slave_vars.fNameTotalCSV = os.path.join(
                locator.get_optimization_network_totals_folder(),
                "Total_%(DHN_barcode)s.csv" % locals())
    else:
        master_to_slave_vars.fNameTotalCSV = locator.get_optimization_substations_total_file(
            DHN_barcode)

    if master_to_slave_vars.number_of_buildings_connected_cooling > 1:
        if DCN_barcode.count("0") == 0:
            master_to_slave_vars.fNameTotalCSV = locator.get_total_demand()
        else:
            master_to_slave_vars.fNameTotalCSV = os.path.join(
                locator.get_optimization_network_totals_folder(),
                "Total_%(DCN_barcode)s.csv" % locals())
    else:
        master_to_slave_vars.fNameTotalCSV = locator.get_optimization_substations_total_file(
            DCN_barcode)

    # slave optimization of heating networks
    if config.optimization.isheating:
        if DHN_barcode.count("1") > 0:
            (slavePrim, slaveCO2, slaveCosts, QUncoveredDesign,
             QUncoveredAnnual) = sM.slave_main(locator, master_to_slave_vars,
                                               solar_features, gv, config,
                                               prices)
        else:
            slaveCO2 = 0
            slaveCosts = 0
            slavePrim = 0
    else:
        slaveCO2 = 0
        slaveCosts = 0
        slavePrim = 0

    costs += slaveCosts
    CO2 += slaveCO2
    prim += slavePrim

    # slave optimization of cooling networks
    if gv.ZernezFlag == 1:
        coolCosts, coolCO2, coolPrim = 0, 0, 0
    elif config.optimization.iscooling and DCN_barcode.count("1") > 0:
        reduced_timesteps_flag = config.supply_system_simulation.reduced_timesteps
        (coolCosts, coolCO2,
         coolPrim) = coolMain.coolingMain(locator, master_to_slave_vars,
                                          network_features, gv, prices, lca,
                                          config, reduced_timesteps_flag)
        # if reduced_timesteps_flag:
        #     # reduced timesteps simulation for a month (May)
        #     coolCosts = coolCosts * (8760/(3624/2880))
        #     coolCO2 = coolCO2 * (8760/(3624/2880))
        #     coolPrim = coolPrim * (8760/(3624/2880))
        #     # FIXME: check results
    else:
        coolCosts, coolCO2, coolPrim = 0, 0, 0

    # print "Add extra costs"
    # add costs of disconnected buildings (best configuration)
    (addCosts, addCO2,
     addPrim) = eM.addCosts(DHN_barcode, DCN_barcode, building_names, locator,
                            master_to_slave_vars, QUncoveredDesign,
                            QUncoveredAnnual, solar_features, network_features,
                            gv, config, prices, lca)

    # recalculate the addCosts by substracting the decentralized costs and add back to corresponding supply system
    Cost_diff, CO2_diff, Prim_diff = calc_decentralized_building_costs(
        config, locator, master_to_slave_vars, DHN_barcode, DCN_barcode,
        building_names)
    addCosts = addCosts + Cost_diff
    addCO2 = addCO2 + CO2_diff
    addPrim = addPrim + Prim_diff

    # add Capex and Opex of PV
    data_electricity = pd.read_csv(
        os.path.join(
            locator.
            get_optimization_slave_electricity_activation_pattern_cooling(
                individual_number, GENERATION_NUMBER)))

    total_area_for_pv = data_electricity['Area_PV_m2'].max()
    # remove the area installed with solar collectors
    sc_installed_area = 0
    if config.supply_system_simulation.decentralized_systems == 'Single-effect Absorption Chiller':
        for (index, building_name) in zip(DCN_barcode, building_names):
            if index == "0":
                sc_installed_area = sc_installed_area + pd.read_csv(
                    locator.PV_results(building_name))['Area_PV_m2'].max()
    pv_installed_area = total_area_for_pv - sc_installed_area
    Capex_a_PV, Opex_fixed_PV = calc_Cinv_pv(pv_installed_area, locator,
                                             config)
    pv_annual_production_kWh = (data_electricity['E_PV_W'].sum()) / 1000

    # electricity calculations
    data_network_electricity = pd.read_csv(
        os.path.join(
            locator.
            get_optimization_slave_electricity_activation_pattern_cooling(
                individual_number, GENERATION_NUMBER)))

    data_cooling = pd.read_csv(
        os.path.join(
            locator.get_optimization_slave_cooling_activation_pattern(
                individual_number, GENERATION_NUMBER)))

    total_demand = pd.read_csv(locator.get_total_demand())
    building_names = total_demand.Name.values
    total_electricity_demand_W = data_network_electricity['E_total_req_W']
    E_decentralized_appliances_W = np.zeros(8760)

    for i, name in zip(
            DCN_barcode, building_names
    ):  # adding the electricity demand from the decentralized buildings
        if i is '0':
            building_demand = pd.read_csv(locator.get_demand_results_folder() +
                                          '//' + name + ".csv",
                                          usecols=['E_sys_kWh'])
            E_decentralized_appliances_W += building_demand['E_sys_kWh'] * 1000

    total_electricity_demand_W = total_electricity_demand_W.add(
        E_decentralized_appliances_W)
    E_for_hot_water_demand_W = np.zeros(8760)

    for i, name in zip(
            DCN_barcode, building_names
    ):  # adding the electricity demand for hot water from all buildings
        building_demand = pd.read_csv(locator.get_demand_results_folder() +
                                      '//' + name + ".csv",
                                      usecols=['E_ww_kWh'])
        E_for_hot_water_demand_W += building_demand['E_ww_kWh'] * 1000

    total_electricity_demand_W = total_electricity_demand_W.add(
        E_for_hot_water_demand_W)
    # Electricity of Energy Systems
    lca = lca_calculations(locator, config)
    E_VCC_W = data_cooling['Opex_var_VCC'] / lca.ELEC_PRICE
    E_VCC_backup_W = data_cooling['Opex_var_VCC_backup'] / lca.ELEC_PRICE
    E_ACH_W = data_cooling['Opex_var_ACH'] / lca.ELEC_PRICE
    E_CT_W = abs(data_cooling['Opex_var_CT']) / lca.ELEC_PRICE
    total_electricity_demand_W = total_electricity_demand_W.add(E_VCC_W)
    total_electricity_demand_W = total_electricity_demand_W.add(E_VCC_backup_W)
    total_electricity_demand_W = total_electricity_demand_W.add(E_ACH_W)
    total_electricity_demand_W = total_electricity_demand_W.add(E_CT_W)
    E_from_CHP_W = data_network_electricity[
        'E_CHP_to_directload_W'] + data_network_electricity['E_CHP_to_grid_W']
    E_from_PV_W = data_network_electricity[
        'E_PV_to_directload_W'] + data_network_electricity['E_PV_to_grid_W']

    E_CHP_to_directload_W = np.zeros(8760)
    E_CHP_to_grid_W = np.zeros(8760)
    E_PV_to_directload_W = np.zeros(8760)
    E_PV_to_grid_W = np.zeros(8760)
    E_from_grid_W = np.zeros(8760)

    # modify simulation timesteps
    if reduced_timesteps_flag == False:
        start_t = 0
        stop_t = 8760
    else:
        # timesteps in May
        start_t = 2880
        stop_t = 3624
    timesteps = range(start_t, stop_t)

    for hour in timesteps:
        E_hour_W = total_electricity_demand_W[hour]
        if E_hour_W > 0:
            if E_from_PV_W[hour] > E_hour_W:
                E_PV_to_directload_W[hour] = E_hour_W
                E_PV_to_grid_W[hour] = E_from_PV_W[
                    hour] - total_electricity_demand_W[hour]
                E_hour_W = 0
            else:
                E_hour_W = E_hour_W - E_from_PV_W[hour]
                E_PV_to_directload_W[hour] = E_from_PV_W[hour]

            if E_from_CHP_W[hour] > E_hour_W:
                E_CHP_to_directload_W[hour] = E_hour_W
                E_CHP_to_grid_W[hour] = E_from_CHP_W[hour] - E_hour_W
                E_hour_W = 0
            else:
                E_hour_W = E_hour_W - E_from_CHP_W[hour]
                E_CHP_to_directload_W[hour] = E_from_CHP_W[hour]

            E_from_grid_W[hour] = E_hour_W

    date = data_network_electricity.DATE.values

    results = pd.DataFrame(
        {
            "DATE": date,
            "E_total_req_W": total_electricity_demand_W,
            "E_from_grid_W": E_from_grid_W,
            "E_VCC_W": E_VCC_W,
            "E_VCC_backup_W": E_VCC_backup_W,
            "E_ACH_W": E_ACH_W,
            "E_CT_W": E_CT_W,
            "E_PV_to_directload_W": E_PV_to_directload_W,
            "E_CHP_to_directload_W": E_CHP_to_directload_W,
            "E_CHP_to_grid_W": E_CHP_to_grid_W,
            "E_PV_to_grid_W": E_PV_to_grid_W,
            "E_for_hot_water_demand_W": E_for_hot_water_demand_W,
            "E_decentralized_appliances_W": E_decentralized_appliances_W,
            "E_total_to_grid_W_negative": -E_PV_to_grid_W - E_CHP_to_grid_W
        }
    )  # let's keep this negative so it is something exported, we can use it in the graphs of likelihood

    if reduced_timesteps_flag:
        reduced_el_costs = ((results['E_from_grid_W'].sum() +
                             results['E_total_to_grid_W_negative'].sum()) *
                            lca.ELEC_PRICE)
        electricity_costs = reduced_el_costs * (8760 / (stop_t - start_t))
    else:
        electricity_costs = ((results['E_from_grid_W'].sum() +
                              results['E_total_to_grid_W_negative'].sum()) *
                             lca.ELEC_PRICE)

    # emission from data
    data_emissions = pd.read_csv(
        os.path.join(
            locator.get_optimization_slave_investment_cost_detailed(
                individual_number, GENERATION_NUMBER)))
    update_PV_emission = abs(
        2 * data_emissions['CO2_PV_disconnected']).values[0]  # kg-CO2
    update_PV_primary = abs(
        2 * data_emissions['Eprim_PV_disconnected']).values[0]  # MJ oil-eq

    costs += addCosts + coolCosts + electricity_costs + Capex_a_PV + Opex_fixed_PV
    CO2 = CO2 + addCO2 + coolCO2 - update_PV_emission
    prim = prim + addPrim + coolPrim - update_PV_primary
    # Converting costs into float64 to avoid longer values
    costs = (np.float64(costs) / 1e6).round(2)  # $ to Mio$
    CO2 = (np.float64(CO2) / 1e6).round(2)  # kg to kilo-ton
    prim = (np.float64(prim) / 1e6).round(2)  # MJ to TJ

    # add electricity costs corresponding to

    # print ('Additional costs = ' + str(addCosts))
    # print ('Additional CO2 = ' + str(addCO2))
    # print ('Additional prim = ' + str(addPrim))

    print('Total annualized costs [USD$(2015) Mio/yr] = ' + str(costs))
    print('Green house gas emission [kton-CO2/yr] = ' + str(CO2))
    print('Primary energy [TJ-oil-eq/yr] = ' + str(prim))

    results = {
        'TAC_Mio_per_yr': [costs.round(2)],
        'CO2_kton_per_yr': [CO2.round(2)],
        'Primary_Energy_TJ_per_yr': [prim.round(2)]
    }
    results_df = pd.DataFrame(results)
    results_path = os.path.join(
        locator.get_optimization_slave_results_folder(GENERATION_NUMBER),
        'ind_' + str(individual_number) + '_results.csv')
    results_df.to_csv(results_path)

    with open(locator.get_optimization_checkpoint_initial(), "wb") as fp:
        pop = []
        g = GENERATION_NUMBER
        epsInd = []
        invalid_ind = []
        fitnesses = []
        capacities = []
        disconnected_capacities = []
        halloffame = []
        halloffame_fitness = []
        euclidean_distance = []
        spread = []
        cp = dict(population=pop,
                  generation=g,
                  epsIndicator=epsInd,
                  testedPop=invalid_ind,
                  population_fitness=fitnesses,
                  capacities=capacities,
                  disconnected_capacities=disconnected_capacities,
                  halloffame=halloffame,
                  halloffame_fitness=halloffame_fitness,
                  euclidean_distance=euclidean_distance,
                  spread=spread)
        json.dump(cp, fp)

    return costs, CO2, prim, master_to_slave_vars, individual
Ejemplo n.º 10
0
def individual_evaluation(generation, level, size, variable_groups):
    """
    :param generation: Generation of the optimization in which the individual evaluation is to be done
    :type generation: int
    :param level: Number of the uncertain scenario. For each scenario, the objectives are calculated
    :type level: int
    :param size: Total uncertain scenarios developed. See 'uncertainty.csv'
    :type size: int
    :return: Function saves the new objectives in a json file
    """

    from cea.optimization.preprocessing.preprocessing_main import preproccessing
    gv = cea.globalvar.GlobalVariables()
    scenario_path = gv.scenario_reference
    locator = cea.inputlocator.InputLocator(scenario_path)
    config = cea.config.Configuration()
    weather_file = locator.get_default_weather()

    with open(
            locator.get_optimization_master_results_folder() + "\CheckPoint_" +
            str(generation), "rb") as fp:
        data = json.load(fp)

    pop = data['population']
    ntwList = data['networkList']

    # # Uncertainty Part
    row = []
    with open(locator.get_uncertainty_results_folder() +
              '\uncertainty.csv') as f:
        reader = csv.reader(f)
        for i in xrange(size + 1):
            row.append(next(reader))

    j = level + 1

    for i in xrange(len(row[0]) - 1):
        setattr(gv, row[0][i + 1], float(row[j][i + 1]))

    total_demand = pd.read_csv(locator.get_total_demand())
    building_names = total_demand.Name.values
    gv.num_tot_buildings = total_demand.Name.count()
    lca = lca_calculations(locator, config)
    prices = Prices(locator, config)

    extra_costs, extra_CO2, extra_primary_energy, solarFeat = preproccessing(
        locator, total_demand, building_names, weather_file, gv)
    network_features = network_opt.network_opt_main()

    def objective_function(ind, ind_num):
        (costs, CO2, prim) = evaluation.evaluation_main(
            ind, building_names, locator, solarFeat, network_features, gv,
            config, prices, lca, ind_num, generation)
        # print (costs, CO2, prim)
        return (costs, CO2, prim)

    fitness = []
    for i in xrange(gv.initialInd):
        evaluation.checkNtw(pop[i], ntwList, locator, gv)
        fitness.append(objective_function(pop[i], i))

    with open(locator.get_uncertainty_checkpoint(level), "wb") as fp:
        cp = dict(population=pop,
                  uncertainty_level=level,
                  population_fitness=fitness)
        json.dump(cp, fp)
def electricity_import_and_exports(generation, individual, locator, config):
    category = "optimization-detailed"

    data_network_electricity = pd.read_csv(
        os.path.join(
            locator.
            get_optimization_slave_electricity_activation_pattern_cooling(
                individual, generation)))

    data_cooling = pd.read_csv(
        os.path.join(
            locator.get_optimization_slave_cooling_activation_pattern(
                individual, generation)))

    all_individuals_of_generation = pd.read_csv(
        locator.get_optimization_individuals_in_generation(generation))

    data_current_individual = all_individuals_of_generation[np.isclose(
        all_individuals_of_generation['individual'], individual)]
    total_demand = pd.read_csv(locator.get_total_demand())
    building_names = total_demand.Name.values

    total_electricity_demand_W = data_network_electricity['E_total_req_W']
    E_decentralized_appliances_W = np.zeros(8760)

    DCN_barcode = ""
    for name in building_names:  # identifying the DCN code
        DCN_barcode += str(
            int(data_current_individual[name + ' DCN'].values[0]))

    for i, name in zip(
            DCN_barcode, building_names
    ):  # adding the electricity demand from the decentralized buildings
        if i is '0':
            building_demand = pd.read_csv(locator.get_demand_results_folder() +
                                          '//' + name + ".csv",
                                          usecols=['E_sys_kWh'])

            E_decentralized_appliances_W += building_demand['E_sys_kWh'] * 1000

    total_electricity_demand_W = total_electricity_demand_W.add(
        E_decentralized_appliances_W)

    E_appliances_total_W = np.zeros(8760)
    for name in building_names:  # adding the electricity demand from the decentralized buildings
        building_demand = pd.read_csv(locator.get_demand_results_folder() +
                                      '//' + name + ".csv",
                                      usecols=['E_sys_kWh'])

        E_appliances_total_W += building_demand['E_sys_kWh'] * 1000

    E_for_hot_water_demand_W = np.zeros(8760)

    for i, name in zip(
            DCN_barcode, building_names
    ):  # adding the electricity demand for hot water from all buildings
        building_demand = pd.read_csv(locator.get_demand_results_folder() +
                                      '//' + name + ".csv",
                                      usecols=['E_ww_kWh'])

        E_for_hot_water_demand_W += building_demand['E_ww_kWh'] * 1000

    total_electricity_demand_W = total_electricity_demand_W.add(
        E_for_hot_water_demand_W)
    # Electricity of Energy Systems
    lca = lca_calculations(locator, config)

    E_VCC_W = data_cooling['Opex_var_VCC'] / lca.ELEC_PRICE
    E_VCC_backup_W = data_cooling['Opex_var_VCC_backup'] / lca.ELEC_PRICE
    E_ACH_W = data_cooling['Opex_var_ACH'] / lca.ELEC_PRICE
    E_CT_W = abs(data_cooling['Opex_var_CT']) / lca.ELEC_PRICE
    total_electricity_demand_W = total_electricity_demand_W.add(E_VCC_W)
    total_electricity_demand_W = total_electricity_demand_W.add(E_VCC_backup_W)
    total_electricity_demand_W = total_electricity_demand_W.add(E_ACH_W)
    total_electricity_demand_W = total_electricity_demand_W.add(E_CT_W)

    E_from_CHP_W = data_network_electricity[
        'E_CHP_to_directload_W'] + data_network_electricity['E_CHP_to_grid_W']
    E_from_PV_W = data_network_electricity[
        'E_PV_to_directload_W'] + data_network_electricity['E_PV_to_grid_W']

    E_CHP_to_directload_W = np.zeros(8760)
    E_CHP_to_grid_W = np.zeros(8760)
    E_PV_to_directload_W = np.zeros(8760)
    E_PV_to_grid_W = np.zeros(8760)
    E_from_grid_W = np.zeros(8760)

    for hour in range(8760):
        E_hour_W = total_electricity_demand_W[hour]
        if E_hour_W > 0:
            if E_from_PV_W[hour] > E_hour_W:
                E_PV_to_directload_W[hour] = E_hour_W
                E_PV_to_grid_W[hour] = E_from_PV_W[
                    hour] - total_electricity_demand_W[hour]
                E_hour_W = 0
            else:
                E_hour_W = E_hour_W - E_from_PV_W[hour]
                E_PV_to_directload_W[hour] = E_from_PV_W[hour]

            if E_from_CHP_W[hour] > E_hour_W:
                E_CHP_to_directload_W[hour] = E_hour_W
                E_CHP_to_grid_W[hour] = E_from_CHP_W[hour] - E_hour_W
                E_hour_W = 0
            else:
                E_hour_W = E_hour_W - E_from_CHP_W[hour]
                E_CHP_to_directload_W[hour] = E_from_CHP_W[hour]

            E_from_grid_W[hour] = E_hour_W

    date = data_network_electricity.DATE.values

    results = pd.DataFrame(
        {
            "DATE": date,
            "E_total_req_W": total_electricity_demand_W,
            "E_from_grid_W": E_from_grid_W,
            "E_VCC_W": E_VCC_W,
            "E_VCC_backup_W": E_VCC_backup_W,
            "E_ACH_W": E_ACH_W,
            "E_CT_W": E_CT_W,
            "E_PV_to_directload_W": E_PV_to_directload_W,
            "E_CHP_to_directload_W": E_CHP_to_directload_W,
            "E_CHP_to_grid_W": E_CHP_to_grid_W,
            "E_PV_to_grid_W": E_PV_to_grid_W,
            "E_for_hot_water_demand_W": E_for_hot_water_demand_W,
            "E_decentralized_appliances_W": E_decentralized_appliances_W,
            "E_appliances_total_W": E_appliances_total_W,
            "E_total_to_grid_W_negative": -E_PV_to_grid_W - E_CHP_to_grid_W
        }
    )  #let's keep this negative so it is something exported, we can use it in the graphs of likelihood

    results.to_csv(
        locator.
        get_optimization_slave_electricity_activation_pattern_processed(
            individual, generation),
        index=False)

    return results
Ejemplo n.º 12
0
def coolingMain(locator, master_to_slave_vars, ntwFeat, gv, prices, config):
    """
    Computes the parameters for the cooling of the complete DCN

    :param locator: path to res folder
    :param ntwFeat: network features
    :param gv: global variables
    :param prices: Prices imported from the database
    :type locator: string
    :type ntwFeat: class
    :type gv: class
    :type prices: class
    :return: costs, co2, prim
    :rtype: tuple
    """

    ############# Recover the cooling needs
    # Cooling demands in a neighborhood are divided into three categories currently. They are
    # 1. Space Cooling in buildings
    # 2. Data center Cooling
    # 3. Refrigeration Needs
    # Data center cooling can also be done by recovering the heat and heating other demands during the same time
    # whereas Space cooling and refrigeration needs are to be provided by District Cooling Network or decentralized cooling
    # Currently, all the buildings are assumed to be connected to DCN
    # In the following code, the cooling demands of Space cooling and refrigeration are first satisfied by using Lake and VCC
    # This is then followed by checking of the Heat recovery from Data Centre, if it is allowed, then the corresponding
    # cooling demand is ignored. If not, the corresponding coolind demand is also satisfied by DCN.

    t0 = time.time()
    lca = lca_calculations(locator, config)
    print('Cooling Main is Running')

    # Space cooling previously aggregated in the substation routine
    if master_to_slave_vars.WasteServersHeatRecovery == 1:
        df = pd.read_csv(
            locator.get_optimization_network_data_folder(
                master_to_slave_vars.network_data_file_cooling),
            usecols=[
                "T_DCNf_space_cooling_and_refrigeration_sup_K",
                "T_DCNf_space_cooling_and_refrigeration_re_K",
                "mdot_cool_space_cooling_and_refrigeration_netw_all_kgpers"
            ])
        df = df.fillna(0)
        T_sup_K = df['T_DCNf_space_cooling_and_refrigeration_sup_K'].values
        T_re_K = df['T_DCNf_space_cooling_and_refrigeration_re_K'].values
        mdot_kgpers = df[
            'mdot_cool_space_cooling_and_refrigeration_netw_all_kgpers'].values
    else:
        df = pd.read_csv(
            locator.get_optimization_network_data_folder(
                master_to_slave_vars.network_data_file_cooling),
            usecols=[
                "T_DCNf_space_cooling_data_center_and_refrigeration_sup_K",
                "T_DCNf_space_cooling_data_center_and_refrigeration_re_K",
                "mdot_cool_space_cooling_data_center_and_refrigeration_netw_all_kgpers"
            ])
        df = df.fillna(0)
        T_sup_K = df[
            'T_DCNf_space_cooling_data_center_and_refrigeration_sup_K'].values
        T_re_K = df[
            'T_DCNf_space_cooling_data_center_and_refrigeration_re_K'].values
        mdot_kgpers = df[
            'mdot_cool_space_cooling_data_center_and_refrigeration_netw_all_kgpers'].values
    DCN_operation_parameters = df.fillna(0)
    DCN_operation_parameters_array = DCN_operation_parameters.values

    Qc_DCN_W = np.array(
        pd.read_csv(locator.get_optimization_network_data_folder(
            master_to_slave_vars.network_data_file_cooling),
                    usecols=[
                        "Q_DCNf_space_cooling_and_refrigeration_W",
                        "Q_DCNf_space_cooling_data_center_and_refrigeration_W"
                    ])
    )  # importing the cooling demands of DCN (space cooling + refrigeration)
    # Data center cooling, (treated separately for each building)
    df = pd.read_csv(locator.get_total_demand(),
                     usecols=["Name", "Qcdata_sys_MWhyr"])
    arrayData = np.array(df)

    # total cooling requirements based on the Heat Recovery Flag
    Q_cooling_req_W = np.zeros(8760)
    if master_to_slave_vars.WasteServersHeatRecovery == 0:
        for hour in range(
                8760
        ):  # summing cooling loads of space cooling, refrigeration and data center
            Q_cooling_req_W[hour] = Qc_DCN_W[hour][1]
    else:
        for hour in range(
                8760
        ):  # only including cooling loads of space cooling and refrigeration
            Q_cooling_req_W[hour] = Qc_DCN_W[hour][0]

    ############# Recover the heat already taken from the Lake by the heat pumps
    if config.district_heating_network:
        try:
            dfSlave = pd.read_csv(
                locator.get_optimization_slave_heating_activation_pattern(
                    master_to_slave_vars.individual_number,
                    master_to_slave_vars.generation_number),
                usecols=["Q_coldsource_HPLake_W"])
            Q_Lake_Array_W = np.array(dfSlave)

        except:
            Q_Lake_Array_W = [0]
    else:
        Q_Lake_Array_W = [0]

    ### input parameters
    Qc_VCC_max_W = master_to_slave_vars.VCC_cooling_size
    Qc_ACH_max_W = master_to_slave_vars.Absorption_chiller_size

    T_ground_K = calculate_ground_temperature(locator, config)

    # sizing cold water storage tank
    if master_to_slave_vars.Storage_cooling_size > 0:
        Qc_tank_discharge_peak_W = master_to_slave_vars.Storage_cooling_size
        Qc_tank_charge_max_W = (
            Qc_VCC_max_W +
            Qc_ACH_max_W) * 0.8  # assume reduced capacity when Tsup is lower
        peak_hour = np.argmax(Q_cooling_req_W)
        area_HEX_tank_discharege_m2, UA_HEX_tank_discharge_WperK, \
        area_HEX_tank_charge_m2, UA_HEX_tank_charge_WperK, \
        V_tank_m3 = storage_tank.calc_storage_tank_properties(DCN_operation_parameters, Qc_tank_charge_max_W,
                                                              Qc_tank_discharge_peak_W, peak_hour, master_to_slave_vars)
    else:
        Qc_tank_discharge_peak_W = 0
        Qc_tank_charge_max_W = 0
        area_HEX_tank_discharege_m2 = 0
        UA_HEX_tank_discharge_WperK = 0
        area_HEX_tank_charge_m2 = 0
        UA_HEX_tank_charge_WperK = 0
        V_tank_m3 = 0

    VCC_cost_data = pd.read_excel(locator.get_supply_systems(config.region),
                                  sheetname="Chiller")
    VCC_cost_data = VCC_cost_data[VCC_cost_data['code'] == 'CH3']
    max_VCC_chiller_size = max(VCC_cost_data['cap_max'].values)

    Absorption_chiller_cost_data = pd.read_excel(
        locator.get_supply_systems(config.region),
        sheetname="Absorption_chiller",
        usecols=[
            'type', 'code', 'cap_min', 'cap_max', 'a', 'b', 'c', 'd', 'e',
            'IR_%', 'LT_yr', 'O&M_%'
        ])
    Absorption_chiller_cost_data = Absorption_chiller_cost_data[
        Absorption_chiller_cost_data['type'] == ACH_TYPE_DOUBLE]
    max_ACH_chiller_size = max(Absorption_chiller_cost_data['cap_max'].values)

    # deciding the number of chillers and the nominal size based on the maximum chiller size
    Qc_VCC_max_W = Qc_VCC_max_W * (1 + SIZING_MARGIN)
    Qc_ACH_max_W = Qc_ACH_max_W * (1 + SIZING_MARGIN)
    Q_peak_load_W = Q_cooling_req_W.max() * (1 + SIZING_MARGIN)

    Qc_VCC_backup_max_W = (Q_peak_load_W - Qc_ACH_max_W - Qc_VCC_max_W -
                           Qc_tank_discharge_peak_W)

    if Qc_VCC_backup_max_W < 0:
        Qc_VCC_backup_max_W = 0

    if Qc_VCC_max_W <= max_VCC_chiller_size:
        Qnom_VCC_W = Qc_VCC_max_W
        number_of_VCC_chillers = 1
    else:
        number_of_VCC_chillers = int(ceil(Qc_VCC_max_W / max_VCC_chiller_size))
        Qnom_VCC_W = Qc_VCC_max_W / number_of_VCC_chillers

    if Qc_VCC_backup_max_W <= max_VCC_chiller_size:
        Qnom_VCC_backup_W = Qc_VCC_backup_max_W
        number_of_VCC_backup_chillers = 1
    else:
        number_of_VCC_backup_chillers = int(
            ceil(Qc_VCC_backup_max_W / max_VCC_chiller_size))
        Qnom_VCC_backup_W = Qc_VCC_backup_max_W / number_of_VCC_backup_chillers

    if Qc_ACH_max_W <= max_ACH_chiller_size:
        Qnom_ACH_W = Qc_ACH_max_W
        number_of_ACH_chillers = 1
    else:
        number_of_ACH_chillers = int(ceil(Qc_ACH_max_W / max_ACH_chiller_size))
        Qnom_ACH_W = Qc_ACH_max_W / number_of_ACH_chillers

    limits = {
        'Qc_VCC_max_W': Qc_VCC_max_W,
        'Qc_ACH_max_W': Qc_ACH_max_W,
        'Qc_peak_load_W': Qc_tank_discharge_peak_W,
        'Qnom_VCC_W': Qnom_VCC_W,
        'number_of_VCC_chillers': number_of_VCC_chillers,
        'Qnom_ACH_W': Qnom_ACH_W,
        'number_of_ACH_chillers': number_of_ACH_chillers,
        'Qnom_VCC_backup_W': Qnom_VCC_backup_W,
        'number_of_VCC_backup_chillers': number_of_VCC_backup_chillers,
        'Qc_tank_discharge_peak_W': Qc_tank_discharge_peak_W,
        'Qc_tank_charge_max_W': Qc_tank_charge_max_W,
        'V_tank_m3': V_tank_m3,
        'T_tank_fully_charged_K': T_TANK_FULLY_CHARGED_K,
        'area_HEX_tank_discharge_m2': area_HEX_tank_discharege_m2,
        'UA_HEX_tank_discharge_WperK': UA_HEX_tank_discharge_WperK,
        'area_HEX_tank_charge_m2': area_HEX_tank_charge_m2,
        'UA_HEX_tank_charge_WperK': UA_HEX_tank_charge_WperK
    }

    ### input variables
    lake_available_cooling = pd.read_csv(locator.get_lake_potential(),
                                         usecols=['lake_potential'])
    Qc_available_from_lake_W = np.sum(
        lake_available_cooling).values[0] + np.sum(Q_Lake_Array_W)
    Qc_from_lake_cumulative_W = 0
    cooling_resource_potentials = {
        'T_tank_K': T_TANK_FULLY_DISCHARGED_K,
        'Qc_avail_from_lake_W': Qc_available_from_lake_W,
        'Qc_from_lake_cumulative_W': Qc_from_lake_cumulative_W
    }

    ############# Output results
    costs_USD = ntwFeat.pipesCosts_DCN
    CO2 = 0
    prim = 0

    nBuild = int(np.shape(arrayData)[0])
    nHour = int(np.shape(DCN_operation_parameters)[0])

    calfactor_buildings = np.zeros(8760)
    TotalCool = 0
    Qc_from_Lake_W = np.zeros(8760)
    Qc_from_VCC_W = np.zeros(8760)
    Qc_from_ACH_W = np.zeros(8760)
    Qc_from_storage_tank_W = np.zeros(8760)
    Qc_from_VCC_backup_W = np.zeros(8760)

    Qc_req_from_CT_W = np.zeros(8760)
    Qh_req_from_CCGT_W = np.zeros(8760)
    Qh_from_CCGT_W = np.zeros(8760)
    E_gen_CCGT_W = np.zeros(8760)

    opex_var_Lake = np.zeros(8760)
    opex_var_VCC = np.zeros(8760)
    opex_var_ACH = np.zeros(8760)
    opex_var_VCC_backup = np.zeros(8760)
    opex_var_CCGT = np.zeros(8760)
    opex_var_CT = np.zeros(8760)
    co2_Lake = np.zeros(8760)
    co2_VCC = np.zeros(8760)
    co2_ACH = np.zeros(8760)
    co2_VCC_backup = np.zeros(8760)
    co2_CCGT = np.zeros(8760)
    co2_CT = np.zeros(8760)
    prim_energy_Lake = np.zeros(8760)
    prim_energy_VCC = np.zeros(8760)
    prim_energy_ACH = np.zeros(8760)
    prim_energy_VCC_backup = np.zeros(8760)
    prim_energy_CCGT = np.zeros(8760)
    prim_energy_CT = np.zeros(8760)
    calfactor_total = 0

    # the simulation is for the month of May. This needs to be multiplied to represent the entire year
    for hour in range(
            2906, 3649
    ):  # cooling supply for all buildings excluding cooling loads from data centers
        performance_indicators_output, \
        Qc_supply_to_DCN, calfactor_output, \
        Qc_CT_W, Qh_CHP_ACH_W, \
        cooling_resource_potentials = cooling_resource_activator(mdot_kgpers[hour], T_sup_K[hour], T_re_K[hour],
                                                                 limits, cooling_resource_potentials,
                                                                 T_ground_K[hour], prices, master_to_slave_vars, config, Q_cooling_req_W[hour], locator)

        print(hour)
        # save results for each time-step
        opex_var_Lake[hour] = performance_indicators_output['Opex_var_Lake']
        opex_var_VCC[hour] = performance_indicators_output['Opex_var_VCC']
        opex_var_ACH[hour] = performance_indicators_output['Opex_var_ACH']
        opex_var_VCC_backup[hour] = performance_indicators_output[
            'Opex_var_VCC_backup']
        co2_Lake[hour] = performance_indicators_output['CO2_Lake']
        co2_VCC[hour] = performance_indicators_output['CO2_VCC']
        co2_ACH[hour] = performance_indicators_output['CO2_ACH']
        co2_VCC_backup[hour] = performance_indicators_output['CO2_VCC_backup']
        prim_energy_Lake[hour] = performance_indicators_output[
            'Primary_Energy_Lake']
        prim_energy_VCC[hour] = performance_indicators_output[
            'Primary_Energy_VCC']
        prim_energy_ACH[hour] = performance_indicators_output[
            'Primary_Energy_ACH']
        prim_energy_VCC_backup[hour] = performance_indicators_output[
            'Primary_Energy_VCC_backup']
        calfactor_buildings[hour] = calfactor_output
        Qc_from_Lake_W[hour] = Qc_supply_to_DCN['Qc_from_Lake_W']
        Qc_from_storage_tank_W[hour] = Qc_supply_to_DCN['Qc_from_Tank_W']
        Qc_from_VCC_W[hour] = Qc_supply_to_DCN['Qc_from_VCC_W']
        Qc_from_ACH_W[hour] = Qc_supply_to_DCN['Qc_from_ACH_W']
        Qc_from_VCC_backup_W[hour] = Qc_supply_to_DCN['Qc_from_backup_VCC_W']
        Qc_req_from_CT_W[hour] = Qc_CT_W
        Qh_req_from_CCGT_W[hour] = Qh_CHP_ACH_W

    costs_USD += (np.sum(opex_var_Lake) + np.sum(opex_var_VCC) +
                  np.sum(opex_var_ACH) + np.sum(opex_var_VCC_backup)) * 12
    CO2 += (np.sum(co2_Lake) + np.sum(co2_Lake) + np.sum(co2_ACH) +
            np.sum(co2_VCC_backup)) * 12
    prim += (np.sum(prim_energy_Lake) + np.sum(prim_energy_VCC) +
             np.sum(prim_energy_ACH) + np.sum(prim_energy_VCC_backup)) * 12
    calfactor_total += (np.sum(calfactor_buildings)) * 12
    TotalCool += np.sum(Qc_from_Lake_W) + np.sum(Qc_from_VCC_W) + np.sum(
        Qc_from_ACH_W) + np.sum(Qc_from_VCC_backup_W) + np.sum(
            Qc_from_storage_tank_W)
    Q_VCC_nom_W = limits['Qnom_VCC_W']
    Q_ACH_nom_W = limits['Qnom_ACH_W']
    Q_VCC_backup_nom_W = limits['Qnom_VCC_backup_W']
    Q_CT_nom_W = np.amax(Qc_req_from_CT_W)
    Qh_req_from_CCGT_max_W = np.amax(
        Qh_req_from_CCGT_W)  # the required heat output from CCGT at peak
    mdot_Max_kgpers = np.amax(
        DCN_operation_parameters_array[:, 1])  # sizing of DCN network pumps
    Q_GT_nom_W = 0
    ########## Operation of the cooling tower

    if Q_CT_nom_W > 0:
        for hour in range(2906, 3649):
            wdot_CT = CTModel.calc_CT(Qc_req_from_CT_W[hour], Q_CT_nom_W)
            opex_var_CT[hour] = (wdot_CT) * lca.ELEC_PRICE
            co2_CT[hour] = (wdot_CT) * lca.EL_TO_CO2 * 3600E-6
            prim_energy_CT[hour] = (wdot_CT) * lca.EL_TO_OIL_EQ * 3600E-6

        costs_USD += np.sum(opex_var_CT)
        CO2 += np.sum(co2_CT)
        prim += np.sum(prim_energy_CT)

    ########## Operation of the CCGT
    if Qh_req_from_CCGT_max_W > 0:
        # Sizing of CCGT
        GT_fuel_type = 'NG'  # assumption for scenarios in SG
        Q_GT_nom_sizing_W = Qh_req_from_CCGT_max_W  # starting guess for the size of GT
        Qh_output_CCGT_max_W = 0  # the heat output of CCGT at currently installed size (Q_GT_nom_sizing_W)
        while (Qh_output_CCGT_max_W - Qh_req_from_CCGT_max_W) <= 0:
            Q_GT_nom_sizing_W += 1000  # update GT size
            # get CCGT performance limits and functions at Q_GT_nom_sizing_W
            CCGT_performances = cogeneration.calc_cop_CCGT(
                Q_GT_nom_sizing_W, ACH_T_IN_FROM_CHP, GT_fuel_type, prices)
            Qh_output_CCGT_max_W = CCGT_performances['q_output_max_W']

        # unpack CCGT performance functions
        Q_GT_nom_W = Q_GT_nom_sizing_W * (1 + SIZING_MARGIN
                                          )  # installed CCGT capacity
        CCGT_performances = cogeneration.calc_cop_CCGT(Q_GT_nom_W,
                                                       ACH_T_IN_FROM_CHP,
                                                       GT_fuel_type, prices)
        Q_used_prim_W_CCGT_fn = CCGT_performances['q_input_fn_q_output_W']
        cost_per_Wh_th_CCGT_fn = CCGT_performances[
            'fuel_cost_per_Wh_th_fn_q_output_W']  # gets interpolated cost function
        Qh_output_CCGT_min_W = CCGT_performances['q_output_min_W']
        Qh_output_CCGT_max_W = CCGT_performances['q_output_max_W']
        eta_elec_interpol = CCGT_performances['eta_el_fn_q_input']

        for hour in range(2906, 3649):
            if Qh_req_from_CCGT_W[
                    hour] > Qh_output_CCGT_min_W:  # operate above minimal load
                if Qh_req_from_CCGT_W[
                        hour] < Qh_output_CCGT_max_W:  # Normal operation Possible within partload regime
                    cost_per_Wh_th = cost_per_Wh_th_CCGT_fn(
                        Qh_req_from_CCGT_W[hour])
                    Q_used_prim_CCGT_W = Q_used_prim_W_CCGT_fn(
                        Qh_req_from_CCGT_W[hour])
                    Qh_from_CCGT_W[hour] = Qh_req_from_CCGT_W[hour].copy()
                    E_gen_CCGT_W[hour] = np.float(
                        eta_elec_interpol(
                            Q_used_prim_CCGT_W)) * Q_used_prim_CCGT_W
                else:
                    raise ValueError('Incorrect CCGT sizing!')
            else:  # operate at minimum load
                cost_per_Wh_th = cost_per_Wh_th_CCGT_fn(Qh_output_CCGT_min_W)
                Q_used_prim_CCGT_W = Q_used_prim_W_CCGT_fn(
                    Qh_output_CCGT_min_W)
                Qh_from_CCGT_W[hour] = Qh_output_CCGT_min_W
                E_gen_CCGT_W[hour] = np.float(
                    eta_elec_interpol(
                        Qh_output_CCGT_max_W)) * Q_used_prim_CCGT_W

            opex_var_CCGT[hour] = cost_per_Wh_th * Qh_from_CCGT_W[
                hour] - E_gen_CCGT_W[hour] * prices.ELEC_PRICE
            co2_CCGT[
                hour] = Q_used_prim_CCGT_W * lca.NG_CC_TO_CO2_STD * WH_TO_J / 1.0E6 - E_gen_CCGT_W[
                    hour] * lca.EL_TO_CO2 * 3600E-6
            prim_energy_CCGT[
                hour] = Q_used_prim_CCGT_W * lca.NG_CC_TO_OIL_STD * WH_TO_J / 1.0E6 - E_gen_CCGT_W[
                    hour] * lca.EL_TO_OIL_EQ * 3600E-6

        costs_USD += np.sum(opex_var_CCGT)
        CO2 += np.sum(co2_CCGT)
        prim += np.sum(prim_energy_CCGT)

    ########## Add investment costs

    for i in range(limits['number_of_VCC_chillers']):
        Capex_a_VCC_USD, Opex_fixed_VCC_USD, Capex_VCC_USD = VCCModel.calc_Cinv_VCC(
            Q_VCC_nom_W, locator, config, 'CH3')
        costs_USD += Capex_a_VCC_USD + Opex_fixed_VCC_USD

    Capex_a_VCC_backup_USD, Opex_fixed_VCC_backup_USD, Capex_VCC_backup_USD = VCCModel.calc_Cinv_VCC(
        Q_VCC_backup_nom_W, locator, config, 'CH3')
    costs_USD += Capex_a_VCC_backup_USD + Opex_fixed_VCC_backup_USD

    for i in range(limits['number_of_ACH_chillers']):
        Capex_a_ACH_USD, Opex_fixed_ACH_USD, Capex_ACH_USD = chiller_absorption.calc_Cinv_ACH(
            Q_ACH_nom_W, locator, ACH_TYPE_DOUBLE, config)
        costs_USD += Capex_a_ACH_USD + Opex_fixed_ACH_USD

    Capex_a_CCGT_USD, Opex_fixed_CCGT_USD, Capex_CCGT_USD = cogeneration.calc_Cinv_CCGT(
        Q_GT_nom_W, locator, config)
    costs_USD += Capex_a_CCGT_USD + Opex_fixed_CCGT_USD

    Capex_a_Tank_USD, Opex_fixed_Tank_USD, Capex_Tank_USD = thermal_storage.calc_Cinv_storage(
        V_tank_m3, locator, config, 'TES2')
    costs_USD += Capex_a_Tank_USD + Opex_fixed_Tank_USD

    Capex_a_CT_USD, Opex_fixed_CT_USD, Capex_CT_USD = CTModel.calc_Cinv_CT(
        Q_CT_nom_W, locator, config, 'CT1')

    costs_USD += Capex_a_CT_USD + Opex_fixed_CT_USD

    Capex_a_pump_USD, Opex_fixed_pump_USD, Opex_var_pump_USD, Capex_pump_USD = PumpModel.calc_Ctot_pump(
        master_to_slave_vars, ntwFeat, gv, locator, prices, config)
    costs_USD += Capex_a_pump_USD + Opex_fixed_pump_USD + Opex_var_pump_USD

    network_data = pd.read_csv(
        locator.get_optimization_network_data_folder(
            master_to_slave_vars.network_data_file_cooling))

    date = network_data.DATE.values
    results = pd.DataFrame({
        "DATE":
        date,
        "Q_total_cooling_W":
        Q_cooling_req_W,
        "Opex_var_Lake":
        opex_var_Lake,
        "Opex_var_VCC":
        opex_var_VCC,
        "Opex_var_ACH":
        opex_var_ACH,
        "Opex_var_VCC_backup":
        opex_var_VCC_backup,
        "Opex_var_CT":
        opex_var_CT,
        "Opex_var_CCGT":
        opex_var_CCGT,
        "CO2_from_using_Lake":
        co2_Lake,
        "CO2_from_using_VCC":
        co2_VCC,
        "CO2_from_using_ACH":
        co2_ACH,
        "CO2_from_using_VCC_backup":
        co2_VCC_backup,
        "CO2_from_using_CT":
        co2_CT,
        "CO2_from_using_CCGT":
        co2_CCGT,
        "Primary_Energy_from_Lake":
        prim_energy_Lake,
        "Primary_Energy_from_VCC":
        prim_energy_VCC,
        "Primary_Energy_from_ACH":
        prim_energy_ACH,
        "Primary_Energy_from_VCC_backup":
        prim_energy_VCC_backup,
        "Primary_Energy_from_CT":
        prim_energy_CT,
        "Primary_Energy_from_CCGT":
        prim_energy_CCGT,
        "Q_from_Lake_W":
        Qc_from_Lake_W,
        "Q_from_VCC_W":
        Qc_from_VCC_W,
        "Q_from_ACH_W":
        Qc_from_ACH_W,
        "Q_from_VCC_backup_W":
        Qc_from_VCC_backup_W,
        "Q_from_storage_tank_W":
        Qc_from_storage_tank_W,
        "Qc_CT_associated_with_all_chillers_W":
        Qc_req_from_CT_W,
        "Qh_CCGT_associated_with_absorption_chillers_W":
        Qh_from_CCGT_W,
        "E_gen_CCGT_associated_with_absorption_chillers_W":
        E_gen_CCGT_W
    })

    results.to_csv(locator.get_optimization_slave_cooling_activation_pattern(
        master_to_slave_vars.individual_number,
        master_to_slave_vars.generation_number),
                   index=False)
    ########### Adjust and add the pumps for filtering and pre-treatment of the water
    calibration = calfactor_total / 50976000

    extraElec = (127865400 + 85243600) * calibration
    costs_USD += extraElec * prices.ELEC_PRICE
    CO2 += extraElec * lca.EL_TO_CO2 * 3600E-6
    prim += extraElec * lca.EL_TO_OIL_EQ * 3600E-6
    # Converting costs into float64 to avoid longer values
    costs_USD = np.float64(costs_USD)
    CO2 = np.float64(CO2)
    prim = np.float64(prim)

    # Capex_a and Opex_fixed
    results = pd.DataFrame({
        "Capex_a_VCC": [Capex_a_VCC_USD],
        "Opex_fixed_VCC": [Opex_fixed_VCC_USD],
        "Capex_a_VCC_backup": [Capex_a_VCC_backup_USD],
        "Opex_fixed_VCC_backup": [Opex_fixed_VCC_backup_USD],
        "Capex_a_ACH": [Capex_a_ACH_USD],
        "Opex_fixed_ACH": [Opex_fixed_ACH_USD],
        "Capex_a_CCGT": [Capex_a_CCGT_USD],
        "Opex_fixed_CCGT": [Opex_fixed_CCGT_USD],
        "Capex_a_Tank": [Capex_a_Tank_USD],
        "Opex_fixed_Tank": [Opex_fixed_Tank_USD],
        "Capex_a_CT": [Capex_a_CT_USD],
        "Opex_fixed_CT": [Opex_fixed_CT_USD],
        "Capex_pump": [Capex_a_pump_USD],
        "Opex_fixed_pump": [Opex_fixed_pump_USD],
        "Opex_var_pump": [Opex_var_pump_USD]
    })

    results.to_csv(
        locator.get_optimization_slave_investment_cost_detailed_cooling(
            master_to_slave_vars.individual_number,
            master_to_slave_vars.generation_number),
        index=False)

    print " Cooling main done (", round(time.time() - t0,
                                        1), " seconds used for this task)"

    print('Cooling costs = ' + str(costs_USD))
    print('Cooling CO2 = ' + str(CO2))
    print('Cooling Eprim = ' + str(prim))

    return (costs_USD, CO2, prim)