def main(mode=None, resume_year=None, endyear=None, ReEDS_inputs=None): try: # ===================================================================== # SET UP THE MODEL TO RUN # ===================================================================== # initialize Model Settings object # (this controls settings that apply to all scenarios to be executed) model_settings = settings.init_model_settings() # make output directory os.makedirs(model_settings.out_dir) # create the logger logger = utilfunc.get_logger( os.path.join(model_settings.out_dir, 'dg_model.log')) # connect to Postgres and configure connection con, cur = utilfunc.make_con(model_settings.pg_conn_string, model_settings.role) engine = utilfunc.make_engine(model_settings.pg_engine_string) # register access to hstore in postgres pgx.register_hstore(con) logger.info( "Connected to Postgres with the following params:\n{}".format( model_settings.pg_params_log)) owner = model_settings.role # ===================================================================== # LOOP OVER SCENARIOS # ===================================================================== # variables used to track outputs scenario_names = [] dup_n = 1 out_subfolders = {'wind': [], 'solar': []} for i, scenario_file in enumerate(model_settings.input_scenarios): logger.info('============================================') logger.info('============================================') logger.info("Running Scenario {i} of {n}".format( i=i + 1, n=len(model_settings.input_scenarios))) # initialize ScenarioSettings object # (this controls settings that apply only to this specific scenario) scenario_settings = settings.init_scenario_settings( scenario_file, model_settings, con, cur) scenario_settings.input_data_dir = model_settings.input_data_dir # summarize high level secenario settings datfunc.summarize_scenario(scenario_settings, model_settings) # create output folder for this scenario input_scenario = scenario_settings.input_scenario scen_name = scenario_settings.scen_name out_dir = model_settings.out_dir (out_scen_path, scenario_names, dup_n) = datfunc.create_scenario_results_folder( input_scenario, scen_name, scenario_names, out_dir, dup_n) # create folder for input data csvs for this scenario scenario_settings.dir_to_write_input_data = out_scen_path + '/input_data' scenario_settings.scen_output_dir = out_scen_path os.makedirs(scenario_settings.dir_to_write_input_data) # get other datasets needed for the model run logger.info('Getting various scenario parameters') schema = scenario_settings.schema max_market_share = datfunc.get_max_market_share(con, schema) load_growth_scenario = scenario_settings.load_growth.lower() inflation_rate = datfunc.get_annual_inflation( con, scenario_settings.schema) bass_params = datfunc.get_bass_params(con, scenario_settings.schema) # get settings whether to use pre-generated agent file ('User Defined'- provide pkl file name) or generate new agents agent_file_status = scenario_settings.agent_file_status #========================================================================================================== # CREATE AGENTS #========================================================================================================== logger.info("--------------Creating Agents---------------") if scenario_settings.techs in [['wind'], ['solar']]: # ========================================================= # Initialize agents # ========================================================= solar_agents = iFuncs.import_agent_file( scenario_settings, con, cur, engine, model_settings, agent_file_status, input_name='agent_file') # Get set of columns that define agent's immutable attributes cols_base = list(solar_agents.df.columns) #============================================================================== # TECHNOLOGY DEPLOYMENT #============================================================================== if scenario_settings.techs == ['solar']: # get incentives and itc inputs state_incentives = datfunc.get_state_incentives(con) itc_options = datfunc.get_itc_incentives( con, scenario_settings.schema) nem_state_capacity_limits = datfunc.get_nem_state( con, scenario_settings.schema) nem_state_and_sector_attributes = datfunc.get_nem_state_by_sector( con, scenario_settings.schema) nem_utility_and_sector_attributes = datfunc.get_nem_utility_by_sector( con, scenario_settings.schema) nem_selected_scenario = datfunc.get_selected_scenario( con, scenario_settings.schema) rate_switch_table = agent_mutation.elec.get_rate_switch_table( con) #========================================================================================================== # INGEST SCENARIO ENVIRONMENTAL VARIABLES #========================================================================================================== deprec_sch = iFuncs.import_table( scenario_settings, con, engine, owner, input_name='depreciation_schedules', csv_import_function=iFuncs.deprec_schedule) carbon_intensities = iFuncs.import_table( scenario_settings, con, engine, owner, input_name='carbon_intensities', csv_import_function=iFuncs.melt_year( 'grid_carbon_intensity_tco2_per_kwh')) wholesale_elec_prices = iFuncs.import_table( scenario_settings, con, engine, owner, input_name='wholesale_electricity_prices', csv_import_function=iFuncs.process_wholesale_elec_prices) pv_tech_traj = iFuncs.import_table( scenario_settings, con, engine, owner, input_name='pv_tech_performance', csv_import_function=iFuncs.stacked_sectors) elec_price_change_traj = iFuncs.import_table( scenario_settings, con, engine, owner, input_name='elec_prices', csv_import_function=iFuncs.process_elec_price_trajectories) load_growth = iFuncs.import_table( scenario_settings, con, engine, owner, input_name='load_growth', csv_import_function=iFuncs.stacked_sectors) pv_price_traj = iFuncs.import_table( scenario_settings, con, engine, owner, input_name='pv_prices', csv_import_function=iFuncs.stacked_sectors) batt_price_traj = iFuncs.import_table( scenario_settings, con, engine, owner, input_name='batt_prices', csv_import_function=iFuncs.stacked_sectors) pv_plus_batt_price_traj = iFuncs.import_table( scenario_settings, con, engine, owner, input_name='pv_plus_batt_prices', csv_import_function=iFuncs.stacked_sectors) financing_terms = iFuncs.import_table( scenario_settings, con, engine, owner, input_name='financing_terms', csv_import_function=iFuncs.stacked_sectors) batt_tech_traj = iFuncs.import_table( scenario_settings, con, engine, owner, input_name='batt_tech_performance', csv_import_function=iFuncs.stacked_sectors) value_of_resiliency = iFuncs.import_table( scenario_settings, con, engine, owner, input_name='value_of_resiliency', csv_import_function=None) #========================================================================================================== # Calculate Tariff Components from ReEDS data #========================================================================================================== for i, year in enumerate(scenario_settings.model_years): logger.info('\tWorking on {}'.format(year)) # determine any non-base-year columns and drop them cols = list(solar_agents.df.columns) cols_to_drop = [x for x in cols if x not in cols_base] solar_agents.df.drop(cols_to_drop, axis=1, inplace=True) # copy the core agent object and set their year solar_agents.df['year'] = year # is it the first model year? is_first_year = year == model_settings.start_year # get and apply load growth solar_agents.on_frame( agent_mutation.elec.apply_load_growth, (load_growth)) # Update net metering and incentive expiration cf_during_peak_demand = pd.read_csv( 'cf_during_peak_demand.csv' ) # Apply NEM on generation basis, i.e. solar capacity factor during peak demand peak_demand_mw = pd.read_csv('peak_demand_mw.csv') if is_first_year: last_year_installed_capacity = agent_mutation.elec.get_state_starting_capacities( con, schema) state_capacity_by_year = agent_mutation.elec.calc_state_capacity_by_year( con, schema, load_growth, peak_demand_mw, is_first_year, year, solar_agents, last_year_installed_capacity) #Apply net metering parameters net_metering_state_df, net_metering_utility_df = agent_mutation.elec.get_nem_settings( nem_state_capacity_limits, nem_state_and_sector_attributes, nem_utility_and_sector_attributes, nem_selected_scenario, year, state_capacity_by_year, cf_during_peak_demand) solar_agents.on_frame( agent_mutation.elec.apply_export_tariff_params, [net_metering_state_df, net_metering_utility_df]) # Apply each agent's electricity price change and assumption about increases solar_agents.on_frame( agent_mutation.elec. apply_elec_price_multiplier_and_escalator, [year, elec_price_change_traj]) # Apply technology performance solar_agents.on_frame( agent_mutation.elec.apply_batt_tech_performance, (batt_tech_traj)) solar_agents.on_frame( agent_mutation.elec.apply_pv_tech_performance, pv_tech_traj) # Apply technology prices solar_agents.on_frame(agent_mutation.elec.apply_pv_prices, pv_price_traj) solar_agents.on_frame( agent_mutation.elec.apply_batt_prices, [batt_price_traj, batt_tech_traj, year]) solar_agents.on_frame( agent_mutation.elec.apply_pv_plus_batt_prices, [pv_plus_batt_price_traj, batt_tech_traj, year]) # Apply value of resiliency solar_agents.on_frame( agent_mutation.elec.apply_value_of_resiliency, value_of_resiliency) # Apply depreciation schedule solar_agents.on_frame( agent_mutation.elec.apply_depreciation_schedule, deprec_sch) # Apply carbon intensities solar_agents.on_frame( agent_mutation.elec.apply_carbon_intensities, carbon_intensities) # Apply wholesale electricity prices solar_agents.on_frame( agent_mutation.elec.apply_wholesale_elec_prices, wholesale_elec_prices) # Apply host-owned financial parameters solar_agents.on_frame( agent_mutation.elec.apply_financial_params, [financing_terms, itc_options, inflation_rate]) if 'ix' not in os.name: cores = None else: cores = model_settings.local_cores # Apply state incentives solar_agents.on_frame( agent_mutation.elec.apply_state_incentives, [ state_incentives, year, model_settings.start_year, state_capacity_by_year ]) # Calculate System Financial Performance solar_agents.chunk_on_row( financial_functions.calc_system_size_and_performance, sectors=scenario_settings.sectors, cores=cores, rate_switch_table=rate_switch_table) # Calculate the financial performance of the S+S systems #solar_agents.on_frame(financial_functions.calc_financial_performance) # Calculate Maximum Market Share solar_agents.on_frame( financial_functions.calc_max_market_share, max_market_share) # determine "developable" population solar_agents.on_frame( agent_mutation.elec. calculate_developable_customers_and_load) # Apply market_last_year if is_first_year == True: state_starting_capacities_df = agent_mutation.elec.get_state_starting_capacities( con, schema) solar_agents.on_frame( agent_mutation.elec.estimate_initial_market_shares, state_starting_capacities_df) market_last_year_df = None else: solar_agents.on_frame( agent_mutation.elec.apply_market_last_year, market_last_year_df) # Calculate diffusion based on economics and bass diffusion solar_agents.df, market_last_year_df = diffusion_functions_elec.calc_diffusion_solar( solar_agents.df, is_first_year, bass_params, year) # Estimate total generation solar_agents.on_frame( agent_mutation.elec.estimate_total_generation) # Aggregate results scenario_settings.output_batt_dispatch_profiles = True last_year_installed_capacity = solar_agents.df[[ 'state_abbr', 'system_kw_cum', 'batt_kw_cum', 'batt_kwh_cum', 'year' ]].copy() last_year_installed_capacity = last_year_installed_capacity.loc[ last_year_installed_capacity['year'] == year] last_year_installed_capacity = last_year_installed_capacity.groupby( 'state_abbr')[[ 'system_kw_cum', 'batt_kw_cum', 'batt_kwh_cum' ]].sum().reset_index() #========================================================================================================== # WRITE AGENT DF AS PICKLES FOR POST-PROCESSING #========================================================================================================== write_annual_agents = True drop_fields = [ 'index', 'reeds_reg', 'customers_in_bin_initial', 'load_kwh_per_customer_in_bin_initial', 'load_kwh_in_bin_initial', 'sector', 'roof_adjustment', 'load_kwh_in_bin', 'naep', 'first_year_elec_bill_savings_frac', 'metric', 'developable_load_kwh_in_bin', 'initial_number_of_adopters', 'initial_pv_kw', 'initial_market_share', 'initial_market_value', 'market_value_last_year', 'teq_yr1', 'mms_fix_zeros', 'ratio', 'teq2', 'f', 'new_adopt_fraction', 'bass_market_share', 'diffusion_market_share', 'new_market_value', 'market_value', 'total_gen_twh', 'consumption_hourly', 'solar_cf_profile', 'tariff_dict', 'deprec_sch', 'batt_dispatch_profile', 'cash_flow', 'cbi', 'ibi', 'pbi', 'cash_incentives', 'state_incentives', 'export_tariff_results' ] drop_fields = [ x for x in drop_fields if x in solar_agents.df.columns ] df_write = solar_agents.df.drop(drop_fields, axis=1) if write_annual_agents == True: df_write.to_pickle(out_scen_path + '/agent_df_{}.pkl'.format(year)) # Write Outputs to the database if i == 0: write_mode = 'replace' else: write_mode = 'append' iFuncs.df_to_psql(df_write, engine, schema, owner, 'agent_outputs', if_exists=write_mode, append_transformations=True) del df_write elif scenario_settings.techs == ['wind']: logger.error('Wind not yet supported') break #============================================================================== # Outputs & Visualization #============================================================================== logger.info("---------Saving Model Results---------") out_subfolders = datfunc.create_tech_subfolders( out_scen_path, scenario_settings.techs, out_subfolders) ##################################################################### # drop the new scenario_settings.schema engine.dispose() con.close() datfunc.drop_output_schema(model_settings.pg_conn_string, scenario_settings.schema, model_settings.delete_output_schema) ##################################################################### logger.info("-------------Model Run Complete-------------") time_to_complete = time.time() - model_settings.model_init logger.info('Completed in: {} seconds'.format( round(time_to_complete, 1))) except Exception as e: # close the connection (need to do this before dropping schema or query will hang) if 'engine' in locals(): engine.dispose() if 'con' in locals(): con.close() if 'logger' in locals(): logger.error(e.__str__(), exc_info=True) if 'scenario_settings' in locals( ) and scenario_settings.schema is not None: # drop the output schema datfunc.drop_output_schema(model_settings.pg_conn_string, scenario_settings.schema, model_settings.delete_output_schema) if 'logger' not in locals(): raise finally: if 'con' in locals(): con.close() if 'scenario_settings' in locals( ) and scenario_settings.schema is not None: # drop the output schema datfunc.drop_output_schema(model_settings.pg_conn_string, scenario_settings.schema, model_settings.delete_output_schema) if 'logger' in locals(): utilfunc.shutdown_log(logger) utilfunc.code_profiler(model_settings.out_dir)
import os import warnings import json import pandas as pd import utility_functions as utilfunc import multiprocessing import sys import psycopg2.extras as pgx import config import data_functions as datfunc from excel import excel_functions #============================================================================== # Load logger logger = utilfunc.get_logger() #============================================================================== class ModelSettings(object): def __init__(self): self.model_init = None # type is float self.cdate = None # type is text self.out_dir = None # doesn't exist already, check parent folder exists self.start_year = None # must = 2014 self.input_scenarios = None # type is list, is not empty self.pg_params_file = None # path exists self.pg_params = None # type is dict, includes all elements self.pg_conn_string = None # type is text self.pg_params_log = None # type is text, doesn't include pw
def main(mode=None, resume_year=None, endyear=None, ReEDS_inputs=None): """ Compute the economic adoption of distributed generation resources on an agent-level basis. Model output is saved to a `/runs` file within the dGen directory. """ try: # ===================================================================== # SET UP THE MODEL TO RUN # ===================================================================== # initialize Model Settings object # (this controls settings that apply to all scenarios to be executed) model_settings = settings.init_model_settings() prerun_test.check_dependencies() # make output directory # create the logger and stamp with git hash logger = utilfunc.get_logger( os.path.join(model_settings.out_dir, 'dg_model.log')) logger.info("Model version is git commit {:}".format( model_settings.git_hash)) # ===================================================================== # LOOP OVER SCENARIOS # ===================================================================== out_subfolders = {'solar': []} for i, scenario_file in enumerate(model_settings.input_scenarios): logger.info('============================================') logger.info('============================================') logger.info("Running Scenario {i} of {n}".format( i=i + 1, n=len(model_settings.input_scenarios))) # initialize ScenarioSettings object # (this controls settings that apply only to this specific scenario) scenario_settings = settings.init_scenario_settings( scenario_file, model_settings) # log summary high level secenario settings logger.info('Scenario Settings:') logger.info('\tScenario Name: %s' % scenario_settings.scenario_name) logger.info('\tSectors: %s' % list(scenario_settings.sector_data.keys())) logger.info('\tTechnologies: %s' % scenario_settings.techs) logger.info( '\tYears: %s - %s' % (scenario_settings.start_year, scenario_settings.end_year)) logger.info('Results Path: %s' % (scenario_settings.out_scen_path)) #========================================================================================================== # CREATE AGENTS #========================================================================================================== logger.info("-------------- Agent Preparation ---------------") if scenario_settings.generate_agents: logger.info('\tCreating Agents') solar_agents = Agents( agent_mutation.init_solar_agents(scenario_settings)) logger.info('....{} agents in input csv'.format( len(solar_agents))) # Write base agents to disk solar_agents.df.to_pickle(scenario_settings.out_scen_path + '/agent_df_base.pkl') else: logger.info('Loading %s' % scenario_settings.agents_file_name) with open(scenario_settings.agents_file_name, "r") as f: solar_agents = Agents(pickle.load(f)) # Get set of columns that define agent's immutable attributes cols_base = list(solar_agents.df.columns.values) #============================================================================== # TECHNOLOGY DEPLOYMENT #============================================================================== logger.info("-------------- Yearly Analysis ---------------") complete_df = pd.DataFrame() if scenario_settings.techs == ['solar']: solar_agents.df['tech'] = 'solar' for i, year in enumerate(scenario_settings.model_years): is_first_year = year == model_settings.start_year logger.info('\tWorking on %s' % year) # determine any non-base columns and drop them cols = list(solar_agents.df.columns.values) cols_to_drop = [x for x in cols if x not in cols_base] if len(cols_to_drop) != 0: solar_agents.df.drop(cols_to_drop, axis=1, inplace=True) # copy the core agent object and set their year solar_agents.df['year'] = year # get and apply load growth load_growth_yearly = scenario_settings.get_load_growth( year) solar_agents.on_frame( agent_mutation.elec.apply_load_growth, (load_growth_yearly)) # Normalize the hourly load profile to updated total load which includes load growth multiplier solar_agents.on_frame(agent_mutation.elec. apply_scale_normalized_load_profiles) # Get and apply net metering parameters net_metering_yearly = scenario_settings.get_nem_settings( year) solar_agents.on_frame( agent_mutation.elec.apply_export_tariff_params, (net_metering_yearly)) # Apply each agent's electricity price change and assumption about increases solar_agents.on_frame( agent_mutation.elec. apply_elec_price_multiplier_and_escalator, [year, scenario_settings.get_rate_escalations()]) # Apply PV Specs solar_agents.on_frame(agent_mutation.elec.apply_pv_specs, scenario_settings.get_pv_specs()) solar_agents.on_frame( agent_mutation.elec.apply_storage_specs, [ scenario_settings.get_batt_price_trajectories(), year, scenario_settings ]) # Apply financial terms solar_agents.on_frame( agent_mutation.elec.apply_financial_params, [ scenario_settings.get_financing_terms(), scenario_settings. financial_options['annual_inflation_pct'] ]) # Apply wholesale electricity prices solar_agents.on_frame( agent_mutation.elec.apply_wholesale_elec_prices, scenario_settings.get_wholesale_elec_prices()) # Size S+S system and calculate electric bills if 'ix' not in os.name: cores = None else: cores = model_settings.local_cores solar_agents.on_row( fFuncs.calc_system_size_and_financial_performance, cores=cores) solar_agents.df['agent_id'] = solar_agents.df.index.values # Calculate the financial performance of the S+S systems solar_agents.on_frame( financial_functions.calc_financial_performance) # Calculate Maximum Market Share solar_agents.on_frame( financial_functions.calc_max_market_share, scenario_settings.get_max_market_share()) # determine "developable" population solar_agents.on_frame( agent_mutation.elec. calculate_developable_customers_and_load) # Apply market_last_year if is_first_year: solar_agents.on_frame( agent_mutation.elec.estimate_initial_market_shares) market_last_year_df = None else: solar_agents.on_frame( agent_mutation.elec.apply_market_last_year, market_last_year_df) # Calculate diffusion based on economics and bass diffusion solar_agents.df, market_last_year_df = diffusion_functions.calc_diffusion_solar( solar_agents.df, is_first_year, scenario_settings.get_bass_params()) # Estimate total generation solar_agents.on_frame( agent_mutation.elec.estimate_total_generation) # Aggregate results scenario_settings.output_batt_dispatch_profiles = False if is_first_year == True: interyear_results_aggregations = agent_mutation.elec.aggregate_outputs_solar( solar_agents.df, year, is_first_year, scenario_settings) else: interyear_results_aggregations = agent_mutation.elec.aggregate_outputs_solar( solar_agents.df, year, is_first_year, scenario_settings, interyear_results_aggregations) # --- Check to ensure that agent_df isn't growing (i.e. merges are failing silently) --- df_print = solar_agents.df.copy() df_print = df_print.loc[df_print['year'] == year] df_print = df_print.groupby(['sector_abbr' ])['pv_kw_cum'].sum() #========================================================================================================== # WRITE AGENT DF AS PICKLES FOR POST-PROCESSING #========================================================================================================== # Write Outputs to the database drop_fields = [ 'consumption_hourly_initial', 'bill_savings', 'consumption_hourly', 'solar_cf_profile', 'tariff_dict', 'deprec_sch', 'batt_dispatch_profile' ] #dropping because are arrays or json df_write = solar_agents.df.drop(drop_fields, axis=1) write_annual = False if write_annual: df_write.to_pickle(scenario_settings.out_scen_path + '/agent_df_%s.pkl' % year) if i == 0: complete_df = df_write else: complete_df = pd.concat([complete_df, df_write], sort=False) #============================================================================== # Outputs & Visualization #============================================================================== logger.info("---------Saving Model Results---------") complete_df.to_csv(scenario_settings.out_scen_path + '/agent_outputs.csv') logger.info("-------------Model Run Complete-------------") logger.info('Completed in: %.1f seconds' % (time.time() - model_settings.model_init)) except Exception as e: if 'logger' in locals(): logger.error(e.__str__(), exc_info=True) logger.info('Error on line {}'.format( sys.exc_info()[-1].tb_lineno)) logger.info('Type of error {}'.format(type(e))) logger.info('Error Text: {}'.format(e)) if 'logger' not in locals(): raise finally: if 'logger' in locals(): utilfunc.shutdown_log(logger) utilfunc.code_profiler(model_settings.out_dir)