def write_recalc_DH(config): ng_heat, hp_heat, ng_dhw, hp_dhw = recalc_DH(config) locator = cea.inputlocator.InputLocator(config.scenario) on_DH_hs, on_DH_dhw = district_buildings(locator) all_bldgs = on_DH_hs + list(set(on_DH_dhw) - set(on_DH_hs)) if on_DH_hs == on_DH_dhw: print(' - - - Changing results for all bldgs...') n = len(on_DH_hs) calc_hourly = cea.utilities.parallel.vectorize( write_ng_hs, config.get_number_of_processes()) calc_hourly(repeat(config, n), repeat(locator, n), on_DH_hs, repeat(hp_heat, n), repeat(hp_dhw, n), repeat(ng_heat, n), repeat(ng_dhw, n)) else: n = len(on_DH_hs) calc_hourly = cea.utilities.parallel.vectorize( write_hs, config.get_number_of_processes()) calc_hourly(repeat(config, n), repeat(locator, n), on_DH_hs, repeat(hp_heat, n), repeat(ng_heat, n)) n = len(on_DH_dhw) calc_hourly = cea.utilities.parallel.vectorize( write_ng, config.get_number_of_processes()) calc_hourly(repeat(config, n), repeat(locator, n), on_DH_dhw, repeat(hp_dhw, n), repeat(ng_dhw, n)) n = len(all_bldgs) calc_hourly = cea.utilities.parallel.vectorize( write_both, config.get_number_of_processes()) calc_hourly(repeat(config, n), repeat(locator, n), all_bldgs) return print(' - District heating recalculated!')
def schedule_maker_main(locator, config, building=None): # local variables buildings = config.schedule_maker.buildings schedule_model = config.schedule_maker.schedule_model if schedule_model == 'deterministic': stochastic_schedule = False elif schedule_model == 'stochastic': stochastic_schedule = True else: raise ValueError("Invalid schedule model: {schedule_model}".format(**locals())) if building != None: buildings = [building] # this is to run the tests # CHECK DATABASE if is_3_22(config.scenario): raise ValueError("""The data format of indoor comfort has been changed after v3.22. Please run Data migrator in Utilities.""") # get variables of indoor comfort and internal loads internal_loads = dbf_to_dataframe(locator.get_building_internal()).set_index('Name') indoor_comfort = dbf_to_dataframe(locator.get_building_comfort()).set_index('Name') architecture = dbf_to_dataframe(locator.get_building_architecture()).set_index('Name') # get building properties prop_geometry = Gdf.from_file(locator.get_zone_geometry()) prop_geometry['footprint'] = prop_geometry.area prop_geometry['GFA_m2'] = prop_geometry['footprint'] * (prop_geometry['floors_ag'] + prop_geometry['floors_bg']) prop_geometry['GFA_ag_m2'] = prop_geometry['footprint'] * prop_geometry['floors_ag'] prop_geometry['GFA_bg_m2'] = prop_geometry['footprint'] * prop_geometry['floors_bg'] prop_geometry = prop_geometry.merge(architecture, on='Name').set_index('Name') prop_geometry = calc_useful_areas(prop_geometry) # get calculation year from weather file weather_path = locator.get_weather_file() weather_data = epwreader.epw_reader(weather_path)[['year', 'drybulb_C', 'wetbulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C']] year = weather_data['year'][0] # create date range for the calculation year date_range = get_date_range_hours_from_year(year) # SCHEDULE MAKER n = len(buildings) calc_schedules_multiprocessing = cea.utilities.parallel.vectorize(calc_schedules, config.get_number_of_processes(), on_complete=print_progress) calc_schedules_multiprocessing(repeat(locator, n), buildings, repeat(date_range, n), [internal_loads.loc[b] for b in buildings], [indoor_comfort.loc[b] for b in buildings], [prop_geometry.loc[b] for b in buildings], repeat(stochastic_schedule, n)) return None
def building_2d_to_3d(locator, zone_df, surroundings_df, elevation_map, config, geometry_pickle_dir): """ :param locator: InputLocator - provides paths to files in a scenario :type locator: cea.inputlocator.InputLocator :param config: the configuration object to use :type config: cea.config.Configuration :return: """ # Config variables num_processes = config.get_number_of_processes() zone_simplification = config.radiation.zone_geometry surroundings_simplification = config.radiation.surrounding_geometry consider_intersections = config.radiation.consider_intersections print('Calculating terrain intersection of building geometries') zone_buildings_df = zone_df.set_index('Name') zone_building_names = zone_buildings_df.index.values zone_building_solid_list = calc_building_solids(zone_buildings_df, zone_simplification, elevation_map, num_processes) surroundings_buildings_df = surroundings_df.set_index('Name') surroundings_building_names = surroundings_buildings_df.index.values surroundings_building_solid_list = calc_building_solids( surroundings_buildings_df, surroundings_simplification, elevation_map, num_processes) architecture_wwr_df = gdf.from_file( locator.get_building_architecture()).set_index('Name') # calculate geometry for the surroundings print('Generating geometry for surrounding buildings') geometry_3D_surroundings = [ calc_building_geometry_surroundings(x, y, geometry_pickle_dir) for x, y in zip(surroundings_building_names, surroundings_building_solid_list) ] # calculate geometry for the zone of analysis print('Generating geometry for buildings in the zone of analysis') n = len(zone_building_names) calc_zone_geometry_multiprocessing = cea.utilities.parallel.vectorize( calc_building_geometry_zone, num_processes, on_complete=print_progress) if consider_intersections: all_building_solid_list = np.append(zone_building_solid_list, surroundings_building_solid_list) else: all_building_solid_list = [] geometry_3D_zone = calc_zone_geometry_multiprocessing( zone_building_names, zone_building_solid_list, repeat(all_building_solid_list, n), repeat(architecture_wwr_df, n), repeat(geometry_pickle_dir, n), repeat(consider_intersections, n)) return geometry_3D_zone, geometry_3D_surroundings
def main(config): assert os.path.exists( config.scenario), 'Scenario not found: %s' % config.scenario locator = cea.inputlocator.InputLocator(scenario=config.scenario) print('Running photovoltaic with scenario = %s' % config.scenario) print('Running photovoltaic with annual-radiation-threshold-kWh/m2 = %s' % config.solar.annual_radiation_threshold) print('Running photovoltaic with panel-on-roof = %s' % config.solar.panel_on_roof) print('Running photovoltaic with panel-on-wall = %s' % config.solar.panel_on_wall) print('Running photovoltaic with solar-window-solstice = %s' % config.solar.solar_window_solstice) print('Running photovoltaic with type-pvpanel = %s' % config.solar.type_pvpanel) if config.solar.custom_tilt_angle: print( 'Running photovoltaic with custom-tilt-angle = %s and panel-tilt-angle = %s' % (config.solar.custom_tilt_angle, config.solar.panel_tilt_angle)) else: print('Running photovoltaic with custom-tilt-angle = %s' % config.solar.custom_tilt_angle) if config.solar.custom_roof_coverage: print( 'Running photovoltaic with custom-roof-coverage = %s and max-roof-coverage = %s' % (config.solar.custom_roof_coverage, config.solar.max_roof_coverage)) else: print('Running photovoltaic with custom-roof-coverage = %s' % config.solar.custom_roof_coverage) building_names = locator.get_zone_building_names() zone_geometry_df = gdf.from_file(locator.get_zone_geometry()) latitude, longitude = get_lat_lon_projected_shapefile(zone_geometry_df) # list_buildings_names =['B026', 'B036', 'B039', 'B043', 'B050'] for missing buildings weather_data = epwreader.epw_reader(locator.get_weather_file()) date_local = solar_equations.calc_datetime_local_from_weather_file( weather_data, latitude, longitude) num_process = config.get_number_of_processes() n = len(building_names) cea.utilities.parallel.vectorize(calc_PV, num_process)(repeat(locator, n), repeat(config, n), repeat(latitude, n), repeat(longitude, n), repeat(weather_data, n), repeat(date_local, n), building_names) # aggregate results from all buildings write_aggregate_results(locator, building_names, num_process)
def multiprocess_write_pv_whole(config, key_list): # multithreading by strategy calc_whole = cea.utilities.parallel.vectorize(process_whole, config.get_number_of_processes()) n = len(key_list) calc_whole( repeat(config, n), key_list ) return
def run_parallel(config): locator = cea.inputlocator.InputLocator(config.scenario) n = len(config.demand.buildings) df = pd.read_csv(locator.get_total_demand('csv'), index_col='Name') print(df['NG_hs_MWhyr']) calc_hourly = cea.utilities.parallel.vectorize( rewrite_to_csv, config.get_number_of_processes()) res = calc_hourly(config.demand.buildings, repeat(locator, n)) return print(pd.concat(res, axis=1).transpose()['NG_hs_MWhyr'])
def multiprocess_write_pv_hourly(config): # multithreading by building locator = cea.inputlocator.InputLocator(config.scenario) pv_total = pd.read_csv(locator.PV_total_buildings(), index_col='Name') bldg_list = pv_total.index.to_list() n = len(bldg_list) calc_hourly = cea.utilities.parallel.vectorize(process_hourly, config.get_number_of_processes()) calc_hourly( bldg_list, repeat(locator, n)) return print(f'Multiprocessing of hourly completed for {config.bigmacc.key}.')
def building_2d_to_3d(locator, geometry_terrain, config, height_col, nfloor_col): """ :param locator: InputLocator - provides paths to files in a scenario :type locator: cea.inputlocator.InputLocator :param config: the configuration object to use :type config: cea.config.Configuration :param height_col: name of the columns storing the height of buildings :param nfloor_col: name ofthe column storing the number of floors in buildings. :return: """ # settings: parameters that configure the level of simplification of geometry settings = config.radiation # preprocess data data_preprocessed = BuildingData(locator, settings, geometry_terrain, height_col, nfloor_col) surrounding_building_names = data_preprocessed.surroundings_building_names surroundings_building_solid_list = data_preprocessed.surroundings_building_solid_list all_building_solid_list = data_preprocessed.all_building_solid_list architecture_wwr_df = data_preprocessed.architecture_wwr_df zone_building_names = data_preprocessed.zone_building_names zone_building_solid_list = data_preprocessed.zone_building_solid_list consider_intersections = config.radiation.consider_intersections # calculate geometry for the surroundings print('Generating geometry for surrounding buildings') data_preprocessed = BuildingDataFinale(surroundings_building_solid_list, all_building_solid_list, architecture_wwr_df) geometry_3D_surroundings = [ calc_building_geometry_surroundings(x, y) for x, y in zip( surrounding_building_names, surroundings_building_solid_list) ] # calculate geometry for the zone of analysis print('Generating geometry for buildings in the zone of analysis') n = len(zone_building_names) calc_zone_geometry_multiprocessing = cea.utilities.parallel.vectorize( calc_building_geometry_zone, config.get_number_of_processes(), on_complete=print_progress) geometry_3D_zone = calc_zone_geometry_multiprocessing( zone_building_names, zone_building_solid_list, [data_preprocessed for x in range(n)], [consider_intersections for x in range(n)]) return geometry_3D_zone, geometry_3D_surroundings
def write_pv_to_demand_multi(config): locator = cea.inputlocator.InputLocator(config.scenario) pv_total = pd.read_csv(locator.PV_total_buildings(), index_col='Name') bldg_list = pv_total.index.to_list() n = len(bldg_list) calc_hourly = cea.utilities.parallel.vectorize(process_pv, config.get_number_of_processes()) calc_hourly( repeat(config, n), bldg_list, repeat(locator, n)) # write total to file return print(' - PV results added them to the demand file.')
def main(config): assert os.path.exists(config.scenario), 'Scenario not found: %s' % config.scenario locator = cea.inputlocator.InputLocator(scenario=config.scenario) print('Running photovoltaic with scenario = %s' % config.scenario) print('Running photovoltaic with annual-radiation-threshold-kWh/m2 = %s' % config.solar.annual_radiation_threshold) print('Running photovoltaic with panel-on-roof = %s' % config.solar.panel_on_roof) print('Running photovoltaic with panel-on-wall = %s' % config.solar.panel_on_wall) print('Running photovoltaic with solar-window-solstice = %s' % config.solar.solar_window_solstice) print('Running photovoltaic with type-pvpanel = %s' % config.solar.type_pvpanel) buildings_names = locator.get_zone_building_names() zone_geometry_df = gdf.from_file(locator.get_zone_geometry()) latitude, longitude = get_lat_lon_projected_shapefile(zone_geometry_df) # list_buildings_names =['B026', 'B036', 'B039', 'B043', 'B050'] for missing buildings weather_data = epwreader.epw_reader(locator.get_weather_file()) date_local = solar_equations.calc_datetime_local_from_weather_file(weather_data, latitude, longitude) n = len(buildings_names) cea.utilities.parallel.vectorize(calc_PV, config.get_number_of_processes())(repeat(locator, n), repeat(config, n), repeat(latitude, n), repeat(longitude, n), repeat(weather_data, n), repeat(date_local, n), buildings_names) # aggregate results from all buildings aggregated_annual_results = {} for i, building in enumerate(buildings_names): hourly_results_per_building = pd.read_csv(locator.PV_results(building)) if i == 0: aggregated_hourly_results_df = hourly_results_per_building else: aggregated_hourly_results_df = aggregated_hourly_results_df + hourly_results_per_building annual_energy_production = hourly_results_per_building.filter(like='_kWh').sum() panel_area_per_building = hourly_results_per_building.filter(like='_m2').iloc[0] building_annual_results = annual_energy_production.append(panel_area_per_building) aggregated_annual_results[building] = building_annual_results # save hourly results aggregated_hourly_results_df = aggregated_hourly_results_df.set_index('Date') aggregated_hourly_results_df.to_csv(locator.PV_totals(), index=True, float_format='%.2f') # save annual results aggregated_annual_results_df = pd.DataFrame(aggregated_annual_results).T aggregated_annual_results_df.to_csv(locator.PV_total_buildings(), index=True, float_format='%.2f')
def disconnected_buildings_cooling_main(locator, building_names, total_demand, config, prices, lca): """ Computes the parameters for the operation of disconnected buildings output results in csv files. There is no optimization at this point. The different cooling energy supply system configurations are calculated and compared 1 to 1 to each other. it is a classical combinatorial problem. The six supply system configurations include: (VCC: Vapor Compression Chiller, ACH: Absorption Chiller, CT: Cooling Tower, Boiler) (AHU: Air Handling Units, ARU: Air Recirculation Units, SCU: Sensible Cooling Units) - config 0: Direct Expansion / Mini-split units (NOTE: this configuration is not fully built yet) - config 1: VCC_to_AAS (AHU + ARU + SCU) + CT - config 2: FP + single-effect ACH_to_AAS (AHU + ARU + SCU) + Boiler + CT - config 3: ET + single-effect ACH_to_AAS (AHU + ARU + SCU) + Boiler + CT - config 4: VCC_to_AA (AHU + ARU) + VCC_to_S (SCU) + CT - config 5: VCC_to_AA (AHU + ARU) + single effect ACH_S (SCU) + CT + Boiler Note: 1. Only cooling supply configurations are compared here. The demand for electricity is supplied from the grid, and the demand for domestic hot water is supplied from electric boilers. 2. Single-effect chillers are coupled with flat-plate solar collectors, and the double-effect chillers are coupled with evacuated tube solar collectors. :param locator: locator class with paths to input/output files :param building_names: list with names of buildings :param config: cea.config :param prices: prices class :return: one .csv file with results of operations of disconnected buildings; one .csv file with operation of the best configuration (Cost, CO2, Primary Energy) """ t0 = time.clock() chiller_prop = pd.read_excel(locator.get_database_supply_systems(), sheet_name="Absorption_chiller") n = len(building_names) cea.utilities.parallel.vectorize(disconnected_cooling_for_building, config.get_number_of_processes())( building_names, repeat(chiller_prop, n), repeat(config, n), repeat(lca, n), repeat(locator, n), repeat(prices, n), repeat(total_demand, n)) print(time.clock() - t0, "seconds process time for the decentralized Building Routine \n")
def calc_demand_multiprocessing( building_properties, date, locator, list_building_names, usage_schedules, weather_data, use_dynamic_infiltration_calculation, use_stochastic_occupancy, resolution_outputs, loads_output, massflows_output, temperatures_output, format_output, config, region): number_of_processes = config.get_number_of_processes() print("Using %i CPU's" % number_of_processes) pool = mp.Pool(number_of_processes) joblist = [] num_buildings = len(list_building_names) for building in list_building_names: bpr = building_properties[building] job = pool.apply_async(thermal_loads.calc_thermal_loads, [ building, bpr, weather_data, usage_schedules, date, locator, use_stochastic_occupancy, use_dynamic_infiltration_calculation, resolution_outputs, loads_output, massflows_output, temperatures_output, format_output, region ]) joblist.append(job) for i, job in enumerate(joblist): job.get(240) print('Building No. %i completed out of %i' % (i + 1, num_buildings)) pool.close()
def demand_calculation(locator, config): """ Algorithm to calculate the hourly demand of energy services in buildings using the integrated model of [Fonseca2015]_. Produces a demand file per building and a total demand file for the whole zone of interest: - a csv file for every building with hourly demand data. - ``Total_demand.csv``, csv file of yearly demand data per building. :param locator: An InputLocator to locate input files :type locator: cea.inputlocator.InputLocator :param weather_path: A path to the EnergyPlus weather data file (.epw) :type weather_path: str :param use_dynamic_infiltration_calculation: Set this to ``True`` if the (slower) dynamic infiltration calculation method (:py:func:`cea.demand.ventilation_air_flows_detailed.calc_air_flows`) should be used instead of the standard. :type use_dynamic_infiltration_calculation: bool :param multiprocessing: Set this to ``True`` if the :py:mod:`multiprocessing` module should be used to speed up calculations by making use of multiple cores. :type multiprocessing: bool :returns: None :rtype: NoneType .. [Fonseca2015] Fonseca, Jimeno A., and Arno Schlueter. “Integrated Model for Characterization of Spatiotemporal Building Energy Consumption Patterns in Neighborhoods and City Districts.” Applied Energy 142 (2015): 247–265. """ # INITIALIZE TIMER t0 = time.clock() # LOCAL VARIABLES building_names = config.demand.buildings use_dynamic_infiltration = config.demand.use_dynamic_infiltration_calculation resolution_output = config.demand.resolution_output loads_output = config.demand.loads_output massflows_output = config.demand.massflows_output temperatures_output = config.demand.temperatures_output debug = config.debug weather_path = locator.get_weather_file() weather_data = epwreader.epw_reader(weather_path)[['year', 'drybulb_C', 'wetbulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C']] year = weather_data['year'][0] # create date range for the calculation year date_range = get_date_range_hours_from_year(year) # SPECIFY NUMBER OF BUILDINGS TO SIMULATE print('Running demand calculation for the following buildings=%s' % building_names) # CALCULATE OBJECT WITH PROPERTIES OF ALL BUILDINGS building_properties = BuildingProperties(locator, building_names) # add a message i2065 of warning. This needs a more elegant solution def calc_buildings_less_100m2(building_properties): footprint = building_properties._prop_geometry.footprint floors = building_properties._prop_geometry.floors_ag names = building_properties._prop_geometry.index GFA_m2 = [x * y for x, y in zip(footprint, floors)] list_buildings_less_100m2 = [] for name, gfa in zip(names, GFA_m2): if gfa < 100.0: list_buildings_less_100m2.append(name) return list_buildings_less_100m2 list_buildings_less_100m2 = calc_buildings_less_100m2(building_properties) if list_buildings_less_100m2 != []: print('Warning! The following list of buildings have less than 100 m2 of gross floor area, CEA might fail: %s' % list_buildings_less_100m2) # DEMAND CALCULATION n = len(building_names) calc_thermal_loads = cea.utilities.parallel.vectorize(thermal_loads.calc_thermal_loads, config.get_number_of_processes(), on_complete=print_progress) calc_thermal_loads( building_names, [building_properties[b] for b in building_names], repeat(weather_data, n), repeat(date_range, n), repeat(locator, n), repeat(use_dynamic_infiltration, n), repeat(resolution_output, n), repeat(loads_output, n), repeat(massflows_output, n), repeat(temperatures_output, n), repeat(config, n), repeat(debug, n)) # WRITE TOTAL YEARLY VALUES writer_totals = demand_writers.YearlyDemandWriter(loads_output, massflows_output, temperatures_output) totals, time_series = writer_totals.write_to_csv(building_names, locator) time_elapsed = time.clock() - t0 print('done - time elapsed: %d.2 seconds' % time_elapsed) return totals, time_series
def planning_and_operation(locator, config): # Local vars scenario_name = config.scenario_name # scenario_name weather_path = locator.get_weather_file() threads = config.get_number_of_processes() time_start = config.mpc_district.time_start time_end = config.mpc_district.time_end set_temperature_goal = config.mpc_district.set_temperature_goal constant_temperature = config.mpc_district.constant_temperature pricing_scheme = config.mpc_district.pricing_scheme constant_price = config.mpc_district.constant_price min_max_source = config.mpc_district.min_max_source min_constant_temperature = config.mpc_district.min_constant_temperature max_constant_temperature = config.mpc_district.max_constant_temperature delta_set = config.mpc_district.delta_set delta_setback = config.mpc_district.delta_setback # local constants parameter_set = PARAMETER_SET time_step_ts = TIME_STEP_TS solver_name = SOLVER_NAME time_limit = TIME_LIMIT alpha = ALPHA beta = BETA power_factor = POWER_FACTOR approx_loss_hours = APPROX_LOSS_HOURS voltage_nominal = VOLTAGE_NOMINAL load_factor = LOAD_FACTOR interest_rate = INTEREST_RATE t0 = time.clock() time_main = time.time() date_main = datetime.datetime.now() print('Running scenario: ' + scenario_name) print('Processing: Setup models and optimization') m = planning_and_operation_optimization.main(locator, weather_path, scenario_name, parameter_set, time_start, time_end, time_step_ts, set_temperature_goal, constant_temperature, alpha, beta, pricing_scheme, constant_price, min_max_source, min_constant_temperature, max_constant_temperature, delta_set, delta_setback, power_factor, approx_loss_hours, voltage_nominal, load_factor, interest_rate ) print('Processing: Solve optimization') opt = SolverFactory(solver_name) # Create a solver if time_limit > 0: if solver_name == 'cplex': opt.options['timelimit'] = time_limit elif solver_name == 'gurobi': opt.options['TimeLimit'] = time_limit opt.options['threads'] = threads opt.solve( m, tee=True ) print('Processing: Write results') output_folder = "mpc-district" planning_and_operation_write_results.print_res(m) planning_and_operation_write_results.write_results(locator, output_folder, scenario_name, m, time_main, solver_name, threads, time_limit, interest_rate, voltage_nominal, approx_loss_hours, alpha, beta, load_factor ) planning_and_operation_plots.save_plots(locator, m) operation_write_results.main(locator, m, output_folder) print('Completed.') print('Total time: {:.2f} seconds'.format(time.clock() - t0))
def main(config): """ This function makes the calculation of solar insolation in X sensor points for every building in the zone of interest. The number of sensor points depends on the size of the grid selected in the config file and are generated automatically. :param config: Configuration object with the settings (genera and radiation) :type config: cea.config.Configuartion :return: """ # reference case need to be provided here locator = cea.inputlocator.InputLocator(scenario=config.scenario) # the selected buildings are the ones for which the individual radiation script is run for # this is only activated when in default.config, run_all_buildings is set as 'False' # BUGFIX for #2447 (make sure the Daysim binaries are there before starting the simulation) daysim_bin_path, daysim_lib_path = check_daysim_bin_directory( config.radiation.daysim_bin_directory, config.radiation.use_latest_daysim_binaries) print('Using Daysim binaries from path: {}'.format(daysim_bin_path)) print('Using Daysim data from path: {}'.format(daysim_lib_path)) # Save daysim path to config config.radiation.daysim_bin_directory = daysim_bin_path # BUGFIX for PyCharm: the PATH variable might not include the daysim-bin-directory, so we add it here os.environ["PATH"] = "{bin}{pathsep}{path}".format( bin=config.radiation.daysim_bin_directory, pathsep=os.pathsep, path=os.environ["PATH"]) os.environ["RAYPATH"] = daysim_lib_path if not "PROJ_LIB" in os.environ: os.environ["PROJ_LIB"] = os.path.join(os.path.dirname(sys.executable), "Library", "share") if not "GDAL_DATA" in os.environ: os.environ["GDAL_DATA"] = os.path.join(os.path.dirname(sys.executable), "Library", "share", "gdal") print("verifying geometry files") zone_path = locator.get_zone_geometry() surroundings_path = locator.get_surroundings_geometry() print("zone: {zone_path}\nsurroundings: {surroundings_path}".format( zone_path=zone_path, surroundings_path=surroundings_path)) verify_input_geometry_zone(gpdf.from_file(zone_path)) verify_input_geometry_surroundings(gpdf.from_file(surroundings_path)) # import material properties of buildings print("Getting geometry materials") building_surface_properties = reader_surface_properties(locator) building_surface_properties.to_csv(locator.get_radiation_materials()) print("Creating 3D geometry and surfaces") geometry_pickle_dir = os.path.join( locator.get_temporary_folder(), "{}_radiation_geometry_pickle".format(config.scenario_name)) print("Saving geometry pickle files in: {}".format(geometry_pickle_dir)) # create geometrical faces of terrain and buildings geometry_terrain, zone_building_names, surroundings_building_names = geometry_generator.geometry_main( locator, config, geometry_pickle_dir) # daysim_bin_directory might contain two paths (e.g. "C:\Daysim\bin;C:\Daysim\lib") - in which case, only # use the "bin" folder bin_directory = [ d for d in config.radiation.daysim_bin_directory.split(";") if not d.endswith("lib") ][0] daysim_staging_location = os.path.join(locator.get_temporary_folder(), 'cea_radiation') cea_daysim = CEADaySim(daysim_staging_location, bin_directory) # create radiance input files print("Creating radiance material file") cea_daysim.create_radiance_material(building_surface_properties) print("Creating radiance geometry file") cea_daysim.create_radiance_geometry(geometry_terrain, building_surface_properties, zone_building_names, surroundings_building_names, geometry_pickle_dir) print("Converting files for DAYSIM") weather_file = locator.get_weather_file() print('Transforming weather files to daysim format') cea_daysim.execute_epw2wea(weather_file) print('Transforming radiance files to daysim format') cea_daysim.execute_radfiles2daysim() time1 = time.time() radiation_singleprocessing(cea_daysim, zone_building_names, locator, config.radiation, geometry_pickle_dir, num_processes=config.get_number_of_processes()) print("Daysim simulation finished in %.2f mins" % ((time.time() - time1) / 60.0))
def operation(locator, config): # local variables scenario_name = config.scenario_name # scenario_name weather_path = locator.get_weather_file() threads = config.get_number_of_processes() time_start = config.mpc_building.time_start time_end = config.mpc_building.time_end # build linear model based on set points in cea/ constant_temperature/ or set-point & set-back temperatures set_temperature_goal = config.mpc_building.set_temperature_goal # specify the temperature for set_temperature_goal = constant_temperature constant_temperature = config.mpc_building.constant_temperature pricing_scheme = config.mpc_building.pricing_scheme # constant or dynamic prices # specify the el. price for pricing_scheme = constant_price constant_price = config.mpc_building.constant_price ## specify temperature control strategies min_max_source = config.mpc_building.min_max_source # specify temperature range if min_max_source = constant min_constant_temperature = config.mpc_building.min_constant_temperature max_constant_temperature = config.mpc_building.max_constant_temperature # specify allowable temperature difference from the set/set-back temperatures if min_max_source = occupancy delta_set = config.mpc_building.delta_set delta_setback = config.mpc_building.delta_setback # local constants parameter_set = PARAMETER_SET time_step_ts = TIME_STEP_TS solver_name = SOLVER_NAME time_limit = TIME_LIMIT alpha = ALPHA # factor for electricity cost, normally "1" beta = BETA # factor for penalty of violating the set-points t0 = time.clock() date_main = datetime.datetime.now() ( date_and_time_prediction, date_and_time_prediction_plus_1, time_step, buildings_dic, buildings_names, states_index, controls_index, outputs_index, temperatures_index, cool_index, heating_boolean, cooling_boolean, occupancy_per_building_cardinal, occupancy_per_building_list, set_temperatures_dic, initial_state_dic, minimum_output_dic, maximum_output_dic, em_efficiency_mean_dic, Qcsmax_Wm2_dic, gross_floor_area_m2, price_vector ) = get_optimization_inputs(locator, weather_path, scenario_name, parameter_set, time_start, time_end, time_step_ts, set_temperature_goal, constant_temperature, pricing_scheme, constant_price, min_max_source, min_constant_temperature, max_constant_temperature, delta_set, delta_setback ) print('Processing: Setup optimization model') m = operation_optimization.main( alpha, beta, date_and_time_prediction, date_and_time_prediction_plus_1, time_step, buildings_dic, buildings_names, states_index, controls_index, outputs_index, temperatures_index, cool_index, heating_boolean, cooling_boolean, occupancy_per_building_cardinal, occupancy_per_building_list, set_temperatures_dic, initial_state_dic, minimum_output_dic, maximum_output_dic, em_efficiency_mean_dic, Qcsmax_Wm2_dic, gross_floor_area_m2, price_vector ) print('Processing: Solve optimization') opt = SolverFactory(solver_name) # Create a solver if time_limit > 0: if solver_name == 'cplex': opt.options['timelimit'] = time_limit elif solver_name == 'gurobi': opt.options['TimeLimit'] = time_limit opt.options['threads'] = threads opt.solve( m, tee=True ) print('Processing: Write results') output_folder = "mpc-building" # this is an identifier for the location of the output folder operation_write_results.main(locator, m, output_folder) print('Completed.') print('Total time: {:.2f} seconds'.format(time.clock() - t0))