def test_mixed_use_schedules(self): locator = ReferenceCaseOpenLocator() # calculate schedules list_uses = ['OFFICE', 'INDUSTRIAL'] occupancy = {'OFFICE': 0.5, 'INDUSTRIAL': 0.5} gv = GlobalVariables() date = pd.date_range(gv.date_start, periods=8760, freq='H') archetype_schedules, archetype_values = schedule_maker( date, locator, list_uses) calculated_schedules = calc_schedules(list_uses, archetype_schedules, occupancy, archetype_values) config = ConfigParser.SafeConfigParser() config.read(get_test_config_path()) reference_results = json.loads( config.get('test_mixed_use_schedules', 'reference_results')) for schedule in reference_results: self.assertAlmostEqual( calculated_schedules[schedule][REFERENCE_TIME], reference_results[schedule], places=4, msg="Schedule '%s' at time %s, %f != %f" % (schedule, str(REFERENCE_TIME), calculated_schedules[schedule][REFERENCE_TIME], reference_results[schedule]))
def properties_and_schedule(locator, region, year, use_daysim_radiation, override_variables=False): # this script is called from the Neural network please do not mess with it! date = pd.date_range(str(year) + '/01/01', periods=8760, freq='H') # building properties model building_properties = BuildingProperties(locator, use_daysim_radiation, region, override_variables) # schedules model list_uses = list(building_properties._prop_occupancy.columns) archetype_schedules, archetype_values = occupancy_model.schedule_maker( region, date, locator, list_uses) schedules_dict = { 'list_uses': list_uses, 'archetype_schedules': archetype_schedules, 'occupancy_densities': archetype_values['people'], 'archetype_values': archetype_values } return building_properties, schedules_dict, date
def test_mixed_use_schedules(self): config = cea.config.Configuration(cea.config.DEFAULT_CONFIG) stochastic_occupancy = config.demand.use_stochastic_occupancy gv = GlobalVariables() gv.config = config locator = ReferenceCaseOpenLocator() date = pd.date_range(gv.date_start, periods=8760, freq='H') building_properties = BuildingProperties(locator, gv, False, 'CH', False) bpr = building_properties['B01'] list_uses = ['OFFICE', 'INDUSTRIAL'] bpr.occupancy = {'OFFICE': 0.5, 'INDUSTRIAL': 0.5} # calculate schedules archetype_schedules, archetype_values = schedule_maker( 'CH', date, locator, list_uses) calculated_schedules = calc_schedules(list_uses, archetype_schedules, bpr, archetype_values, stochastic_occupancy) config = ConfigParser.SafeConfigParser() config.read(get_test_config_path()) reference_results = json.loads( config.get('test_mixed_use_schedules', 'reference_results')) for schedule in reference_results: self.assertAlmostEqual( calculated_schedules[schedule][REFERENCE_TIME], reference_results[schedule], places=4, msg="Schedule '%s' at time %s, %f != %f" % (schedule, str(REFERENCE_TIME), calculated_schedules[schedule][REFERENCE_TIME], reference_results[schedule]))
def create_test_data(): """Create test data to compare against - run this the first time you make changes that affect the results. Note, this will overwrite the previous test data.""" config = ConfigParser.SafeConfigParser() config.read(get_test_config_path()) if not config.has_section('test_mixed_use_archetype_values'): config.add_section('test_mixed_use_archetype_values') locator = ReferenceCaseOpenLocator() expected_results = calculate_test_mixed_use_archetype_values_results( locator) config.set('test_mixed_use_archetype_values', 'expected_results', expected_results.to_json()) # calculate schedules list_uses = ['OFFICE', 'INDUSTRIAL'] occupancy = {'OFFICE': 0.5, 'INDUSTRIAL': 0.5} gv = GlobalVariables() date = pd.date_range(gv.date_start, periods=8760, freq='H') archetype_schedules, archetype_values = schedule_maker( date, locator, list_uses) calculated_schedules = calc_schedules(list_uses, archetype_schedules, occupancy, archetype_values) if not config.has_section('test_mixed_use_schedules'): config.add_section('test_mixed_use_schedules') config.set( 'test_mixed_use_schedules', 'reference_results', json.dumps({ schedule: calculated_schedules[schedule][REFERENCE_TIME] for schedule in calculated_schedules.keys() })) with open(get_test_config_path(), 'w') as f: config.write(f)
def setUpClass(cls): import zipfile import tempfile import cea.examples archive = zipfile.ZipFile( os.path.join(os.path.dirname(cea.examples.__file__), 'reference-case-open.zip')) archive.extractall(tempfile.gettempdir()) reference_case = os.path.join(tempfile.gettempdir(), 'reference-case-open', 'baseline') cls.locator = InputLocator(reference_case) cls.gv = GlobalVariables() weather_path = cls.locator.get_default_weather() cls.weather_data = epwreader.epw_reader(weather_path)[[ 'drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C' ]] # run properties script import cea.demand.preprocessing.properties cea.demand.preprocessing.properties.properties(cls.locator, True, True, True, True) cls.building_properties = BuildingProperties(cls.locator, cls.gv) cls.date = pd.date_range(cls.gv.date_start, periods=8760, freq='H') cls.list_uses = cls.building_properties.list_uses() cls.archetype_schedules, cls.archetype_values = schedule_maker( cls.date, cls.locator, cls.list_uses) cls.occupancy_densities = cls.archetype_values['people'] cls.usage_schedules = { 'list_uses': cls.list_uses, 'archetype_schedules': cls.archetype_schedules, 'occupancy_densities': cls.occupancy_densities, 'archetype_values': cls.archetype_values }
def main(): locator = InputLocator(REFERENCE_CASE) gv = GlobalVariables() weather_path = locator.get_default_weather() weather_data = epwreader.epw_reader(weather_path)[[ 'drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C' ]] building_properties = BuildingProperties(locator, gv) date = pd.date_range(gv.date_start, periods=8760, freq='H') list_uses = building_properties.list_uses() schedules = schedule_maker(date, locator, list_uses) usage_schedules = {'list_uses': list_uses, 'schedules': schedules} print("data for test_calc_thermal_loads_new_ventilation:") print building_properties.list_building_names() bpr = building_properties['B01'] result = calc_thermal_loads('B01', bpr, weather_data, usage_schedules, date, gv, locator) # test the building csv file df = pd.read_csv(locator.get_demand_results_file('B01')) expected_columns = list(df.columns) print("expected_columns = %s" % repr(expected_columns)) value_columns = [ u'Ealf_kWh', u'Eauxf_kWh', u'Edataf_kWh', u'Ef_kWh', u'QCf_kWh', u'QHf_kWh', u'Qcdataf_kWh', u'Qcref_kWh', u'Qcs_kWh', u'Qcsf_kWh', u'Qhs_kWh', u'Qhsf_kWh', u'Qww_kWh', u'Qwwf_kWh', u'Tcsf_re_C', u'Thsf_re_C', u'Twwf_re_C', u'Tcsf_sup_C', u'Thsf_sup_C', u'Twwf_sup_C' ] print("values = %s " % repr([df[column].sum() for column in value_columns])) print("data for test_calc_thermal_loads_other_buildings:") # randomly selected except for B302006716, which has `Af == 0` buildings = { 'B01': (81124.39400, 150471.05200), 'B03': (81255.09200, 150520.01000), 'B02': (82176.15300, 150604.85100), 'B05': (84058.72400, 150841.56200), 'B04': (82356.22600, 150598.43400), 'B07': (81052.19000, 150490.94800), 'B06': (83108.45600, 150657.24900), 'B09': (84491.58100, 150853.54000), 'B08': (88572.59000, 151020.09300), } for building in buildings.keys(): bpr = building_properties[building] b, qcf_kwh, qhf_kwh = run_for_single_building(building, bpr, weather_data, usage_schedules, date, gv, locator) print("'%(b)s': (%(qcf_kwh).5f, %(qhf_kwh).5f)," % locals())
def main(): locator = InputLocator(REFERENCE_CASE) gv = GlobalVariables() weather_path = locator.get_default_weather() weather_data = epwreader.epw_reader(weather_path)[['drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C']] building_properties = BuildingProperties(locator, gv) date = pd.date_range(gv.date_start, periods=8760, freq='H') list_uses = building_properties.list_uses() schedules = schedule_maker(date, locator, list_uses) usage_schedules = {'list_uses': list_uses, 'schedules': schedules} print("data for test_calc_thermal_loads_new_ventilation:") print building_properties.list_building_names() bpr = building_properties['B01'] result = calc_thermal_loads('B01', bpr, weather_data, usage_schedules, date, gv, locator) # test the building csv file df = pd.read_csv(locator.get_demand_results_file('B01')) expected_columns = list(df.columns) print("expected_columns = %s" % repr(expected_columns)) value_columns = [u'Ealf_kWh', u'Eauxf_kWh', u'Edataf_kWh', u'Ef_kWh', u'QCf_kWh', u'QHf_kWh', u'Qcdataf_kWh', u'Qcref_kWh', u'Qcs_kWh', u'Qcsf_kWh', u'Qhs_kWh', u'Qhsf_kWh', u'Qww_kWh', u'Qwwf_kWh', u'Tcsf_re_C', u'Thsf_re_C', u'Twwf_re_C', u'Tcsf_sup_C', u'Thsf_sup_C', u'Twwf_sup_C'] print("values = %s " % repr([df[column].sum() for column in value_columns])) print("data for test_calc_thermal_loads_other_buildings:") # randomly selected except for B302006716, which has `Af == 0` buildings = {'B01': (81124.39400, 150471.05200), 'B03': (81255.09200, 150520.01000), 'B02': (82176.15300, 150604.85100), 'B05': (84058.72400, 150841.56200), 'B04': (82356.22600, 150598.43400), 'B07': (81052.19000, 150490.94800), 'B06': (83108.45600, 150657.24900), 'B09': (84491.58100, 150853.54000), 'B08': (88572.59000, 151020.09300), } for building in buildings.keys(): bpr = building_properties[building] b, qcf_kwh, qhf_kwh = run_for_single_building(building, bpr, weather_data, usage_schedules, date, gv, locator) print("'%(b)s': (%(qcf_kwh).5f, %(qhf_kwh).5f)," % locals())
def setUpClass(cls): if os.environ.has_key('REFERENCE_CASE'): cls.locator = InputLocator(os.environ['REFERENCE_CASE']) else: cls.locator = InputLocator(REFERENCE_CASE) cls.gv = GlobalVariables() weather_path = cls.locator.get_default_weather() cls.weather_data = epwreader.epw_reader(weather_path)[['drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C']] cls.building_properties = BuildingProperties(cls.locator, cls.gv) cls.date = pd.date_range(cls.gv.date_start, periods=8760, freq='H') cls.list_uses = cls.building_properties.list_uses() cls.schedules = schedule_maker(cls.date, cls.locator, cls.list_uses) cls.usage_schedules = {'list_uses': cls.list_uses, 'schedules': cls.schedules}
def create_data(): """Create test data to compare against - run this the first time you make changes that affect the results. Note, this will overwrite the previous test data.""" test_config = ConfigParser.SafeConfigParser() test_config.read(get_test_config_path()) if not test_config.has_section('test_mixed_use_archetype_values'): test_config.add_section('test_mixed_use_archetype_values') locator = ReferenceCaseOpenLocator() expected_results = calculate_mixed_use_archetype_values_results(locator) test_config.set('test_mixed_use_archetype_values', 'expected_results', expected_results.to_json()) config = cea.config.Configuration(cea.config.DEFAULT_CONFIG) locator = ReferenceCaseOpenLocator() # calculate schedules building_properties = BuildingProperties(locator, False) bpr = building_properties['B01'] list_uses = ['OFFICE', 'INDUSTRIAL'] bpr.occupancy = {'OFFICE': 0.5, 'INDUSTRIAL': 0.5} # get year from weather file weather_path = locator.get_weather_file() weather_data = epwreader.epw_reader(weather_path)[['year']] year = weather_data['year'][0] date = pd.date_range(str(year) + '/01/01', periods=HOURS_IN_YEAR, freq='H') archetype_schedules, archetype_values = schedule_maker( 'CH', date, locator, list_uses) stochastic_occupancy = config.demand.use_stochastic_occupancy calculated_schedules = calc_schedules(list_uses, archetype_schedules, bpr, archetype_values, stochastic_occupancy) if not test_config.has_section('test_mixed_use_schedules'): test_config.add_section('test_mixed_use_schedules') test_config.set( 'test_mixed_use_schedules', 'reference_results', json.dumps({ schedule: calculated_schedules[schedule][REFERENCE_TIME] for schedule in calculated_schedules.keys() })) with open(get_test_config_path(), 'w') as f: test_config.write(f)
def test_mixed_use_schedules(self): # get reference case to be tested archive = zipfile.ZipFile( os.path.join(os.path.dirname(cea.examples.__file__), 'reference-case-open.zip')) archive.extractall(tempfile.gettempdir()) reference_case = os.path.join(tempfile.gettempdir(), 'reference-case-open', 'baseline') locator = InputLocator(reference_case) # calculate schedules list_uses = ['OFFICE', 'INDUSTRIAL'] occupancy = {'OFFICE': 0.5, 'INDUSTRIAL': 0.5} gv = GlobalVariables() date = pd.date_range(gv.date_start, periods=8760, freq='H') archetype_schedules, archetype_values = schedule_maker( date, locator, list_uses) calculated_schedules = calc_schedules(list_uses, archetype_schedules, occupancy, archetype_values) reference_time = 3456 reference_results = { 'El': 0.1080392156862745, 'Qs': 0.0088163265306122462, 've': 0.01114606741573034, 'Epro': 0.17661721828842394, 'people': 0.0080000000000000019, 'Ed': 0.0, 'Vww': 0.0, 'Ea': 0.1340740740740741, 'Ere': 0.0, 'Vw': 0.0, 'X': 0.010264150943396229 } for schedule in reference_results: self.assertEqual(calculated_schedules[schedule][reference_time], reference_results[schedule], msg="Schedule '%s' at time %s, %f != %f" % (schedule, str(reference_time), calculated_schedules[schedule][reference_time], reference_results[schedule]))
def create_test_data(): """Create test data to compare against - run this the first time you make changes that affect the results. Note, this will overwrite the previous test data.""" config = ConfigParser.SafeConfigParser() config.read(get_test_config_path()) if not config.has_section('test_mixed_use_archetype_values'): config.add_section('test_mixed_use_archetype_values') locator = ReferenceCaseOpenLocator() expected_results = calculate_test_mixed_use_archetype_values_results( locator) config.set('test_mixed_use_archetype_values', 'expected_results', expected_results.to_json()) config = cea.config.Configuration(cea.config.DEFAULT_CONFIG) gv = GlobalVariables() gv.config = config locator = ReferenceCaseOpenLocator() # calculate schedules building_properties = BuildingProperties(locator, gv, False, 'CH', False) bpr = building_properties['B01'] list_uses = ['OFFICE', 'INDUSTRIAL'] bpr.occupancy = {'OFFICE': 0.5, 'INDUSTRIAL': 0.5} gv = GlobalVariables() date = pd.date_range(gv.date_start, periods=8760, freq='H') archetype_schedules, archetype_values = schedule_maker( 'CH', date, locator, list_uses) stochastic_occupancy = config.demand.use_stochastic_occupancy calculated_schedules = calc_schedules(list_uses, archetype_schedules, bpr, archetype_values, stochastic_occupancy) if not config.has_section('test_mixed_use_schedules'): config.add_section('test_mixed_use_schedules') config.set( 'test_mixed_use_schedules', 'reference_results', json.dumps({ schedule: calculated_schedules[schedule][REFERENCE_TIME] for schedule in calculated_schedules.keys() })) with open(get_test_config_path(), 'w') as f: config.write(f)
def test_mixed_use_schedules(self): locator = ReferenceCaseOpenLocator() config = cea.config.Configuration(cea.config.DEFAULT_CONFIG) config.scenario = locator.scenario stochastic_occupancy = config.demand.use_stochastic_occupancy # get year from weather file weather_path = locator.get_weather_file() weather_data = epwreader.epw_reader(weather_path)[['year']] year = weather_data['year'][0] date = pd.date_range(str(year) + '/01/01', periods=HOURS_IN_YEAR, freq='H') building_properties = BuildingProperties(locator, False) bpr = building_properties['B01'] list_uses = ['OFFICE', 'INDUSTRIAL'] bpr.occupancy = {'OFFICE': 0.5, 'INDUSTRIAL': 0.5} # calculate schedules archetype_schedules, archetype_values = schedule_maker( date, locator, list_uses) calculated_schedules = calc_schedules(list_uses, archetype_schedules, bpr, archetype_values, stochastic_occupancy) config = ConfigParser.SafeConfigParser() config.read(get_test_config_path()) reference_results = json.loads( config.get('test_mixed_use_schedules', 'reference_results')) for schedule in reference_results: self.assertAlmostEqual( calculated_schedules[schedule][REFERENCE_TIME], reference_results[schedule], places=4, msg="Schedule '%s' at time %s, %f != %f" % (schedule, str(REFERENCE_TIME), calculated_schedules[schedule][REFERENCE_TIME], reference_results[schedule]))
def demand_calculation(locator, weather_path, gv, use_dynamic_infiltration_calculation=False): """ Algorithm to calculate the hourly demand of energy services in buildings using the integrated model of [Fonseca2015]_. Produces a demand file per building and a total demand file for the whole zone of interest: - a csv file for every building with hourly demand data. - ``Total_demand.csv``, csv file of yearly demand data per building. :param locator: An InputLocator to locate input files :type locator: cea.inputlocator.InputLocator :param weather_path: A path to the EnergyPlus weather data file (.epw) :type weather_path: str :param gv: global variables :type gv: cea.globalvar.GlobalVariables :returns: None :rtype: NoneType .. [Fonseca2015] Fonseca, Jimeno A., and Arno Schlueter. “Integrated Model for Characterization of Spatiotemporal Building Energy Consumption Patterns in Neighborhoods and City Districts.” Applied Energy 142 (2015): 247–265. """ if not os.path.exists(locator.get_radiation()) or not os.path.exists(locator.get_surface_properties()): raise ValueError("No radiation file found in scenario. Consider running radiation script first.") t0 = time.clock() date = pd.date_range(gv.date_start, periods=8760, freq='H') # weather model weather_data = epwreader.epw_reader(weather_path)[['drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C']] # building properties model building_properties = BuildingProperties(locator, gv) # schedules model list_uses = list(building_properties._prop_occupancy.drop('PFloor', axis=1).columns) archetype_schedules, archetype_values = occupancy_model.schedule_maker(date, locator, list_uses) schedules_dict = {'list_uses': list_uses, 'archetype_schedules': archetype_schedules, 'occupancy_densities': archetype_values['people'], 'archetype_values': archetype_values} # in case gv passes a list of specific buildings to simulate. if gv.simulate_building_list: list_building_names = gv.simulate_building_list else: list_building_names = building_properties.list_building_names() # demand if gv.multiprocessing and mp.cpu_count() > 1: thermal_loads_all_buildings_multiprocessing(building_properties, date, gv, locator, list_building_names, schedules_dict, weather_data, use_dynamic_infiltration_calculation) else: thermal_loads_all_buildings(building_properties, date, gv, locator, list_building_names, schedules_dict, weather_data, use_dynamic_infiltration_calculation) if gv.print_totals: totals, time_series = gv.demand_writer.write_totals_csv(building_properties, locator) gv.log('done - time elapsed: %(time_elapsed).2f seconds', time_elapsed=time.clock() - t0) return totals, time_series
def demand_calculation(locator, weather_path, gv): """ Algorithm to calculate the hourly demand of energy services in buildings using the integrated model of Fonseca et al. 2015. Applied energy. (http://dx.doi.org/10.1016/j.apenergy.2014.12.068) PARAMETERS ---------- :param locator: An InputLocator to locate input files :type locator: inputlocator.InputLocator :param weather_path: A path to the EnergyPlus weather data file (.epw) :type weather_path: str :param gv: A GlobalVariable (context) instance :type gv: globalvar.GlobalVariable RETURNS ------- :returns: None :rtype: NoneType INPUT / OUTPUT FILES -------------------- - get_radiation: c:\reference-case\baseline\outputs\data\solar-radiation\radiation.csv - get_surface_properties: c:\reference-case\baseline\outputs\data\solar-radiation\properties_surfaces.csv - get_building_geometry: c:\reference-case\baseline\inputs\building-geometry\zone.shp - get_building_hvac: c:\reference-case\baseline\inputs\building-properties\technical_systems.shp - get_building_thermal: c:\reference-case\baseline\inputs\building-properties\thermal_properties.shp - get_building_occupancy: c:\reference-case\baseline\inputs\building-properties\occupancy.shp - get_building_architecture: c:\reference-case\baseline\inputs\building-properties\architecture.shp - get_building_age: c:\reference-case\baseline\inputs\building-properties\age.shp - get_building_comfort: c:\reference-case\baseline\inputs\building-properties\indoor_comfort.shp - get_building_internal: c:\reference-case\baseline\inputs\building-properties\internal_loads.shp SIDE EFFECTS ------------ Produces a demand file per building and a total demand file for the whole zone of interest. B153767T.csv: csv file for every building with hourly demand data Total_demand.csv: csv file of yearly demand data per buidling. """ t0 = time.clock() date = pd.date_range(gv.date_start, periods=8760, freq='H') # weather model weather_data = epwreader.epw_reader(weather_path)[[ 'drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C' ]] # building properties model building_properties = BuildingProperties(locator, gv) # schedules model list_uses = list( building_properties._prop_occupancy.drop('PFloor', axis=1).columns) schedules = occupancy_model.schedule_maker(date, locator, list_uses) schedules_dict = {'list_uses': list_uses, 'schedules': schedules} # demand model num_buildings = len(building_properties) if gv.multiprocessing and mp.cpu_count() > 1: thermal_loads_all_buildings_multiprocessing(building_properties, date, gv, locator, num_buildings, schedules_dict, weather_data) else: thermal_loads_all_buildings(building_properties, date, gv, locator, num_buildings, schedules_dict, weather_data) totals = write_totals_csv(building_properties, locator, gv) gv.log('done - time elapsed: %(time_elapsed).2f seconds', time_elapsed=time.clock() - t0) return totals
def main(output_file): import cea.examples archive = zipfile.ZipFile( os.path.join(os.path.dirname(cea.examples.__file__), 'reference-case-open.zip')) archive.extractall(tempfile.gettempdir()) reference_case = os.path.join(tempfile.gettempdir(), 'reference-case-open', 'baseline') locator = InputLocator(reference_case) gv = GlobalVariables() weather_path = locator.get_default_weather() weather_data = epwreader.epw_reader(weather_path)[[ 'drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C' ]] # run properties script import cea.demand.preprocessing.properties cea.demand.preprocessing.properties.properties(locator, True, True, True, True) building_properties = BuildingProperties(locator, gv) date = pd.date_range(gv.date_start, periods=8760, freq='H') list_uses = building_properties.list_uses() archetype_schedules, archetype_values = schedule_maker( date, locator, list_uses) usage_schedules = { 'list_uses': list_uses, 'archetype_schedules': archetype_schedules, 'occupancy_densities': archetype_values['people'], 'archetype_values': archetype_values } print("data for test_calc_thermal_loads:") print(building_properties.list_building_names()) bpr = building_properties['B01'] result = calc_thermal_loads('B01', bpr, weather_data, usage_schedules, date, gv, locator) # test the building csv file df = pd.read_csv(locator.get_demand_results_file('B01')) expected_columns = list(df.columns) print("expected_columns = %s" % repr(expected_columns)) config = ConfigParser.SafeConfigParser() config.read(output_file) value_columns = [ u'Ealf_kWh', u'Eauxf_kWh', u'Edataf_kWh', u'Ef_kWh', u'QCf_kWh', u'QHf_kWh', u'Qcdataf_kWh', u'Qcref_kWh', u'Qcs_kWh', u'Qcsf_kWh', u'Qhs_kWh', u'Qhsf_kWh', u'Qww_kWh', u'Qwwf_kWh', u'Tcsf_re_C', u'Thsf_re_C', u'Twwf_re_C', u'Tcsf_sup_C', u'Thsf_sup_C', u'Twwf_sup_C' ] values = [float(df[column].sum()) for column in value_columns] print("values = %s " % repr(values)) if not config.has_section("test_calc_thermal_loads"): config.add_section("test_calc_thermal_loads") config.set("test_calc_thermal_loads", "value_columns", json.dumps(value_columns)) print values config.set("test_calc_thermal_loads", "values", json.dumps(values)) print("data for test_calc_thermal_loads_other_buildings:") buildings = ['B01', 'B03', 'B02', 'B05', 'B04', 'B07', 'B06', 'B09', 'B08'] results = {} for building in buildings: bpr = building_properties[building] b, qcf_kwh, qhf_kwh = run_for_single_building(building, bpr, weather_data, usage_schedules, date, gv, locator) print("'%(b)s': (%(qcf_kwh).5f, %(qhf_kwh).5f)," % locals()) results[building] = (qcf_kwh, qhf_kwh) if not config.has_section("test_calc_thermal_loads_other_buildings"): config.add_section("test_calc_thermal_loads_other_buildings") config.set("test_calc_thermal_loads_other_buildings", "results", json.dumps(results)) with open(output_file, 'w') as f: config.write(f) print("Wrote output to %(output_file)s" % locals())
def main(): import zipfile import cea.examples import tempfile archive = zipfile.ZipFile( os.path.join(os.path.dirname(cea.examples.__file__), 'reference-case-open.zip')) archive.extractall(tempfile.gettempdir()) reference_case = os.path.join(tempfile.gettempdir(), 'reference-case-open', 'baseline') locator = InputLocator(reference_case) gv = GlobalVariables() weather_path = locator.get_default_weather() weather_data = epwreader.epw_reader(weather_path)[[ 'drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C' ]] # run properties script import cea.demand.preprocessing.properties cea.demand.preprocessing.properties.properties(locator, True, True, True, True) building_properties = BuildingProperties(locator, gv) date = pd.date_range(gv.date_start, periods=8760, freq='H') list_uses = building_properties.list_uses() archetype_schedules, archetype_values = schedule_maker( date, locator, list_uses) usage_schedules = { 'list_uses': list_uses, 'archetype_schedules': archetype_schedules, 'occupancy_densities': archetype_values['people'], 'archetype_values': archetype_values } print("data for test_calc_thermal_loads:") print building_properties.list_building_names() bpr = building_properties['B01'] result = calc_thermal_loads('B01', bpr, weather_data, usage_schedules, date, gv, locator) # test the building csv file df = pd.read_csv(locator.get_demand_results_file('B01')) expected_columns = list(df.columns) print("expected_columns = %s" % repr(expected_columns)) value_columns = [ u'Ealf_kWh', u'Eauxf_kWh', u'Edataf_kWh', u'Ef_kWh', u'QCf_kWh', u'QHf_kWh', u'Qcdataf_kWh', u'Qcref_kWh', u'Qcs_kWh', u'Qcsf_kWh', u'Qhs_kWh', u'Qhsf_kWh', u'Qww_kWh', u'Qwwf_kWh', u'Tcsf_re_C', u'Thsf_re_C', u'Twwf_re_C', u'Tcsf_sup_C', u'Thsf_sup_C', u'Twwf_sup_C' ] print("values = %s " % repr([df[column].sum() for column in value_columns])) print("data for test_calc_thermal_loads_other_buildings:") # randomly selected except for B302006716, which has `Af == 0` buildings = ['B01', 'B03', 'B02', 'B05', 'B04', 'B07', 'B06', 'B09', 'B08'] for building in buildings: bpr = building_properties[building] b, qcf_kwh, qhf_kwh = run_for_single_building(building, bpr, weather_data, usage_schedules, date, gv, locator) print("'%(b)s': (%(qcf_kwh).5f, %(qhf_kwh).5f)," % locals())
def demand_calculation(locator, weather_path, gv): """ Algorithm to calculate the hourly demand of energy services in buildings using the integrated model of Fonseca et al. 2015. Applied energy. (http://dx.doi.org/10.1016/j.apenergy.2014.12.068) PARAMETERS ---------- :param locator: An InputLocator to locate input files :type locator: inputlocator.InputLocator :param weather_path: A path to the EnergyPlus weather data file (.epw) :type weather_path: str :param gv: A GlobalVariable (context) instance :type gv: globalvar.GlobalVariable RETURNS ------- :returns: None :rtype: NoneType INPUT / OUTPUT FILES -------------------- - get_radiation: c:\reference-case\baseline\outputs\data\solar-radiation\radiation.csv - get_surface_properties: c:\reference-case\baseline\outputs\data\solar-radiation\properties_surfaces.csv - get_building_geometry: c:\reference-case\baseline\inputs\building-geometry\zone.shp - get_building_hvac: c:\reference-case\baseline\inputs\building-properties\technical_systems.shp - get_building_thermal: c:\reference-case\baseline\inputs\building-properties\thermal_properties.shp - get_building_occupancy: c:\reference-case\baseline\inputs\building-properties\occupancy.shp - get_building_architecture: c:\reference-case\baseline\inputs\building-properties\architecture.shp - get_building_age: c:\reference-case\baseline\inputs\building-properties\age.shp - get_building_comfort: c:\reference-case\baseline\inputs\building-properties\indoor_comfort.shp - get_building_internal: c:\reference-case\baseline\inputs\building-properties\internal_loads.shp SIDE EFFECTS ------------ Produces a demand file per building and a total demand file for the whole zone of interest. B153767T.csv: csv file for every building with hourly demand data Total_demand.csv: csv file of yearly demand data per buidling. """ t0 = time.clock() date = pd.date_range(gv.date_start, periods=8760, freq='H') # weather model weather_data = epwreader.epw_reader(weather_path)[['drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C']] # building properties model building_properties = BuildingProperties(locator, gv) # schedules model list_uses = list(building_properties._prop_occupancy.drop('PFloor', axis=1).columns) schedules = occupancy_model.schedule_maker(date, locator, list_uses) schedules_dict = {'list_uses': list_uses, 'schedules': schedules} # demand model num_buildings = len(building_properties) if gv.multiprocessing and mp.cpu_count() > 1: thermal_loads_all_buildings_multiprocessing(building_properties, date, gv, locator, num_buildings, schedules_dict, weather_data) else: thermal_loads_all_buildings(building_properties, date, gv, locator, num_buildings, schedules_dict, weather_data) totals = write_totals_csv(building_properties, locator, gv) gv.log('done - time elapsed: %(time_elapsed).2f seconds', time_elapsed=time.clock() - t0) return totals