def test_run_olderv_problematic(clean_config): """Will run eplus on a file that needs to be upgraded and that should fail. Will be ignored in the test suite""" file = "tests/input_data/problematic/RefBldgLargeOfficeNew2004_v1.4_7.2_5A_USA_IL_CHICAGO-OHARE.idf" wf = "tests/input_data/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" tr.run_eplus(file, wf, prep_outputs=True, annual=True, expandobjects=True, verbose="q")
def test_old_than_change_args(clean_config): """Should upgrade file only once even if run_eplus args are changed afterwards""" from trnslator import run_eplus file = (get_eplus_dirs(settings.ep_version) / "ExampleFiles" / "RefBldgQuickServiceRestaurantNew2004_Chicago.idf") epw = "tests/input_data/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" idf = run_eplus(file, epw, prep_outputs=True, output_report="sql_file") idf = run_eplus(file, epw, prep_outputs=True, output_report="sql_file") idf = run_eplus(file, epw, prep_outputs=True, output_report="sql")
def test_run_olderv(clean_config, ep_version): """Will run eplus on a file that needs to be upgraded with one that does not""" tr.settings.use_cache = False files = [ "tests/input_data/problematic/nat_ventilation_SAMPLE0.idf", get_eplus_dirs(settings.ep_version) / "ExampleFiles" / "5ZoneNightVent1.idf", ] wf = "tests/input_data/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" files = tr.copy_file(files) rundict = { file: dict( eplus_file=file, weather_file=wf, ep_version=ep_version, annual=True, prep_outputs=True, expandobjects=True, verbose="q", output_report="sql", ) for file in files } result = {file: tr.run_eplus(**rundict[file]) for file in files}
def test_add_object_and_run_ep(self, config, converttesteasy): # Gets from fixture paths to files and IDF object to be used in test ( idf, idf_file, weather_file, window_lib, trnsidf_exe, template, output_folder, kwargs, ) = converttesteasy ep_version = None # Adds Output variable in IDF outputs = [ { "ep_object": "Output:Variable".upper(), "kwargs": dict( Variable_Name= "Zone Thermostat Heating Setpoint Temperature", Reporting_Frequency="hourly", save=True, ), }, { "ep_object": "Output:Variable".upper(), "kwargs": dict( Variable_Name= "Zone Thermostat Cooling Setpoint Temperature", Reporting_Frequency="hourly", save=True, ), }, ] # Runs EnergyPlus Simulation _, idf = run_eplus( idf_file, weather_file, output_directory=None, ep_version=ep_version, output_report=None, prep_outputs=outputs, design_day=False, annual=True, expandobjects=True, return_idf=True, ) # Makes sure idf vriable is an IDF assert isinstance(idf, tr.idfclass.IDF)
def test_small_home_data(clean_config): file = (get_eplus_dirs(settings.ep_version) / "ExampleFiles" / "BasicsFiles" / "AdultEducationCenter.idf") file = tr.copy_file(file) wf = "tests/input_data/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" return tr.run_eplus(file, wf, prep_outputs=True, design_day=True, expandobjects=True, verbose="q")
def run_schedules_idf(config): files = run_eplus( idf_file, weather_file="tests/input_data/CAN_PQ_Montreal.Intl.AP" ".716270_CWEC.epw", annual=True, readvars=True, include=[ get_eplus_dirs(settings.ep_version) / "DataSets" / "TDV" / "TDV_2008_kBtu_CTZ06.csv" ], return_files=True, ) cache_dir = files[1][0].dirname() csv = next(iter(cache_dir.glob("*out.csv"))) yield csv
def energy_series(config, request): from trnslator import ReportData outputs = { "ep_object": "Output:Variable".upper(), "kwargs": { "Key_Value": "OCCUPY-1", "Variable_Name": "Schedule Value", "Reporting_Frequency": "Hourly", }, } wf = "tests/input_data/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" sql = tr.run_eplus( request.param, weather_file=wf, output_report="sql_file", prep_outputs=[outputs], annual=True, expandobjects=True, ) report = ReportData.from_sqlite( sql, table_name=("Heating:Electricity", "Heating:Gas", "Heating:DistrictHeating"), ) hl = EnergySeries.from_sqlite( report, name="Heating", normalize=False, sort_values=False, concurrent_sort=False, to_units="kWh", ) yield hl
def test_write_to_b18(self, config, converttesteasy): # Gets from fixture paths to files and IDF object to be used in test ( idf, idf_file, weather_file, window_lib, trnsidf_exe, template, output_folder, kwargs, ) = converttesteasy # Runs EnergyPlus Simulation res = run_eplus( idf_file, weather_file, output_directory=None, ep_version=None, output_report="htm", prep_outputs=True, design_day=True, ) # Copy IDF object, making sure we don't change/overwrite original IDF file idf_2 = deepcopy(idf) # Clean names of idf objects (e.g. 'MATERIAL') log_clear_names = False clear_name_idf_objects(idf_2, log_clear_names) # Get old:new names equivalence old_new_names = pd.read_csv( os.path.join( settings.data_folder, Path(idf_file).basename().stripext() + "_old_new_names_equivalence.csv", )).to_dict() # Get objects from IDF ( buildingSurfs, buildings, constructions, equipments, fenestrationSurfs, globGeomRules, lights, locations, materialAirGap, materialNoMass, materials, peoples, versions, zones, zonelists, ) = get_idf_objects(idf_2) # Read a b18 file and write lines in variable (b18_lines) b18_path = "tests/input_data/trnsys/T3D_simple_2_zone.b18" with open(b18_path) as b18_file: b18_lines = b18_file.readlines() # initialize variable schedules_not_written = [] # Gets conditioning (heating and cooling) info from simulation results heat_name = {} for i in range(0, len(res["Zone Sensible Heating"])): key = res["Zone Sensible Heating"].iloc[i, 0] name = "HEAT_z" + str(res["Zone Sensible Heating"].iloc[i].name) heat_name[key] = name cool_name = {} for i in range(0, len(res["Zone Sensible Cooling"])): key = res["Zone Sensible Cooling"].iloc[i, 0] name = "HEAT_z" + str(res["Zone Sensible Cooling"].iloc[i].name) cool_name[key] = name # Selects only 2 first zones zones = zones[0:2] peoples = peoples[0:2] equipments = equipments[0:2] lights = lights[0:2] # Writes infiltration in b18_lines (b18 file) infilt_to_b18(b18_lines, zones, res) # Tests both cases, whether schedules are taken as inputs or written in b18_lines for cond in [True, False]: schedule_as_input = cond gains_to_b18( b18_lines, zones, zonelists, peoples, lights, equipments, schedules_not_written, res, old_new_names, schedule_as_input, ) # Writes conditioning (heating and cooling) in b18_lines (b18 file) conditioning_to_b18(b18_lines, heat_name, cool_name, zones, old_new_names) # Asserts infiltration, internal gains and conditioning are written in b18_lines assert "INFILTRATION Constant" + "\n" in b18_lines assert " INFILTRATION = Constant" + "\n" in b18_lines assert any(peoples[0].Name in mystring for mystring in b18_lines[200:]) assert any(lights[0].Name in mystring for mystring in b18_lines[200:]) assert any(equipments[0].Name in mystring for mystring in b18_lines[200:]) assert any( heat_name[old_new_names[zones[0].Name.upper()][0]] in mystring for mystring in b18_lines[200:])
def test_write_gains_conditioning(self, config, converttest): # Gets from fixture paths to files and IDF object to be used in test ( idf, idf_file, weather_file, window_lib, trnsidf_exe, template, output_folder, _, ) = converttest # Gets EnergyPlus version ep_version = settings.ep_version # Adds Output variable in IDF outputs = [ { "ep_object": "Output:Variable".upper(), "kwargs": dict( Variable_Name= "Zone Thermostat Heating Setpoint Temperature", Reporting_Frequency="hourly", save=True, ), }, { "ep_object": "Output:Variable".upper(), "kwargs": dict( Variable_Name= "Zone Thermostat Cooling Setpoint Temperature", Reporting_Frequency="hourly", save=True, ), }, ] # Run EnergyPlus Simulation _, idf = run_eplus( idf_file, weather_file, output_directory=None, ep_version=ep_version, output_report=None, prep_outputs=outputs, design_day=False, annual=True, expandobjects=True, return_idf=True, ) # Output reports htm = idf.htm sql = idf.sql sql_file = idf.sql_file # Check if cache exists log_clear_names = False # Clean names of idf objects (e.g. 'MATERIAL') idf_2 = deepcopy(idf) clear_name_idf_objects(idf_2, log_clear_names) # Get old:new names equivalence old_new_names = pd.read_csv( os.path.join( settings.data_folder, Path(idf_file).basename().stripext() + "_old_new_names_equivalence.csv", )).to_dict() # Read IDF_T3D template and write lines in variable lines = io.TextIOWrapper(io.BytesIO(settings.template_BUI)).readlines() # Get objects from IDF file ( buildingSurfs, buildings, constructions, equipments, fenestrationSurfs, globGeomRules, lights, locations, materialAirGap, materialNoMass, materials, peoples, versions, zones, zonelists, ) = get_idf_objects(idf_2) # Write GAINS (People, Lights, Equipment) from IDF to lines (T3D) _write_gains(equipments, lights, lines, peoples, htm, old_new_names) # Gets schedules from IDF schedule_names, schedules = _get_schedules(idf_2) # Adds ground temperature to schedules adds_sch_ground(htm, schedule_names, schedules) # Adds "sch_setpoint_ZONES" to schedules df_heating_setpoint = ReportData.from_sqlite( sql_file, table_name="Zone Thermostat Heating Setpoint Temperature") df_cooling_setpoint = ReportData.from_sqlite( sql_file, table_name="Zone Thermostat Cooling Setpoint Temperature") # Heating adds_sch_setpoint(zones, df_heating_setpoint, old_new_names, schedule_names, schedules, "h") # Cooling adds_sch_setpoint(zones, df_cooling_setpoint, old_new_names, schedule_names, schedules, "c") # Writes conditioning in lines schedule_as_input = True heat_dict, cool_dict = _write_conditioning(htm, lines, schedules, old_new_names, schedule_as_input)