def auto_generate_from_template(peak_heating_w, peak_cooling_w, base_LP_idf, base_plant_idf, plant_configuration_json_file, out_dir, final_idf_name, idd_file): expanded_plant_loop_idf = 'expanded.idf' LP_plant_loop_idf = 'plant_loop.idf' temp_base_LP_idf = 'temp_base_LP.idf' # Update the flow rate for the loadprofile:plant objects IDF.setiddname(idd_file) idf = IDF(base_LP_idf) idf = auto_update_LP_flow_rates(idf, peak_heating_w, peak_cooling_w) idf.saveas(temp_base_LP_idf) modify_template_idf(plant_configuration_json_file, base_plant_idf, idd_file=idd_file) expand_template_idf() prepare_LP_plantloop(expanded_plant_loop_idf, LP_plant_loop_idf, idd_file=idd_file) append_files(temp_base_LP_idf, LP_plant_loop_idf, final_idf_name) cleanup(temp_base_LP_idf) cleanup(expanded_plant_loop_idf) cleanup(LP_plant_loop_idf) cleanup('expandedidf.err') cleanup('in.idf') if not os.path.exists(out_dir): os.mkdir(out_dir) shutil.move(final_idf_name, f"{out_dir}{final_idf_name}")
def __init__(self): #Change EplusPath Here eplusPath="D:/EnergyPlusV8-6-0/energyplus.exe" #Automate Setting self.path=os.path.abspath('.') sys.path.append(self.path) self.idfName = "test6.idf" self.eplusPath=eplusPath self.iddFile = "Energy+.idd" IDF.setiddname(self.iddFile) self.outputPath="." self.idf1 = IDF(self.idfName) self.epwFile = "SGP_Singapore.486980_IWEC.epw" esoPath="5ZoneAirCooled.eso" #Call Eplus Here # subprocess.call([self.eplusPath,'-i',self.iddFile,'-w',self.epwFile,'-d',self.outputPath,self.idfName]) self.htmFile = "eplustbl.htm" fileHandle = open(self.htmFile, 'r').read() # get a file handle to the html file self.eplusOutputTables = readhtml.titletable(fileHandle) # reads the tables with their titles self.ETTV=Class_ETTV.ETTV() self.ETTV.ETTVCalculator(self.eplusOutputTables) self.RTTV=Class_RTTV.RTTV() self.RTTV.RTTVCalculator(self.eplusOutputTables,self.idf1) self.Trop=Class_Trop.Trop() self.Trop.TropCalculator(self.eplusOutputTables,self.RTTV.RTTV) self.TDSE=Class_TDSE.TDSE() self.TDSE.TDSECalculator(self.eplusOutputTables,esoPath) #Output self.get()
def __init__( self, idf_file_path, unit_registry, profiles_files_handler: Optional[ProfileFilesHandler] = None, custom_config={}, ): """ You can only use one construction type at a time. All three constr_xxx callables take a construction object as the first argument. Either constr_writer or constr_get_idf_files should point to a method of your construction, you can also assign both method if you wish. :param constr_name_lookup: function which returns the name for a given construction object :param constr_writer: function which writes the passed construction object to the idf file passed as a second parameter :param constr_get_idf_files: function which returns partial idf files for that construction which shall be appended at the end of IDF-Writing process. files are collected and before writing duplicated files are removed. this allows that you return the same file several times and it will only be appended once, but means that you have to make sure files with different content have different file names. :param idf_file_path: file name including full path to write IDF to. file should not exist. :param geometry_writer: instance of class to write geometry properties from the model to the IDF :param custom_config: dictionary containing configuration entries overwriting package default config """ assert not os.path.exists(idf_file_path), f"Cannot create IDF File {idf_file_path}. Already existing." self.logger = logging.getLogger(__name__) self._cfg = config_loader.load_config_for_package(_default_config_file, __package__, custom_config) self._custom_config = custom_config self.unit_registry = unit_registry idd_path = get_idd_path(ep_config=self._cfg) self.logger.info(f"using IDD {idd_path}") IDF.setiddname(idd_path) self.idf_file_path = idf_file_path self.__create_empty_idf() self.zone_data = Optional[Dict[int, Tuple[str, List[EpBunch]]]] if profiles_files_handler: self.profiles_files_handler_method = profiles_files_handler.add_file else: self.profiles_files_handler_method = lambda x: x # do nothing, just return filepath back
def read_idf(in_file): # in_file = os.path.join(filepath, filename) idd = settings.ep_idd IDF.setiddname(idd) with open(in_file, 'r') as infile: idf = IDF(infile) return idf
def setup(self): """Tidy up anything left from previous runs. Get an IDF object to run. """ outdir = os.path.join(THIS_DIR, 'run_outputs') if os.path.isdir(outdir): shutil.rmtree(outdir) iddfile = os.path.join(IDD_FILES, TEST_IDD) fname1 = os.path.join(IDF_FILES, TEST_IDF) IDF.setiddname(iddfile, testing=True) self.idf = IDF(fname1, TEST_EPW) self.expected_files = [ u'eplusout.audit', u'eplusout.bnd', u'eplusout.eio', u'eplusout.end', u'eplusout.err', u'eplusout.eso', u'eplusout.mdd', u'eplusout.mtd', u'eplusout.rdd', u'eplusout.shd', u'eplustbl.htm', u'sqlite.err', ] self.expected_files_suffix_C = [ u'eplus.audit', u'eplus.mdd', u'eplus.err', u'eplusSqlite.err', u'eplus.eio', u'eplusTable.htm', u'eplus.shd', u'eplus.mtd', u'eplus.bnd', u'eplus.eso', u'eplus.rdd', u'eplus.end'] self.expected_files_suffix_D = [ u'eplus.audit', u'eplus.mdd', u'eplus-sqlite.err', u'eplus-table.htm', u'eplus.err', u'eplus.eio', u'eplus.bnd', u'eplus.shd', u'eplus.mtd', u'eplus.end', u'eplus.eso', u'eplus.rdd']
def to_eppy(self, idd_file: str) -> List[EpBunch]: """ Convert Material into Eppy material and construction objects """ try: IDF.setiddname(str(idd_file)) except IDDAlreadySetError as e: pass idf = IDF(io.StringIO("")) # Create list of eppy objects to populate eppy_objects = [] material = idf.newidfobject("WINDOWMATERIAL:GLAZING") material.Name = self.name material.Optical_Data_Type = "SpectralAverage" material.Thickness = self.thickness material.Solar_Transmittance_at_Normal_Incidence = self.solar_transmittance material.Front_Side_Solar_Reflectance_at_Normal_Incidence = self.solar_reflectance material.Back_Side_Solar_Reflectance_at_Normal_Incidence = self.solar_reflectance material.Visible_Transmittance_at_Normal_Incidence = self.visible_transmittance material.Front_Side_Visible_Reflectance_at_Normal_Incidence = self.visible_reflectance material.Back_Side_Visible_Reflectance_at_Normal_Incidence = self.visible_reflectance material.Infrared_Transmittance_at_Normal_Incidence = self.transmittance material.Front_Side_Infrared_Hemispherical_Emissivity = self.emissivity material.Back_Side_Infrared_Hemispherical_Emissivity = self.emissivity material.Conductivity = self.conductivity material.Solar_Diffusing = False eppy_objects.append(material) construction = idf.newidfobject("CONSTRUCTION") construction.Name = self.name construction.Outside_Layer = self.name eppy_objects.append(construction) return eppy_objects
def to_eppy(self, idd_file: str) -> List[EpBunch]: """ Convert Material into Eppy material and construction objects """ try: IDF.setiddname(str(idd_file)) except IDDAlreadySetError as e: pass idf = IDF(io.StringIO("")) # Create list of eppy objects to populate eppy_objects = [] material = idf.newidfobject("MATERIAL") material.Name = self.name material.Roughness = self.roughness material.Thickness = self.thickness material.Conductivity = self.conductivity material.Density = self.density material.Specific_Heat = self.specific_heat material.Thermal_Absorptance = self.thermal_absorptance material.Solar_Absorptance = self.solar_absorptance material.Visible_Absorptance = self.visible_absorptance eppy_objects.append(material) construction = idf.newidfobject("CONSTRUCTION") construction.Name = self.name construction.Outside_Layer = self.name eppy_objects.append(construction) return eppy_objects
def to_eppy(self, idd_file: str): try: IDF.setiddname(str(idd_file)) except IDDAlreadySetError as e: pass idf = IDF(io.StringIO("")) eppy_objects = [] # Create shade object transmittance_schedule_type_limit = idf.newidfobject("SCHEDULETYPELIMITS") transmittance_schedule_type_limit.Name = "constant" transmittance_schedule_type_limit.Lower_Limit_Value = 0 transmittance_schedule_type_limit.Upper_Limit_Value = 1 transmittance_schedule_type_limit.Numeric_Type = "Continuous" eppy_objects.append(transmittance_schedule_type_limit) transmittance_schedule = idf.newidfobject("SCHEDULE:CONSTANT") transmittance_schedule.Name = "constant" transmittance_schedule.Schedule_Type_Limits_Name = "constant" transmittance_schedule.Hourly_Value = 0 eppy_objects.append(transmittance_schedule) shade = idf.newidfobject("SHADING:SITE:DETAILED") shade.Name = random_id() shade.Transmittance_Schedule_Name = "constant" shade.Number_of_Vertices = len(self.vertices) for nn, vtx in enumerate(self.vertices): setattr(shade, "Vertex_{}_Xcoordinate".format(nn + 1), vtx.x) setattr(shade, "Vertex_{}_Ycoordinate".format(nn + 1), vtx.y) setattr(shade, "Vertex_{}_Zcoordinate".format(nn + 1), vtx.z) eppy_objects.append(shade) return eppy_objects
def setDirectories(): # set idd file iddfile = 'C:\\EnergyPlusV9-0-1\\Energy+.idd' IDF.setiddname(iddfile) # set weather directory WeatherDir = 'C:\\EnergyPlusV9-0-1\\WeatherData\\Scripting\\' epwDir = WeatherDir + 'EPW\\' ddyDir = WeatherDir + 'DDY\\' # The csv file to define the weather files to pick up for simulation CSVDir = 'C:\\EnergyPlusV9-0-1\\WeatherData\\Scripting\\WeatherFileNameList.csv' weatherfilename_list = ReadFileNameInCsv(CSVDir) # print(weatherfilename_list) # WriteEPWNameToCSV(epwDir, CSVDir , 8) # This could help write a specific number of weather files to .csv file # Read all the idf models for simulation and make a list IdfDirInitial = 'C:\\EnergyPlusV9-0-1\\IDFscripting\\' LargeIDFDir = IdfDirInitial + 'Large\\' SmallIDFDir = IdfDirInitial + 'Small\\' LargeIDFLs = os.listdir(LargeIDFDir) SmallIDFLs = os.listdir(SmallIDFDir) return weatherfilename_list, epwDir, ddyDir, LargeIDFLs, SmallIDFLs
def idf(): eplus_cfg = cesarp.common.config_loader.load_config_for_package( eplus_adpater_config_file, "cesarp.eplus_adapter") IDF.setiddname(eplus_cfg["CUSTOM_IDD_9_5"]) idfstring = cesarp.eplus_adapter.idf_strings.version.format("9.5.0") fhandle = StringIO(idfstring) return IDF(fhandle)
def auto_generate_from_template(cop, number, base_idf_dir, final_idf_dir, idd_file_dir): IDF.setiddname(idd_file_dir) idf = IDF(base_idf_dir) idf.idfobjects['ChillerHeaterPerformance:Electric:EIR'][ 0].Reference_Cooling_Mode_COP = cop idf.idfobjects['CentralHeatPumpSystem'][ 0].Number_of_Chiller_Heater_Modules_1 = number idf.save(final_idf_dir)
def base_idf(): iddsnippet = iddcurrent.iddtxt iddfhandle = StringIO(iddsnippet) if IDF.getiddname() == None: IDF.setiddname(iddfhandle) idftxt = "" idfhandle = StringIO(idftxt) idf = IDF(idfhandle) return idf
def makeIDFfortesting(): """make IDF for testing""" iddtxt = iddcurrent.iddtxt iddfhandle = StringIO(iddcurrent.iddtxt) try: IDF.setiddname(iddfhandle) except IDDAlreadySetError as e: pass return IDF
def modify_template_idf(plant_configuration_json_file, base_template_idf='base_plant.idf', modified_template_idf='in.idf', idd_file="C:/EnergyPlusV9-1-0/Energy+.idd"): IDF.setiddname(idd_file) idf = IDF(base_template_idf) with open(plant_configuration_json_file) as json_file: data = json.load(json_file) def add_template_chiller(idf, chiller_name, chiller_type='ElectricCentrifugalChiller', cop=3.5, condenser_type='WaterCooled'): idf.newidfobject("HVACTemplate:Plant:Chiller") idf_new_template_chiller = idf.idfobjects[ 'HVACTemplate:Plant:Chiller'][-1] idf_new_template_chiller.Name = chiller_name idf_new_template_chiller.Chiller_Type = chiller_type idf_new_template_chiller.Nominal_COP = cop idf_new_template_chiller.Condenser_Type = condenser_type idf_new_template_chiller.Priority = 1 idf_new_template_chiller.Sizing_Factor = '' idf_new_template_chiller.Minimum_Part_Load_Ratio = '' idf_new_template_chiller.Maximum_Part_Load_Ratio = '' idf_new_template_chiller.Optimum_Part_Load_Ratio = '' idf_new_template_chiller.Minimum_Unloading_Ratio = '' idf_new_template_chiller.Leaving_Chilled_Water_Lower_Temperature_Limit = '' return idf def add_template_boiler(idf, boiler_name, efficiency=0.8, fuel_type="NaturalGas"): idf.newidfobject("HVACTemplate:Plant:Boiler") idf_new_template_boiler = idf.idfobjects['HVACTemplate:Plant:Boiler'][ -1] idf_new_template_boiler.Name = boiler_name idf_new_template_boiler.Efficiency = efficiency idf_new_template_boiler.Fuel_Type = fuel_type return idf for count, chiller in enumerate(data['chillers']): idf = add_template_chiller(idf, f"Chiller No. {count+1}", chiller['type'], chiller['COP'], chiller['condenser']) for count, boiler in enumerate(data['boilers']): idf = add_template_boiler(idf, f"Boiler No. {count+1}", boiler['efficiency'], fuel_type=boiler['fuel']) print('---> Adding template chiller and boiler done.') idf.saveas(modified_template_idf)
def get_idf(self, name: str, idd_path: Path) -> IDF: url = self.url + '/idf' response = requests.get(url=url, params={'name': name}) try: IDF.setiddname(str(idd_path)) idf: IDF = IDF() idf.initreadtxt(response.text) return idf except: return response.text
def __init__(self, idf_file_in, idd_file): """ Args: idf_file_in: path to idf file to be injected idd_file: path to idd file for processing idf file with eppy """ self.idf_file_in = idf_file_in IDF.setiddname(iddname=idd_file) self.appending_str = None
def __init__(self, idf_path, idd_path="data/V8-5-0-Energy+.idd", save=False): #Object initialization IDF.setiddname(idd_path) self.save = save self.centers = [] self.labels = [] self.model_parameters = [] #Read IDF file and generate zone dictionary self.idf = IDF(idf_path) self.zones = Idf_Zone_List(self.idf)
def test_IDF(): """py.test for class IDF""" stored_idd = IDF.iddname IDF.iddname = None assert IDF.iddname == None IDF.setiddname("gumby", testing=True) assert IDF.iddname == "gumby" IDF.setiddname("karamba", testing=True) assert IDF.iddname != "karamba" assert IDF.iddname == "gumby" IDF.iddname = stored_idd
def auto_generate_from_template(peak_heating_w, peak_cooling_w, heating_cop, cooling_cop, n_borehole, soil_k, base_idf_dir, final_idf_dir, idd_file_dir): IDF.setiddname(idd_file_dir) idf = IDF(base_idf_dir) idf = auto_update_LP_flow_rates(idf, peak_heating_w, peak_cooling_w) idf = auto_update_ghex_system(idf, peak_heating_w, peak_cooling_w, heating_cop, cooling_cop, n_borehole, soil_k) idf.save(final_idf_dir)
def set_energyplus_folder(cls, path): """ Add the pyenergyplus into the path so the program can find the EnergyPlus. :parameter path: The installation path of the EnergyPlus 9.3.0. :type path: str :return: None """ sys.path.insert(0, path) IDF.setiddname(f"{path}Energy+.idd") cls.model_import_flag = True
def __init__(self, schedule_params, fig_dir='../figures/', idd_file=None, **kwargs): """ Constructs necessary attributes for a setpoint schedule object Parameters :param schedule_params: dict -> see .json for input You can add eplus_param as an additional argument eplus_param """ #get arguments self.fig_dir = fig_dir if not os.path.exists(self.fig_dir): os.makedirs(self.fig_dir) # set eplus_config if specified. if "eplus_param" in kwargs: self.eplus_param = kwargs["eplus_param"] else: self.set_eplus_config() # set IDD if idd_file is None: self.idd_file = pkg_resources.resource_filename( 'tempset', 'data/eplus/Energy+.idd') else: self.idd_file = idd_file IDF.setiddname(self.idd_file) # import IDF and extract all the scedules idf_1 = IDF(idfname=pkg_resources.resource_filename( 'tempset', 'data/idf/gas.idf')) AllSchedComapcts = idf_1.idfobjects["Schedule:Compact"] # get params from json' self.get_schparams_from_json(schedule_params=schedule_params) # change SETP schedule' self.mod_schedule = self.change_SETP(AllSchedCompacts=AllSchedComapcts, schedule_name=self.schedule_name) # write new schedule to file' self.write_to_idf( edited_schedule=self.mod_schedule, idf_file=pkg_resources.resource_filename('tempset', 'data/idf/gas.idf'), new_file=self.eplus_param["mod_file"], )
def get_idf(idf_file: str = config.files.get('idf'), idd_file: str = config.files.get('idd'), output_directory=config.out_dir) -> IDF: """Uses eppy to read an idf file and generate the corresponding idf object""" # Trying to change the idd file inside a program will cause an error IDF.setiddname(idd_file) # TODO: Fix this rather than hiding it. # calling IDF causes a warning to appear, currently redirect_stdout hides this. with contextlib.redirect_stdout(None): idf = IDF(idf_file) # override the output location so I stop messing up idf.run = partial(idf.run, output_directory=output_directory) return idf
def read_idf(fname, iddfile="lib/V8-5-0-Energy+.idd"): IDF.setiddname(iddfile) idf = IDF(fname) zones = {} for zone in idf.idfobjects["ZONE"]: zone_obj = Zone(zone.Name, "Zone", zone.Floor_Area) zone_obj.equipmentLoad = 0.0 zone_obj.occupancy = 0.0 zone_obj.lightingLoad = 0.0 zone_obj.wallClasses = [] zones[zone.Name] = zone_obj
def setup(self): """Set the IDD and file paths, and make a copy of the original file.""" iddfile = os.path.join(IDD_FILES, "Energy+V7_2_0.idd") IDF.setiddname(iddfile, testing=True) self.origfile = os.path.join(INTEGRATION_FILES, "origfile.idf") # set tempfile names self.startfile = os.path.join(INTEGRATION_FILES, "startfile.idf") self.saveasfile = os.path.join(INTEGRATION_FILES, "saveas.idf") self.copyfile = os.path.join(INTEGRATION_FILES, "savecopy.idf") # make a copy of test file shutil.copy(self.origfile, self.startfile)
def setup(self): """Set the IDD and file paths, and make a copy of the original file. """ iddfile = os.path.join(IDD_FILES, "Energy+V7_2_0.idd") IDF.setiddname(iddfile, testing=True) self.origfile = os.path.join(INTEGRATION_FILES, "origfile.idf") #set tempfile names self.startfile = os.path.join(INTEGRATION_FILES, "startfile.idf") self.saveasfile = os.path.join(INTEGRATION_FILES, "saveas.idf") self.copyfile = os.path.join(INTEGRATION_FILES, "savecopy.idf") # make a copy of test file shutil.copy(self.origfile, self.startfile)
def run_single(idf_name_in, epw_name): dir_name = os.path.dirname(os.path.realpath(__file__)) idd_file = "C:/EnergyPlusV9-0-0/Energy+.idd" IDF.setiddname(idd_file) idf_path = dir_name + '/' + idf_name_in epw_path = dir_name + '/' + epw_name out = dir_name + '/out_' + idf_name_in.split('.idf')[0] idf = IDF(idf_path, epw_path) idf.run(output_directory=out, readvars=True, output_prefix=idf_name_in.split('.idf')[0], output_suffix='D')
def idfreadtest(iddhandle, idfhandle1, idfhandle2, verbose=False, save=False): """compare the results of eppy reader and simple reader""" # read using eppy: try: IDF.setiddname(iddhandle) except modeleditor.IDDAlreadySetError: # idd has already been set pass idf = IDF(idfhandle1) idfstr = idf.idfstr() idfstr = idf2txt(idfstr) # - # do a simple read simpletxt = idfhandle2.read() try: simpletxt = simpletxt.decode('ISO-8859-2') except AttributeError: pass simpletxt = idf2txt(simpletxt) # - if save: open('simpleread.idf', 'w').write(idfstr) open('eppyread.idf', 'w').write(simpletxt) # do the compare lines1 = idfstr.splitlines() lines2 = simpletxt.splitlines() for i, (line1, line2) in enumerate(zip(lines1, lines2)): if line1 != line2: # test if it is a mismatch in number format try: line1 = float(line1[:-1]) line2 = float(line2[:-1]) if line1 != line2: if verbose: print() print("%s- : %s" % (i, line1)) print("%s- : %s" % (i, line2)) return False except ValueError: if verbose: print() print("%s- : %s" % (i, line1)) print("%s- : %s" % (i, line2)) return False return True
def compute(self): from eppy.modeleditor import IDF, IDDAlreadySetError from StringIO import StringIO idf = self.force_get_input('idf', None) idd = self.get_input('idd').name try: IDF.setiddname(idd) except IDDAlreadySetError: pass if idf: idf_file = open(idf.name, 'r') else: idf_file = StringIO('') self.idf = IDF(idf_file) self.set_output('idf', self.idf)
def main(): """the main routine""" from io import StringIO import eppy.iddv7 as iddv7 IDF.setiddname(StringIO(iddv7.iddtxt)) idf1 = IDF(StringIO('')) loopname = "p_loop" sloop = ['sb0', ['sb1', 'sb2', 'sb3'], 'sb4'] dloop = ['db0', ['db1', 'db2', 'db3'], 'db4'] # makeplantloop(idf1, loopname, sloop, dloop) loopname = "c_loop" sloop = ['sb0', ['sb1', 'sb2', 'sb3'], 'sb4'] dloop = ['db0', ['db1', 'db2', 'db3'], 'db4'] # makecondenserloop(idf1, loopname, sloop, dloop) loopname = "a_loop" sloop = ['sb0', ['sb1', 'sb2', 'sb3'], 'sb4'] dloop = ['zone1', 'zone2', 'zone3'] makeairloop(idf1, loopname, sloop, dloop) idf1.savecopy("hh1.idf")
def compute(self): import requests from eppy.modeleditor import IDF, IDDAlreadySetError from StringIO import StringIO url = self.get_input('url') snapshot = self.get_input('snapshot') r = requests.post(url, etree.tostring(snapshot)) if r.ok: idf_file = StringIO(r.text.strip().replace('\r\n', '\n')) idd = force_get_path(self, 'idd', find_idd()) try: IDF.setiddname(idd) except IDDAlreadySetError: pass self.idf = IDF(idf_file) self.set_output('idf', self.idf) else: raise Exception('Could not request IDF from BIM')
def main(): """the main routine""" from StringIO import StringIO import eppy.iddv7 as iddv7 IDF.setiddname(StringIO(iddv7.iddtxt)) idf1 = IDF(StringIO('')) loopname = "p_loop" sloop = ['sb0', ['sb1', 'sb2', 'sb3'], 'sb4'] dloop = ['db0', ['db1', 'db2', 'db3'], 'db4'] # makeplantloop(idf1, loopname, sloop, dloop) loopname = "c_loop" sloop = ['sb0', ['sb1', 'sb2', 'sb3'], 'sb4'] dloop = ['db0', ['db1', 'db2', 'db3'], 'db4'] # makecondenserloop(idf1, loopname, sloop, dloop) loopname = "a_loop" sloop = ['sb0', ['sb1', 'sb2', 'sb3'], 'sb4'] dloop = ['zone1', 'zone2', 'zone3'] makeairloop(idf1, loopname, sloop, dloop) idf1.savecopy("hh1.idf")
def run_single(idf_name, epw_name, n=None): print('Simulation ' + str(n) + ' starts.') dir_name = os.path.dirname(os.path.realpath(__file__)) idd_file = "C:/EnergyPlusV9-0-0/Energy+.idd" IDF.setiddname(idd_file) idf_path = dir_name + '/' + idf_name epw_path = dir_name + '/' + epw_name out = dir_name + '/out_' + idf_name.split('.idf')[0] idf = IDF(idf_path, epw_path) idf.run(output_directory=out, readvars=True, output_prefix=idf_name.split('.idf')[0], output_suffix='D') print('Simulation ' + str(n) + ' ends.')
def main(argv=None): if argv is None: argv = sys.argv try: try: opts, args = getopt.getopt(argv[1:], "ho:v", ["help", "output="]) except getopt.error, msg: raise Usage(msg) # option processing for option, value in opts: if option == "-v": verbose = True if option in ("-h", "--help"): raise Usage(help_message) if option in ("-o", "--output"): output = value iddfile, fname1, fname2 = args IDF.setiddname(iddfile) idf1 = IDF(fname1) idf2 = IDF(fname2) idfdiffs(idf1, idf2)
def find_links(f): """Find target/source links between e+ objects Args: objs (idf.idfobjects()) Returns list of dicts, [ {"source": int, "target":int}, .. ] Raises: dunno yet """ # start eppy iddfile = "/usr/local/EnergyPlus-8-0-0/Energy+.idd" IDF.setiddname(iddfile) # only do this once # geometry_filename = os.path.join(input_directory, "model.idf") #"//home/tom/repos/www-maddog/mde/tutorials/ods-studio/afn0/ep/Output idf = IDF(f) #get a idf.idfobjects # get all idfobjects try: listed_objects = [obj[1] for obj in idf.idfobjects.iteritems() if len(obj[1]) > 0] all_objects = [x for lst in listed_objects for x in lst] #all_objects = [key for key, value in idf1.idfobjects.items() if len(value) > 0] except: return "couldnt get all objects, Santosh - whats up?" """all_objects = idf.idfobjects non_empty = [] for obj in allobjects.iteritems(): if obj[1]: non_empty.append(obj[1][0]) """ #afn_surfaces = idf.idfobjects['AirflowNetwork:Multizone:Surface'.upper()] list_of_names = [] for obj in all_objects: try: n = getattr(obj, 'Name') list_of_names.append(n) except: pass list_of_names # make list of all object names, source id # loop over objects, look for other names in that object links = [] for name in list_of_names: for obj in all_objects: if name in obj.obj: #links.append("name %s found in %s" %(name, obj) ) #print(obj.Name) #print(name) try: if name != obj.Name: #print(" link from name : %s to name %s "%(name, obj.Name)) links.append(["source :%s"%name, "target: %s"%obj.Name ]) except: try: if name != obj.Zone_Name: #print(" \tlink from name : %s to Zone Name %s "%(name, obj.Zone_Name)) links.append(["source :%s"%name, "target: %s"%obj.Zone_Name ]) except: #print("\t \t Variables are not supported couldnt get name from %s"%obj.obj) pass
try: idf1 = IDF(fname1) except modeleditor.IDDNotSetError as e: print("raised eppy.modeleditor.IDDNotSetError") # <markdowncell> # OK. It does not let you do that and it raises an exception # # So let us set the **idd** file and then open the idf file # <codecell> iddfile = "../eppy/resources/iddfiles/Energy+V7_2_0.idd" IDF.setiddname(iddfile) idf1 = IDF(fname1) # <markdowncell> # That worked without raising an exception # # Now let us try to change the **idd** file. Eppy should not let you do this and should raise an exception. # <codecell> try: IDF.setiddname("anotheridd.idd") except modeleditor.IDDAlreadySetError as e: print("raised modeleditor.IDDAlreadySetError")
from eppy import modeleditor from eppy.modeleditor import IDF import csv import importdir # Initialization #1 importdir.do("Functions",globals()) IDDFile = 'C:\EnergyPlusV8-3-0\Energy+.idd' IDF.setiddname(IDDFile) BatchProcesingFile = open("BatchProcessing.csv","r") BatchProcessing = list(csv.reader(BatchProcesingFile, delimiter=',', quotechar=chr(34))) Idx = 0 for i in range(0,len(BatchProcessing)): # Display the progress of the script if Idx >= 2: print "Creating "+str(Idx-1)+" out of "+str(len(BatchProcessing)-2)+" models." # Initialization #2 NbRows = BatchProcessing[i] if NbRows[0] <> 'idf' and NbRows[0] <> '-': # Initialization #3 idf_file = IDF(NbRows[0]) # Iterates throught the CSV file and execute the specified functions for j in range(2,len(NbRows)): # Retrieve the user input arguments arguments = NbRows[j].split(",") #print str(len(arguments))+" " +str(arguments[0])
import eppy.snippet as snippet iddsnippet = iddcurrent.iddtxt idfsnippet = snippet.idfsnippet # idffhandle = StringIO(idfsnippet) # iddfhandle = StringIO(iddsnippet) # bunchdt, data, commdct, gdict = idfreader.idfreader(idffhandle, iddfhandle, None) # idd is read only once in this test # if it has already been read from some other test, it will continue with # the old reading iddfhandle = StringIO(iddcurrent.iddtxt) if IDF.getiddname() == None: IDF.setiddname(iddfhandle) def test_poptrailing(): """py.test for poptrailing""" tdata = ( ( [1, 2, 3, '', 56, '', '', '', ''], [1, 2, 3, '', 56] ), # lst, popped ( [1, 2, 3, '', 56], [1, 2, 3, '', 56] ), # lst, popped ( [1, 2, 3, 56],
# if you have not done so, uncomment the following three lines import sys # pathnameto_eppy = 'c:/eppy' pathnameto_eppy = "../" sys.path.append(pathnameto_eppy) from eppy import modeleditor from eppy.modeleditor import IDF iddfile = "../eppy/resources/iddfiles/Energy+V7_2_0.idd" fname1 = "../eppy/resources/idffiles/V_7_2/smallfile.idf" # <codecell> IDF.setiddname(iddfile) idf1 = IDF(fname1) # <markdowncell> # idf1 now holds all the data to your in you idf file. # # Now that the behind-the-scenes work is done, we can print this file. # <codecell> idf1.printidf() # <markdowncell> # Looks like the same file as before, except that all the comments are slightly different.
from eppy.modeleditor import IDF from manager.src.ssh_lib import sftpGetDirFiles from manager.src.ssh_lib import sshCommandNoWait from manager.src.ssh_lib import sshCommandWait from manager.src import ssh_lib from manager.src.config import config from manager.src.ssh_lib import sftpSendFile logging.basicConfig(level=logging.INFO) THIS_DIR = os.path.abspath(os.path.dirname(__file__)) DATA_DIR = os.path.join(THIS_DIR, os.pardir, 'data') IDF.setiddname(os.path.join(DATA_DIR, 'idd/Energy+.idd')) JOBQUEUE = os.path.join(THIS_DIR, os.pardir, os.pardir, 'queue', 'job_queue') RESULTS = os.path.join(THIS_DIR, os.pardir, os.pardir, 'queue', 'results_queue') def distribute_job(jobpath): """Find a server on which to run an EnergyPlus simulation. Parameters ---------- jobpath : str Path to a folder containing everything needed to run the simulation. """ jobdir = os.path.basename(jobpath)