def simulate_idf(idf_file, epw_file, folder, log_file): """Simulate an IDF file in EnergyPlus. \b Args: idf_file: Full path to a simulate-able .idf file. epw_file: Full path to an .epw file. """ try: # set the default folder to the default if it's not specified and copy the IDF if folder is None: proj_name = os.path.basename(idf_file).replace('.idf', '') folder = os.path.join(folders.default_simulation_folder, proj_name) preparedir(folder, remove_content=False) idf = os.path.join(folder, 'in.idf') shutil.copy(idf_file, idf) # run the file through EnergyPlus gen_files = [idf] sql, eio, rdd, html, err = run_idf(idf, epw_file) if err is not None and os.path.isfile(err): gen_files.extend([sql, eio, rdd, html, err]) err_obj = Err(err) for error in err_obj.fatal_errors: log_file.write( err_obj.file_contents) # log before raising the error raise Exception(error) else: raise Exception('Running EnergyPlus failed.') log_file.write(json.dumps(gen_files)) except Exception as e: _logger.exception('IDF simulation failed.\n{}'.format(e)) sys.exit(1) else: sys.exit(0)
def mirror_grid(grid_file, vector, name, suffix, folder, log_file): """Mirror a honeybee Model's SensorGrids and format them for thermal mapping. This involves setting the direction of every sensor to point up (0, 0, 1) and then adding a mirrored sensor grid with the same sensor positions that all point downward. In thermal mapping workflows, the upward-pointing grids can be used to account for direct and diffuse shortwave irradiance while the downard pointing grids account for ground-reflected shortwave irradiance. \b Args: model_json: Full path to a Model JSON file. """ try: # create the directory if it's not there and set up output paths if not os.path.isdir(folder): preparedir(folder) base_file = os.path.join(folder, '{}.pts'.format(name)) rev_file = os.path.join(folder, '{}_{}.pts'.format(name, suffix)) # loop through the lines of the grid_file and mirror the sensors if vector is not None and vector != '': # process the vector if it exists vec = [float(v) for v in vector.split()] assert len(vec) == 3, \ 'Vector "{}" must have 3 values. Got {}.'.format(vector, len(vec)) vec_str = ' {} {} {}\n'.format(*vec) rev_vec = [-v for v in vec] rev_vec_str = ' {} {} {}\n'.format(*rev_vec) # get the lines from the grid file with open(grid_file) as sg_file: with open(base_file, 'w') as b_file, open(rev_file, 'w') as r_file: for line in sg_file: origin_str = ' '.join(line.split()[:3]) b_file.write(origin_str + vec_str) r_file.write(origin_str + rev_vec_str) else: # loop through each sensor and reverse the vector with open(grid_file) as sg_file: with open(rev_file, 'w') as r_file: for line in sg_file: ray_vals = line.strip().split() origin_str = ' '.join(ray_vals[:3]) vec_vals = (-float(v) for v in ray_vals[3:]) rev_vec_str = ' {} {} {}\n'.format(*vec_vals) r_file.write(origin_str + rev_vec_str) # copy the input grid file to the base file location shutil.copyfile(grid_file, base_file) # write the resulting file paths to the log file log_file.write(json.dumps([base_file, rev_file], indent=4)) except Exception as e: _logger.exception('Sensor grid mirroring failed.\n{}'.format(e)) sys.exit(1) else: sys.exit(0)
def orientation_sim_pars(ddy_file, north_angles, output_name, run_period, start_north, filter_des_days, folder, log_file): """Get SimulationParameter JSONs with different north angles for orientation studies. \b Args: ddy_file: Full path to a DDY file that will be used to specify design days within the simulation parameter. north_angles: Any number of values between -360 and 360 for the counterclockwise difference between the North and the positive Y-axis in degrees. 90 is West and 270 is East. """ try: # get a default folder if none was specified if folder is None: folder = os.path.join(folders.default_simulation_folder, 'orientation_study') preparedir(folder, remove_content=False) # create a base set of simulation parameters to be edited parametrically sim_par = SimulationParameter() for out_name in output_name: sim_par.output.add_output(out_name) _apply_run_period(run_period, sim_par) _apply_design_days(ddy_file, filter_des_days, sim_par) # shift all of the north angles by the start_north if specified if start_north != 0: north_angles = [angle + start_north for angle in north_angles] for i, angle in enumerate(north_angles): angle = angle - 360 if angle > 360 else angle angle = angle + 360 if angle < -360 else angle north_angles[i] = angle # loop through the north angles and write a simulation parameter for each json_files = [] for angle in north_angles: sim_par.north_angle = angle base_name = 'sim_par_north_{}'.format(int(angle)) file_name = '{}.json'.format(base_name) file_path = os.path.join(folder, file_name) with open(file_path, 'w') as fp: json.dump(sim_par.to_dict(), fp) sp_info = { 'id': base_name, 'path': file_name, 'full_path': os.path.abspath(file_path) } json_files.append(sp_info) log_file.write(json.dumps(json_files)) except Exception as e: _logger.exception( 'Failed to generate simulation parameters.\n{}'.format(e)) sys.exit(1) else: sys.exit(0)
def measure_compatible_model_json(model_json_path, destination_directory=None): """Convert a Model JSON to one that is compatible with the honeybee_openstudio_gem. This includes the re-serialization of the Model to Python, which will automatically ensure that all Apertures and Doors point in the same direction as their parent Face. If the Model tolerance is non-zero and Rooms are closed solids, this will also ensure that all Room Faces point outwards from their parent's volume. If the Model units are not Meters, the model will be scaled to be in Meters. Lastly, apertures and doors with more than 4 vertices will be triangulated to ensure EnergyPlus accepts them. Args: model_json_path: File path to the Model JSON. destination_directory: The directory into which the Model JSON that is compatible with the honeybee_openstudio_gem should be written. If None, this will be the same location as the input model_json_path. Default: None. Returns: The full file path to the new Model JSON written out by this method. """ # check that the file is there assert os.path.isfile(model_json_path), \ 'No JSON file found at {}.'.format(model_json_path) # get the directory and the file path for the new Model JSON directory, _ = os.path.split(model_json_path) dest_dir = directory if destination_directory is None else destination_directory dest_file_path = os.path.join(dest_dir, 'in.hbjson') # serialize the Model to Python with open(model_json_path) as json_file: data = json.load(json_file) parsed_model = Model.from_dict(data) # remove colinear vertices to avoid E+ tolerance issues and convert Model to Meters if parsed_model.tolerance != 0: for room in parsed_model.rooms: room.remove_colinear_vertices_envelope(parsed_model.tolerance) parsed_model.convert_to_units('Meters') # get the dictionary representation of the Model model_dict = parsed_model.to_dict(triangulate_sub_faces=True) # write the dictionary into a file preparedir(dest_dir, remove_content=False) # create the directory if it's not there with open(dest_file_path, 'w') as fp: json.dump(model_dict, fp) return os.path.abspath(dest_file_path)
def to_geojson(self, location, point=Point2D(0, 0), folder=None, tolerance=0.01): """Convert Dragonfly Model to a geoJSON of buildings footprints. This geoJSON will be in a format that is compatible with the URBANopt SDK, including properties for floor_area, footprint_area, and detailed_model_filename, which will align with the paths to OpenStudio model (.osm) files output from honeybee Models translated to OSM. Args: location: A ladybug Location object possessing longitude and latitude data. point: A ladybug_geometry Point2D for where the location object exists within the space of a scene. The coordinates of this point are expected to be in the units of this Model. (Default: (0, 0)). folder: Text for the full path to the folder where the OpenStudio model files for each building are written. This is also the location where the geojson will be written. If None, the honeybee default simulation folder will be used (Default: None). tolerance: The minimum distance between points at which they are not considered touching. Default: 0.01, suitable for objects in meters. Returns: The path to a geoJSON file that contains polygons for all of the Buildings within the dragonfly model along with their properties (floor area, number of stories, etc.). The polygons will also possess detailed_model_filename keys that align with where OpenStudio models would be written, assuming the input folder matches that used to export OpenStudio models. """ # set the default simulation folder if folder is None: folder = folders.default_simulation_folder else: preparedir(folder, remove_content=False) # get the geojson dictionary geojson_dict = self.to_geojson_dict(location, point, tolerance) # write out the dictionary to a geojson file project_folder = os.path.join(folder, self.identifier) preparedir(project_folder, remove_content=False) file_path = os.path.join(project_folder, '{}.geojson'.format(self.identifier)) with open(file_path, 'w') as fp: json.dump(geojson_dict, fp, indent=4) return file_path
def run_uwg(model, epw_file_path, simulation_parameter=None, directory=None, silent=False): """Run a UWG dictionary file through the UWG on any operating system. Args: model: A Dragonfly Model to be used to morph the EPW for the urban area. epw_file_path: The full path to an EPW file. simulation_parameter: A UWGSimulationParameter object that dictates various settings about the UWG simulation. If None, default parameters will be generated. (Default: None). directory: Text for the directory into which the the uwg JSON and morphed urban EPW will be written. If None, it will be written into the ladybug default_epw_folder within a subfolder bearing the name of the dragonfly Model. (Default: None). silent: Boolean to note whether the simulation should be run silently. This only has an effect on Windows simulations since Unix-based simulations always use shell and are always silent (Default: False). Returns: The following files output from the UWG CLI run - uwg_json -- Path to a .json file derived from the input uwg_dict. - epw -- File path to the morphed EPW. Will be None if the UWG failed to run. """ # get the name of the EPW and the directory into which the urban epw will be written epw_file_path = os.path.abspath(epw_file_path) epw_name = '{}.epw'.format(model.identifier) if directory is None: directory = os.path.join(lb_folders.default_epw_folder, model.identifier) preparedir(directory, remove_content=False) # write the model to a UWG dictionary uwg_dict = model.to.uwg(model, epw_file_path, simulation_parameter) uwg_json = os.path.join(directory, '{}_uwg.json'.format(model.identifier)) with open(uwg_json, 'w') as fp: json.dump(uwg_dict, fp, indent=4) # run the simulation if os.name == 'nt': # we are on Windows epw = _run_uwg_windows(uwg_json, epw_file_path, epw_name, silent) else: # we are on Mac, Linux, or some other unix-based system epw = _run_uwg_unix(uwg_json, epw_file_path, epw_name) return uwg_json, epw
def _thermal_map_csv(folder, result_sql, temperature, condition, condition_intensity): """Write out the thermal mapping CSV files associated with every comfort map.""" if folder is None: folder = os.path.join(os.path.dirname(result_sql), 'thermal_map') preparedir(folder, remove_content=False) result_file_dict = { 'temperature': os.path.join(folder, 'temperature.csv'), 'condition': os.path.join(folder, 'condition.csv'), 'condition_intensity': os.path.join(folder, 'condition_intensity.csv') } _data_to_csv(temperature, result_file_dict['temperature']) _data_to_csv(condition, result_file_dict['condition']) _data_to_csv(condition_intensity, result_file_dict['condition_intensity']) return result_file_dict
def write_inputs_json(self, project_folder=None, indent=4, cpu_count=None): """Write the inputs.json file that gets passed to queenbee luigi. Note that running this method will automatically handle all of the inputs. Args: project_folder: The full path to where the inputs json file will be written. If None, the default_project_folder on this recipe will be used. indent: The indent at which the JSON will be written (Default: 4). cpu_count: An optional integer to override any inputs that are named "cpu-count". This can be used to coordinate such recipe inputs with the number of workers specified in recipe settings. If None, no overriding will happen. (Default: None). """ # create setup the project folder in which the inputs json will be written p_fold = project_folder if project_folder else self.default_project_folder if not os.path.isdir(p_fold): preparedir(p_fold) file_path = os.path.join(p_fold, '{}_inputs.json'.format(self.simulation_id)) # create the inputs dictionary, ensuring all inputs are handled in the process inp_dict = {} for inp in self.inputs: inp.handle_value() if inp.is_path and inp.value is not None and inp.value != '': # copy artifact to project folder path_basename = os.path.basename(inp.value) dest = os.path.join(p_fold, path_basename) if os.path.isfile(inp.value): try: shutil.copyfile(inp.value, dest) except shutil.SameFileError: pass # the file is already in the right place; no need to copy elif os.path.isdir(inp.value): copy_file_tree(inp.value, dest, overwrite=True) inp_dict[inp.name] = path_basename elif inp.is_path and (inp.value is None or inp.value == ''): # conditional artifact; ignore it pass elif inp.name == 'cpu-count' and cpu_count is not None: inp_dict[inp.name] = cpu_count else: inp_dict[inp.name] = inp.value # write the inputs dictionary to a file with open(file_path, 'w') as fp: json.dump(inp_dict, fp, indent=indent) return file_path
def model_radiant_enclosure_info(model_json, folder, log_file): """Translate a Model JSON file to a list of JSONs with radiant enclosure information. There will be one radiant enclosure JSON for each Model SensorGrid written to the output folder and each JSON will contain a list of values about which room (or radiant enclosure) each sensor is located. JSONs will also include a mapper that links the integers of each sensor with the identifier(s) of a room. \b Args: model_json: Full path to a Model JSON file (HBJSON) or a Model pkl (HBpkl) file. """ try: # re-serialize the Model model = Model.from_file(model_json) # set the default folder if it's not specified if folder is None: folder = os.path.dirname(os.path.abspath(model_json)) folder = os.path.join(folder, 'enclosure') if not os.path.isdir(folder): preparedir(folder) # create the directory if it's not there # loop through sensor grids and build up the radiant enclosure dicts grids_info = [] for grid in model.properties.radiance.sensor_grids: # write an enclosure JSON for each grid enc_dict = grid.enclosure_info_dict(model) enclosure_file = os.path.join(folder, '{}.json'.format(grid.identifier)) with open(enclosure_file, 'w') as fp: json.dump(enc_dict, fp) g_info = { 'id': grid.identifier, 'enclosure_path': enclosure_file, 'enclosure_full_path': os.path.abspath(enclosure_file), 'count': grid.count } grids_info.append(g_info) # write out the list of radiant enclosure JSON info log_file.write(json.dumps(grids_info, indent=4)) except Exception as e: _logger.exception( 'Model translation to radiant enclosure failed.\n{}'.format(e)) sys.exit(1) else: sys.exit(0)
def simulate_osm(osm_file, epw_file, folder, log_file): """Simulate an OSM file in EnergyPlus. \b Args: osm_file: Full path to a simulate-able .osm file. epw_file: Full path to an .epw file. """ try: # set the default folder to the default if it's not specified and copy the IDF if folder is None: proj_name = os.path.basename(osm_file).replace('.osm', '') folder = os.path.join(folders.default_simulation_folder, proj_name) preparedir(folder, remove_content=False) base_osm = os.path.join(folder, 'in.osm') shutil.copy(osm_file, base_osm) # create a blank osw for the translation osw_dict = {'seed_file': osm_file, 'weather_file': epw_file} osw = os.path.join(folder, 'workflow.osw') with open(osw, 'w') as fp: json.dump(osw_dict, fp, indent=4) # run the OSW through OpenStudio CLI osm, idf = run_osw(osw) # run the file through EnergyPlus if idf is not None and os.path.isfile(idf): gen_files = [osw, osm, idf] sql, eio, rdd, html, err = run_idf(idf, epw_file) if err is not None and os.path.isfile(err): gen_files.extend([sql, eio, rdd, html, err]) err_obj = Err(err) for error in err_obj.fatal_errors: log_file.write( err_obj.file_contents) # log before raising the error raise Exception(error) else: raise Exception('Running EnergyPlus failed.') else: raise Exception('Running OpenStudio CLI failed.') log_file.write(json.dumps(gen_files)) except Exception as e: _logger.exception('OSM simulation failed.\n{}'.format(e)) sys.exit(1) else: sys.exit(0)
def model_to_osm(model_json, sim_par_json, folder, check_model, log_file): """Translate a Model JSON file into an OpenStudio Model and corresponding IDF. \b Args: model_json: Full path to a Model JSON file. """ try: # set the default folder if it's not specified if folder is None: folder = os.path.dirname(os.path.abspath(model_json)) preparedir(folder, remove_content=False) # generate default simulation parameters if sim_par_json is None: sim_par = SimulationParameter() sim_par.output.add_zone_energy_use() sim_par.output.add_hvac_energy_use() sim_par_dict = sim_par.to_dict() sp_json = os.path.abspath( os.path.join(folder, 'simulation_parameter.json')) with open(sp_json, 'w') as fp: json.dump(sim_par_dict, fp) # run the Model re-serialization and check if specified if check_model: model_json = measure_compatible_model_json(model_json, folder) # Write the osw file to translate the model to osm osw = to_openstudio_osw(folder, model_json, sim_par_json) # run the measure to translate the model JSON to an openstudio measure if osw is not None and os.path.isfile(osw): osm, idf = run_osw(osw) # run the resulting idf through EnergyPlus if idf is not None and os.path.isfile(idf): log_file.write(json.dumps([osm, idf])) else: raise Exception('Running OpenStudio CLI failed.') else: raise Exception('Writing OSW file failed.') except Exception as e: _logger.exception('Model translation failed.\n{}'.format(e)) sys.exit(1) else: sys.exit(0)
def test_three_phase_rmtxop(): runner = CliRunner() sky_vector = "./tests/assets/sky/sky.mtx" view_matrix = "./tests/assets/multi_phase/matrices/view.vmx" t_matrix = "./tests/assets/clear.xml" daylight_matrix = "./tests/assets/multi_phase/matrices/daylight.dmx" output_folder = "./tests/assets/temp" preparedir(output_folder) output_matrix = "./tests/assets/temp/three_phase.res" cmd_args = [ view_matrix, t_matrix, daylight_matrix, sky_vector, output_matrix ] result = runner.invoke(three_phase_rmtxop, cmd_args) assert result.exit_code == 0 assert os.path.isfile("./tests/assets/temp/three_phase.res") nukedir(output_folder)
def _write_views(folder, model, views_filter, full_match=False): """Write out the view files. Args: folder: The views folder. model: A Honeybee model. views_filter: A list of view names to filter the views in the model. Use this argument to indicate specific views that should be included. By default, all the views will be exported. You can use wildcard symbols in names. Use relative path from inside views folder. full_match: A boolean to filter views by their identifiers as full matches. (Default: False). Returns: The path to _info.json, which includes the information for the views that are written to the folder. """ model_views = model.properties.radiance.views filtered_views = _filter_by_pattern(model_views, views_filter, full_match=full_match) if len(filtered_views) != 0: preparedir(folder) # group_by_identifier views_info = [] for view in filtered_views: view.to_file(folder) info_file = os.path.join(folder, '{}.json'.format(view.identifier)) with open(info_file, 'w') as fp: json.dump(view.info_dict(model), fp, indent=4) view_info = { 'name': view.identifier, 'identifier': view.identifier, 'group': view.group_identifier or '', 'full_id': view.full_identifier } views_info.append(view_info) # write information file for all the views. views_info_file = os.path.join(folder, '_info.json') with open(views_info_file, 'w') as fp: json.dump(views_info, fp, indent=2) return views_info_file elif len(model_views) != 0: raise ValueError('All views were filtered out of the model folder!')
def setup_resource_folders(overwrite=False): """Set up user resource folders in their respective locations. Args: overwrite: Boolean to note whether the user resources should only be set up if they do not exist, in which case existing resources will be preserved, or should they be overwritten. """ # first check if there's an environment variable available for APPDATA app_folder = os.getenv('APPDATA') if app_folder is not None: resource_folder = os.path.join(app_folder, 'ladybug_tools') # set up user standards lib_folder = os.path.join(resource_folder, 'standards') for sub_f in STANDARDS_SUBFOLDERS: sub_lib_folder = os.path.join(lib_folder, sub_f) if not os.path.isdir(sub_lib_folder) or overwrite: preparedir(sub_lib_folder) # set up the user weather epw_folder = os.path.join(resource_folder, 'weather') if not os.path.isdir(epw_folder) or overwrite: if os.path.isdir(epw_folder): nukedir(epw_folder, rmdir=True) # delete all sub-folders preparedir(epw_folder) # set up the user measures folder measure_folder = os.path.join(resource_folder, 'measures') if not os.path.isdir(measure_folder) or overwrite: if os.path.isdir(measure_folder): nukedir(measure_folder, rmdir=True) # delete all sub-folders preparedir(measure_folder) return resource_folder
def write_inputs_json(self, simulation_folder=None, indent=4): """Write the inputs.json file that gets passed to queenbee luigi. Args: simulation_folder: The full path to where the inputs.json file will be written and where the simulation will be run. If None the default_simulation_path on this Wirkflow will be used. indent: The indent at which the JSON will be written (Default: 4). """ sim_fold = simulation_folder if simulation_folder else self.default_simulation_path inputs = self._info['inputs'].copy( ) # avoid editing the base dictionary process_inputs(inputs, sim_fold) if self.simulation_id: inputs['simulation-id'] = self.simulation_id # write the inputs dictionary into a file if not os.path.isdir(sim_fold): preparedir(sim_fold) file_path = os.path.join(sim_fold, '{}-inputs.json'.format(self.name)) with open(file_path, 'w') as fp: json.dump(inputs, fp, indent=indent) return file_path
def download_file_by_name(url, target_folder, file_name, mkdir=False): """Download a file to a directory. Args: url: A string to a valid URL. target_folder: Target folder for download (e.g. c:/ladybug) file_name: File name (e.g. testPts.zip). mkdir: Set to True to create the directory if doesn't exist (Default: False) """ # create the target directory. if not os.path.isdir(target_folder): if mkdir: preparedir(target_folder) else: created = preparedir(target_folder, False) if not created: raise ValueError("Failed to find %s." % target_folder) file_path = os.path.join(target_folder, file_name) # set the security protocol to the most recent version try: # TLS 1.2 is needed to download over https System.Net.ServicePointManager.SecurityProtocol = \ System.Net.SecurityProtocolType.Tls12 except AttributeError: # TLS 1.2 is not provided by MacOS .NET in Rhino 5 if url.lower().startswith('https'): print('This system lacks the necessary security' ' libraries to download over https.') # attempt to download the file client = System.Net.WebClient() try: client.DownloadFile(url, file_path) except Exception as e: raise Exception(' Download failed with the error:\n{}'.format(e))
def _write_sensor_grids(folder, model, grids_filter): """Write out the sensor grid files. Args: folder: The sensor grids folder. model: A Honeybee model. grids_filter: A list of sensor grid names to filter the sensor grids in the model. Use this argument to indicate specific sensor grids that should be included. By default all the sensor grids will be exported. You can use wildcard symbols in names. Use relative path from inside grids folder. Returns: A tuple for path to _info.json and _model_grids_info.json. The first file includes the information for the sensor grids that are written to the folder and the second one is the information for the input sensor grids from the model. Use ``_info.json`` for access the sensor grid information for running the commands and ``_model_grids_info`` file for loading the results back to match with the model. Model_grids_info has an extra key for `start_ln` which provides the start line for where the sensors for this grid starts in a pts file. Unless there are grids with same identifier this value will be set to 0. """ sensor_grids = model.properties.radiance.sensor_grids filtered_grids = _filter_by_pattern(sensor_grids, grids_filter) if len(filtered_grids) != 0: grids_info = [] preparedir(folder) # group_by_identifier grouped_grids = _group_by_identifier(filtered_grids) for grid in grouped_grids: fp = grid.to_file(folder) info_dir = os.path.dirname(fp) info_file = os.path.join(info_dir, '{}.json'.format(grid.identifier)) with open(info_file, 'w') as fp: json.dump(grid.info_dict(model), fp, indent=4) grid_info = { 'name': grid.identifier, 'identifier': grid.identifier, 'count': grid.count, 'group': grid.group_identifier or '', 'full_id': grid.full_identifier } grids_info.append(grid_info) # write information file for all the grids. grids_info_file = os.path.join(folder, '_info.json') with open(grids_info_file, 'w') as fp: json.dump(grids_info, fp, indent=2) # write input grids info model_grids_info = [] start_line = defaultdict(lambda: 0) for grid in filtered_grids: identifier = grid.identifier grid_info = { 'name': identifier, 'identifier': identifier, 'count': grid.count, 'group': grid.group_identifier or '', 'full_id': grid.full_identifier, 'start_ln': start_line[identifier] } start_line[identifier] += grid.count model_grids_info.append(grid_info) model_grids_info_file = os.path.join(folder, '_model_grids_info.json') with open(model_grids_info_file, 'w') as fp: json.dump(model_grids_info, fp, indent=2) return grids_info_file, model_grids_info_file
# duplicate model to avoid mutating it as we edit it for energy simulation _model = _model.duplicate() # remove colinear vertices using the Model tolerance to avoid E+ tolerance issues for room in _model.rooms: room.remove_colinear_vertices_envelope(_model.tolerance) # auto-assign stories if there are none since most OpenStudio measures need these if len(_model.stories) == 0: _model.assign_stories_by_floor_height() # scale the model if the units are not meters if _model.units != 'Meters': _model.convert_to_units('Meters') # delete any existing files in the directory and prepare it for simulation nukedir(directory, True) preparedir(directory) sch_directory = os.path.join(directory, 'schedules') preparedir(sch_directory) # write the model parameter JSONs model_dict = _model.to_dict(triangulate_sub_faces=True) model_json = os.path.join(directory, '{}.hbjson'.format(_model.identifier)) with open(model_json, 'w') as fp: json.dump(model_dict, fp) # write the simulation parameter JSONs sim_par_dict = _sim_par_.to_dict() sim_par_json = os.path.join(directory, 'simulation_parameter.json') with open(sim_par_json, 'w') as fp: json.dump(sim_par_dict, fp)
def create_view_factor_modifiers(model_file, exclude_sky, exclude_ground, individual_shades, triangulate, folder, name): """Translate a Model into an Octree and corresponding modifier list for view factors. \b Args: model_file: Full path to a Model JSON file (HBJSON) or a Model pkl (HBpkl) file. """ try: # create the directory if it's not there if not os.path.isdir(folder): preparedir(folder) # load the model and ensure the properties align with the energy model model = Model.from_file(model_file) original_units = None if model.units != 'Meters': original_units = model.units model.convert_to_units('Meters') for room in model.rooms: room.remove_colinear_vertices_envelope(tolerance=0.01, delete_degenerate=True) if original_units is not None: model.convert_to_units(original_units) # triangulate the sub-faces if requested if triangulate: apertures, parents_to_edit = model.triangulated_apertures() for tri_aps, edit_infos in zip(apertures, parents_to_edit): if len(edit_infos) == 3: for room in model._rooms: if room.identifier == edit_infos[2]: break for face in room._faces: if face.identifier == edit_infos[1]: break for i, ap in enumerate(face._apertures): if ap.identifier == edit_infos[0]: break face._apertures.pop(i) # remove the aperture to replace face._apertures.extend(tri_aps) doors, parents_to_edit = model.triangulated_doors() for tri_drs, edit_infos in zip(doors, parents_to_edit): if len(edit_infos) == 3: for room in model._rooms: if room.identifier == edit_infos[2]: break for face in room._faces: if face.identifier == edit_infos[1]: break for i, dr in enumerate(face._doors): if dr.identifier == edit_infos[0]: break face._doors.pop(i) # remove the doors to replace face._doors.extend(tri_drs) # set values to be used throughout the modifier assignment offset = model.tolerance * -1 white_plastic = Plastic('white_plastic', 1, 1, 1) geo_strs, mod_strs, mod_names = [], [], [] def _add_geo_and_modifier(hb_obj): """Add a honeybee object to the geometry and modifier strings.""" mod_name = '%s_mod' % hb_obj.identifier mod_names.append(mod_name) white_plastic.identifier = mod_name rad_poly = Polygon(hb_obj.identifier, hb_obj.vertices, white_plastic) geo_strs.append(rad_poly.to_radiance(False, False, False)) mod_strs.append(white_plastic.to_radiance(True, False, False)) # loop through all geometry in the model and get radiance strings for room in model.rooms: for face in room.faces: if not isinstance(face.type, AirBoundary): if isinstance(face.boundary_condition, Surface): face.move(face.normal * offset) _add_geo_and_modifier(face) for ap in face.apertures: _add_geo_and_modifier(ap) for dr in face.doors: _add_geo_and_modifier(dr) all_shades = model.shades + model._orphaned_faces + \ model._orphaned_apertures + model._orphaned_doors if individual_shades: for shade in all_shades: _add_geo_and_modifier(shade) else: white_plastic.identifier = 'shade_plastic_mod' mod_names.append(white_plastic.identifier) mod_strs.append(white_plastic.to_radiance(True, False, False)) for shade in all_shades: rad_poly = Polygon(shade.identifier, shade.vertices, white_plastic) geo_strs.append(rad_poly.to_radiance(False, False, False)) # add the ground and sky domes if requested if not exclude_sky: mod_names.append('sky_glow_mod') mod_strs.append('void glow sky_glow_mod 0 0 4 1 1 1 0') geo_strs.append('sky_glow_mod source sky_dome 0 0 4 0 0 1 180') if not exclude_ground: mod_names.append('ground_glow_mod') mod_strs.append('void glow ground_glow_mod 0 0 4 1 1 1 0') geo_strs.append( 'ground_glow_mod source ground_dome 0 0 4 0 0 -1 180') # write the radiance strings to the output folder geo_file = os.path.join(folder, '{}.rad'.format(name)) mod_file = os.path.join(folder, '{}.mod'.format(name)) oct_file = os.path.join(folder, '{}.oct'.format(name)) with open(geo_file, 'w') as gf: gf.write('\n\n'.join(mod_strs + geo_strs)) with open(mod_file, 'w') as mf: mf.write('\n'.join(mod_names)) # use the radiance files to create an octree cmd = Oconv(output=oct_file, inputs=[geo_file]) cmd.options.f = True run_command(cmd.to_radiance(), env=folders.env) except Exception as e: _logger.exception('Model translation failed.\n{}'.format(e)) sys.exit(1) else: sys.exit(0)
def simulate_model(model_json, epw_file, sim_par_json, base_osw, folder, check_model, log_file): """Simulate a Model JSON file in EnergyPlus. \n Args: model_json: Full path to a Model JSON file.\n epw_file: Full path to an .epw file. """ try: # check that the model JSON and the EPW file is there assert os.path.isfile(model_json), \ 'No Model JSON file found at {}.'.format(model_json) assert os.path.isfile(epw_file), \ 'No EPW file found at {}.'.format(epw_file) # ddy variable that might get used later epw_folder, epw_file_name = os.path.split(epw_file) ddy_file = os.path.join(epw_folder, epw_file_name.replace('.epw', '.ddy')) # set the default folder to the default if it's not specified if folder is None: proj_name = os.path.basename(model_json).replace('.json', '') folder = os.path.join(folders.default_simulation_folder, proj_name, 'OpenStudio') preparedir(folder, remove_content=False) # process the simulation parameters and write new ones if necessary def write_sim_par(sim_par): """Write simulation parameter object to a JSON.""" sim_par_dict = sim_par.to_dict() sp_json = os.path.abspath( os.path.join(folder, 'simulation_parameter.json')) with open(sp_json, 'w') as fp: json.dump(sim_par_dict, fp) return sp_json if sim_par_json is None: # generate some default simulation parameters sim_par = SimulationParameter() sim_par.output.add_zone_energy_use() sim_par.output.add_hvac_energy_use() if os.path.isfile(ddy_file): sim_par.sizing_parameter.add_from_ddy_996_004(ddy_file) else: raise ValueError( 'No sim-par-json was input and there is no .ddy file next to ' 'the .epw.\nAt least one of these two cirtieria must be satisfied ' 'for a successful simulation.') sim_par_json = write_sim_par(sim_par) else: assert os.path.isfile(sim_par_json), \ 'No simulation parameter file found at {}.'.format(sim_par_json) with open(sim_par_json) as json_file: data = json.load(json_file) sim_par = SimulationParameter.from_dict(data) if len(sim_par.sizing_parameter.design_days ) == 0 and os.path.isfile(ddy_file): sim_par.sizing_parameter.add_from_ddy_996_004(ddy_file) sim_par_json = write_sim_par(sim_par) elif len(sim_par.sizing_parameter.design_days) == 0: raise ValueError( 'No design days were found in the input sim-par-json and there is ' 'no .ddy file next to the .epw.\nAt least one of these two cirtieria ' 'must be satisfied for a successful simulation.') # run the Model re-serialization and check if specified if check_model: model_json = measure_compatible_model_json(model_json, folder) # Write the osw file to translate the model to osm osw = to_openstudio_osw(folder, model_json, sim_par_json, base_osw=base_osw, epw_file=epw_file) # run the measure to translate the model JSON to an openstudio measure if osw is not None and os.path.isfile(osw): gen_files = [osw] if base_osw is None: # separate the OS CLI run from the E+ run osm, idf = run_osw(osw) # run the resulting idf through EnergyPlus if idf is not None and os.path.isfile(idf): gen_files.extend([osm, idf]) sql, eio, rdd, html, err = run_idf(idf, epw_file) if err is not None and os.path.isfile(err): gen_files.extend([sql, eio, rdd, html, err]) else: raise Exception('Running EnergyPlus failed.') else: raise Exception('Running OpenStudio CLI failed.') else: # run the whole simulation with the OpenStudio CLI osm, idf = run_osw(osw, measures_only=False) if idf is not None and os.path.isfile(idf): gen_files.extend([osm, idf]) else: raise Exception('Running OpenStudio CLI failed.') sql, eio, rdd, html, err = output_energyplus_files( os.path.dirname(idf)) if os.path.isfile(err): gen_files.extend([sql, eio, rdd, html, err]) else: raise Exception('Running EnergyPlus failed.') log_file.write(json.dumps(gen_files)) else: raise Exception('Writing OSW file failed.') except Exception as e: _logger.exception('Model simulation failed.\n{}'.format(e)) sys.exit(1) else: sys.exit(0)
def simulate_model(model_json, epw_file, sim_par_json, obj_per_model, multiplier, no_plenum, no_cap, shade_dist, base_osw, folder, log_file): """Simulate a Dragonfly Model JSON file in EnergyPlus. \b Args: model_json: Full path to a Dragonfly Model JSON file. epw_file: Full path to an .epw file. """ try: # get a ddy variable that might get used later epw_folder, epw_file_name = os.path.split(epw_file) ddy_file = os.path.join(epw_folder, epw_file_name.replace('.epw', '.ddy')) # set the default folder to the default if it's not specified if folder is None: proj_name = \ os.path.basename(model_json).replace('.json', '').replace('.dfjson', '') folder = os.path.join(folders.default_simulation_folder, proj_name, 'OpenStudio') preparedir(folder, remove_content=False) # process the simulation parameters and write new ones if necessary def ddy_from_epw(epw_file, sim_par): """Produce a DDY from an EPW file.""" epw_obj = EPW(epw_file) des_days = [ epw_obj.approximate_design_day('WinterDesignDay'), epw_obj.approximate_design_day('SummerDesignDay') ] sim_par.sizing_parameter.design_days = des_days def write_sim_par(sim_par): """Write simulation parameter object to a JSON.""" sim_par_dict = sim_par.to_dict() sp_json = os.path.abspath( os.path.join(folder, 'simulation_parameter.json')) with open(sp_json, 'w') as fp: json.dump(sim_par_dict, fp) return sp_json if sim_par_json is None: # generate some default simulation parameters sim_par = SimulationParameter() sim_par.output.add_zone_energy_use() sim_par.output.add_hvac_energy_use() else: with open(sim_par_json) as json_file: data = json.load(json_file) sim_par = SimulationParameter.from_dict(data) if len(sim_par.sizing_parameter.design_days) == 0 and os.path.isfile( ddy_file): try: sim_par.sizing_parameter.add_from_ddy_996_004(ddy_file) except AssertionError: # no design days within the DDY file ddy_from_epw(epw_file, sim_par) elif len(sim_par.sizing_parameter.design_days) == 0: ddy_from_epw(epw_file, sim_par) sim_par_json = write_sim_par(sim_par) # re-serialize the Dragonfly Model with open(model_json) as json_file: data = json.load(json_file) model = Model.from_dict(data) model.convert_to_units('Meters') # convert Dragonfly Model to Honeybee add_plenum = not no_plenum cap = not no_cap hb_models = model.to_honeybee(obj_per_model, shade_dist, multiplier, add_plenum, cap) # write out the honeybee JSONs osms = [] idfs = [] sqls = [] for hb_model in hb_models: model_dict = hb_model.to_dict(triangulate_sub_faces=True) directory = os.path.join(folder, hb_model.identifier) file_path = os.path.join(directory, '{}.json'.format(hb_model.identifier)) preparedir(directory, remove_content=False) # create the directory with open(file_path, 'w') as fp: json.dump(model_dict, fp, indent=4) # Write the osw file to translate the model to osm osw = to_openstudio_osw(directory, file_path, sim_par_json, base_osw=base_osw, epw_file=epw_file) # run the measure to translate the model JSON to an openstudio measure if osw is not None and os.path.isfile(osw): if base_osw is None: # separate the OS CLI run from the E+ run osm, idf = run_osw(osw) if idf is not None and os.path.isfile(idf): sql, eio, rdd, html, err = run_idf(idf, epw_file) osms.append(osm) idfs.append(idf) sqls.append(sql) if err is None or not os.path.isfile(err): raise Exception('Running EnergyPlus failed.') else: raise Exception('Running OpenStudio CLI failed.') else: # run the whole simulation with the OpenStudio CLI osm, idf = run_osw(osw, measures_only=False) if idf is None or not os.path.isfile(idf): raise Exception('Running OpenStudio CLI failed.') sql, eio, rdd, html, err = \ output_energyplus_files(os.path.dirname(idf)) if err is None or not os.path.isfile(err): raise Exception('Running EnergyPlus failed.') osms.append(osm) idfs.append(idf) sqls.append(sql) else: raise Exception('Writing OSW file failed.') log_file.write(json.dumps({'osm': osms, 'idf': idfs, 'sql': sqls})) except Exception as e: _logger.exception('Model translation failed.\n{}'.format(e)) sys.exit(1) else: sys.exit(0)
html.append(html_i) err.append(err_i) if all_required_inputs(ghenv.Component) and _run: # global lists of outputs to be filled sql, zsz, rdd, html, err, err_objs = [], [], [], [], [], [] # copy the IDFs into a sub-directory if they are not already labeled as in.idf idfs = [] for idf_file_path in _idf: idf_dir, idf_file_name = os.path.split(idf_file_path) if idf_file_name != 'in.idf': # copy the IDF file into a sub-directory sub_dir = os.path.join(idf_dir, 'run') target = os.path.join(sub_dir, 'in.idf') preparedir(sub_dir) shutil.copy(idf_file_path, target) idfs.append(target) else: idfs.append(idf_file_path) # run the IDF files through E+ silent = True if _run == 2 else False if parallel_: tasks.Parallel.ForEach(range(len(idfs)), run_idf_and_report_errors) else: for i in range(len(idfs)): run_idf_and_report_errors(i) # print out error report if it's only one # otherwise it's too much data to be read-able
def simulate_model(model_json, epw_file, sim_par_json, base_osw, folder, check_model, log_file): """Simulate a Model JSON file in EnergyPlus. \b Args: model_json: Full path to a Model JSON file. epw_file: Full path to an .epw file. """ try: # get a ddy variable that might get used later epw_folder, epw_file_name = os.path.split(epw_file) ddy_file = os.path.join(epw_folder, epw_file_name.replace('.epw', '.ddy')) # set the default folder to the default if it's not specified if folder is None: proj_name = \ os.path.basename(model_json).replace('.json', '').replace('.hbjson', '') folder = os.path.join(folders.default_simulation_folder, proj_name, 'OpenStudio') preparedir(folder, remove_content=False) # process the simulation parameters and write new ones if necessary def ddy_from_epw(epw_file, sim_par): """Produce a DDY from an EPW file.""" epw_obj = EPW(epw_file) des_days = [ epw_obj.approximate_design_day('WinterDesignDay'), epw_obj.approximate_design_day('SummerDesignDay') ] sim_par.sizing_parameter.design_days = des_days def write_sim_par(sim_par): """Write simulation parameter object to a JSON.""" sim_par_dict = sim_par.to_dict() sp_json = os.path.abspath( os.path.join(folder, 'simulation_parameter.json')) with open(sp_json, 'w') as fp: json.dump(sim_par_dict, fp) return sp_json if sim_par_json is None: # generate some default simulation parameters sim_par = SimulationParameter() sim_par.output.add_zone_energy_use() sim_par.output.add_hvac_energy_use() else: with open(sim_par_json) as json_file: data = json.load(json_file) sim_par = SimulationParameter.from_dict(data) if len(sim_par.sizing_parameter.design_days) == 0 and os.path.isfile( ddy_file): try: sim_par.sizing_parameter.add_from_ddy_996_004(ddy_file) except AssertionError: # no design days within the DDY file ddy_from_epw(epw_file, sim_par) elif len(sim_par.sizing_parameter.design_days) == 0: ddy_from_epw(epw_file, sim_par) sim_par_json = write_sim_par(sim_par) # run the Model re-serialization and check if specified if check_model: model_json = measure_compatible_model_json(model_json, folder) # Write the osw file to translate the model to osm osw = to_openstudio_osw(folder, model_json, sim_par_json, base_osw=base_osw, epw_file=epw_file) # run the measure to translate the model JSON to an openstudio measure if osw is not None and os.path.isfile(osw): gen_files = [osw] if base_osw is None: # separate the OS CLI run from the E+ run osm, idf = run_osw(osw) # run the resulting idf through EnergyPlus if idf is not None and os.path.isfile(idf): gen_files.extend([osm, idf]) sql, eio, rdd, html, err = run_idf(idf, epw_file) if err is not None and os.path.isfile(err): gen_files.extend([sql, eio, rdd, html, err]) else: raise Exception('Running EnergyPlus failed.') else: raise Exception('Running OpenStudio CLI failed.') else: # run the whole simulation with the OpenStudio CLI osm, idf = run_osw(osw, measures_only=False) if idf is not None and os.path.isfile(idf): gen_files.extend([osm, idf]) else: raise Exception('Running OpenStudio CLI failed.') sql, eio, rdd, html, err = output_energyplus_files( os.path.dirname(idf)) if os.path.isfile(err): gen_files.extend([sql, eio, rdd, html, err]) else: raise Exception('Running EnergyPlus failed.') log_file.write(json.dumps(gen_files)) else: raise Exception('Writing OSW file failed.') except Exception as e: _logger.exception('Model simulation failed.\n{}'.format(e)) sys.exit(1) else: sys.exit(0)
from ladybug_rhino.grasshopper import all_required_inputs, give_warning except ImportError as e: raise ImportError('\nFailed to import ladybug_rhino:\n\t{}'.format(e)) if all_required_inputs(ghenv.Component) and _write: # create the UWGSimulationParameter or use the input if _sim_par_ is not None: assert isinstance(_sim_par_, UWGSimulationParameter), \ 'Expected UWG Simulation Parameters. Got {}.'.format(type(_sim_par_)) else: _sim_par_ = UWGSimulationParameter() if run_ is not None and run_ > 0: # write and simulate the UWG JSON silent = True if run_ > 1 else False uwg_json, urban_epw = run_uwg(_model, _epw_file, _sim_par_, _folder_, silent) if urban_epw is None: msg = 'The Urban Weather Generator Failed to run.' print(msg) give_warning(ghenv.Component, msg) else: # only write the UWG JSON but don't run it # get the directory into which the urban epw will be written if _folder_ is None: _folder_ = os.path.join(lb_folders.default_epw_folder, _model.identifier) preparedir(_folder_, remove_content=False) # write the model to a UWG dictionary uwg_dict = _model.to.uwg(_model, _epw_file, _sim_par_) uwg_json = os.path.join(_folder_, '{}_uwg.json'.format(_model.identifier)) with open(uwg_json, 'w') as fp: json.dump(uwg_dict, fp, indent=4)
def model_to_rad_folder(model, folder=None, config_file=None, minimal=False): r"""Write a honeybee model to a rad folder. The rad files in the resulting folders will include all geometry (Rooms, Faces, Shades, Apertures, Doors), all modifiers, and all states of dynamic objects. It also includes any SensorGrids and Views that are assigned to the model's radiance properties. Args: model: A honeybee Model for which radiance folder will be written. folder: An optional folder to be used as the root of the model's Radiance folder. If None, the files will be written into a sub-directory of the honeybee-core default_simulation_folder. This sub-directory is specifically: default_simulation_folder/[MODEL IDENTIFIER]/Radiance config_file: An optional config file path to modify the default folder names. If None, ``folder.cfg`` in ``honeybee-radiance-folder`` will be used. (Default: None). minimal: Boolean to note whether the radiance strings should be written in a minimal format (with spaces instead of line breaks). Default: False. """ # prepare the folder for simulation model_id = model.identifier if folder is None: folder = os.path.join(folders.default_simulation_folder, model_id, 'Radiance') if not os.path.isdir(folder): preparedir(folder) # create the directory if it's not there model_folder = ModelFolder(folder, 'model', config_file) model_folder.write(folder_type=-1, cfg=folder_config.minimal, overwrite=True) # gather and write static apertures to the folder aps, aps_blk = model.properties.radiance.subfaces_by_blk() mods, mods_blk, mod_combs, mod_names = _collect_modifiers( aps, aps_blk, True) _write_static_files(folder, model_folder.aperture_folder(full=True), 'aperture', aps, aps_blk, mods, mods_blk, mod_combs, mod_names, False, minimal) # gather and write static faces faces, faces_blk = model.properties.radiance.faces_by_blk() f_mods, f_mods_blk, mod_combs, mod_names = _collect_modifiers( faces, faces_blk) _write_static_files(folder, model_folder.scene_folder(full=True), 'envelope', faces, faces_blk, f_mods, f_mods_blk, mod_combs, mod_names, True, minimal) # gather and write static shades shades, shades_blk = model.properties.radiance.shades_by_blk() s_mods, s_mods_blk, mod_combs, mod_names = _collect_modifiers( shades, shades_blk) _write_static_files(folder, model_folder.scene_folder(full=True), 'shades', shades, shades_blk, s_mods, s_mods_blk, mod_combs, mod_names, False, minimal) # write dynamic sub-face groups (apertures and doors) ext_dict = {} out_subfolder = model_folder.aperture_group_folder(full=True) dyn_subface = model.properties.radiance.dynamic_subface_groups if len(dyn_subface) != 0: preparedir(out_subfolder) for group in dyn_subface: if group.is_indoor: # TODO: Implement dynamic interior apertures once the radiance folder # structure is clear about how the "light path" should be input raise NotImplementedError( 'Dynamic interior apertures are not currently' ' supported by Model.to.rad_folder.') else: st_d = _write_dynamic_subface_files(folder, out_subfolder, group, minimal) _write_mtx_files(folder, out_subfolder, group, st_d, minimal) ext_dict[group.identifier] = st_d _write_dynamic_json(folder, out_subfolder, ext_dict) # write dynamic shade groups out_dict = {} in_dict = {} out_subfolder = model_folder.dynamic_scene_folder(full=True, indoor=False) in_subfolder = model_folder.dynamic_scene_folder(full=True, indoor=True) dyn_shade = model.properties.radiance.dynamic_shade_groups if len(dyn_shade) != 0: preparedir(out_subfolder) indoor_created = False for group in dyn_shade: if group.is_indoor: if not indoor_created: preparedir(in_subfolder) indoor_created = True st_d = _write_dynamic_shade_files(folder, in_subfolder, group, minimal) in_dict[group.identifier] = st_d else: st_d = _write_dynamic_shade_files(folder, out_subfolder, group, minimal) out_dict[group.identifier] = st_d _write_dynamic_json(folder, out_subfolder, out_dict) if indoor_created: _write_dynamic_json(folder, in_subfolder, in_dict) # copy all bsdfs into the bsdf folder bsdf_folder = model_folder.bsdf_folder(full=True) bsdf_mods = model.properties.radiance.bsdf_modifiers if len(bsdf_mods) != 0: preparedir(bsdf_folder) for bdf_mod in bsdf_mods: bsdf_name = os.path.split(bdf_mod.bsdf_file)[-1] new_bsdf_path = os.path.join(bsdf_folder, bsdf_name) shutil.copy(bdf_mod.bsdf_file, new_bsdf_path) # write the assigned sensor grids and views into the correct folder grid_dir = model_folder.grid_folder(full=True) grids = model.properties.radiance.sensor_grids if len(grids) != 0: grids_info = [] preparedir(grid_dir) model.properties.radiance.check_duplicate_sensor_grid_display_names() for grid in grids: grid.to_file(grid_dir) info_file = os.path.join(grid_dir, '{}.json'.format(grid.display_name)) with open(info_file, 'w') as fp: json.dump(grid.info_dict(model), fp, indent=4) grid_info = {'name': grid.display_name, 'count': grid.count} grids_info.append(grid_info) # write information file for all the grids. grids_info_file = os.path.join(grid_dir, '_info.json') with open(grids_info_file, 'w') as fp: json.dump(grids_info, fp, indent=2) view_dir = model_folder.view_folder(full=True) views = model.properties.radiance.views if len(views) != 0: views_info = [] preparedir(view_dir) model.properties.radiance.check_duplicate_view_display_names() for view in views: view.to_file(view_dir) info_file = os.path.join(view_dir, '{}.json'.format(view.display_name)) with open(info_file, 'w') as fp: json.dump(view.info_dict(model), fp, indent=4) # TODO: see if it make sense to use to_dict here instead of only taking the # name view_info = {'name': view.display_name} views_info.append(view_info) # write information file for all the views. views_info_file = os.path.join(view_dir, '_info.json') with open(views_info_file, 'w') as fp: json.dump(views_info, fp, indent=2) return folder
def base_honeybee_osw(project_directory, sim_par_json=None, additional_measures=None, additional_mapper_measures=None, base_osw=None, epw_file=None, skip_report=True): """Create a honeybee_workflow.osw to be used as a base in URBANopt simulations. This method will also copy the Honeybee.rb mapper to this folder if it is available in the config of this library. Args: project_directory: Full path to a folder out of which the URBANopt simulation will be run. This is the folder that contains the feature geoJSON. sim_par_json: Optional file path to the SimulationParameter JSON. If None, the OpenStudio models generated in the URBANopt run will not have everything they need to be simulate-able unless such parameters are supplied from one of the additional_measures or the base_osw. (Default: None). additional_measures: An optional array of honeybee-energy Measure objects to be included in the output osw. These Measure objects must have values for all required input arguments or an exception will be raised while running this function. (Default: None). additional_mapper_measures: An optional array of dragonfly-energy MapperMeasure objects to be included in the output osw. These MapperMeasure objects must have values for all required input arguments or an exception will be raised while running this function. (Default: None). base_osw: Optional file path to an existing OSW JSON be used as the base for the honeybee_workflow.osw. This is another way that outside measures can be incorporated into the workflow. (Default: None). epw_file: Optional file path to an EPW that should be associated with the output energy model. (Default: None). skip_report: Set to True to have the URBANopt default feature reporting measure skipped as part of the workflow. If False, the measure will be run after all simulations are complete. Note that this input has no effect if the default_feature_reports measure is already in the base_osw or additional_measures (Default: True) Returns: The file path to the honeybee_workflow.osw written out by this method. This is used as the base for translating all features in the geoJSON. """ # create a dictionary representation of the .osw with steps to run # the model measure and the simulation parameter measure if base_osw is None: osw_dict = {'steps': [], 'name': None, 'description': None} else: assert os.path.isfile( base_osw), 'No base OSW file found at {}.'.format(base_osw) with open(base_osw, 'r') as base_file: osw_dict = json.load(base_file) # add a simulation parameter step if it is specified if sim_par_json is not None: sim_par_dict = { 'arguments': { 'simulation_parameter_json': sim_par_json }, 'measure_dir_name': 'from_honeybee_simulation_parameter' } osw_dict['steps'].insert(0, sim_par_dict) # addd the model json serialization into the steps model_measure_dict = { 'arguments': { 'model_json': 'model_json_to_be_mapped.json' }, 'measure_dir_name': 'from_honeybee_model' } osw_dict['steps'].insert(0, model_measure_dict) # assign the measure_paths to the osw_dict if 'measure_paths' not in osw_dict: osw_dict['measure_paths'] = [] if hb_energy_folders.honeybee_openstudio_gem_path: # add honeybee-openstudio measure m_dir = os.path.join(hb_energy_folders.honeybee_openstudio_gem_path, 'measures') osw_dict['measure_paths'].append(m_dir) # add any additional measures to the osw_dict if additional_measures or additional_mapper_measures: measures = [] if additional_measures is not None: measures.extend(additional_measures) if additional_mapper_measures is not None: measures.extend(additional_mapper_measures) measure_paths = set() # set of all unique measure paths # ensure measures are correctly ordered m_dict = { 'ModelMeasure': [], 'EnergyPlusMeasure': [], 'ReportingMeasure': [] } for measure in measures: m_dict[measure.type].append(measure) sorted_measures = m_dict['ModelMeasure'] + m_dict['EnergyPlusMeasure'] + \ m_dict['ReportingMeasure'] for measure in sorted_measures: measure.validate( ) # ensure that all required arguments have values measure_paths.add(os.path.dirname(measure.folder)) osw_dict['steps'].append( measure.to_osw_dict()) # add measure to workflow if isinstance(measure, MapperMeasure): _add_mapper_measure(project_directory, measure) for m_path in measure_paths: # add outside measure paths osw_dict['measure_paths'].append(m_path) # add default feature reports if they aren't already in the steps all_measures = [step['measure_dir_name'] for step in osw_dict['steps']] if 'default_feature_reports' not in all_measures: report_measure_dict = { 'arguments': { 'feature_id': None, 'feature_name': None, 'feature_type': None, 'feature_location': None }, 'measure_dir_name': 'default_feature_reports' } if skip_report: report_measure_dict['arguments']['__SKIP__'] = True osw_dict['steps'].append(report_measure_dict) # assign the epw_file to the osw if it is input if epw_file is not None: osw_dict['weather_file'] = epw_file # write the dictionary to a honeybee_workflow.osw mappers_dir = os.path.join(project_directory, 'mappers') if not os.path.isdir(mappers_dir): preparedir(mappers_dir) osw_json = os.path.join(mappers_dir, 'honeybee_workflow.osw') with open(osw_json, 'w') as fp: json.dump(osw_dict, fp, indent=4) # copy the Honeybee.rb mapper if it exists in the config if folders.mapper_path: shutil.copy(folders.mapper_path, os.path.join(mappers_dir, 'Honeybee.rb')) return os.path.abspath(osw_json)
def model_to_urbanopt(model, location, point=Point2D(0, 0), shade_distance=None, use_multiplier=True, add_plenum=False, electrical_network=None, folder=None, tolerance=0.01): r"""Generate an URBANopt feature geoJSON and honeybee JSONs from a dragonfly Model. Args: model: A dragonfly Model for which an URBANopt feature geoJSON and corresponding honeybee Model JSONs will be returned. location: A ladybug Location object possessing longitude and latitude data. point: A ladybug_geometry Point2D for where the location object exists within the space of a scene. The coordinates of this point are expected to be in the units of this Model. (Default: (0, 0)). shade_distance: An optional number to note the distance beyond which other objects' shade should not be exported into a given honeybee Model. This is helpful for reducing the simulation run time of each Model when other connected buildings are too far away to have a meaningful impact on the results. If None, all other buildings will be included as context shade in each and every Model. Set to 0 to exclude all neighboring buildings from the resulting models. (Default: None). use_multiplier: If True, the multipliers on the Model's Stories will be passed along to the generated Honeybee Room objects, indicating the simulation will be run once for each unique room and then results will be multiplied. If False, full geometry objects will be written for each and every floor in the building that are represented through multipliers and all resulting multipliers will be 1. (Default: True). add_plenum: Boolean to indicate whether ceiling/floor plenums should be auto-generated for the Rooms. (Default: False). electrical_network: An optional OpenDSS ElectricalNetwork that's associated with the dragonfly Model. (Default: None). folder: An optional folder to be used as the root of the model's URBANopt folder. If None, the files will be written into a sub-directory of the honeybee-core default_simulation_folder. This sub-directory is specifically: default_simulation_folder/[MODEL IDENTIFIER] tolerance: The minimum distance between points at which they are not considered touching. (Default: 0.01, suitable for objects in meters). Returns: A tuple with three values. feature_geojson -- The path to an URBANopt feature geoJSON that has been written by this method. hb_model_jsons -- An array of file paths to honeybee Model JSONS that correspond to the detailed_model_filename keys in the feature_geojson. hb_models -- An array of honeybee Model objects that were generated in process of writing the URBANopt files. """ # make sure the model is in meters and, if it's not, duplicate and scale it if model.units != 'Meters': conversion_factor = hb_model.conversion_factor_to_meters(model.units) point = point.scale(conversion_factor) if shade_distance is not None: shade_distance = shade_distance * conversion_factor tolerance = tolerance * conversion_factor model = model.duplicate() # duplicate the model to avoid mutating the input model.convert_to_units('Meters') if electrical_network is not None: electrical_network = electrical_network.scale(conversion_factor) # prepare the folder for simulation if folder is None: # use the default simulation folder folder = os.path.join(folders.default_simulation_folder, model.identifier) nukedir(folder, True) # get rid of anything that exists in the folder already preparedir(folder) # create the directory if it's not there # prepare the folder into which honeybee Model JSONs will be written hb_model_folder = os.path.join(folder, 'hb_json') # folder for honeybee JSONs preparedir(hb_model_folder) # create geoJSON dictionary geojson_dict = model.to_geojson_dict(location, point, tolerance=tolerance) for feature_dict in geojson_dict['features']: # add the detailed model filename if feature_dict['properties']['type'] == 'Building': bldg_id = feature_dict['properties']['id'] feature_dict['properties']['detailed_model_filename'] = \ os.path.join(hb_model_folder, '{}.json'.format(bldg_id)) # add the electrical network to the geoJSOn dictionary if electrical_network is not None: electric_features = electrical_network.to_geojson_dict( model.buildings, location, point, tolerance=tolerance) geojson_dict['features'].extend(electric_features) electric_json = os.path.join(folder, 'electrical_database.json') with open(electric_json, 'w') as fp: json.dump(electrical_network.to_electrical_database_dict(), fp, indent=4) # write out the geoJSON file feature_geojson = os.path.join(folder, '{}.geojson'.format(model.identifier)) with open(feature_geojson, 'w') as fp: json.dump(geojson_dict, fp, indent=4) # write out the honeybee Model JSONS from the model hb_model_jsons = [] hb_models = model.to_honeybee( 'Building', shade_distance, use_multiplier, add_plenum, tolerance=tolerance) for bldg_model in hb_models: bld_path = os.path.join(hb_model_folder, '{}.json'.format(bldg_model.identifier)) model_dict = bldg_model.to_dict(triangulate_sub_faces=True) with open(bld_path, 'w') as fp: json.dump(model_dict, fp) hb_model_jsons.append(bld_path) return feature_geojson, hb_model_jsons, hb_models
def rcontrib_command_with_view_postprocess(octree, sensor_grid, modifiers, ray_count, rad_params, rad_params_locked, folder, name): """Run rcontrib to get spherical view factors from a sensor grid. \b Args: octree: Path to octree file. sensor-grid: Path to sensor grid file. modifiers: Path to modifiers file. """ try: # create the directory if it's not there if not os.path.isdir(folder): preparedir(folder) # generate the ray vectors to be used in the view factor calculation if ray_count == 6: rays = ((1, 0, 0), (0, 1, 0), (0, 0, 1), (-1, 0, 0), (0, -1, 0), (0, 0, -1)) else: rays = _fibonacci_spiral(ray_count) ray_str = [' {} {} {}\n'.format(*ray) for ray in rays] # create a new .pts file with the view vectors ray_file = os.path.abspath(os.path.join(folder, '{}.pts'.format(name))) total_rays = 0 with open(sensor_grid) as sg_file: with open(ray_file, 'w') as r_file: for line in sg_file: for ray in ray_str: try: r_file.write(' '.join(line.split()[:3]) + ray) total_rays += 1 except Exception: pass # we are at the end of the file # set up the Rcontrib options options = RcontribOptions() if rad_params: # parse input radiance parameters options.update_from_string(rad_params.strip()) if rad_params_locked: # overwrite input values with protected ones options.update_from_string(rad_params_locked.strip()) # overwrite specific options that would otherwise break the command options.M = modifiers options.update_from_string('-I -V- -y {}'.format(total_rays)) # create the rcontrib command and run it mtx_file = os.path.abspath(os.path.join(folder, '{}.mtx'.format(name))) rcontrib = Rcontrib(options=options, octree=octree, sensors=ray_file) cmd = rcontrib.to_radiance().replace('\\', '/') cmd = '{} | rmtxop -fa - -c .333 .333 .334'.format(cmd) cmd = '{} | getinfo - > {}'.format(cmd, mtx_file.replace('\\', '/')) run_command(cmd, env=folders.env) # load the resulting matrix and process the results into view factors view_fac_mtx = [] with open(mtx_file) as mtx_data: while True: sens_lines = list(islice(mtx_data, ray_count)) if not sens_lines: break sens_mtx = ((float(v) for v in ln.strip().split()) for ln in sens_lines) s_facs = [] for sens_facs in zip(*sens_mtx): s_facs.append(sum(sens_facs) / (math.pi * ray_count)) view_fac_mtx.append(s_facs) # write the final view factors into a CSV file view_file = os.path.join(folder, '{}.csv'.format(name)) with open(view_file, 'w') as v_file: for facs in view_fac_mtx: v_file.write(','.join((str(v) for v in facs)) + '\n') except Exception: _logger.exception('Failed to comput view factor contributions.') sys.exit(1) else: sys.exit(0)
'captured_views') except: home_folder = os.getenv('HOME') or os.path.expanduser('~') default_folder = os.path.join(home_folder, 'captured_views') try: from ladybug_rhino.grasshopper import all_required_inputs, bring_to_front from ladybug_rhino.viewport import viewport_by_name, capture_view except ImportError as e: raise ImportError('\nFailed to import ladybug_rhino:\n\t{}'.format(e)) if all_required_inputs(ghenv.Component) and _capture: # ensure the component runs last on the canvas bring_to_front(ghenv.Component) # prepare the folder folder = _folder_ if _folder_ is not None else default_folder preparedir(folder, remove_content=False) # get the viewport objects vp_names = viewport_ if len(viewport_) != 0 else [None] viewports = [viewport_by_name(vp) for vp in vp_names] # save the viewports to images for i, view_p in enumerate(viewports): f_name = _file_name if len(viewports) == 1 else \ '{}_{}'.format(_file_name, vp_names[i]) file_p = os.path.join(folder, f_name) fp = capture_view(view_p, file_p, width_, height_, mode_, transparent_) print(fp)
def run(self, settings=None, radiance_check=False, openstudio_check=False, energyplus_check=False, queenbee_path=None, silent=False, debug_folder=None): """Run the recipe using the queenbee local run command. Args: settings: An optional RecipeSettings object or RecipeSettings string to dictate the settings of the recipe run (eg. the number of workers or the project folder). If None, default settings will be assumed. (Default: None). radiance_check: Boolean to note whether the installed version of Radiance should be checked before executing the recipe. If there is no compatible version installed, an exception will be raised with a clear error message. (Default: False). openstudio_check: Boolean to note whether the installed version of OpenStudio should be checked before executing the recipe. If there is no compatible version installed, an exception will be raised with a clear error message. (Default: False). energyplus_check: Boolean to note whether the installed version of EnergyPlus should be checked before executing the recipe. If there is no compatible version installed, an exception will be raised with a clear error message. (Default: False). queenbee_path: Optional path to the queenbee executable. If None, the queenbee within the ladybug_tools Python folder will be used. Setting this to just 'queenbee' will use the system Python. silent: Boolean to note whether the recipe should be run silently on Windows (True) or with a command window (False). (Default: False). debug_folder: An optional path to a debug folder. If debug folder is provided all the steps of the simulation will be executed inside the debug folder which can be used for furthur inspection. Returns: Path to the project folder containing the recipe results. """ # perform any simulation engine checks if radiance_check: check_radiance_date() if openstudio_check: check_openstudio_version() if energyplus_check: check_energyplus_version() # parse the settings or use default ones if settings is not None: settings = RecipeSettings.from_string(settings) \ if isinstance(settings, str) else settings else: settings = RecipeSettings() # get the folder out of which the recipe will be executed folder = self.default_project_folder if settings.folder is None \ else settings.folder if not os.path.isdir(folder): preparedir(folder) # create the directory if it's not there # delete any existing result files unless reload_old is True if not settings.reload_old and self.simulation_id is not None: wf_folder = os.path.join(folder, self.simulation_id) if os.path.isdir(wf_folder): nukedir(wf_folder, rmdir=True) # write the inputs JSON for the recipe and set up the environment variables inputs_json = self.write_inputs_json(folder, cpu_count=settings.workers) genv = {} genv['PATH'] = rad_folders.radbin_path genv['RAYPATH'] = rad_folders.radlib_path env_args = ['--env {}="{}"'.format(k, v) for k, v in genv.items()] # create command qb_path = os.path.join(folders.python_scripts_path, 'queenbee') \ if queenbee_path is None else queenbee_path command = '"{qb_path}" local run "{recipe_folder}" ' \ '"{project_folder}" -i "{user_inputs}" --workers {workers} ' \ '{environment} --name {simulation_name}'.format( qb_path=qb_path, recipe_folder=self.path, project_folder=folder, user_inputs=inputs_json, workers=settings.workers, environment=' '.join(env_args), simulation_name=self.simulation_id ) if debug_folder is not None: command += ' --debug "{}"'.format(debug_folder) # execute command shell = False if os.name == 'nt' and not silent else True if settings.report_out: process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell) result = process.communicate() print(result[0]) print(result[1]) else: process = subprocess.Popen(command, shell=shell) result = process.communicate() # freeze the canvas while running return folder