lb_mesh = to_joined_gridded_mesh3d(floor_faces, _grid_size, _dist_floor_) # remove points outside of the room volume if requested if remove_out_: pattern = [ room.geometry.is_point_inside(pt) for pt in lb_mesh.face_centroids ] lb_mesh, vertex_pattern = lb_mesh.remove_faces(pattern) # extract positions and directions from the mesh base_points = [from_point3d(pt) for pt in lb_mesh.face_centroids] base_poss = [(pt.x, pt.y, pt.z) for pt in lb_mesh.face_centroids] base_dirs = [(vec.x, vec.y, vec.z) for vec in lb_mesh.face_normals] # create the sensor grid s_grid = SensorGrid.from_position_and_direction( room.identifier, base_poss, base_dirs) s_grid.display_name = clean_rad_string(room.display_name) s_grid.room_identifier = room.identifier s_grid.mesh = lb_mesh # append everything to the lists grid.append(s_grid) points.append(base_points) mesh.append(from_mesh3d(lb_mesh)) # convert the lists of points to data trees points = list_to_data_tree(points)
except ImportError as e: raise ImportError('\nFailed to import ladybug_rhino:\n\t{}'.format(e)) import math if all_required_inputs(ghenv.Component): # extract any rooms from input Models in_rooms = [] for hb_obj in _rooms: if isinstance(hb_obj, Model): in_rooms.extend(hb_obj.rooms) else: in_rooms.append(hb_obj) # process the north_ input if north_ is not None: try: north_vec = to_vector2d(north_) except AttributeError: # north angle instead of vector north_vec = Vector2D(0, 1).rotate(-math.radians(float(north_))) else: north_vec = Vector2D(0, 1) # group the rooms by orientation perim_rooms, core_rooms, orientations, = \ Room.group_by_orientation(in_rooms, n_groups_, north_vec) # convert list of lists to data tree perim_rooms = list_to_data_tree(perim_rooms)
if l_par.max is None: l_par.max = max(total) # output patch patch vectors patch_vecs_lb = view_sphere.tregenza_dome_vectors if len(total) == 145 \ else view_sphere.reinhart_dome_vectors patch_vecs = [from_vector3d(vec) for vec in patch_vecs_lb] # create the dome meshes if not show_comp_: # only create the total dome mesh mesh, compass, legend, title, mesh_values = \ draw_dome(total, center_pt3d, 'Total', l_par) patch_values = total else: # create domes for total, direct and diffuse # loop through the 3 radiation types and produce a dome mesh, compass, legend, title, mesh_values = [], [], [], [], [] rad_types = ('Total', 'Direct', 'Diffuse') rad_data = (total, direct, diffuse) for dome_i in range(3): cent_pt = Point3D(center_pt3d.x + radius * 3 * dome_i, center_pt3d.y, center_pt3d.z) dome_mesh, dome_compass, dome_legend, dome_title, dome_values = \ draw_dome(rad_data[dome_i], cent_pt, rad_types[dome_i], l_par) mesh.append(dome_mesh) compass.extend(dome_compass) legend.extend(dome_legend) title.append(dome_title) mesh_values.append(dome_values) patch_values = list_to_data_tree(rad_data) mesh_values = list_to_data_tree(mesh_values)
rev_vec = [ from_vector3d(to_vector3d(vec).reverse()) for vec in _vectors ] normals = [from_vector3d(vec) for vec in study_mesh.face_normals] points = [from_point3d(pt) for pt in study_mesh.face_centroids] hide_output(ghenv.Component, 1) # intersect the rays with the mesh int_matrix = intersect_mesh_rays(shade_mesh, points, rev_vec, normals, parallel=parallel_) # compute the results sun_visible = list_to_data_tree(int_matrix) if _timestep_ and _timestep_ != 1: # divide by the timestep before output results = [ sum(val / _timestep_ for val in int_list) for int_list in int_matrix ] else: # no division required results = [sum(int_list) for int_list in int_matrix] # create the mesh and legend outputs graphic = GraphicContainer(results, study_mesh.min, study_mesh.max, legend_par_) graphic.legend_parameters.title = 'hours' if legend_par_ is None or legend_par_.are_colors_default: graphic.legend_parameters.colors = Colorset.ecotect() title = text_objects('Direct Sun Study', graphic.lower_title_location,
psy_chart.z = z psy_chart.original_temperature = original_temperature draw_psych_chart(psy_chart) psych_chart.append(psy_chart) lb_mesh, container = psy_chart.data_mesh( d, leg_par_by_index(i + 1)) mesh.append(from_mesh2d(lb_mesh, z)) legend.append(legend_objects(container.legend)) move_vec = Vector2D(base_pt.x + move_dist * (i + 1), 0) points.append( [from_point2d(pt.move(move_vec)) for pt in lb_points]) # add a title for the new chart title_items = ['{} [{}]'.format(d.header.data_type, d.header.unit)] + \ ['{}: {}'.format(key, val) for key, val in d.header.metadata.items()] title[j + j * len(data_) + (i + 1)].append( text_objects('\n'.join(title_items), psy_chart.container.upper_title_location, psy_chart.legend_parameters.text_height * 1.5, psy_chart.legend_parameters.font, 0, 0)) # upack all of the python matrices into data trees title = list_to_data_tree(title) temp_lines = list_to_data_tree(temp_lines) rh_lines = list_to_data_tree(rh_lines) hr_lines = list_to_data_tree(hr_lines) enth_wb_lines = list_to_data_tree(enth_wb_lines) legend = list_to_data_tree(legend) points = list_to_data_tree(points) data = list_to_data_tree(data_colls)
for seg in windrose.orientation_lines] freq_line = [from_polygon2d(poly, _center_pt_.z) for poly in windrose.frequency_lines] windrose_lines = [from_polygon2d(poly, _center_pt_.z) for poly in windrose.windrose_lines] fac = (i + 1) * windrose.compass_radius * 3 center_pt_2d = Point2D(_center_pt_.x + fac, _center_pt_.y) # collect everything to be output mesh.append(msh) all_compass.append(compass) all_orient_line.append(orient_line) all_freq_line.append(freq_line) all_windrose_lines.append(windrose_lines) all_legends.append(legend) title.append(titl) calm = windrose.zero_count if isinstance(speed_data.header.data_type, Speed) else None calm_hours.append(calm) histogram.append(objectify_output('WindRose {}'.format(i), windrose.histogram_data)) # convert nested lists into data trees compass = list_to_data_tree(all_compass) orient_line = list_to_data_tree(all_orient_line) freq_line = list_to_data_tree(all_freq_line) windrose_line = list_to_data_tree(all_windrose_lines) legend = list_to_data_tree(all_legends) hide_output(ghenv.Component, 5) # keep the devault visual simple # compute direction angles and prevailing direction theta = 180.0 / windrose._number_of_directions angles = [(angle + theta) % 360.0 for angle in windrose.angles[:-1]] prevailing = windrose.prevailing_direction
raise ImportError('\nFailed to import ladybug_rhino:\n\t{}'.format(e)) if all_required_inputs(ghenv.Component): # extract any faces from input Rooms or Models faces = [] for hb_obj in _hb_objs: if isinstance(hb_obj, Model): for room in hb_obj.rooms: faces.extend(room.faces) faces.extend(room.shades) faces.extend(hb_obj.orphaned_faces) faces.extend(hb_obj.orphaned_apertures) faces.extend(hb_obj.orphaned_doors) faces.extend(hb_obj.orphaned_shades) elif isinstance(hb_obj, Room): faces.extend(hb_obj.faces) faces.extend(hb_obj.shades) else: faces.append(hb_obj) # use the ColorRoom object to get a set of attributes assigned to the faces color_obj = ColorFace(faces, _attribute) values = color_obj.attributes_unique # loop through each of the hb_objs and get the floor height hb_objs = [[] for val in values] for atr, face in zip(color_obj.attributes, color_obj.flat_faces): atr_i = values.index(atr) hb_objs[atr_i].append(face) hb_objs = list_to_data_tree(hb_objs)
from honeybee.model import Model from honeybee.colorobj import ColorRoom except ImportError as e: raise ImportError('\nFailed to import honeybee:\n\t{}'.format(e)) try: # import the ladybug_rhino dependencies from ladybug_rhino.grasshopper import all_required_inputs, list_to_data_tree except ImportError as e: raise ImportError('\nFailed to import ladybug_rhino:\n\t{}'.format(e)) if all_required_inputs(ghenv.Component): # extract any rooms from input Models in_rooms = [] for hb_obj in _rooms: if isinstance(hb_obj, Model): in_rooms.extend(hb_obj.rooms) else: in_rooms.append(hb_obj) # use the ColorRoom object to get a set of attributes assigned to the rooms color_obj = ColorRoom(in_rooms, _attribute) values = color_obj.attributes_unique # loop through each of the rooms and get the attributes rooms = [[] for val in values] for atr, room in zip(color_obj.attributes, in_rooms): atr_i = values.index(atr) rooms[atr_i].append(room) rooms = list_to_data_tree(rooms)
Deconstruct a Ladybug Matrix object into a Grasshopper Data Tree of values. - Args: _matrix: A Ladybug Matrix object such as the intersection matrices output from any of the ray-tracing components (eg. "LB Direct Sun Hours"). Returns: values: The numerical values of the matrix as a Grasshopper Data Tree. """ ghenv.Component.Name = "LB Deconstruct Matrix" ghenv.Component.NickName = 'XMatrix' ghenv.Component.Message = '1.5.0' ghenv.Component.Category = 'Ladybug' ghenv.Component.SubCategory = '4 :: Extra' ghenv.Component.AdditionalHelpFromDocStrings = '0' try: from ladybug_rhino.grasshopper import all_required_inputs, de_objectify_output, \ list_to_data_tree, merge_data_tree except ImportError as e: raise ImportError('\nFailed to import ladybug_rhino:\n\t{}'.format(e)) if all_required_inputs(ghenv.Component): values = [] for i, mtx in enumerate(_matrix): values.append(list_to_data_tree(de_objectify_output(mtx), root_count=i)) values = merge_data_tree(values)
min_t = _min_max_[0] max_t = _min_max_[1] else: min_t = 100 max_t = 2000 # process the schedule and sun-up hours if _occ_sch_ is None: schedule = generate_default_schedule() elif isinstance(_occ_sch_, BaseCollection): schedule = _occ_sch_.values elif isinstance(_occ_sch_, str): if schedule_by_identifier is not None: schedule = schedule_by_identifier(_occ_sch_).values() else: raise ValueError('honeybee-energy must be installed to reference ' 'occupancy schedules by identifier.') else: # assume that it is a honeybee schedule object schedule = _occ_sch_.values() total_occupied_hours = sum(schedule) occ_pattern = parse_sun_up_hours(_results, schedule) # compute the annual metrics DA, UDI = [], [] for ill_file in _results: da, udi = annual_metrics(ill_file, occ_pattern, total_occupied_hours, _threshold_, min_t, max_t) DA.append(da) UDI.append(udi) DA = list_to_data_tree(DA) UDI = list_to_data_tree(UDI)
hour_plot.legend_parameters.font) title.append(tit_txt) # create the text label objects label1 = [ text_objects(txt, Plane(o=Point3D(pt.x, pt.y, _base_pt_.z)), hour_plot.legend_parameters.text_height, hour_plot.legend_parameters.font, 2, 3) for txt, pt in zip(hour_plot.hour_labels, hour_plot.hour_label_points2d) ] label2 = [ text_objects(txt, Plane(o=Point3D(pt.x, pt.y, _base_pt_.z)), hour_plot.legend_parameters.text_height, hour_plot.legend_parameters.font, 1, 0) for txt, pt in zip(hour_plot.month_labels, hour_plot.month_label_points2d) ] all_labels.append(label1 + label2) # increment the base point so that the next chart doesn't overlap this one try: next_tstep = _data[i + 1].header.analysis_period.timestep except IndexError: next_tstep = 1 increment = 24 * next_tstep * _y_dim_ * 1.5 _base_pt_ = Point3D(_base_pt_.x, _base_pt_.y - increment, _base_pt_.z) # convert nexted lists into data trees legend = list_to_data_tree(all_legends) borders = list_to_data_tree(all_borders) labels = list_to_data_tree(all_labels)
holes.append(h_poly) boundaries.append(boundary) hole_polygons.append(holes) # compute the skeleton and convert to line segments skel_lines = skeleton_as_edge_list(boundary, holes, tolerance) skel_lines_rh = [ from_linesegment2d(LineSegment2D.from_array(line), z_height) for line in skel_lines ] polyskel.append(skel_lines_rh) # try to compute core/perimeter polygons if an offset_ is input if offset_: perim_poly, core_poly = [], [] for bound, holes in zip(boundaries, hole_polygons): try: perim, core = perimeter_core_subpolygons( bound, offset_, holes, tolerance) perim_poly.append( [polygon_to_brep(p, z_height) for p in perim]) core_poly.append([polygon_to_brep(p, z_height) for p in core]) except RuntimeError as e: print(e) perim_poly.append(None) core_poly.append(None) # convert outputs to data trees polyskel = list_to_data_tree(polyskel) perim_poly = list_to_data_tree(perim_poly) core_poly = list_to_data_tree(core_poly)
all_mesh.append(mesh) all_compass.append(compass) all_orient_line.append(orient_line) all_freq_line.append(freq_line) all_legends.append(legend) all_title.append(title) # compute the average values wind_avg_val = [] for bin in windrose.histogram_data: try: wind_avg_val.append(sum(bin) / len(bin)) except ZeroDivisionError: wind_avg_val.append(0) all_wind_avg_val.append(wind_avg_val) all_wind_frequency.append( [len(bin) for bin in windrose.histogram_data]) # convert nested lists into data trees mesh = list_to_data_tree(all_mesh) compass = list_to_data_tree(all_compass) orient_line = list_to_data_tree(all_orient_line) freq_line = list_to_data_tree(all_freq_line) legend = list_to_data_tree(all_legends) title = list_to_data_tree(all_title) avg_val = list_to_data_tree(all_wind_avg_val) frequency = list_to_data_tree(all_wind_frequency) # output prevailing direction and processed data prevailing = windrose.prevailing_direction data = _data
for pt_res in results: values = [float(r) for r in pt_res.split()] total_val = sum(values) irradiance.append(total_val / len(values)) radiation.append(total_val / timestep) else: for pt_res in results: values = [ float(r) for r, is_hoy in zip(pt_res.split(), occ_pattern) if is_hoy ] total_val = sum(values) irradiance.append(total_val / len(values)) radiation.append(total_val / timestep) return irradiance, radiation if all_required_inputs(ghenv.Component): # process the sun-up hours and parse timestep timestep_ = 1 if timestep_ is None else timestep_ occ_pattern = parse_sun_up_hours(_results, hoys_, timestep_) # compute the annual metrics irradiance, radiation = [], [] for ill_file in _results: irr, rad = cumulative_radiation(ill_file, occ_pattern, timestep_) irradiance.append(irr) radiation.append(rad) irradiance = list_to_data_tree(irradiance) radiation = list_to_data_tree(radiation)
cutoff_ang = math.pi / 2 rtrace_geo = [rtrace_brep] rays, int_pts = [], [] for ray, pt, norm in zip(start_rays, source_points, study_mesh.face_normals): if norm.angle(neg_lb_vec) < cutoff_ang: pl_pts = trace_ray(ray, rtrace_geo, _bounce_count_ + 1) # if the intersection was successful, create a polyline represeting the ray if pl_pts: # gather all of the intersection points all_pts = [pt] for i_pt in pl_pts: all_pts.append(to_point3d(i_pt)) # compute the last point if len(pl_pts) < _bounce_count_ + 2: int_norm = normal_at_point(rtrace_brep, pl_pts[-1]) int_norm = to_vector3d(int_norm) last_vec = all_pts[-2] - all_pts[-1] last_vec = last_vec.normalize() final_vec = last_vec.reflect(int_norm).reverse() final_pt = all_pts[-1] + (final_vec * _last_length_) all_pts.append(final_pt) # create a Polyline3D from the points lb_ray_line = Polyline3D(all_pts) rays.append(from_polyline3d(lb_ray_line)) int_pts.append([from_point3d(p) for p in all_pts]) # convert the intersection points to a data tree int_pts = list_to_data_tree(int_pts) hide_output(ghenv.Component, 1)
try: # import the ladybug_rhino dependencies from ladybug_rhino.grasshopper import all_required_inputs, list_to_data_tree except ImportError as e: raise ImportError('\nFailed to import ladybug_rhino:\n\t{}'.format(e)) if all_required_inputs(ghenv.Component): # extract any rooms from input Models in_rooms = [] for df_obj in _df_obj: if isinstance(df_obj, Model): in_rooms.extend(df_obj.room_2ds) elif isinstance(df_obj, Building): in_rooms.extend(df_obj.unique_room_2ds) elif isinstance(df_obj, Story): in_rooms.extend(df_obj.room_2ds) elif isinstance(df_obj, Room2D): in_rooms.append(df_obj) # use the ColorRoom object to get a set of attributes assigned to the rooms color_obj = ColorRoom2D(in_rooms, _attribute) values = color_obj.attributes_unique # loop through each of the room_2ds and get the attributes room2ds = [[] for val in values] for atr, room in zip(color_obj.attributes, in_rooms): atr_i = values.index(atr) room2ds[atr_i].append(room) room2ds = list_to_data_tree(room2ds)
all_analemma.append(analemma_i) all_daily.append(daily_i) all_compass.append(compass_i) # produce a visualization of colored points cols = [color_to_color(col) for col in graphic.value_colors] col_pts = [] for pt, col in zip(sun_pts_init, cols): col_pt = ColoredPoint(pt) col_pt.color = col col_pts.append(col_pt) all_sun_pts.append(sun_pts_init) all_col_pts.append(col_pts) # convert all nested lists to data trees sun_pts = list_to_data_tree(all_sun_pts) analemma = list_to_data_tree(all_analemma) daily = list_to_data_tree(all_daily) compass = list_to_data_tree(all_compass) legend = list_to_data_tree(all_legends) # do some acrobatics to get the colored points to display # CWM: I don't know why we have to re-schedule the solution but this is the # only way I found to get the colored points to appear (redraw did not work). color_pts = list_to_data_tree(all_col_pts) hide_output(ghenv.Component, 5) schedule_solution(ghenv.Component, 2) else: # no data connected; just output one sunpath sun_pts = draw_sun_positions(suns, radius, center_pt3d) analemma, daily = draw_analemma_and_arcs(sp, datetimes, radius, center_pt3d)
# @license GPL-3.0+ <http://spdx.org/licenses/GPL-3.0+> """ Deconstruct a Ladybug Matrix object into a Grasshopper Data Tree of values. - Args: _matrix: A Ladybug Matrix object such as the intersection matrices output from any of the ray-tracing components (eg. "LB Direct Sun Hours"). Returns: values: The numerical values of the matrix as a Grasshopper Data Tree. """ ghenv.Component.Name = "LB Deconstruct Matrix" ghenv.Component.NickName = 'XMatrix' ghenv.Component.Message = '1.2.0' ghenv.Component.Category = 'Ladybug' ghenv.Component.SubCategory = '4 :: Extra' ghenv.Component.AdditionalHelpFromDocStrings = '0' try: from ladybug_rhino.grasshopper import all_required_inputs, de_objectify_output, \ list_to_data_tree except ImportError as e: raise ImportError('\nFailed to import ladybug_rhino:\n\t{}'.format(e)) if all_required_inputs(ghenv.Component): values = list_to_data_tree(de_objectify_output(_matrix))
windrose.histogram_data) all_mesh.append(mesh) all_compass.append(compass) all_orient_line.append(orient_line) all_freq_line.append(freq_line) all_windrose_lines.append(windrose_lines) all_legends.append(legend) all_title.append(title) calm = windrose.zero_count if isinstance(speed_data.header.data_type, Speed) else None all_calm_hours.append(calm) all_histograms.append(hist_mtx) # convert nested lists into data trees mesh = list_to_data_tree(all_mesh) compass = list_to_data_tree(all_compass) orient_line = list_to_data_tree(all_orient_line) freq_line = list_to_data_tree(all_freq_line) windrose_line = list_to_data_tree(all_windrose_lines) legend = list_to_data_tree(all_legends) title = list_to_data_tree(all_title) calm_hours = list_to_data_tree(all_calm_hours) # Compute direction angles theta = 360.0 / windrose._number_of_directions theta /= 2.0 angles = [(angle + theta) % 360.0 for angle in windrose.angles[:-1]] # output prevailing direction and processed data prevailing = windrose.prevailing_direction
try: from honeybee.config import folders except ImportError as e: raise ImportError('\nFailed to import honeybee:\n\t{}'.format(e)) try: from ladybug_rhino.grasshopper import all_required_inputs, list_to_data_tree except ImportError as e: raise ImportError('\nFailed to import ladybug_rhino:\n\t{}'.format(e)) if all_required_inputs(ghenv.Component): if os.name == 'nt': # we are on windows; use IronPython like usual sql_obj = SQLiteResult(_sql) # create the SQL result parsing object results = sql_obj.tabular_data_by_name(_table_name) values = list_to_data_tree(list(results.values())) row_names = list(results.keys()) col_names = sql_obj.tabular_column_names(_table_name) else: # we are on Mac; sqlite3 module doesn't work in Mac IronPython # Execute the honybee CLI to obtain the results via CPython cmds = [ folders.python_exe_path, '-m', 'honeybee_energy', 'result', 'tabular-data', _sql, _table_name ] process = subprocess.Popen(cmds, stdout=subprocess.PIPE) stdout = process.communicate() results = json.loads(stdout[0]) values = list_to_data_tree(results) cmds = [ folders.python_exe_path, '-m', 'honeybee_energy', 'result',
# Execute the honybee CLI to obtain the component sizes via CPython cmds = [folders.python_exe_path, '-m', 'honeybee_energy', 'result', 'component-sizes', _sql] if comp_type_ is not None: comp_types = comp_type_ cmds.extend(['--component-type', comp_type_]) process = subprocess.Popen(cmds, stdout=subprocess.PIPE) stdout = process.communicate() comp_size_dicts = json.loads(stdout[0]) component_sizes = [ComponentSize.from_dict(cs) for cs in comp_size_dicts] if comp_type_ is None: # get a set of all unique component types _comp_types = set() for comp in component_sizes: _comp_types.add(comp.component_type) comp_types = list(_comp_types) # get the peak zone heating and cooling from the ZoneSize objects for zone_size in zone_cooling_sizes: zone_names.append(zone_size.zone_name) zone_peak_cool.append(zone_size.calculated_design_load) for zone_size in zone_heating_sizes: zone_peak_heat.append(zone_size.calculated_design_load) # get the HVAC component sizes from the ComponentSize objects for comp_size in component_sizes: comp_properties_mtx.append(comp_size.descriptions) comp_values_mtx.append(comp_size.values) # convert HVAC components to data trees comp_properties = list_to_data_tree(comp_properties_mtx) comp_values = list_to_data_tree(comp_values_mtx)
# process the schedule and sun-up hours if _occ_sch_ is None: schedule = generate_default_schedule() elif isinstance(_occ_sch_, BaseCollection): schedule = _occ_sch_.values elif isinstance(_occ_sch_, str): if schedule_by_identifier is not None: schedule = schedule_by_identifier(_occ_sch_).values() else: raise ValueError('honeybee-energy must be installed to reference ' 'occupancy schedules by identifier.') else: # assume that it is a honeybee schedule object schedule = _occ_sch_.values() total_occupied_hours = sum(schedule) occ_pattern = parse_sun_up_hours(_results, schedule) # compute the annual metrics DA, UDI, UDI_low, UDI_up = [], [], [], [] for ill_file in _results: da, udi, udi_low, udi_up = \ annual_metrics(ill_file, occ_pattern, total_occupied_hours, _threshold_, min_t, max_t) DA.append(da) UDI.append(udi) UDI_low.append(udi_low) UDI_up.append(udi_up) DA = list_to_data_tree(DA) UDI = list_to_data_tree(UDI) UDI_low = list_to_data_tree(UDI_low) UDI_up = list_to_data_tree(UDI_up)