points, all_vecs, normals, cpu_count=workers) # compute the results results = [] int_matrix = [] for int_vals, angles in zip(int_matrix_init, angles): pt_rel = [ival * math.cos(ang) for ival, ang in zip(int_vals, angles)] int_matrix.append(pt_rel) rad_result = sum(r * w for r, w in zip(pt_rel, all_rad)) results.append(rad_result) # output the intersection matrix and compute total radiation int_mtx = objectify_output('Geometry/Sky Intersection Matrix', int_matrix) unit_conv = conversion_to_meters()**2 total = 0 for rad, area in zip(results, study_mesh.face_areas): total += rad * area * unit_conv # create the mesh and legend outputs graphic = GraphicContainer(results, study_mesh.min, study_mesh.max, legend_par_) graphic.legend_parameters.title = 'kWh/m2' title = text_objects('Incident Radiation', graphic.lower_title_location, graphic.legend_parameters.text_height * 1.5, graphic.legend_parameters.font) # create all of the visual outputs study_mesh.colors = graphic.value_colors
# intersect the rays with the mesh if vt_str == 'Sky View': # account for the normals of the surface normals = [from_vector3d(vec) for vec in study_mesh.face_normals] int_matrix, angles = intersect_mesh_rays(shade_mesh, points, view_vecs, normals, parallel=parallel_) else: int_matrix, angles = intersect_mesh_rays(shade_mesh, points, view_vecs, parallel=parallel_) # compute the results int_mtx = objectify_output('View Intersection Matrix', int_matrix) vec_count = len(view_vecs) results = [] if vt_str == 'Sky View': # weight intersections by angle before output for int_vals, angles in zip(int_matrix, angles): w_res = (ival * 2 * math.cos(ang) for ival, ang in zip(int_vals, angles)) weight_result = sum(r * w for r, w in zip(w_res, patch_wghts)) results.append(weight_result * 100 / vec_count) else: if patch_wghts: for int_list in int_matrix: weight_result = sum(r * w for r, w in zip(int_list, patch_wghts)) results.append(weight_result * 100 / vec_count) else:
for seg in windrose.orientation_lines] freq_line = [from_polygon2d(poly, _center_pt_.z) for poly in windrose.frequency_lines] windrose_lines = [from_polygon2d(poly, _center_pt_.z) for poly in windrose.windrose_lines] fac = (i + 1) * windrose.compass_radius * 3 center_pt_2d = Point2D(_center_pt_.x + fac, _center_pt_.y) # collect everything to be output mesh.append(msh) all_compass.append(compass) all_orient_line.append(orient_line) all_freq_line.append(freq_line) all_windrose_lines.append(windrose_lines) all_legends.append(legend) title.append(titl) calm = windrose.zero_count if isinstance(speed_data.header.data_type, Speed) else None calm_hours.append(calm) histogram.append(objectify_output('WindRose {}'.format(i), windrose.histogram_data)) # convert nested lists into data trees compass = list_to_data_tree(all_compass) orient_line = list_to_data_tree(all_orient_line) freq_line = list_to_data_tree(all_freq_line) windrose_line = list_to_data_tree(all_windrose_lines) legend = list_to_data_tree(all_legends) hide_output(ghenv.Component, 5) # keep the devault visual simple # compute direction angles and prevailing direction theta = 180.0 / windrose._number_of_directions angles = [(angle + theta) % 360.0 for angle in windrose.angles[:-1]] prevailing = windrose.prevailing_direction
windrose.legend_parameters.text_height, windrose.legend_parameters.font) compass = compass_objects(windrose.compass, _center_pt_.z, None) orient_line = [ from_linesegment2d(seg, _center_pt_.z) for seg in windrose.orientation_lines ] freq_line = [from_polygon2d(poly) for poly in windrose.frequency_lines] windrose_lines = [ from_polygon2d(poly) for poly in windrose.windrose_lines ] fac = (i + 1) * windrose.compass_radius * 3 center_pt_2d = Point2D(_center_pt_.x + fac, _center_pt_.y) # Add histogram hist_mtx = objectify_output('WindRose {}'.format(i), windrose.histogram_data) all_mesh.append(mesh) all_compass.append(compass) all_orient_line.append(orient_line) all_freq_line.append(freq_line) all_windrose_lines.append(windrose_lines) all_legends.append(legend) all_title.append(title) calm = windrose.zero_count if isinstance(speed_data.header.data_type, Speed) else None all_calm_hours.append(calm) all_histograms.append(hist_mtx) # convert nested lists into data trees mesh = list_to_data_tree(all_mesh)
use_shell = True if os.name == 'nt' else False # command for direct patches cmds = [gendaymtx_exe, '-m', str(density), '-d', '-O1', '-A', wea_file] process = subprocess.Popen(cmds, stdout=subprocess.PIPE, shell=use_shell) stdout = process.communicate() dir_data_str = stdout[0] # command for diffuse patches cmds = [gendaymtx_exe, '-m', str(density), '-s', '-O1', '-A', wea_file] process = subprocess.Popen(cmds, stdout=subprocess.PIPE, shell=use_shell) stdout = process.communicate() diff_data_str = stdout[0] # parse the data into a single matrix dir_vals = parse_mtx_data(dir_data_str, wea_duration, density) diff_vals = parse_mtx_data(diff_data_str, wea_duration, density) # collect sky metadata like the north, which will be used by other components metadata = [north_, ground_r] if _hoys_: metadata.extend( [DateTime.from_hoy(h) for h in (_hoys_[0], _hoys_[-1])]) else: metadata.extend( [wea.analysis_period.st_time, wea.analysis_period.end_time]) for key, val in _direct_rad.header.metadata.items(): metadata.append('{} : {}'.format(key, val)) # wrap everything together into an object to output from the component mtx_data = (metadata, dir_vals, diff_vals) sky_mtx = objectify_output('Cumulative Sky Matrix', mtx_data)
] hide_output(ghenv.Component, 1) # mesh the geometry and context shade_mesh = join_geometry_to_mesh(_geometry + context_) if _geo_block_ \ or _geo_block_ is None else join_geometry_to_mesh(context_) # intersect the lines with the mesh int_matrix = intersect_mesh_lines(shade_mesh, points, _view_points, max_dist_, parallel=parallel_) # compute the results int_mtx = objectify_output('Visibility Intersection Matrix', int_matrix) vec_count = len(_view_points) if pt_weights_: # weight intersections by the input point weights tot_wght = sum(pt_weights_) / vec_count adj_weights = [wght / tot_wght for wght in pt_weights_] results = [] for int_vals in int_matrix: w_res = [ival * wght for ival, wght in zip(int_vals, adj_weights)] results.append(sum(w_res) * 100 / vec_count) else: # no need to wieght results results = [sum(int_list) * 100 / vec_count for int_list in int_matrix] # create the mesh and legend outputs graphic = GraphicContainer(results, study_mesh.min, study_mesh.max, legend_par_) graphic.legend_parameters.title = '%'
data_header = Header.from_dict(json.load(json_file)) a_per = data_header.analysis_period continuous = True if a_per.st_hour == 0 and a_per.end_hour == 23 else False if not continuous: dates = a_per.datetimes # parse the grids_info.json with the correct order of the grid files with open(os.path.join(_comf_result, 'grids_info.json')) as json_file: grid_list = json.load(json_file) # loop through the grid CSV files, parse their results, and build data collections comf_matrix = [] for grid in grid_list: grid_name = grid['full_id'] if 'full_id' in grid else 'id' metadata = {'grid': grid_name} grid_file = os.path.join(_comf_result, '{}.csv'.format(grid_name)) data_matrix = csv_to_num_matrix(grid_file) grid_data = [] for i, row in enumerate(data_matrix): header = data_header.duplicate() header.metadata = metadata.copy() header.metadata['sensor_index'] = i data = HourlyContinuousCollection(header, row) if continuous else \ HourlyDiscontinuousCollection(header, row, dates) grid_data.append(data) comf_matrix.append(grid_data) # wrap the maptrix into an object so that it does not slow the Grasshopper UI comf_mtx = objectify_output('{} Matrix'.format(data_header.data_type.name), comf_matrix)
try: from ladybug_rhino.grasshopper import give_warning, objectify_output except ImportError as e: raise ImportError('\nFailed to import ladybug_rhino:\n\t{}'.format(e)) def check_strategy(value, name, default, max, min): """Check a strategy parameter to ensure it is correct.""" if value is None: strategy_par.append(default) elif value <= max and value >= min: strategy_par.append(value) else: strategy_par.append(default) msg = '"{}" must be between {} and {}. Got {}.\nReverting to default ' \ 'value of {}'.format(name, min, max, value, default) print(msg) give_warning(ghenv.Component, msg) #check and add each of the strategies strategy_par = [] check_strategy(_day_above_comf_, '_day_above_comf_', 12.0, 30.0, 0.0) check_strategy(_night_below_comf_, '_night_below_comf_', 3.0, 15.0, 0.0) check_strategy(_fan_air_speed_, '_fan_air_speed_', 1.0, 10.0, 0.1) check_strategy(_balance_temp_, '_balance_temp_', 12.8, 20.0, 5.0) check_strategy(_solar_heat_cap_, '_solar_heat_cap_', 50.0, 1000.0, 1.0) check_strategy(_time_constant_, '_time_constant_', 8, 48, 1) strategy_par = objectify_output('Passive Strategy Parameters', strategy_par)
# along with Ladybug; If not, see <http://www.gnu.org/licenses/>. # # @license GPL-3.0+ <http://spdx.org/licenses/GPL-3.0+> """ Construct a Ladybug Matrix object from a Grasshopper Data Tree of values. - Args: _values: A Grasshopper Data Tree of values to be merged into a matrix object. Returns: matrix: A Ladybug Matrix object encapsulating all of the input values. """ ghenv.Component.Name = "LB Construct Matrix" ghenv.Component.NickName = '+Matrix' ghenv.Component.Message = '1.2.0' ghenv.Component.Category = 'Ladybug' ghenv.Component.SubCategory = '4 :: Extra' ghenv.Component.AdditionalHelpFromDocStrings = '0' try: from ladybug_rhino.grasshopper import all_required_inputs, objectify_output, \ data_tree_to_list except ImportError as e: raise ImportError('\nFailed to import ladybug_rhino:\n\t{}'.format(e)) if all_required_inputs(ghenv.Component): python_mtx = [row[1] for row in data_tree_to_list(_values)] matrix = objectify_output('Matrix', python_mtx)