tag = "n_1_k" tag_2 = "n_2_k" # tag = "m_1_k" # tag_2 = "m_2_k" x_lim = -10.0 # faces stay if x coord of their centroid is larger than x_lim y_lim = -10.0 # faces stay if y coord of their centroid is larger than y_lim # ============================================================================= # Import mesh # ============================================================================= name = HERE.split("/").pop() mesh = Mesh.from_json(HERE + ".json") mesh_unify_cycles(mesh) # ========================================================================== # Store subset attributes # ========================================================================== centroids = {} vectors = {} vectors_2 = {} for fkey in mesh.faces(): centroids[geometric_key(mesh.face_centroid(fkey))] = fkey vectors[fkey] = mesh.face_attribute(fkey, tag) vectors_2[fkey] = mesh.face_attribute(fkey, tag_2)
HERE = os.path.dirname(__file__) FILE_I = os.path.join(HERE, 'data', 'block_cut1.json') # ============================================================================== # Parameters # ============================================================================== WIRE = 1600 TABLE = 1500 HEIGHT = 1000 # ============================================================================== # Block and Blank # ============================================================================== block = Mesh.from_json(FILE_I) blank = block.attributes['blank'] # ============================================================================== # Cut data # ============================================================================== left = block.attributes['sides']['left'] right = block.attributes['sides']['right'] left_poly = block.attributes['cut1']['left'] right_poly = block.attributes['cut1']['right'] # ============================================================================== # Blank polylines # ==============================================================================
import os from compas.datastructures import Mesh from compas.datastructures import mesh_quads_to_triangles from compas_plotters import MeshPlotter from compas.utilities import i_to_rgb import compas_libigl as igl # ============================================================================== # Input geometry # ============================================================================== HERE = os.path.dirname(__file__) FILE = os.path.join(HERE, '..', 'data', 'tubemesh.json') mesh = Mesh.from_json(FILE) mesh_quads_to_triangles(mesh) # ============================================================================== # Isolines # ============================================================================== key_index = mesh.key_index() V = mesh.vertices_attributes('xyz') F = [[key_index[key] for key in mesh.face_vertices(fkey)] for fkey in mesh.faces()] S = mesh.vertices_attribute('z') N = 50 vertices, levels = igl.trimesh_isolines(V, F, S, N)
from __future__ import absolute_import from __future__ import division from __future__ import print_function from compas.utilities import pairwise from compas.datastructures import Mesh from compas.datastructures import mesh_flip_cycles from compas.geometry import offset_polygon from compas.utilities import i_to_rgb from compas_rhino.artists import MeshArtist mesh = Mesh.from_json('../cablenet.json') # ============================================================================== # Make the concrete extrados # ============================================================================== # make a copy of the mesh to create the extrados of the concrete layer edos = mesh.copy() # offset the edos compared to the cablenet to the height of the extrados for key, attr in edos.vertices(True): nx, ny, nz = mesh.vertex_normal(key) attr['x'] += (0.02 + 0.06) * nx attr['y'] += (0.02 + 0.06) * ny attr['z'] += (0.02 + 0.06) * nz # ============================================================================== # Make the blocks
------- bool ``True`` if plane1 intersects with plane2. ``False`` otherwise. """ # check for parallelity of planes if abs(dot_vectors(plane1[1], plane2[1])) > 1 - tol: return False return True # ============================================================================== # Main # ============================================================================== if __name__ == "__main__": import os import compas from compas.datastructures import Mesh mesh = Mesh.from_json(os.path.join(compas.TEMP, 'm11.json')) xyz = mesh.get_vertices_attributes('xyz') xyz = [[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [1.0, 1.0, 0.0], [0.0, 1.0, 0.0], [3.0, 1.0, 0.5]] print(is_coplanar(xyz))
from compas.datastructures import Mesh from compas_fea.structure import Structure from compas_fea.structure import FixedDisplacement from compas_fea.structure import ElasticIsotropic from compas_fea.structure import ShellSection from compas_fea.structure import ElementProperties from compas_fea.structure import GravityLoad from compas_fea.structure import GeneralStep # Author(s): Tomás Méndez Echenagucia (github.com/tmsmendez) # get mesh from json file ------------------------------------------------------ mesh = Mesh.from_json(compas_fea.get('flat20x20.json')) # add shell elements from mesh ------------------------------------------------- name = 'shell_example' s = Structure(name=name, path=compas_fea.TEMP) shell_keys = s.add_nodes_elements_from_mesh(mesh, element_type='ShellElement') s.add_set('shell', 'element', shell_keys) # add supports -------------------------------------------------------------- nkeys = mesh.vertices_on_boundaries()[0] s.add_set(name='support_nodes', type='NODE', selection=nkeys) supppots = FixedDisplacement(name='supports', nodes='support_nodes') s.add_displacement(supppots)
from compas.datastructures import Mesh from compas_vibro.viewers import PressureFieldViewer from compas_vibro.structure import Structure for i in range(50): print('') filepath = os.path.join(compas_vibro.DATA, 'clt_1_remeshed_radiation.obj') s = Structure.from_obj(filepath) frequencies = range(20, 500, 10) waves = generate_uniform_waves_numpy() fields = compute_pressure_fields_structure(waves, s, frequencies, center=True) v = PressureFieldViewer(fields, structure=s) v.real = True v.show() num_waves = 500 model = 'flat_mesh_20x20.json' # model = 'clt_2.json' mesh = Mesh.from_json(compas_vibro.get(model)) # waves = generate_random_waves_numpy(num_waves) waves = generate_uniform_waves_numpy() frequencies = range(20, 500, 10) c = 340.0 fields = compute_pressure_fields_mesh(waves, mesh, frequencies, c, center=True) v = PressureFieldViewer(fields, mesh=mesh) v.real = True v.show()
from compas_vibro.viewers import HarmonicViewer __author__ = ["Tomas Mendez Echenagucia"] __copyright__ = "Copyright 2020, Design Machine Group - University of Washington" __license__ = "MIT License" __email__ = "*****@*****.**" __version__ = "0.1.0" for i in range(60): print() path = compas_vibro.TEMP geometry = 'mesh_flat_20x20' name = 'opensees_{0}_harmonic'.format(geometry) mesh = Mesh.from_json(compas_vibro.get('{0}.json'.format(geometry))) # make an instance of the stucture object - - - - - - - - - - - - - - - - - - - s = Structure(path, name) # add nodes and elements from mesh - - - - - - - - - - - - - - - - - - - - - - - s.add_nodes_elements_from_mesh(mesh, 'ShellElement', elset='shell') # add displacements - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - d = FixedDisplacement('boundary', mesh.vertices_on_boundary()) s.add(d) # add loads - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - load = PointLoad(name='pload', nodes=[10, 15], x=0, y=0, z=1, xx=0, yy=0, zz=0) s.add(load)
from compas.datastructures import mesh_flip_cycles from compas_rhino.artists import MeshArtist from compas.geometry import add_vectors from compas.geometry import scale_vector from compas.geometry import intersection_line_plane # ============================================================================== # Initialise # ============================================================================== HERE = os.path.dirname(__file__) DATA = os.path.abspath(os.path.join(HERE, '..', 'data')) FILE_I1 = os.path.join(DATA, 'data.json') FILE_I2 = os.path.join(DATA, 'fabric.json') SHELL = Mesh.from_json(FILE_I1) FABRIC = Mesh.from_json(FILE_I2) SHELL.name = 'Shell' FABRIC.name = 'Fabric' mesh_flip_cycles(FABRIC) THICKNESS = 0.04 # ============================================================================== # Offsets # ============================================================================== EDOS = FABRIC.copy() IDOS = FABRIC.copy()
self.file.write(self.vertex_tpl.format(x, y, z)) def write_faces(self): key_index = self.mesh.key_index() for fkey in self.mesh.faces(): vertices = self.mesh.face_vertices(fkey) v = len(vertices) self.file.write("{0} {1}\n".format( v, " ".join([str(key_index[key]) for key in vertices]))) # ============================================================================== # Main # ============================================================================== if __name__ == "__main__": import os from compas.datastructures import Mesh FILE = os.path.join(compas.DATA, 'tubemesh.ply') mesh = Mesh.from_json(compas.get('tubemesh.json')) mesh.to_ply(FILE, author="Tom Van Mele") ply = PLY(FILE) print(len(ply.reader.vertices) == ply.reader.number_of_vertices) print(len(ply.reader.faces) == ply.reader.number_of_faces) print(len(ply.reader.faces)) print(ply.reader.number_of_faces)
EXPORT_PNG = False THERE = '/Users/arpj/code/libraries/streamlines/examples/gif_{0}_{1}/kmeans_{0}_{1}_' THERE = THERE.format(NUM, ITERS) vector_tag_1 = 'ps_1_top' # ps_1_top vector_tag_2 = 'ps_2_top' # ps_1_top vector_tag = 'ps_12_top' # ps_1_top vector_tag = vector_tag_1 smooth_iters = 0 # ========================================================================== # Import mesh # ========================================================================== mesh = Mesh.from_json(HERE) # mesh_unify_cycles(mesh) # ========================================================================== # rebuild mesh # ========================================================================== new_mesh = Mesh() all_vertices = set() for idx, tup in enumerate(mesh.faces(True)): fkey, attr = tup # 4.5 x 6.0 m rectancle # if mesh.face_centroid(fkey)[0] < -0.05: # x - mesh deleter by symmetry # continue
def main(): start_time = time.time() avg_layer_height = 4.0 parameters = { 'avg_layer_height': avg_layer_height, # controls number of curves that will be generated 'min_layer_height': 0.2, 'max_layer_height': 4.0, 'uneven_upper_targets_offset': 0, 'target_HIGH_smooth_union': [True, [25.0]], # on/off, blend radius } ### --- Load initial_mesh mesh = Mesh.from_obj(os.path.join(DATA_PATH, OBJ_INPUT_NAME)) # --- Load targets (boundaries) low_boundary_vs = utils.load_from_json(DATA_PATH, 'boundaryLOW.json') high_boundary_vs = utils.load_from_json(DATA_PATH, 'boundaryHIGH.json') create_mesh_boundary_attributes(mesh, low_boundary_vs, high_boundary_vs) # --- Create pre-processor preprocessor = InterpolationSlicingPreprocessor(mesh, parameters, DATA_PATH) preprocessor.create_compound_targets() preprocessor.targets_laplacian_smoothing(iterations=4, strength=0.05) ######################################### # --- region split if REGION_SPLIT: # --- ADVANCED slicing with region split g_eval = preprocessor.create_gradient_evaluation( target_1=preprocessor.target_LOW, target_2=preprocessor.target_HIGH, save_output=True) preprocessor.find_critical_points( g_eval, output_filenames=['minima.json', 'maxima.json', 'saddles.json']) preprocessor.region_split( save_split_meshes=True) # split mesh regions on saddle points # utils.interrupt() ######################################### # --- slicing if SLICER: slicers = [] filenames = utils.get_all_files_with_name('split_mesh_', '.json', OUTPUT_PATH) split_meshes = [ Mesh.from_json(os.path.join(OUTPUT_PATH, filename)) for filename in filenames ] for i, split_mesh in enumerate(split_meshes): preprocessor_split = InterpolationSlicingPreprocessor( split_mesh, parameters, DATA_PATH) preprocessor_split.create_compound_targets() preprocessor_split.create_gradient_evaluation( norm_filename='gradient_norm_%d.json' % i, g_filename='gradient_%d.json' % i, target_1=preprocessor_split.target_LOW, target_2=preprocessor_split.target_HIGH) slicer = InterpolationSlicer(split_mesh, preprocessor_split, parameters) if i == 3: slicer.n_multiplier = 0.85 slicer.slice_model() if i == 0: generate_brim(slicer, layer_width=3.0, number_of_brim_offsets=5) seams_smooth(slicer, smooth_distance=0.1) simplify_paths_rdp_igl(slicer, threshold=0.25) utils.save_to_json(slicer.to_data(), OUTPUT_PATH, 'curved_slicer_%d.json' % i) slicers.append(slicer) # utils.interrupt() ######################################### # --- print organization if PRINT_ORGANIZER: filenames = utils.get_all_files_with_name('curved_slicer_', '.json', OUTPUT_PATH) slicers = [ InterpolationSlicer.from_data( utils.load_from_json(OUTPUT_PATH, filename)) for filename in filenames ] for i, slicer in enumerate(slicers): print_organizer = InterpolationPrintOrganizer( slicer, parameters, DATA_PATH) print_organizer.create_printpoints() set_extruder_toggle(print_organizer, slicer) set_blend_radius(print_organizer) add_safety_printpoints(print_organizer, z_hop=10.0) smooth_printpoints_up_vectors(print_organizer, strength=0.5, iterations=10) set_wait_time_on_sharp_corners(print_organizer, threshold=0.5 * math.pi, wait_time=0.2) # --- Save printpoints dictionary to json file printpoints_data = print_organizer.output_printpoints_dict() utils.save_to_json(printpoints_data, OUTPUT_PATH, 'out_printpoints_%d.json' % i) end_time = time.time() print("Total elapsed time", round(end_time - start_time, 2), "seconds")
def region_split(self, cut_mesh=True, separate_neighborhoods=True, topological_sorting=True, save_split_meshes=True): """ Splits the mesh on the saddle points. This process can take a long time. It consists of four parts: 1) Create cuts on the mesh so that they intersect the saddle points and follow the get_distance function iso-contour 2) Separate mesh neighborhoods from cuts 3) Topological sorting of split meshes to determine their connectivity and sequence. 4) Finally resulting meshes are saved to json. The intermediary outputs are saved to json, so if you don'weight want to be recomputing the entire thing every time, you can turn the respective processes to false. """ print("") logging.info("--- Mesh region splitting") if cut_mesh: # (1) self.mesh.update_default_vertex_attributes({'cut': 0}) mesh_splitter = rs.MeshSplitter(self.mesh, self.target_LOW, self.target_HIGH, self.DATA_PATH) mesh_splitter.run() self.mesh = mesh_splitter.mesh logger.info('Completed Region splitting') logger.info("Region split cut indices: " + str(mesh_splitter.cut_indices)) # save results to json self.mesh.to_obj( os.path.join(self.OUTPUT_PATH, 'mesh_with_cuts.obj')) self.mesh.to_json( os.path.join(self.OUTPUT_PATH, 'mesh_with_cuts.json')) logger.info("Saving to Obj and Json: " + os.path.join(self.OUTPUT_PATH, 'mesh_with_cuts.json')) if separate_neighborhoods: # (2) print("") logger.info("--- Separating mesh disconnected components") self.mesh = Mesh.from_json( os.path.join(self.OUTPUT_PATH, 'mesh_with_cuts.json')) region_split_cut_indices = get_existing_cut_indices(self.mesh) # save results to json utils.save_to_json( get_vertices_that_belong_to_cuts(self.mesh, region_split_cut_indices), self.OUTPUT_PATH, "vertices_on_cuts.json") self.split_meshes = rs.separate_disconnected_components( self.mesh, attr='cut', values=region_split_cut_indices, OUTPUT_PATH=self.OUTPUT_PATH) logger.info('Created %d split meshes.' % len(self.split_meshes)) if topological_sorting: # (3) print("") logger.info( "--- Topological sort of meshes directed graph to determine print order" ) graph = topo_sort.MeshDirectedGraph(self.split_meshes, self.DATA_PATH) all_orders = graph.get_all_topological_orders() selected_order = all_orders[0] logger.info('selected_order : ' + str(selected_order) ) # TODO: improve the way an order is selected self.cleanup_mesh_attributes_based_on_selected_order( selected_order, graph) # reorder split_meshes based on selected order self.split_meshes = [self.split_meshes[i] for i in selected_order] # --- save split meshes if save_split_meshes: # (4) print("") logger.info("--- Saving resulting split meshes") for i, m in enumerate(self.split_meshes): m.to_obj( os.path.join(self.OUTPUT_PATH, 'split_mesh_' + str(i) + '.obj')) m.to_json( os.path.join(self.OUTPUT_PATH, 'split_mesh_' + str(i) + '.json')) logger.info('Saving to Obj and Json: ' + os.path.join(self.OUTPUT_PATH, 'split_mesh_%.obj')) logger.info("Saved %d split_meshes" % len(self.split_meshes)) print('')
tags = [ 'n_1', 'n_2', 'm_1', 'm_2', 'ps_1_top', 'ps_1_bot', 'ps_1_mid', 'ps_2_top', 'ps_2_bot', 'ps_2_mid', 'custom_1', 'custom_2' ] vector_tag_1 = 'ps_1_top' # ps_1_top vector_tag_2 = 'ps_2_top' # ps_1_top vector_tag = 'ps_12_top' # ps_1_top smooth_iters = 0 # ========================================================================== # Import mesh # ========================================================================== mesh = Mesh() mesh.from_json(HERE) mesh_unify_cycles(mesh) # ========================================================================== # 45 degrees field # ========================================================================== for fkey, attr in mesh.faces(True): vec_1 = attr[vector_tag_1] y = 1.0 / math.tan(math.radians(45.0)) x_vec = vec_1 y_vec = cross_vectors(x_vec, [0.0, 0.0, 1.0]) # global Z y_vec = scale_vector(y_vec, y) vec_3 = normalize_vector(add_vectors(x_vec, y_vec)) mesh.set_face_attribute(fkey, name=vector_tag, value=vec_3)