def mesh_to_collada(mesh): ''' Supports per-vertex color, but nothing else. ''' import numpy as np try: from collada import Collada, scene except ImportError: raise ImportError( "lace.serialization.dae.mesh_to_collade requires package pycollada." ) def create_material(dae): from collada import material, scene effect = material.Effect("effect0", [], "phong", diffuse=(1, 1, 1), specular=(0, 0, 0), double_sided=True) mat = material.Material("material0", "mymaterial", effect) dae.effects.append(effect) dae.materials.append(mat) return scene.MaterialNode("materialref", mat, inputs=[]) def geometry_from_mesh(dae, mesh): from collada import source, geometry srcs = [] # v srcs.append(source.FloatSource("verts-array", mesh.v, ('X', 'Y', 'Z'))) input_list = source.InputList() input_list.addInput(0, 'VERTEX', "#verts-array") # vc if mesh.vc is not None: input_list.addInput(len(srcs), 'COLOR', "#color-array") srcs.append( source.FloatSource("color-array", mesh.vc[mesh.f.ravel()], ('X', 'Y', 'Z'))) # f geom = geometry.Geometry(str(mesh), "geometry0", "mymesh", srcs) indices = np.dstack([mesh.f for _ in srcs]).ravel() triset = geom.createTriangleSet(indices, input_list, "materialref") geom.primitives.append(triset) # e if mesh.e is not None: indices = np.dstack([mesh.e for _ in srcs]).ravel() lineset = geom.createLineSet(indices, input_list, "materialref") geom.primitives.append(lineset) dae.geometries.append(geom) return geom dae = Collada() geom = geometry_from_mesh(dae, mesh) node = scene.Node( "node0", children=[scene.GeometryNode(geom, [create_material(dae)])]) myscene = scene.Scene("myscene", [node]) dae.scenes.append(myscene) dae.scene = myscene return dae
def main(): """ Generate .dae file. """ mesh = Collada() effect = material.Effect('effect0', [], 'phong', diffuse=(1, 0, 0), specular=(0, 1, 0)) mat = material.Material('material0', 'mymaterial', effect) mesh.effects.append(effect) mesh.materials.append(mat) vert_floats = numpy.array([ -50, 50, 50, 50, 50, 50, -50, -50, 50, 50, -50, 50, -50, 50, -50, 50, 50, -50, -50, -50, -50, 50, -50, -50 ]) normal_floats = numpy.array([ 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1 ]) componests = ('X', 'Y', 'Z') vert_src = source.FloatSource('cubeverts-array', vert_floats, componests) normal_src = source.FloatSource('cubenormals-array', normal_floats, componests) input_list = source.InputList() input_list.addInput(0, 'VERTEX', '#cubeverts-array') input_list.addInput(1, 'NORMAL', '#cubenormals-array') indices = numpy.array([ 0, 0, 2, 1, 3, 2, 0, 0, 3, 2, 1, 3, 0, 4, 1, 5, 5, 6, 0, 4, 5, 6, 4, 7, 6, 8, 7, 9, 3, 10, 6, 8, 3, 10, 2, 11, 0, 12, 4, 13, 6, 14, 0, 12, 6, 14, 2, 15, 3, 16, 7, 17, 5, 18, 3, 16, 5, 18, 1, 19, 5, 20, 7, 21, 6, 22, 5, 20, 6, 22, 4, 23 ]) geom = geometry.Geometry(mesh, 'geometry0', 'mycube', [vert_src, normal_src]) triset = geom.createTriangleSet(indices, input_list, 'materialref') geom.primitives.append(triset) mesh.geometries.append(geom) matnode = scene.MaterialNode('materialref', mat, inputs=[]) geomnode = scene.GeometryNode(geom, [matnode]) node = scene.Node('node0', children=[geomnode]) myscene = scene.Scene('myscene', [node]) mesh.scenes.append(myscene) mesh.scene = myscene mesh.write('collada_sample.dae')
def display_collada(dae_file): """ Display the DAE mesh. Requires pycollada. """ print("Displaying %s" % dae_file) from collada import Collada, DaeUnsupportedError, DaeBrokenRefError mesh = Collada(dae_file, ignore=[DaeUnsupportedError, DaeBrokenRefError]) for geometry in mesh.scene.objects('geometry'): for prim in geometry.primitives(): # use primitive-specific ways to get triangles prim_type = type(prim).__name__ if prim_type == 'BoundTriangleSet': triangles = prim elif prim_type == 'BoundPolylist': triangles = prim.triangleset() else: # Unsupported mesh type triangles = None if triangles is not None: x = triangles.vertex[:, 0] y = triangles.vertex[:, 1] z = triangles.vertex[:, 2] mlab.triangular_mesh(x, y, z, triangles.vertex_index, color=(0, 0, 1))
def load(op, ctx, filepath=None, **kwargs): c = Collada(filepath, ignore=[DaeBrokenRefError]) impclass = get_import(c) imp = impclass(ctx, c, os.path.dirname(filepath), **kwargs) tf = kwargs['transformation'] print('Begin Load') with prevented_updates(ctx): if tf in ('MUL', 'APPLY'): for i, obj in enumerate(c.scene.objects('geometry')): b_geoms = imp.geometry(obj) if tf == 'MUL': tf_mat = Matrix(obj.matrix) for b_obj in b_geoms: b_obj.matrix_world = tf_mat elif tf == 'PARENT': _dfs(c.scene, imp.node) for i, obj in enumerate(c.scene.objects('light')): imp.light(obj, i) for obj in c.scene.objects('camera'): imp.camera(obj) print('Finish Load') return {'FINISHED'}
def read_collada_surfaces(session, path, name=None, color=(200, 200, 200, 255), **kw): '''Open a collada file.''' from collada import Collada c = Collada(path) if name is None: from os.path import basename name = basename(path) from chimerax.geometry import Place splist = surfaces_from_nodes(c.scene.nodes, color, Place(), {}, session) if len(splist) > 1: from chimerax.core.models import Model s = Model(name, session) s.add(splist) elif len(splist) == 1: s = splist[0] s.name = name else: from chimerax.core.errors import UserError raise UserError('Collada file has no TriangleSets: %s' % name) set_instance_positions_and_colors(s.all_drawings()) ai = c.assetInfo if ai: s.collada_unit_name = ai.unitname s.collada_contributors = ai.contributors return [s], ('Opened collada file %s' % name)
def build_scene(env, outfilename): #architecture is : #-env.name #--exterior #----ingredients #--compartments #----surface #------ingredients #----interior #------ingredients #create the document and a node for rootenv collada_xml = Collada() collada_xml.assetInfo.unitname = "centimeter" collada_xml.assetInfo.unitmeter = 0.01 collada_xml.assetInfo.upaxis = "Y_UP" root_env = scene.Node(env.name) myscene = scene.Scene(env.name + "_Scene", [root_env]) collada_xml.scenes.append(myscene) collada_xml.scene = myscene # bbsurface = None r = env.exteriorRecipe if r: collada_xml, root_env = buildRecipe(r, r.name, collada_xml, root_env) for o in env.compartments: rs = o.surfaceRecipe if rs: #p,s,bb=up (767.0) #used for lipids #pp,ss,bbsurface = up (700.0) collada_xml, root_env = buildRecipe(rs, str(o.name) + "_surface", collada_xml, root_env, mask=bbsurface) ri = o.innerRecipe if ri: collada_xml, root_env = buildRecipe(ri, str(o.name) + "_interior", collada_xml, root_env, mask=bbsurface) collada_xml, root_env = buildCompartmentsGeom(env.compartments[0], collada_xml, root_env) collada_xml.write(outfilename) return collada_xml
def __init__(self, directory, export_as='dae_only'): self._dir = directory self._export_as = export_as self._geometries = {} self._materials = {} self._collada = Collada() self._scene = Scene('main', []) self._collada.scenes.append(self._scene) self._collada.scene = self._scene
def export_dae(vertices, indices, fname): mesh = Collada() effect = material.Effect("effect0", [], "phong", diffuse=(1, 0, 0), specular=(0, 1, 0)) mat = material.Material("material0", "mymaterial", effect) mesh.effects.append(effect) mesh.materials.append(mat) vert_src = source.FloatSource('verts-array', numpy.array(vertices), ('X', 'Y', 'Z')) inlist = source.InputList() inlist.addInput(0, 'VERTEX', '#verts-array') indices = numpy.array(indices) geom = geometry.Geometry(mesh, 'geometry0', 'linac', [vert_src]) triset = geom.createTriangleSet(indices, inlist, "materialref") geom.primitives.append(triset) mesh.geometries.append(geom) matnode = scene.MaterialNode("materialref", mat, inputs=[]) geomnode = scene.GeometryNode(geom, [matnode]) node = scene.Node("node0", children=[geomnode]) myscene = scene.Scene("myscene", [node]) mesh.scenes.append(myscene) mesh.scene = myscene mesh.write(fname)
def collada_from_scene(scene, name="triagain"): collada = Collada() geometry_nodes = ([ geometry_node_from_mesh( collada=collada, mesh=child.mesh, name=f"mesh_geometry_{i}", ) for i, child in enumerate(child for child in scene.children if isinstance(child, InternalMesh)) ] + [ geometry_node_from_polyline( collada, child.polyline, name=f"polyline_geometry_{i}", color=normalize_color(child.color), ) for i, child in enumerate(child for child in scene.children if isinstance(child, InternalLine)) ] + [ geometry_node_from_points( collada, points=np.array([point.point for point in points]), radius=scene.point_radius, color=normalize_color(color), name=f"point_geometry_{i}", ) for i, (color, points) in enumerate( groupby( lambda point: point.color, [ child for child in scene.children if isinstance(child, InternalPoint) ], ).items()) ]) scene = Scene(name, [Node("root", children=geometry_nodes)]) collada.scenes.append(scene) collada.scene = scene return collada
def _save_dae(self, name): """\ Save the model as a collada file. """ mesh = Collada() effect = material.Effect("effect0", [], "phong", diffuse=(0.9,0.9,0.9), \ specular=(0.1,0.1,0.1)) mat = material.Material("material0", "mymaterial", effect) mesh.effects.append(effect) mesh.materials.append(mat) vert_floats = [] norm_floats = [] for vertex in self.vertices: vert_floats.append(vertex.x) vert_floats.append(vertex.y) vert_floats.append(vertex.z) for normal in self.normals: norm_floats.append(normal.x) norm_floats.append(normal.y) norm_floats.append(normal.z) indices = array(self.collada_indices) vert_src = source.FloatSource("vert-array", array(vert_floats), \ ('X', 'Y', 'Z')) norm_src = source.FloatSource("norm-array", array(norm_floats), \ ('X', 'Y', 'Z')) geom = geometry.Geometry(mesh, "geometry0", "solid", [vert_src, norm_src]) input_list = source.InputList() input_list.addInput(0, 'VERTEX', "#vert-array") input_list.addInput(1, 'NORMAL', "#norm-array") triset = geom.createTriangleSet(indices, input_list, "materialref") geom.primitives.append(triset) mesh.geometries.append(geom) matnode = scene.MaterialNode("materialref", mat, inputs=[]) geomnode = scene.GeometryNode(geom, [matnode]) node = scene.Node("node0", children=[geomnode]) myscene = scene.Scene("myscene", [node]) mesh.scenes.append(myscene) mesh.scene = myscene mesh.write(name+'.dae')
def load(file): mesh = Collada(file) index_count = 0 all_indices = [] all_vertices = [] for geometry in mesh.geometries: for primitive in geometry.primitives: indices = [] for tri in primitive.indices: indices.extend(tri[:, 0] + index_count) index_count += len(primitive.vertex) vertices = primitive.vertex all_vertices.extend(vertices) all_indices.extend(indices) return np.array(all_vertices).reshape(len(all_vertices), 3), np.array(all_indices)
def _import_dae(self, file): """\ Import a collada object. """ solid = Collada(file) self._triangles = [] for geometry in solid.geometries: for triangle_set in geometry.primitives: for triangle in triangle_set: v = triangle.vertices try: points = [Point(v[0][0], v[0][1], v[0][2]), \ Point(v[1][0], v[1][1], v[1][2]), \ Point(v[2][0], v[2][1], v[2][2])] self._triangles.append(Triangle(*points)) except IndexError: pass
def load(op, ctx, filepath=None, **kwargs): c = Collada(filepath, ignore=[DaeBrokenRefError]) impclass = get_import(c) imp = impclass(ctx, c, os.path.dirname(filepath), **kwargs) tf = kwargs['transformation'] allvertices = np.vstack([ i.vertex for j in c.scene.objects('geometry') for i in j.primitives() if isinstance(i, BoundTriangleSet) ]) scale = np.max(allvertices.max(axis=0) - allvertices.min(axis=0)) / 2 mean = (allvertices.min(axis=0) + allvertices.max(axis=0)) * 0.5 for i, obj in enumerate(c.scene.objects('geometry')): b_geoms = imp.geometry(obj, mean, scale) return {'FINISHED'}
def read_collada_surfaces(path, session, color=(178, 178, 178, 255)): from os.path import basename from ..models import Model s = Model(basename(path)) from collada import Collada c = Collada(path) from ..geometry.place import Place splist = surface_pieces_from_nodes(c.scene.nodes, s, color, Place(), {}) set_positions_and_colors(splist) ai = c.assetInfo if ai: s.collada_unit_name = ai.unitname s.collada_contributors = ai.contributors return s
def __init__(self, collada_file_path): model = Collada(collada_file_path) self.vao = [] self.inverse_transform_matrices = [ value for _, value in model.controllers[0].joint_matrices.items() ] self.joints_order = { "Armature_" + joint_name: index for index, joint_name in enumerate( np.squeeze(model.controllers[0].weight_joints.data)) } self.joint_count = len(self.joints_order) for node in model.scenes[0].nodes: if node.id == 'Armature': self.root_joint = Joint( node.children[0].id, self.inverse_transform_matrices[self.joints_order.get( node.children[0].id)]) self.root_joint.children.extend( self.__load_armature(node.children[0])) del self.inverse_transform_matrices if node.id == "Cube": self.__load_mesh_data(node.children[0]) self.render_static_matrices = [ np.identity(4) for _ in range(len(self.joints_order)) ] self.render_animation_matrices = [ i for i in range(len(self.joints_order)) ] anims = et.parse(collada_file_path).getroot().find( "{http://www.collada.org/2005/11/COLLADASchema}library_animations" ).findall("{http://www.collada.org/2005/11/COLLADASchema}animation") self.__load_keyframes(anims) self.prev_iter = 0
def __init__(self, collada_file_path): model = Collada(collada_file_path) self.vao = [] self.inverse_transform_matrices = [ value for _, value in model.controllers[0].joint_matrices.items() ] self.joints_order = { "Armature_" + joint_name: index for index, joint_name in enumerate( np.squeeze(model.controllers[0].weight_joints.data)) } self.joint_count = len(self.joints_order) for node in model.scenes[0].nodes: if node.id == 'Armature': self.root_joint = Joint( node.children[0].id, self.inverse_transform_matrices[self.joints_order.get( node.children[0].id)]) self.root_joint.children.extend( self.__load_armature(node.children[0])) del self.inverse_transform_matrices if node.id == "Cube": self.__load_mesh_data(node.children[0]) self.render_static_matrices = [ np.identity(4) for _ in range(len(self.joints_order)) ] self.render_animation_matrices = [ i for i in range(len(self.joints_order)) ] self.__load_keyframes(model.animations) self.doing_animation = False self.frame_start_time = None self.animation_keyframe_pointer = 0
def import_Collada(filename): ''' Returns a list of objects represented as a list of triangles. A triangle is list of vertices. A vertex is a list of points. ''' from collada import Collada from StringIO import StringIO import sys, inspect #This allows it to except bad Collada files by ignore all errors clsmembers = inspect.getmembers(sys.modules['collada.common'], inspect.isclass) ignore_types = [cls[1] for cls in clsmembers] mesh = Collada(filename, ignore=ignore_types) geoms = mesh.geometries #This is the list of geometries geom_tris = [] for geom in geoms: for primitive in geom.primitives: triangles = list(primitive) #Appends each triangles set with each triangle being just a list of vertice geom_tris.append([tri.vertices for tri in triangles]) return geom_tris
def instancesToCollada(self,parent_object,collada_xml=None,instance_node=True,**kw): try : from upy.transformation import decompose_matrix from collada import Collada from collada import material from collada import source from collada import geometry from collada import scene except : return inst_parent=parent_object#self.getCurrentSelection()[0] ch=self.getChilds(inst_parent) transpose=True if "transpose" in kw : transpose = kw["transpose"] #instance master if "mesh" in kw and kw["mesh"] is not None: inst_master = kw["mesh"] f,v,vn = self.DecomposeMesh(kw["mesh"],edit=False,copy=False,tri=True, transform=False) else : inst_master = self.getMasterInstance(ch[0]) print "master is ",inst_master #grabb v,f,n of inst_master f,v,vn = self.DecomposeMesh(inst_master,edit=False,copy=False,tri=True, transform=False) #special case when come from x-z swap # v=[[vv[2],vv[1],vv[0]] for vv in v] # go back to regular #-90degree rotation onY mry90 = self.rotation_matrix(-math.pi/2.0, [0.0,1.0,0.0])#? v=self.ApplyMatrix(v,mry90)#same for the normal? vn=self.ApplyMatrix(vn,mry90)#same for the normal? iname = self.getName( inst_master ) pname = self.getName( inst_parent ) if collada_xml is None: collada_xml = Collada() collada_xml.assetInfo.unitname="centimeter" collada_xml.assetInfo.unitmeter=0.01 mat = self.getMaterialObject(inst_master) if len(mat) : mat = mat[0] props = self.getMaterialProperty(mat,color=1)#,specular_color=1) print "colors is ",props effect = material.Effect("effect"+iname, [], "phong", diffuse=[props["color"][0],props["color"][1],props["color"][2],1.0]) # specular = props["specular_color"]) mat = material.Material("material"+iname, iname+"material", effect) collada_xml.effects.append(effect) collada_xml.materials.append(mat) matnode = scene.MaterialNode(iname+"material"+"ref", mat, inputs=[]) #the geom #invert Z ? for C4D? vertzyx = numpy.array(v)#* numpy.array([1,1,-1]) z,y,x=vertzyx.transpose() vertxyz = numpy.vstack([x,y,z]).transpose()#* numpy.array([1,1,-1]) vert_src = source.FloatSource(iname+"_verts-array", vertxyz.flatten(), ('X', 'Y', 'Z')) norzyx=numpy.array(vn) nz,ny,nx=norzyx.transpose() norxyz = numpy.vstack([nx,ny,nz]).transpose()* numpy.array([1,1,-1]) normal_src = source.FloatSource(iname+"_normals-array", norxyz.flatten(), ('X', 'Y', 'Z')) geom = geometry.Geometry(collada_xml, "geometry"+iname, iname, [vert_src,normal_src])# normal_src]) input_list = source.InputList() input_list.addInput(0, 'VERTEX', "#"+iname+"_verts-array") input_list.addInput(0, 'NORMAL', "#"+iname+"_normals-array") #invert all the face fi=numpy.array(f,int)#[:,::-1] triset = geom.createTriangleSet(fi.flatten(), input_list, iname+"materialref") geom.primitives.append(triset) collada_xml.geometries.append(geom) #the noe #instance here ? #creae the instance maser node : if instance_node: master_geomnode = scene.GeometryNode(geom, [matnode])#doesn work ? master_node = scene.Node("node_"+iname, children=[master_geomnode,])#,transforms=[tr,rz,ry,rx,s]) g=[] for c in ch : #collada.scene.NodeNode if instance_node: geomnode = scene.NodeNode(master_node) else : geomnode = scene.GeometryNode(geom, [matnode]) matrix = self.ToMat(self.getTransformation(c))#.transpose()#.flatten() if transpose: matrix = numpy.array(matrix).transpose() scale, shear, euler, translate, perspective=decompose_matrix(matrix) scale = self.getScale(c) p=translate#matrix[3,:3]/100.0#unit problem tr=scene.TranslateTransform(p[0],p[1],p[2]) rx=scene.RotateTransform(1,0,0,numpy.degrees(euler[0])) ry=scene.RotateTransform(0,1,0,numpy.degrees(euler[1])) rz=scene.RotateTransform(0,0,1,numpy.degrees(euler[2])) # rx=scene.RotateTransform(-1,0,0,numpy.degrees(euler[0])) # ry=scene.RotateTransform(0,-1,0,numpy.degrees(euler[1])) # rz=scene.RotateTransform(0,0,1,numpy.degrees(euler[2])) s=scene.ScaleTransform(scale[0],scale[1],scale[2]) #n = scene.NodeNode(master_node,transforms=[tr,rz,ry,rx,s]) # gnode = scene.Node(self.getName(c)+"_inst", children=[geomnode,]) n = scene.Node(self.getName(c), children=[geomnode,],transforms=[tr,rz,ry,rx,s]) #scene.MatrixTransform(matrix)[scene.MatrixTransform(numpy.array(matrix).reshape(16,))] # n = scene.Node(self.getName(c), children=[geomnode,], # transforms=[scene.MatrixTransform(numpy.array(matrix).reshape(16,))]) #scene.MatrixTransform(matrix)[scene.MatrixTransform(numpy.array(matrix).reshape(16,))] g.append(n) node = scene.Node(pname, children=g)#,transforms=[scene.RotateTransform(0,1,0,90.0)]) if "parent_node" in kw : kw["parent_node"].children.append(node) node = kw["parent_node"] if not len(collada_xml.scenes) : myscene = scene.Scene("myscene", [node]) collada_xml.scenes.append(myscene) collada_xml.scene = myscene else : if "parent_node" not in kw : collada_xml.scene.nodes.append(node) if instance_node: collada_xml.nodes.append(master_node) return collada_xml
def __init__(self, objects, filepath, directory, kwargs): self._is_zae = kwargs["export_as"] == "zae" self._export_textures = kwargs["export_textures"] self._add_blender_extensions = kwargs["add_blender_extensions"] self._filepath = filepath self._dir = directory self._ext_files_map = {} self._ext_files_revmap = {} if self._is_zae: self._zip = zipfile.ZipFile(self._filepath, "w") class ZipAttr: compress_type = zipfile.ZIP_DEFLATED file_attr = 0o100644 << 16 date_time = time.gmtime()[:6] scene_name = "scene.dae" @classmethod def new_item(celf, filename): item = zipfile.ZipInfo() item.compress_type = celf.compress_type item.external_attr = celf.file_attr item.date_time = celf.date_time item.filename = filename return item #end new_item #end ZipAttr self._zipattr = ZipAttr # First item in archive is uncompressed and named “mimetype”, and # contents is MIME type for archive. This way it ends up at a fixed # offset (filename at 30 bytes from start, contents at 38 bytes) for # easy detection by format-sniffing tools. This convention is used # by ODF and other formats similarly based on Zip archives. mimetype = zipfile.ZipInfo() mimetype.filename = "mimetype" mimetype.compress_type = zipfile.ZIP_STORED mimetype.external_attr = ZipAttr.file_attr mimetype.date_time = (2020, 8, 23, 1, 33, 52) # about when I started getting .zae export working self._zip.writestr(mimetype, b"model/vnd.collada+xml+zip") # extrapolating from the fact that the official type for .dae files # is “model/vnd.collada+xml” else: self._zip = None self._zipattr = None #end if self._up_axis = kwargs["up_axis"] if self._up_axis == "Z_UP": self._orient = Matrix.Identity(4) elif self._up_axis == "X_UP": self._orient = Matrix.Rotation(-120 * DEG, 4, Vector(1, -1, 1)) else: # "Y_UP" or unspecified self._orient = Matrix.Rotation(-90 * DEG, 4, "X") #end if obj_children = {} for obj in objects: parent = obj.parent if parent == None: parentname = None else: parentname = parent.name #end if if parentname not in obj_children: obj_children[parentname] = set() #end if obj_children[parentname].add(obj.name) #end for self._obj_children = obj_children self._selected_only = kwargs["use_selection"] self._geometries = {} self._materials = {} self._collada = Collada() self._collada.xmlnode.getroot().set("version", kwargs["collada_version"]) self._collada.assetInfo.unitmeter = 1 self._collada.assetInfo.unitname = "metre" self._collada.assetInfo.upaxis = self._up_axis self._collada.assetInfo.save() root_technique = self.blender_technique( True, self._collada.xmlnode.getroot()) if root_technique != None: prefixes = E.id_prefixes() for k in sorted(DATABLOCK.__members__.keys()): v = DATABLOCK[k] if not v.internal_only: prefix = E.prefix(name=k, value=v.nameid("")) prefixes.append(prefix) #end if #end for root_technique.append(prefixes) #end if self._scene = Scene(DATABLOCK.SCENE.nameid("main"), []) self._collada.scenes.append(self._scene) self._collada.scene = self._scene self._id_seq = 0
def getColladaMesh(filename, node, link): """Read collada file.""" if not colladaIsAvailable: sys.stderr.write('Collada module not found, please install it with:\n') sys.stderr.write(' python -m pip install pycollada\n') sys.stderr.write('Skipping "%s"\n' % filename) return print('Parsing Mesh: ' + filename) colladaMesh = Collada(filename) index = -1 if hasattr(node, 'material') and node.material: for geometry in list(colladaMesh.scene.objects('geometry')): for data in list(geometry.primitives()): visual = Visual() index += 1 visual.position = node.position visual.rotation = node.rotation visual.material.diffuse.red = node.material.diffuse.red visual.material.diffuse.green = node.material.diffuse.green visual.material.diffuse.blue = node.material.diffuse.blue visual.material.diffuse.alpha = node.material.diffuse.alpha visual.material.texture = node.material.texture name = '%s_%d' % (os.path.splitext( os.path.basename(filename))[0], index) if type(data.original) is lineset.LineSet: visual.geometry.lineset = True if name in Geometry.reference: visual.geometry = Geometry.reference[name] else: Geometry.reference[name] = visual.geometry visual.geometry.name = name visual.geometry.scale = node.geometry.scale for val in data.vertex: visual.geometry.trimesh.coord.append(numpy.array(val)) for val in data.vertex_index: visual.geometry.trimesh.coordIndex.append(val) if data.texcoordset: # non-empty for val in data.texcoordset[0]: visual.geometry.trimesh.texCoord.append(val) if data.texcoord_indexset: # non-empty for val in data.texcoord_indexset[0]: visual.geometry.trimesh.texCoordIndex.append(val) if hasattr( data, '_normal' ) and data._normal is not None and data._normal.size > 0: for val in data._normal: visual.geometry.trimesh.normal.append( numpy.array(val)) if hasattr( data, '_normal_index' ) and data._normal_index is not None and data._normal_index.size > 0: for val in data._normal_index: visual.geometry.trimesh.normalIndex.append(val) if data.material and data.material.effect: if data.material.effect.emission and isinstance( data.material.effect.emission, tuple): visual.material.emission = colorVector2Instance( data.material.effect.emission) if data.material.effect.ambient and isinstance( data.material.effect.ambient, tuple): visual.material.ambient = colorVector2Instance( data.material.effect.ambient) if data.material.effect.specular and isinstance( data.material.effect.specular, tuple): visual.material.specular = colorVector2Instance( data.material.effect.specular) if data.material.effect.shininess: visual.material.shininess = data.material.effect.shininess if data.material.effect.index_of_refraction: visual.material.index_of_refraction = data.material.effect.index_of_refraction if data.material.effect.diffuse: if numpy.size(data.material.effect.diffuse) > 1\ and all([isinstance(x, numbers.Number) for x in data.material.effect.diffuse]): # diffuse is defined by values visual.material.diffuse = colorVector2Instance( data.material.effect.diffuse) else: # diffuse is defined by *.tif files visual.material.texture = 'textures/' + \ data.material.effect.diffuse.sampler.surface.image.path.split('/')[-1] txt = os.path.splitext(visual.material.texture)[1] if txt == '.tiff' or txt == '.tif': for dirname, dirnames, filenames in os.walk( '.'): for file in filenames: if file == str( visual.material.texture.split( '/')[-1]): try: tifImage = Image.open( os.path.join( dirname, file)) img = './' + robotName + '_textures' tifImage.save( os.path.splitext( os.path.join( img, file))[0] + '.png') visual.material.texture = ( robotName + '_textures/' + os.path.splitext(file)[0] + '.png') print('translated image ' + visual.material.texture) except IOError: visual.material.texture = "" print('failed to open ' + os.path.join( dirname, file)) else: visual.material.diffuse = colorVector2Instance( [1.0, 1.0, 1.0, 1.0]) link.visual.append(visual) else: for geometry in list(colladaMesh.scene.objects('geometry')): for data in list(geometry.primitives()): collision = Collision() collision.position = node.position collision.rotation = node.rotation collision.geometry.scale = node.geometry.scale for value in data.vertex: collision.geometry.trimesh.coord.append(numpy.array(value)) for value in data.vertex_index: collision.geometry.trimesh.coordIndex.append(value) link.collision.append(collision)
def read_collada_file(model_path: Union[Path, str]) -> RawModelData: """Reads the model and returns it's data in the desirable format.""" if isinstance(model_path, Path): model_path = str(model_path) collada_obj = Collada(model_path) # assumes that there is a single controller if len(collada_obj.controllers) != 1: raise RuntimeError( f'there should be exactly 1 controller, got {len(collada_obj.controllers)}' ) controller: Skin = collada_obj.controllers[0] triangles: TriangleSet = controller.geometry.primitives[0] # get vertices vertices = triangles.vertex n_vertices = vertices.shape[0] # apply the bind_shape_matrix to all of the vertices t_bind_shape = Transformation(controller.bind_shape_matrix) vertices = t_bind_shape.apply(vertices) # get faces faces = triangles.vertex_index n_faces = triangles.ntriangles _check_all_vertices_in_faces(n_vertices, faces) # get texture vertex_texture_coords = _get_vertex_texture_coords( n_vertices, faces, triangles.texcoord_indexset, triangles.texcoordset) # get normals vertex_normals = _get_vertex_normals(n_vertices, faces, triangles.normal_index, triangles.normal) # get joint data root_joint, joint_list, n_bound_joints, n_total_joints = _get_joints_data( collada_obj.scene.nodes, controller) # get weight matrix weight_matrix = _get_weight_matrix(n_vertices, n_bound_joints, controller.weights.data.flatten(), controller.weight_index, controller.joint_index) # read animation data animations = collada_obj.animations n_keyframes = 0 if len(animations) > 0: n_keyframes = _read_animations(animations, root_joint) return RawModelData( n_vertices=n_vertices, n_faces=n_faces, n_bound_joints=n_bound_joints, n_total_joints=n_total_joints, vertices=vertices, faces=faces, vertex_normals=vertex_normals, vertex_texture_coords=vertex_texture_coords, weight_matrix=weight_matrix, joint_list=joint_list, root_joint=root_joint, n_keyframes=n_keyframes, )
def export(file): # Load scene from file collada_file = Collada(file) scene = collada_file.scene # Create folder to store resources folder_path = resources_path / Path(file.name).stem if not folder_path.exists(): os.mkdir(folder_path) # Export textures mat_map = {} model_folder_path = Path(file.name).parent for material in collada_file.materials: if hasattr(material.effect.diffuse, "sampler"): mat_path = model_folder_path / Path( material.effect.diffuse.sampler.surface.image.path) shutil.copy(mat_path, folder_path / mat_path.name) mat_map[material.effect.id] = Path( file.name).stem + "/" + mat_path.name # Process skinned meshes skin_data = {} node_list = [] bind_mats = [] for controller in collada_file.controllers: joints = list(controller.sourcebyid[controller.joint_source]) joint_list = [] weight_list = [] for i in range(len(controller.joint_index)): joint_indices = controller.joint_index[i] vertex_joints = [] for joint_index in joint_indices: joint_node = joints[joint_index] if joint_node not in node_list: node_list.append(joint_node) vertex_joints.append(node_list.index(joint_node)) joint_list.append(vertex_joints) weight_indices = controller.weight_index[i] vertex_weights = [] for weight_index in weight_indices: vertex_weights.append( list(controller.weights)[weight_index][0].item()) weight_list.append(vertex_weights) skin_data[controller.geometry.id] = { "joints": joint_list, "weights": weight_list } # Export meshes for geometry in collada_file.geometries: # Create new mesh new_mesh = {} vertex_list = [] texcoords_list = [] normals_list = [] joints_list = [] weights_list = [] for primitive in geometry.primitives: # Set mesh's texture if primitive.material in mat_map: new_mesh["texture"] = mat_map[primitive.material] for tri in list(primitive): # Go over vertices, texture coordinates, and normals for vertex in tri.vertices: for coord in vertex: vertex_list.append(coord.item()) for texcoord in tri.texcoords: for coord in texcoord: texcoords_list.append(coord[0].item()) texcoords_list.append(coord[1].item()) for normal in tri.normals: for coord in normal: normals_list.append(coord.item()) # Go over weights if geometry.id in skin_data: for index in tri.indices: joints_list.append( skin_data[geometry.id]["joints"][index]) weights_list.append( skin_data[geometry.id]["weights"][index]) new_mesh["joints"] = joints_list new_mesh["weights"] = weights_list new_mesh["vertices"] = vertex_list new_mesh["texcoords"] = texcoords_list new_mesh["normals"] = normals_list # Add metadata new_mesh["metadata"] = {"version": "0.2", "type": "mesh"} # Export JSON files with open(folder_path / f"{geometry.name}.json", "w") as mesh_file: json.dump(new_mesh, mesh_file) # Export sub-tree joint_node_dict = {} root_node = ColladaImporter.process_node(scene, np.identity(4), joint_node_dict, node_list, Path(file.name).stem) node_dict = GraphNode.scene_graph_to_dict(root_node) with open(folder_path / f"{Path(file.name).stem}.json", "w") as scene_file: json.dump(node_dict, scene_file)
class ColladaExport(object): def __init__(self, directory, export_as='dae_only'): self._dir = directory self._export_as = export_as self._geometries = {} self._materials = {} self._collada = Collada() self._scene = Scene('main', []) self._collada.scenes.append(self._scene) self._collada.scene = self._scene def save(self, fp): self._collada.write(fp) def object(self, b_obj, parent=None, children=True): b_matrix = b_obj.matrix_world if parent: if children: b_matrix = b_obj.matrix_local else: b_matrix = Matrix() node = self.node(b_obj.name, b_matrix) if any(b_obj.children) and children: self.object(b_obj, parent=node, children=False) for child in b_obj.children: self.object(child, parent=node) if parent: parent.children.append(node) else: self._scene.nodes.append(node) inode_meth = getattr(self, 'obj_' + b_obj.type, None) if inode_meth: node.children.extend(inode_meth(b_obj)) def node(self, b_name, b_matrix=None): tf = [] if b_matrix: tf.append(self.matrix(b_matrix)) node = Node(b_name, transforms=tf) node.save() return node def obj_MESH(self, b_obj): geom = self._geometries.get(b_obj.data.name, None) if not geom: geom = self.mesh(b_obj.data) self._geometries[b_obj.data.name] = geom matnodes = [] for slot in b_obj.material_slots: sname = slot.material.name if sname not in self._materials: self._materials[sname] = self.material(slot.material) matnodes.append(MaterialNode('none', self._materials[sname], inputs=[])) return [GeometryNode(geom, matnodes)] def mesh(self, b_mesh): vert_srcid = b_mesh.name + '-vertary' vert_f = [c for v in b_mesh.vertices for c in v.co] vert_src = FloatSource(vert_srcid, np.array(vert_f), ('X', 'Y', 'Z')) sources = [vert_src] smooth = list(filter(lambda f: f.use_smooth, b_mesh.faces)) if any(smooth): vnorm_srcid = b_mesh.name + '-vnormary' norm_f = [c for v in b_mesh.vertices for c in v.normal] norm_src = FloatSource(vnorm_srcid, np.array(norm_f), ('X', 'Y', 'Z')) sources.append(norm_src) flat = list(filter(lambda f: not f.use_smooth, b_mesh.faces)) if any(flat): fnorm_srcid = b_mesh.name + '-fnormary' norm_f = [c for f in flat for c in f.normal] norm_src = FloatSource(fnorm_srcid, np.array(norm_f), ('X', 'Y', 'Z')) sources.append(norm_src) name = b_mesh.name + '-geom' geom = Geometry(self._collada, name, name, sources) if any(smooth): ilist = InputList() ilist.addInput(0, 'VERTEX', _url(vert_srcid)) ilist.addInput(1, 'NORMAL', _url(vnorm_srcid)) # per vertex normals indices = np.array([ i for v in [ (v, v) for f in smooth for v in f.vertices ] for i in v]) if _is_trimesh(smooth): p = geom.createTriangleSet(indices, ilist, 'none') else: vcount = [len(f.vertices) for f in smooth] p = geom.createPolylist(indices, vcount, ilist, 'none') geom.primitives.append(p) if any(flat): ilist = InputList() ilist.addInput(0, 'VERTEX', _url(vert_srcid)) ilist.addInput(1, 'NORMAL', _url(fnorm_srcid)) indices = [] # per face normals for i, f in enumerate(flat): for v in f.vertices: indices.extend([v, i]) indices = np.array(indices) if _is_trimesh(flat): p = geom.createTriangleSet(indices, ilist, 'none') else: vcount = [len(f.vertices) for f in flat] p = geom.createPolylist(indices, vcount, ilist, 'none') geom.primitives.append(p) self._collada.geometries.append(geom) return geom def material(self, b_mat): shader = 'lambert' if b_mat.specular_shader == 'PHONG': shader = 'phong' elif b_mat.specular_shader == 'BLINN': shader = 'blinn' if b_mat.use_shadeless: shader = 'constant' child = { 'ambient': (b_mat.ambient,) * 3, 'emission': (b_mat.emit,) * 3, 'diffuse': tuple(b_mat.diffuse_color), } if b_mat.use_transparency: child.update({ 'transparent': (0.,0.,0.), 'transparency': b_mat.alpha, }) if b_mat.raytrace_mirror.use: child.update({ 'reflective': tuple(b_mat.mirror_color), 'reflectivity': b_mat.raytrace_mirror.reflect_factor, }) effect = Effect(b_mat.name + '-fx', [], shader, **child) mat = Material(b_mat.name, b_mat.name, effect) self._collada.effects.append(effect) self._collada.materials.append(mat) return mat def matrix(self, b_matrix): f = tuple(map(tuple, b_matrix.transposed())) return MatrixTransform(np.array( [e for r in f for e in r], dtype=np.float32))
def instancesToCollada(self, parent_object, collada_xml=None, instance_node=True, **kw): try: from upy.transformation import decompose_matrix from collada import Collada from collada import material from collada import source from collada import geometry from collada import scene except: return inst_parent = parent_object #self.getCurrentSelection()[0] ch = self.getChilds(inst_parent) transpose = True if "transpose" in kw: transpose = kw["transpose"] #instance master if "mesh" in kw and kw["mesh"] is not None: inst_master = kw["mesh"] f, v, vn = self.DecomposeMesh(kw["mesh"], edit=False, copy=False, tri=True, transform=False) else: inst_master = self.getMasterInstance(ch[0]) print "master is ", inst_master #grabb v,f,n of inst_master f, v, vn = self.DecomposeMesh(inst_master, edit=False, copy=False, tri=True, transform=False) #special case when come from x-z swap # v=[[vv[2],vv[1],vv[0]] for vv in v] # go back to regular #-90degree rotation onY mry90 = self.rotation_matrix(-math.pi / 2.0, [0.0, 1.0, 0.0]) #? v = self.ApplyMatrix(v, mry90) #same for the normal? vn = self.ApplyMatrix(vn, mry90) #same for the normal? iname = self.getName(inst_master) pname = self.getName(inst_parent) if collada_xml is None: collada_xml = Collada() collada_xml.assetInfo.unitname = "centimeter" collada_xml.assetInfo.unitmeter = 0.01 mat = self.getMaterialObject(inst_master) if len(mat): mat = mat[0] props = self.getMaterialProperty(mat, color=1) #,specular_color=1) print "colors is ", props effect = material.Effect( "effect" + iname, [], "phong", diffuse=[props["color"][0], props["color"][1], props["color"][2], 1.0]) # specular = props["specular_color"]) mat = material.Material("material" + iname, iname + "material", effect) collada_xml.effects.append(effect) collada_xml.materials.append(mat) matnode = scene.MaterialNode(iname + "material" + "ref", mat, inputs=[]) #the geom #invert Z ? for C4D? vertzyx = numpy.array(v) #* numpy.array([1,1,-1]) z, y, x = vertzyx.transpose() vertxyz = numpy.vstack([x, y, z]).transpose() #* numpy.array([1,1,-1]) vert_src = source.FloatSource(iname + "_verts-array", vertxyz.flatten(), ('X', 'Y', 'Z')) norzyx = numpy.array(vn) nz, ny, nx = norzyx.transpose() norxyz = numpy.vstack([nx, ny, nz]).transpose() * numpy.array([1, 1, -1]) normal_src = source.FloatSource(iname + "_normals-array", norxyz.flatten(), ('X', 'Y', 'Z')) geom = geometry.Geometry(collada_xml, "geometry" + iname, iname, [vert_src, normal_src]) # normal_src]) input_list = source.InputList() input_list.addInput(0, 'VERTEX', "#" + iname + "_verts-array") input_list.addInput(0, 'NORMAL', "#" + iname + "_normals-array") #invert all the face fi = numpy.array(f, int) #[:,::-1] triset = geom.createTriangleSet(fi.flatten(), input_list, iname + "materialref") geom.primitives.append(triset) collada_xml.geometries.append(geom) #the noe #instance here ? #creae the instance maser node : if instance_node: master_geomnode = scene.GeometryNode(geom, [matnode]) #doesn work ? master_node = scene.Node("node_" + iname, children=[ master_geomnode, ]) #,transforms=[tr,rz,ry,rx,s]) g = [] for c in ch: #collada.scene.NodeNode if instance_node: geomnode = scene.NodeNode(master_node) else: geomnode = scene.GeometryNode(geom, [matnode]) matrix = self.ToMat( self.getTransformation(c)) #.transpose()#.flatten() if transpose: matrix = numpy.array(matrix).transpose() scale, shear, euler, translate, perspective = decompose_matrix(matrix) scale = self.getScale(c) p = translate #matrix[3,:3]/100.0#unit problem tr = scene.TranslateTransform(p[0], p[1], p[2]) rx = scene.RotateTransform(1, 0, 0, numpy.degrees(euler[0])) ry = scene.RotateTransform(0, 1, 0, numpy.degrees(euler[1])) rz = scene.RotateTransform(0, 0, 1, numpy.degrees(euler[2])) # rx=scene.RotateTransform(-1,0,0,numpy.degrees(euler[0])) # ry=scene.RotateTransform(0,-1,0,numpy.degrees(euler[1])) # rz=scene.RotateTransform(0,0,1,numpy.degrees(euler[2])) s = scene.ScaleTransform(scale[0], scale[1], scale[2]) #n = scene.NodeNode(master_node,transforms=[tr,rz,ry,rx,s]) # gnode = scene.Node(self.getName(c)+"_inst", children=[geomnode,]) n = scene.Node( self.getName(c), children=[ geomnode, ], transforms=[tr, rz, ry, rx, s] ) #scene.MatrixTransform(matrix)[scene.MatrixTransform(numpy.array(matrix).reshape(16,))] # n = scene.Node(self.getName(c), children=[geomnode,], # transforms=[scene.MatrixTransform(numpy.array(matrix).reshape(16,))]) #scene.MatrixTransform(matrix)[scene.MatrixTransform(numpy.array(matrix).reshape(16,))] g.append(n) node = scene.Node( pname, children=g) #,transforms=[scene.RotateTransform(0,1,0,90.0)]) if "parent_node" in kw: kw["parent_node"].children.append(node) node = kw["parent_node"] if not len(collada_xml.scenes): myscene = scene.Scene("myscene", [node]) collada_xml.scenes.append(myscene) collada_xml.scene = myscene else: if "parent_node" not in kw: collada_xml.scene.nodes.append(node) if instance_node: collada_xml.nodes.append(master_node) return collada_xml
def getColladaMesh(filename, node, link): """Read collada file. Args: filename (str): path to the Collada (.dae) file. node (DOM): XML node. link (Link): link instance. """ colladaMesh = Collada(filename) if hasattr(node, 'material') and node.material: for geometry in list(colladaMesh.scene.objects('geometry')): for data in list(geometry.primitives()): visual = Visual() visual.position = node.position visual.rotation = node.rotation visual.material.diffuse.red = node.material.diffuse.red visual.material.diffuse.green = node.material.diffuse.green visual.material.diffuse.blue = node.material.diffuse.blue visual.material.diffuse.alpha = node.material.diffuse.alpha visual.material.texture = node.material.texture visual.geometry.scale = node.geometry.scale for val in data.vertex: visual.geometry.trimesh.coord.append(numpy.array(val)) for val in data.vertex_index: visual.geometry.trimesh.coordIndex.append(val) if data.texcoordset: # non-empty for val in data.texcoordset[0]: visual.geometry.trimesh.texCoord.append(val) if data.texcoord_indexset: # non-empty for val in data.texcoord_indexset[0]: visual.geometry.trimesh.texCoordIndex.append(val) if hasattr( data, '_normal' ) and data._normal is not None and data._normal.size > 0: for val in data._normal: visual.geometry.trimesh.normal.append(numpy.array(val)) if hasattr( data, '_normal_index' ) and data._normal_index is not None and data._normal_index.size > 0: for val in data._normal_index: visual.geometry.trimesh.normalIndex.append(val) if data.material and data.material.effect: if data.material.effect.emission: visual.material.emission = colorVector2Instance( data.material.effect.emission) if data.material.effect.ambient: visual.material.ambient = colorVector2Instance( data.material.effect.ambient) if data.material.effect.specular: visual.material.specular = colorVector2Instance( data.material.effect.specular) if data.material.effect.shininess: visual.material.shininess = data.material.effect.shininess if data.material.effect.index_of_refraction: visual.material.index_of_refraction = data.material.effect.index_of_refraction if data.material.effect.diffuse: if numpy.size(data.material.effect.diffuse) > 1\ and all([isinstance(x, numbers.Number) for x in data.material.effect.diffuse]): # diffuse is defined by values visual.material.diffuse = colorVector2Instance( data.material.effect.diffuse) else: # diffuse is defined by *.tif files visual.material.texture = 'textures/' + data.material.effect.diffuse.sampler.surface.image.path.split( '/')[-1] txt = os.path.splitext(visual.material.texture)[1] if txt == '.tiff' or txt == '.tif': for dirname, dirnames, filenames in os.walk( '.'): for file in filenames: if file == str( visual.material.texture.split( '/')[-1]): try: tifImage = Image.open( os.path.join( dirname, file)) img = './' + robotName + '_textures' tifImage.save( os.path.splitext( os.path.join( img, file))[0] + '.png') visual.material.texture = robotName + '_textures/' + os.path.splitext( file)[0] + '.png' print('translated image ' + visual.material.texture) except IOError: visual.material.texture = "" print('failed to open ' + os.path.join( dirname, file)) link.visual.append(visual) else: for geometry in list(colladaMesh.scene.objects('geometry')): for data in list(geometry.primitives()): collision = Collision() collision.position = node.position collision.rotation = node.rotation collision.geometry.scale = node.geometry.scale for value in data.vertex: collision.geometry.trimesh.coord.append(numpy.array(value)) for value in data.vertex_index: collision.geometry.trimesh.coordIndex.append(value) link.collision.append(collision)
def generate_collada(coords, relative_texture_path): """generate the pycollada mesh out of the coordinates array Arguments: coords {[[[]]]} -- 2d array of 3d coordinates relative_texture_path {str} -- relative path to the texture, relative to the generated collada file Returns: Collada -- final collada mesh """ # create the mesh mesh = Collada() # create source arrays vert_src = source.FloatSource('verts-array', generate_vertex_array(coords), ('X', 'Y', 'Z')) normal_src = source.FloatSource('normals-array', generate_normal_array(coords), ('X', 'Y', 'Z')) uv_src = source.FloatSource('uv-array', generate_uv_array(coords), ('S', 'T')) # create geometry and add the sources geom = geometry.Geometry(mesh, 'geometry', 'terrain', [vert_src, normal_src, uv_src]) # define inputs to triangle set input_list = source.InputList() input_list.addInput(0, 'VERTEX', '#verts-array') input_list.addInput(1, 'NORMAL', '#normals-array') input_list.addInput(2, 'TEXCOORD', '#uv-array', set='0') # create index array indices = generate_index_array(coords) # repeat each of the entries for vertex, normal, uv indices = np.repeat(indices, 3) # create triangle set, add it to list of geometries in the mesh triset = geom.createTriangleSet(indices, input_list, 'material') geom.primitives.append(triset) mesh.geometries.append(geom) # add a material image = material.CImage('material-image', relative_texture_path) surface = material.Surface('material-image-surface', image) sampler2d = material.Sampler2D('material-image-sampler', surface) material_map = material.Map(sampler2d, 'UVSET0') effect = material.Effect('material-effect', [surface, sampler2d], 'lambert', emission=(0.0, 0.0, 0.0, 1), ambient=(0.0, 0.0, 0.0, 1), diffuse=material_map, transparent=material_map, transparency=0.0, double_sided=True) mat = material.Material('materialID', 'material', effect) mesh.effects.append(effect) mesh.materials.append(mat) mesh.images.append(image) # instantiate geometry into a scene node matnode = scene.MaterialNode('material', mat, inputs=[]) geomnode = scene.GeometryNode(geom, [matnode]) node = scene.Node('model', children=[geomnode]) # create scene myscene = scene.Scene('scene', [node]) mesh.scenes.append(myscene) mesh.scene = myscene return mesh
class ColladaExport: def __init__(self, objects, filepath, directory, kwargs): self._is_zae = kwargs["export_as"] == "zae" self._export_textures = kwargs["export_textures"] self._add_blender_extensions = kwargs["add_blender_extensions"] self._filepath = filepath self._dir = directory self._ext_files_map = {} self._ext_files_revmap = {} if self._is_zae: self._zip = zipfile.ZipFile(self._filepath, "w") class ZipAttr: compress_type = zipfile.ZIP_DEFLATED file_attr = 0o100644 << 16 date_time = time.gmtime()[:6] scene_name = "scene.dae" @classmethod def new_item(celf, filename): item = zipfile.ZipInfo() item.compress_type = celf.compress_type item.external_attr = celf.file_attr item.date_time = celf.date_time item.filename = filename return item #end new_item #end ZipAttr self._zipattr = ZipAttr # First item in archive is uncompressed and named “mimetype”, and # contents is MIME type for archive. This way it ends up at a fixed # offset (filename at 30 bytes from start, contents at 38 bytes) for # easy detection by format-sniffing tools. This convention is used # by ODF and other formats similarly based on Zip archives. mimetype = zipfile.ZipInfo() mimetype.filename = "mimetype" mimetype.compress_type = zipfile.ZIP_STORED mimetype.external_attr = ZipAttr.file_attr mimetype.date_time = (2020, 8, 23, 1, 33, 52) # about when I started getting .zae export working self._zip.writestr(mimetype, b"model/vnd.collada+xml+zip") # extrapolating from the fact that the official type for .dae files # is “model/vnd.collada+xml” else: self._zip = None self._zipattr = None #end if self._up_axis = kwargs["up_axis"] if self._up_axis == "Z_UP": self._orient = Matrix.Identity(4) elif self._up_axis == "X_UP": self._orient = Matrix.Rotation(-120 * DEG, 4, Vector(1, -1, 1)) else: # "Y_UP" or unspecified self._orient = Matrix.Rotation(-90 * DEG, 4, "X") #end if obj_children = {} for obj in objects: parent = obj.parent if parent == None: parentname = None else: parentname = parent.name #end if if parentname not in obj_children: obj_children[parentname] = set() #end if obj_children[parentname].add(obj.name) #end for self._obj_children = obj_children self._selected_only = kwargs["use_selection"] self._geometries = {} self._materials = {} self._collada = Collada() self._collada.xmlnode.getroot().set("version", kwargs["collada_version"]) self._collada.assetInfo.unitmeter = 1 self._collada.assetInfo.unitname = "metre" self._collada.assetInfo.upaxis = self._up_axis self._collada.assetInfo.save() root_technique = self.blender_technique( True, self._collada.xmlnode.getroot()) if root_technique != None: prefixes = E.id_prefixes() for k in sorted(DATABLOCK.__members__.keys()): v = DATABLOCK[k] if not v.internal_only: prefix = E.prefix(name=k, value=v.nameid("")) prefixes.append(prefix) #end if #end for root_technique.append(prefixes) #end if self._scene = Scene(DATABLOCK.SCENE.nameid("main"), []) self._collada.scenes.append(self._scene) self._collada.scene = self._scene self._id_seq = 0 #end __init__ def write_ext_file(self, category, obj_name, filename, contents): if category not in self._ext_files_map: self._ext_files_map[category] = {} self._ext_files_revmap[category] = {} #end if ext_files_map = self._ext_files_map[category] ext_files_revmap = self._ext_files_revmap[category] if obj_name in ext_files_map: # already encountered this external file out_filename = ext_files_map[obj_name] else: if not self._is_zae: outdir = os.path.join(self._dir, category.subdir) os.makedirs(outdir, exist_ok=True) #end if base_out_filename = os.path.join(category.subdir, filename) out_filename = base_out_filename seq = 0 while out_filename in ext_files_revmap: if seq == 0: base_parts = os.path.splitext(base_out_filename) #end if seq += 1 assert seq < 1000000 # impose some ridiculous but finite upper limit out_filename = "%s-%0.3d%s" % (base_parts[0], seq, base_parts[1]) #end while ext_files_map[obj_name] = out_filename ext_files_revmap[out_filename] = obj_name if self._is_zae: item = self._zipattr.new_item(out_filename) self._zip.writestr(item, contents) else: out = open(os.path.join(self._dir, out_filename), "wb") out.write(contents) out.close() #end if #end if return out_filename #end write_ext_file def save(self): if self._is_zae: item = self._zipattr.new_item(self._zipattr.scene_name) dae = io.BytesIO() self._collada.write(dae) self._zip.writestr(item, dae.getvalue()) manifest = ElementTree.Element("dae_root") manifest.text = self._zipattr.scene_name item = self._zipattr.new_item("manifest.xml") self._zip.writestr(item, ElementTree.tostring(manifest)) # all done self._zip.close() else: self._collada.write(self._filepath) #end if #end save def blender_technique(self, as_extra, obj): # experimental: add Blender-specific attributes via a custom <technique>. if self._add_blender_extensions: if isinstance(obj, DaeObject): obj = obj.xmlnode #end if blendstuff = E.technique(profile="BLENDER028") if as_extra: parent = E.extra() else: parent = obj #end if parent.append(blendstuff) if as_extra: obj.append(parent) #end if else: blendstuff = None #end if return blendstuff #end blender_technique def obj_blender_technique(self, as_extra, obj, b_data, attribs): # save any custom technique settings for this object. blendstuff = self.blender_technique(as_extra, obj) if blendstuff != None: for tagname, attrname in attribs: if hasattr(b_data, attrname): subtag = getattr(E, tagname)(str(getattr(b_data, attrname))) blendstuff.append(subtag) #end if #end for #end if #end obj_blender_technique def next_internal_id(self): self._id_seq += 1 return DATABLOCK.INTERNAL_ID.nameid("%0.5d" % self._id_seq) #end next_internal_id def node(self, b_matrix=None): node = Node(id=None, xmlnode=E.node()) # construct my own xmlnode to avoid setting an id or name # (should be optional according to Collada spec) if b_matrix != None: node.transforms.append(self.matrix(b_matrix)) #end if node.save() return node #end node def obj_camera(self, b_obj): result = [] b_cam = b_obj.data if b_cam.type == "PERSP": cam_class = PerspectiveCamera args = \ { "xfov" : b_cam.angle_x / DEG, "yfov" : b_cam.angle_y / DEG, } elif b_cam.type == "ORTHO": cam_class = OrthographicCamera args = \ { "xmag" : b_cam.ortho_scale, "ymag" : b_cam.ortho_scale, } else: cam_class = None #end if if cam_class != None: # todo: shared datablock cam = cam_class \ ( id = DATABLOCK.CAMERA.nameid(b_obj.name), znear = b_cam.clip_start, zfar = b_cam.clip_end, **args ) self._collada.cameras.append(cam) result.append(CameraNode(cam)) #end if return result #end obj_camera def obj_light(self, b_obj): result = [] b_light = b_obj.data light_type = tuple \ ( t for t in ( ("POINT", PointLight), ("SPOT", SpotLight), ("SUN", DirectionalLight), ) if b_light.type == t[0] ) if len(light_type) != 0: light_type = light_type[0][1] else: light_type = None #end if if light_type != None: # todo: shared datablock light = light_type \ ( DATABLOCK.LAMP.nameid(b_obj.name), color = tuple(b_light.color) + (1,) ) for attr, battr, conv in \ ( # conversions are inverses of those done in importer ("falloff_ang", "spot_size", lambda ang : ang / DEG), ("falloff_exp", "spot_blend", lambda blend : 1 / max(blend, 0.00001) - 1), # some very small-magnitude positive value to avoid division by zero ) \ : if hasattr(b_light, battr) and hasattr(light, attr): setattr(light, attr, conv(getattr(b_light, battr))) #end if #end for if b_light.use_nodes: node_graph = b_light.node_tree the_node = list(n for n in node_graph.nodes if n.type == "OUTPUT_LIGHT")[0] trace_path = iter \ ( ( ("Surface", "EMISSION"), ("Strength", "LIGHT_FALLOFF"), ) ) found = False while True: trace = next(trace_path, None) if trace == None: if not the_node.inputs["Strength"].is_linked: found = True #end if break #end if input_name, want_shader_type = trace input = the_node.inputs[input_name] if not input.is_linked: break links = input.links # note docs say this takes O(N) in total nr links in node graph to compute if len(links) == 0: break the_node = links[0].from_node if the_node.type != want_shader_type: break output_name = links[0].from_socket.name #end while if found: strength = the_node.inputs["Strength"].default_value if strength != 0: atten = \ { "Constant" : "constant_att", "Linear" : "linear_att", "Quadratic" : "quad_att", }.get(output_name) if atten != None and hasattr(light, atten): setattr(light, atten, 1 / strength) #end if #end if #end if #end if self.obj_blender_technique \ ( True, light, b_light, [ ("angle", "angle"), ("power", "energy"), ("shadow_soft_size", "shadow_soft_size"), ("spot_blend", "spot_blend"), ("spot_size", "spot_size"), ] ) self._collada.lights.append(light) result.append(LightNode(light)) #end if return result #end obj_light def obj_empty(self, b_obj): result = Node(id=DATABLOCK.EMPTY.nameid(b_obj.name)) return [result] #end obj_empty def obj_mesh(self, b_obj): def make_slotname(slotindex): # Blender doesn’t name material slots, but Collada does return "slot%.3d" % slotindex #end make_slotname def encode_mesh(b_mesh, b_mesh_name, b_material_slots): def is_trimesh(faces): return all([len(f.verts) == 3 for f in faces]) #end is_trimesh #begin encode_mesh mesh_name = DATABLOCK.MESH.nameid(b_mesh_name) sources = [] vert_srcid = self.next_internal_id() sources.append \ ( FloatSource ( id = vert_srcid, data = np.array([c for v in b_mesh.verts for c in v.co]), components = ("X", "Y", "Z") ) ) vnorm_srcid = self.next_internal_id() sources.append \ ( FloatSource ( id = vnorm_srcid, data = np.array([c for v in b_mesh.verts for c in v.normal]), components = ("X", "Y", "Z") ) ) # todo: face normal might be different for flat shading uv_ids = [] if b_mesh.loops.layers.uv.active != None: active_uv_name = b_mesh.loops.layers.uv.active.name else: active_uv_name = None #end if for i, (b_uvname, uvlayer) in enumerate(b_mesh.loops.layers.uv.items()): uv_name = self.next_internal_id() uv_ids.append((uv_name, b_uvname)) sources.append \ ( FloatSource ( id = uv_name, data = np.array ( [ x for f in b_mesh.faces for l in f.loops for x in l[uvlayer].uv ] ), components = ("S", "T") ) ) #end for geom = Geometry(self._collada, mesh_name, mesh_name, sources) blendstuff = self.blender_technique(True, geom) if blendstuff != None: names = E.layer_names() for u in uv_ids: names.append(E.name(name=u[1], refid=u[0], type="UV")) #end for blendstuff.append(names) #end if for slotindex in range(max(len(b_material_slots), 1)): slotname = make_slotname(slotindex) assigned = \ [ f for f in b_mesh.faces if f.material_index == slotindex ] if any(assigned): ilist = InputList() ilist.addInput(0, "VERTEX", idurl(vert_srcid)) ilist.addInput(0, "NORMAL", idurl(vnorm_srcid)) setnr = 0 for u in uv_ids: setnr += 1 ilist.addInput(1, "TEXCOORD", idurl(u[0]), (setnr, 0)[u[1] == active_uv_name]) # always assign set 0 to active UV layer #end for indices = [] for face in b_mesh.faces: for face_loop in face.loops: this_face = [face_loop.vert.index, face_loop.index] indices.extend(this_face) #end for #end for indices = np.array(indices) if is_trimesh(assigned): p = geom.createTriangleSet(indices, ilist, slotname) else: vcounts = [len(f.verts) for f in assigned] p = geom.createPolylist(indices, vcounts, ilist, slotname) #end if geom.primitives.append(p) #end if #end for self._collada.geometries.append(geom) return geom #end encode_mesh #begin obj_mesh b_mesh_name = b_obj.data.name b_material_slots = b_obj.material_slots b_mesh = bmesh.new() geom = self._geometries.get(b_mesh_name, None) if not geom: b_mesh.from_mesh(b_obj.data) geom = encode_mesh(b_mesh, b_mesh_name, b_material_slots) self._geometries[b_mesh_name] = geom #end if matnodes = [] for slotindex, slot in enumerate(b_material_slots): sname = slot.material.name if sname not in self._materials: self._materials[sname] = self.material(slot.material) #end if matnodes.append \ ( MaterialNode ( make_slotname(slotindex), self._materials[sname], inputs = [("ACTIVE_UV", "TEXCOORD", "0")] # always assign set 0 to active UV layer ) ) #end for b_mesh.free() return [GeometryNode(geom, matnodes)] #end obj_mesh obj_type_handlers = \ { "CAMERA" : (obj_camera, DATABLOCK.CAMERA), "EMPTY" : (obj_empty, DATABLOCK.EMPTY), "LIGHT" : (obj_light, DATABLOCK.LAMP), "MESH" : (obj_mesh, DATABLOCK.MESH), } def object(self, b_obj, parent=None): handle_type = self.obj_type_handlers.get(b_obj.type) if handle_type != None: if parent != None: b_matrix = b_obj.matrix_local else: b_matrix = self._orient @ b_obj.matrix_world #end if obj = handle_type[0](self, b_obj) is_node = len(obj) == 1 and isinstance(obj[0], Node) if is_node: obj = obj[0] assert b_matrix != None obj.transforms.append(self.matrix(b_matrix)) node = obj else: node = self.node(b_matrix) #end if children = self._obj_children.get(b_obj.name) if children != None: for childname in children: self.object(bpy.data.objects[childname], parent=node) #end for #end if if parent != None: parent.children.append(node) else: self._scene.nodes.append(node) #end if if not is_node: node.children.extend(obj) #end if #end if #end object def material(self, b_mat): shader = "lambert" effect_kwargs = \ { "diffuse" : tuple(b_mat.diffuse_color[:3]), "double_sided" : not b_mat.use_backface_culling, } effect_params = [] if b_mat.diffuse_color[3] != 1.0: effect_kwargs["transparency"] = b_mat.diffuse_color[3] #end if if b_mat.use_nodes: b_shader = list(n for n in b_mat.node_tree.nodes if n.type == "BSDF_PRINCIPLED") if len(b_shader) == 1: # assume node setup somewhat resembles what importer creates b_shader = b_shader[0] else: b_shader = None #end if if b_shader != None: def get_input(name): input = b_shader.inputs[name] if not input.is_linked: value = input.default_value else: value = None #end if return value #end get_input def get_input_map(name): input = b_shader.inputs[name] map = None # to begin with if input.is_linked: links = input.links # note docs say this takes O(N) in total nr links in node graph to compute teximage = list \ ( l.from_node for l in links if isinstance(l.from_node, bpy.types.ShaderNodeTexImage) and l.from_socket.name == "Color" ) if len(teximage) != 0: teximage = teximage[0].image if teximage.packed_file != None: contents = teximage.packed_file.data else: contents = open( bpy.path.abspath(teximage.filepath), "rb").read() #end if out_filepath = self.write_ext_file \ ( category = EXT_FILE.TEXTURE, obj_name = teximage.name, filename = os.path.basename(teximage.filepath), contents = contents ) image = CImage(id=self.next_internal_id(), path=out_filepath) surface = Surface(id=self.next_internal_id(), img=image) sampler = Sampler2D(id=self.next_internal_id(), surface=surface) map = Map(sampler=sampler, texcoord="ACTIVE_UV") effect_params.extend([image, surface, sampler]) #end if #end if return map #end get_input_map value = get_input("Base Color") if value != None: effect_kwargs["diffuse"] = value[:3] elif self._export_textures: map = get_input_map("Base Color") if map != None: effect_kwargs["diffuse"] = map #end if #end if # todo: support maps for more inputs value = get_input("Metallic") metallic = True if value == None or value == 0: value = get_input("Specular") metallic = False #end if if value != None and value != 0: shader = "phong" # do I care about “blinn”? if metallic: effect_kwargs["reflective"] = effect_kwargs["diffuse"] else: effect_kwargs["reflective"] = (1, 1, 1) #end if effect_kwargs["reflectivity"] = value #end if value = get_input("Alpha") if value != None and value != 1.0: effect_kwargs["transparency"] = value # overridden by Transmission (below) if any #end if value = get_input("Transmission") if value != None and value != 0: effect_kwargs["transparency"] = value effect_kwargs["transparent"] = effect_kwargs["diffuse"] value = get_input("IOR") if value != None: effect_kwargs["index_of_refraction"] = value #end if #end if else: pass # give up for now #end if else: # quick fudge based only on Viewport Display settings if b_mat.metallic > 0 or b_mat.roughness < 1: shader = "phong" # do I care about “blinn”? try: shininess = 1 / b_mat.roughness - 1 # inverse of formula used in importer except ZeroDivisionError: shininess = math.inf #end try shininess = min( shininess, 10000) # just some arbitrary finite upper limit effect_kwargs["reflectivity"] = b_mat.specular_intensity effect_kwargs["shininess"] = shininess if b_mat.metallic > 0: # not paying attention to actual value of b_mat.metallic! effect_kwargs["reflective"] = b_mat.specular_color[:3] else: effect_kwargs["reflective"] = (1, 1, 1) #end if #end if #end if effect = Effect(self.next_internal_id(), effect_params, shader, **effect_kwargs) mat = Material(DATABLOCK.MATERIAL.nameid(b_mat.name), b_mat.name, effect) self._collada.effects.append(effect) self._collada.materials.append(mat) return mat #end material @staticmethod def matrix(b_matrix): return \ MatrixTransform \ ( np.array ( [e for r in tuple(map(tuple, b_matrix)) for e in r], dtype = np.float32 ) )
def __init__(self, filename, scale=1.0): Group.__init__(self) self._dae = Collada(filename) self._load_mesh(self._dae, scale=scale)
ndata = isocontour.newDatasetRegFloat3D(newgrid3D, origin, stepsize) # print "pfff" isoc = isocontour.getContour3d(ndata, 0, 0, isovalue, isocontour.NO_COLOR_VARIABLE) vert = np.zeros((isoc.nvert, 3)).astype('f') norm = np.zeros((isoc.nvert, 3)).astype('f') col = np.zeros((isoc.nvert)).astype('f') tri = np.zeros((isoc.ntri, 3)).astype('i') isocontour.getContour3dData(isoc, vert, norm, col, tri, 0) #print vert if maskGrid.crystal: vert = maskGrid.crystal.toCartesian(vert) return vert, norm, tri collada_xml = Collada() collada_xml.assetInfo.unitname = "centimeter" collada_xml.assetInfo.unitmeter = 0.01 collada_xml.assetInfo.upaxis = "Y_UP" root_env = scene.Node(env.name) myscene = scene.Scene(env.name + "_Scene", [root_env]) collada_xml.scenes.append(myscene) collada_xml.scene = myscene name = "myMolecule" matnode = oneMaterial(name, collada_xml) master_node = buildMeshGeom(name, v, f, collada_xml, matnode) collada_xml.nodes.append(master_node) collada_xml.write("test.dae")
def PyFactoryCreateFile(self, uclass: Class, parent: Object, name: str, filename: str) -> Object: # load the collada file dae = Collada(filename) ue.log_warning(dae) self.do_import = False self.open_collada_wizard() if not self.do_import: return None # create a new UStaticMesh with the specified name and parent static_mesh = StaticMesh(name, parent) # prepare a new model with the specified build settings source_model = StaticMeshSourceModel( BuildSettings=MeshBuildSettings(bRecomputeNormals=False, bRecomputeTangents=True, bUseMikkTSpace=True, bBuildAdjacencyBuffer=True, bRemoveDegenerates=True)) # extract vertices, uvs and normals from the da file (numpy.ravel will flatten the arrays to simple array of floats) triset = dae.geometries[0].primitives[0] self.vertices = numpy.ravel(triset.vertex[triset.vertex_index]) # take the first uv channel (there could be multiple channels, like the one for lightmapping) self.uvs = numpy.ravel( triset.texcoordset[0][triset.texcoord_indexset[0]]) self.normals = numpy.ravel(triset.normal[triset.normal_index]) # fix mesh data self.FixMeshData() # create a new mesh, FRawMesh is an ptopmized wrapper exposed by the python plugin. read: no reflection involved mesh = FRawMesh() # assign vertices mesh.set_vertex_positions(self.vertices) # uvs are required mesh.set_wedge_tex_coords(self.uvs) # normals are optionals mesh.set_wedge_tangent_z(self.normals) # assign indices (not optimized, just return the list of triangles * 3...) mesh.set_wedge_indices(numpy.arange(0, len(triset) * 3)) # assign the FRawMesh to the LOD0 (the model we created before) mesh.save_to_static_mesh_source_model(source_model) # assign LOD0 to the SataticMesh and build it static_mesh.SourceModels = [source_model] static_mesh.static_mesh_build() static_mesh.static_mesh_create_body_setup() static_mesh.StaticMaterials = [ StaticMaterial( MaterialInterface=self.ImportOptions.DefaultMaterial, MaterialSlotName='Main') ] return static_mesh