def fix_vertex(vtx): """Changes a single position vector from y-up to z-up""" return mathutils.Vector((vtx.x, vtx.z, -vtx.y))
def approx_normals(self, context): obj_dst = context.active_object if context.selected_objects[0].name != obj_dst.name: obj_src = context.selected_objects[0] else: obj_src = context.selected_objects[1] mesh_src = obj_src.data mesh_dst = obj_dst.data src_loops_normals, src_vert_to_loops_map = load_loops_normals(obj_src) dst_vert_list = mesh_dst.vertices src_vert_list = mesh_src.vertices matr_src = obj_src.matrix_world matr_dst = obj_dst.matrix_world # remember old vertices positions old_dst_co_list = [0] * 3 * len(dst_vert_list) old_src_co_list = [0] * 3 * len(src_vert_list) for i in range(len(dst_vert_list)): old_dst_co_list[i * 3] = dst_vert_list[i].co[0] old_dst_co_list[i * 3 + 1] = dst_vert_list[i].co[1] old_dst_co_list[i * 3 + 2] = dst_vert_list[i].co[2] for i in range(len(src_vert_list)): old_src_co_list[i * 3] = src_vert_list[i].co[0] old_src_co_list[i * 3 + 1] = src_vert_list[i].co[1] old_src_co_list[i * 3 + 2] = src_vert_list[i].co[2] # transform vertices to world space for vert_dst in dst_vert_list: vert_dst.co = matr_dst * vert_dst.co for vert_src in src_vert_list: vert_src.co = matr_src * vert_src.co # approximate normals verts_is_selected = False for v in dst_vert_list: if v.select: verts_is_selected = True break for vert_dst in dst_vert_list: if not verts_is_selected or vert_dst.select == True: min_distance = 1E10 min_index = -1 for vert_src in src_vert_list: distance = sqrt(pow(vert_dst.co[0]-vert_src.co[0],2) \ + pow(vert_dst.co[1]-vert_src.co[1],2) + pow(vert_dst.co[2]-vert_src.co[2],2)) if distance < min_distance: if vert_src.index in src_vert_to_loops_map: # vertex must be connected min_distance = distance min_index = vert_src.index n = mathutils.Vector() for l in src_vert_to_loops_map[min_index]: n = n + mathutils.Vector(src_loops_normals[l]) n = n / len(src_vert_to_loops_map[min_index]) n = matr_dst.to_quaternion().inverted() \ * matr_src.to_quaternion() \ * n set_vertex_normal(vert_dst.index, (n.x, n.y, n.z)) # reset destination mesh's vertices positions for vert_dst in dst_vert_list: vert_dst.co[0] = old_dst_co_list[vert_dst.index * 3] vert_dst.co[1] = old_dst_co_list[vert_dst.index * 3 + 1] vert_dst.co[2] = old_dst_co_list[vert_dst.index * 3 + 2] # reset source mesh's vertices positions for vert_src in src_vert_list: vert_src.co[0] = old_src_co_list[vert_src.index * 3] vert_src.co[1] = old_src_co_list[vert_src.index * 3 + 1] vert_src.co[2] = old_src_co_list[vert_src.index * 3 + 2] obj_dst.data.normals_split_custom_set(b4w_loops_normals)
def execute(self, context): # here is main rotation logic global rotation_helper_params # calculate vector that point into eye and depends on center of rotation eye_n = mathutils.Vector( context.space_data.region_3d.view_matrix[2][:3]) eye_co = (context.space_data.region_3d.view_location + eye_n * context.space_data.region_3d.view_distance) obimat = context.active_object.matrix_world.inverted() c_local = obimat * rotation_helper_params.c_world eye_co_local = obimat * eye_co n = eye_co_local - c_local n.normalize() if rotation_helper_params.degrees: if rotation_helper_params.constraint: matrix = mathutils.Quaternion( bpy.context.active_object.matrix_world.inverted() * mathutils.Vector(rotation_helper_params.constraint), -radians(rotation_helper_params.degrees)).to_matrix() else: matrix = mathutils.Quaternion( n, -radians(rotation_helper_params.degrees)).to_matrix() else: rotation_helper_params.mouse_world = self.calc_mouse_world(context) if not rotation_helper_params.mouse_world: return rotation_helper_params.mouse_local = context.active_object.matrix_world.inverted() *\ rotation_helper_params.mouse_world if not self.mouse_local_old: self.mouse_local_old = rotation_helper_params.mouse_local # calculate projection of c_local on viewplane c_pr = c_local + n * ( n * (rotation_helper_params.mouse_local - c_local)) # calculate main vectors r_old = self.mouse_local_old - c_pr r = rotation_helper_params.mouse_local - c_pr # calculate projection of main vectors on orthogonal to view vector plane r_pr = r - n * r * n r_old_pr = r_old - n * r_old * n r_pr.normalize() r_old_pr.normalize() # calculate main rotation matrix matrix = r_old_pr.rotation_difference(r_pr).to_matrix() # correct rotation matrix in correspondence to constraint if rotation_helper_params.constraint: constraint = context.active_object.matrix_world.inverted( ) * mathutils.Vector(rotation_helper_params.constraint) if n * constraint < 0: n = -n matrix = n.rotation_difference(constraint).to_matrix() * matrix if context.window_manager.b4w_split: update_custom_normal(self, context, matrix, "rotate", self.init_loops_normals) else: update_custom_normal(self, context, matrix, "rotate", self.init_normals)
OutputFPS = 30 # Specify desired output frame rate FrameRatio = round(OutputFPS / FPS) # Calculate ratio of input vs output frame rates head = bpy.data.objects["HeaDRig"] # Get handle for macaque avatar head head2 = bpy.context.object #bpy.ops.object.mode_set(mode='POSE') # Enter pose mode #============== Load reference movie to 3D plane if LoadOrigMovie == 1: Scene = bpy.data.scenes['Scene'] # Quick and dirty render! Scene.render.resolution_percentage = 50 bpy.context.scene.cycles.samples = 10 HeadAzimuths = [0] bpy.data.objects['Camera'].location = mu.Vector((-0.1, -1, 0)) MovieFile = MovieDir + f[0:-3] + 'mpg' print('Loading ' + MovieFile) mc = bpy.data.movieclips.load(MovieFile) bpy.ops.import_image.to_plane(files=[{ 'name': os.path.basename(MovieFile) }], directory=os.path.dirname(MovieFile)) bpy.data.objects[DataFile].rotation_euler = mu.Vector( (math.radians(90), 0, 0)) bpy.data.objects[DataFile].location = mu.Vector((-0.2, 0, 0)) bpy.data.objects[DataFile].scale = ((0.15, 0.15, 0.15)) else:
def dot_mesh(ob, path, force_name=None, ignore_shape_animation=False, normals=True, isLOD=False, **kwargs): """ export the vertices of an object into a .mesh file ob: the blender object path: the path to save the .mesh file to. path MUST exist force_name: force a different name for this .mesh kwargs: * material_prefix - string. (optional) * overwrite - bool. (optional) default False """ obj_name = force_name or ob.data.name obj_name = clean_object_name(obj_name) target_file = os.path.join(path, '%s.mesh.xml' % obj_name) material_prefix = kwargs.get('material_prefix', '') overwrite = kwargs.get('overwrite', False) if os.path.isfile(target_file) and not overwrite: return [] if not os.path.isdir(path): os.makedirs(path) start = time.time() # blender per default does not calculate these. when querying the quads/tris # of the object blender would crash if calc_tessface was not updated ob.data.update(calc_tessface=True) Report.meshes.append(obj_name) Report.faces += len(ob.data.tessfaces) Report.orig_vertices += len(ob.data.vertices) cleanup = False if ob.modifiers: cleanup = True copy = ob.copy() #bpy.context.scene.objects.link(copy) rem = [] for mod in copy.modifiers: # remove armature and array modifiers before collaspe if mod.type in 'ARMATURE ARRAY'.split(): rem.append(mod) for mod in rem: copy.modifiers.remove(mod) # bake mesh mesh = copy.to_mesh(bpy.context.scene, True, "PREVIEW") # collaspe else: copy = ob mesh = ob.data if logging: print(' - Generating:', '%s.mesh.xml' % obj_name) try: with open(target_file, 'w') as f: f.flush() except Exception as e: show_dialog("Invalid mesh object name: " + obj_name) return with open(target_file, 'w') as f: doc = SimpleSaxWriter(f, 'mesh', {}) # Very ugly, have to replace number of vertices later doc.start_tag('sharedgeometry', {'vertexcount': '__TO_BE_REPLACED_VERTEX_COUNT__'}) if logging: print(' - Writing shared geometry') doc.start_tag( 'vertexbuffer', { 'positions': 'true', 'normals': 'true', 'colours_diffuse': str(bool(mesh.vertex_colors)), 'texture_coords': '%s' % len(mesh.uv_textures) if mesh.uv_textures.active else '0' }) # Vertex colors vertex_color_lookup = VertexColorLookup(mesh) # Materials # saves tuples of material name and material obj (or None) materials = [] # a material named 'vertex.color.<yourname>' will overwrite # the diffuse color in the mesh file! for mat in ob.data.materials: mat_name = "_missing_material_" if mat is not None: mat_name = mat.name mat_name = material_name(mat_name, prefix=material_prefix) extern = False if mat_name.startswith("extern."): mat_name = mat_name[len("extern."):] extern = True if mat: materials.append((mat_name, extern, mat)) else: print('[WARNING:] Bad material data in', ob) materials.append(('_missing_material_', True, None)) # fixed dec22, keep proper index if not materials: materials.append(('_missing_material_', True, None)) vertex_groups = {} material_faces = [] for matidx, mat in enumerate(materials): material_faces.append([]) # Textures dotextures = False uvcache = [] # should get a little speed boost by this cache if mesh.tessface_uv_textures.active: dotextures = True for layer in mesh.tessface_uv_textures: uvs = [] uvcache.append(uvs) # layer contains: name, active, data for uvface in layer.data: uvs.append( (uvface.uv1, uvface.uv2, uvface.uv3, uvface.uv4)) shared_vertices = {} _remap_verts_ = [] _remap_normals_ = [] _face_indices_ = [] numverts = 0 bm = None if mesh.has_custom_normals: mesh.calc_normals_split() # Create bmesh to help obtain custom vertex normals bm = bmesh.new() bm.from_mesh(mesh) bm.verts.ensure_lookup_table() for F in mesh.tessfaces: smooth = F.use_smooth faces = material_faces[F.material_index] # Ogre only supports triangles tris = [] tris.append((F.vertices[0], F.vertices[1], F.vertices[2])) if len(F.vertices) >= 4: tris.append((F.vertices[0], F.vertices[2], F.vertices[3])) if dotextures: a = [] b = [] uvtris = [a, b] for layer in uvcache: uv1, uv2, uv3, uv4 = layer[F.index] a.append((uv1, uv2, uv3)) b.append((uv1, uv3, uv4)) for tidx, tri in enumerate(tris): face = [] for vidx, idx in enumerate(tri): v = mesh.vertices[idx] if smooth: if mesh.has_custom_normals: n = mathutils.Vector() for loop in bm.verts[idx].link_loops: n += mesh.loops[loop.index].normal n.normalize() nx, ny, nz = swap(n) else: nx, ny, nz = swap(v.normal) # fixed june 17th 2011 n = mathutils.Vector([nx, ny, nz]) else: nx, ny, nz = swap(F.normal) n = mathutils.Vector([nx, ny, nz]) r, g, b, ra = vertex_color_lookup.get(F, idx) # Texture maps vert_uvs = [] if dotextures: for layer in uvtris[tidx]: vert_uvs.append(layer[vidx]) ''' Check if we already exported that vertex with same normal, do not export in that case, (flat shading in blender seems to work with face normals, so we copy each flat face' vertices, if this vertex with same normals was already exported, todo: maybe not best solution, check other ways (let blender do all the work, or only support smooth shading, what about seems, smoothing groups, materials, ...) ''' vert = VertexNoPos(numverts, nx, ny, nz, r, g, b, ra, vert_uvs) alreadyExported = False if idx in shared_vertices: for vert2 in shared_vertices[idx]: #does not compare ogre_vidx (and position at the moment) if vert == vert2: face.append(vert2.ogre_vidx) alreadyExported = True #print(idx,numverts, nx,ny,nz, r,g,b,ra, vert_uvs, "already exported") break if not alreadyExported: face.append(vert.ogre_vidx) shared_vertices[idx].append(vert) #print(numverts, nx,ny,nz, r,g,b,ra, vert_uvs, "appended") else: face.append(vert.ogre_vidx) shared_vertices[idx] = [vert] #print(idx, numverts, nx,ny,nz, r,g,b,ra, vert_uvs, "created") if alreadyExported: continue numverts += 1 _remap_verts_.append(v) _remap_normals_.append(n) _face_indices_.append(F.index) x, y, z = swap(v.co) # xz-y is correct! doc.start_tag('vertex', {}) doc.leaf_tag('position', { 'x': '%6f' % x, 'y': '%6f' % y, 'z': '%6f' % z }) doc.leaf_tag('normal', { 'x': '%6f' % nx, 'y': '%6f' % ny, 'z': '%6f' % nz }) if vertex_color_lookup.has_vcolors: doc.leaf_tag( 'colour_diffuse', {'value': '%6f %6f %6f %6f' % (r, g, b, ra)}) # Texture maps if dotextures: for uv in vert_uvs: doc.leaf_tag('texcoord', { 'u': '%6f' % uv[0], 'v': '%6f' % (1.0 - uv[1]) }) doc.end_tag('vertex') append_triangle_in_vertex_group(mesh, ob, vertex_groups, face, tri) faces.append((face[0], face[1], face[2])) Report.vertices += numverts doc.end_tag('vertexbuffer') doc.end_tag('sharedgeometry') if logging: print(' Done at', timer_diff_str(start), "seconds") print(' - Writing submeshes') doc.start_tag('submeshes', {}) for matidx, (mat_name, extern, mat) in enumerate(materials): if not len(material_faces[matidx]): Report.warnings.append( 'BAD SUBMESH "%s": material %r, has not been applied to any faces - not exporting as submesh.' % (obj_name, mat_name)) continue # fixes corrupt unused materials submesh_attributes = { 'usesharedvertices': 'true', # Maybe better look at index of all faces, if one over 65535 set to true; # Problem: we know it too late, postprocessing of file needed "use32bitindexes": str(bool(numverts > 65535)), "operationtype": "triangle_list" } if mat_name != "_missing_material_": submesh_attributes['material'] = mat_name doc.start_tag('submesh', submesh_attributes) doc.start_tag('faces', {'count': str(len(material_faces[matidx]))}) for fidx, (v1, v2, v3) in enumerate(material_faces[matidx]): doc.leaf_tag('face', { 'v1': str(v1), 'v2': str(v2), 'v3': str(v3) }) doc.end_tag('faces') doc.end_tag('submesh') Report.triangles += len(material_faces[matidx]) for name, ogre_indices in vertex_groups.items(): if len(ogre_indices) <= 0: continue submesh_attributes = { 'usesharedvertices': 'true', "use32bitindexes": str(bool(numverts > 65535)), "operationtype": "triangle_list", "material": "none", } doc.start_tag('submesh', submesh_attributes) doc.start_tag('faces', {'count': len(ogre_indices)}) for (v1, v2, v3) in ogre_indices: doc.leaf_tag('face', { 'v1': str(v1), 'v2': str(v2), 'v3': str(v3) }) doc.end_tag('faces') doc.end_tag('submesh') del material_faces del shared_vertices doc.end_tag('submeshes') # Submesh names # todo: why is the submesh name taken from the material # when we have the blender object name available? doc.start_tag('submeshnames', {}) for matidx, (mat_name, extern, mat) in enumerate(materials): doc.leaf_tag('submesh', {'name': mat_name, 'index': str(matidx)}) idx = len(materials) for name in vertex_groups.keys(): name = name[len('ogre.vertex.group.'):] doc.leaf_tag('submesh', {'name': name, 'index': idx}) idx += 1 doc.end_tag('submeshnames') if logging: print(' Done at', timer_diff_str(start), "seconds") # Generate lod levels if isLOD == False and ob.type == 'MESH' and config.get( 'lodLevels') > 0: lod_levels = config.get('lodLevels') lod_distance = config.get('lodDistance') lod_ratio = config.get('lodPercent') / 100.0 lod_pre_mesh_count = len(bpy.data.meshes) # Cap lod levels to something sensible (what is it?) if lod_levels > 10: lod_levels = 10 def activate_object(obj): bpy.ops.object.select_all(action='DESELECT') bpy.context.scene.objects.active = obj obj.select = True def duplicate_object(scene, name, copyobj): # Create new mesh mesh = bpy.data.meshes.new(name) # Create new object associated with the mesh ob_new = bpy.data.objects.new(name, mesh) # Copy data block from the old object into the new object ob_new.data = copyobj.data.copy() ob_new.location = copyobj.location ob_new.rotation_euler = copyobj.rotation_euler ob_new.scale = copyobj.scale # Link new object to the given scene and select it scene.objects.link(ob_new) ob_new.select = True return ob_new, mesh def delete_object(obj): activate_object(obj) bpy.ops.object.delete() # todo: Potential infinite recursion creation fails? def get_or_create_modifier(obj, modifier_name): if obj.type != 'MESH': return None # Find modifier for mod_iter in obj.modifiers: if mod_iter.type == modifier_name: return mod_iter # Not found? Create it and call recurse activate_object(obj) bpy.ops.object.modifier_add(type=modifier_name) return get_or_create_modifier(obj, modifier_name) # Create a temporary duplicate ob_copy, ob_copy_mesh = duplicate_object( bpy.context.scene, obj_name + "_LOD_TEMP_COPY", ob) ob_copy_meshes = [ob_copy.data, ob_copy_mesh] # Activate clone for modifier manipulation decimate = get_or_create_modifier(ob_copy, 'DECIMATE') if decimate is not None: decimate.decimate_type = 'COLLAPSE' decimate.show_viewport = True decimate.show_render = True lod_generated = [] lod_ratio_multiplier = 1.0 - lod_ratio lod_current_ratio = 1.0 * lod_ratio_multiplier lod_current_distance = lod_distance lod_current_vertice_count = len(mesh.vertices) lod_min_vertice_count = 12 for level in range(lod_levels + 1)[1:]: decimate.ratio = lod_current_ratio lod_mesh = ob_copy.to_mesh(scene=bpy.context.scene, apply_modifiers=True, settings='PREVIEW') ob_copy_meshes.append(lod_mesh) # Check min vertice count and that the vertice count got reduced from last iteration lod_mesh_vertices = len(lod_mesh.vertices) if lod_mesh_vertices < lod_min_vertice_count: print(' - LOD', level, 'vertice count', lod_mesh_vertices, 'too small. Ignoring LOD.') break if lod_mesh_vertices >= lod_current_vertice_count: print(' - LOD', level - 1, 'vertice count', lod_mesh_vertices, 'cannot be decimated any longer. Ignoring LOD.') break # todo: should we check if the ratio gets too small? although its up to the user to configure from the export panel lod_generated.append({ 'level': level, 'distance': lod_current_distance, 'ratio': lod_current_ratio, 'mesh': lod_mesh }) lod_current_distance += lod_distance lod_current_vertice_count = lod_mesh_vertices lod_current_ratio *= lod_ratio_multiplier # Create lod .mesh files and generate LOD XML to the original .mesh.xml if len(lod_generated) > 0: # 'manual' means if the geometry gets loaded from a # different file that this LOD list references # NOTE: This is the approach at the moment. Another option would be to # references to the same vertex indexes in the shared geometry. But the # decimate approach wont work with this as it generates a fresh geometry. doc.start_tag( 'levelofdetail', { 'strategy': 'default', 'numlevels': str( len(lod_generated) + 1 ), # The main mesh is + 1 (kind of weird Ogre logic) 'manual': "true" }) print(' - Generating', len(lod_generated), 'LOD meshes. Original: vertices', len(mesh.vertices), "faces", len(mesh.tessfaces)) for lod in lod_generated: ratio_percent = round(lod['ratio'] * 100.0, 0) print(' > Writing LOD', lod['level'], 'for distance', lod['distance'], 'and ratio', str(ratio_percent) + "%", 'with', len(lod['mesh'].vertices), 'vertices', len(lod['mesh'].tessfaces), 'faces') lod_ob_temp = bpy.data.objects.new( obj_name, lod['mesh']) lod_ob_temp.data.name = obj_name + '_LOD_' + str( lod['level']) dot_mesh(lod_ob_temp, path, lod_ob_temp.data.name, ignore_shape_animation, normals, isLOD=True) # 'value' is the distance this LOD kicks in for the 'Distance' strategy. doc.leaf_tag( 'lodmanual', { 'value': str(lod['distance']), 'meshname': lod_ob_temp.data.name + ".mesh" }) # Delete temporary LOD object. # The clone meshes will be deleted later. lod_ob_temp.user_clear() delete_object(lod_ob_temp) del lod_ob_temp doc.end_tag('levelofdetail') # Delete temporary LOD object delete_object(ob_copy) del ob_copy # Delete temporary data/mesh objects for mesh_iter in ob_copy_meshes: mesh_iter.user_clear() bpy.data.meshes.remove(mesh_iter) del mesh_iter ob_copy_meshes = [] if lod_pre_mesh_count != len(bpy.data.meshes): print( ' - WARNING: After LOD generation, cleanup failed to erase all temporary data!' ) arm = ob.find_armature() if arm: doc.leaf_tag('skeletonlink', {'name': '%s.skeleton' % obj_name}) doc.start_tag('boneassignments', {}) boneOutputEnableFromName = {} boneIndexFromName = {} for bone in arm.pose.bones: boneOutputEnableFromName[bone.name] = True if config.get('ONLY_DEFORMABLE_BONES'): # if we found a deformable bone, if bone.bone.use_deform: # visit all ancestor bones and mark them "output enabled" parBone = bone.parent while parBone: boneOutputEnableFromName[parBone.name] = True parBone = parBone.parent else: # non-deformable bone, no output boneOutputEnableFromName[bone.name] = False boneIndex = 0 for bone in arm.pose.bones: boneIndexFromName[bone.name] = boneIndex if boneOutputEnableFromName[bone.name]: boneIndex += 1 badverts = 0 for vidx, v in enumerate(_remap_verts_): check = 0 for vgroup in v.groups: if vgroup.weight > config.get('TRIM_BONE_WEIGHTS'): groupIndex = vgroup.group if groupIndex < len(copy.vertex_groups): vg = copy.vertex_groups[groupIndex] if vg.name in boneIndexFromName: # allows other vertex groups, not just armature vertex groups bnidx = boneIndexFromName[ vg. name] # find_bone_index(copy,arm,vgroup.group) doc.leaf_tag( 'vertexboneassignment', { 'vertexindex': str(vidx), 'boneindex': str(bnidx), 'weight': '%6f' % vgroup.weight }) check += 1 else: print( 'WARNING: object vertex groups not in sync with armature', copy, arm, groupIndex) if check > 4: badverts += 1 print( 'WARNING: vertex %s is in more than 4 vertex groups (bone weights)\n(this maybe Ogre incompatible)' % vidx) if badverts: Report.warnings.append( '%s has %s vertices weighted to too many bones (Ogre limits a vertex to 4 bones)\n[try increaseing the Trim-Weights threshold option]' % (mesh.name, badverts)) doc.end_tag('boneassignments') # Updated June3 2011 - shape animation works if config.get('SHAPE_ANIM') and ob.data.shape_keys and len( ob.data.shape_keys.key_blocks): print(' - Writing shape keys') doc.start_tag('poses', {}) for sidx, skey in enumerate(ob.data.shape_keys.key_blocks): if sidx == 0: continue if len(skey.data) != len(mesh.vertices): failure = 'FAILED to save shape animation - you can not use a modifier that changes the vertex count! ' failure += '[ mesh : %s ]' % mesh.name Report.warnings.append(failure) print(failure) break doc.start_tag( 'pose', { 'name': skey.name, # If target is 'mesh', no index needed, if target is submesh then submesh identified by 'index' #'index' : str(sidx-1), #'index' : '0', 'target': 'mesh' }) snormals = None if config.get('SHAPE_NORMALS'): if smooth: snormals = skey.normals_vertex_get() else: snormals = skey.normals_polygon_get() for vidx, v in enumerate(_remap_verts_): pv = skey.data[v.index] x, y, z = swap(pv.co - v.co) if config.get('SHAPE_NORMALS'): n = _remap_normals_[vidx] if smooth: pn = mathutils.Vector([ snormals[v.index * 3], snormals[v.index * 3 + 1], snormals[v.index * 3 + 2] ]) else: vindex = _face_indices_[vidx] pn = mathutils.Vector([ snormals[vindex * 3], snormals[vindex * 3 + 1], snormals[vindex * 3 + 2] ]) nx, ny, nz = swap(pn - n) #for i,p in enumerate( skey.data ): #x,y,z = p.co - ob.data.vertices[i].co #x,y,z = swap( ob.data.vertices[i].co - p.co ) #if x==.0 and y==.0 and z==.0: continue # the older exporter optimized this way, is it safe? if config.get('SHAPE_NORMALS'): doc.leaf_tag( 'poseoffset', { 'x': '%6f' % x, 'y': '%6f' % y, 'z': '%6f' % z, 'nx': '%6f' % nx, 'ny': '%6f' % ny, 'nz': '%6f' % nz, 'index': str(vidx) # is this required? }) else: doc.leaf_tag( 'poseoffset', { 'x': '%6f' % x, 'y': '%6f' % y, 'z': '%6f' % z, 'index': str(vidx) # is this required? }) doc.end_tag('pose') doc.end_tag('poses') if logging: print(' Done at', timer_diff_str(start), "seconds") if ob.data.shape_keys.animation_data and len( ob.data.shape_keys.animation_data.nla_tracks): print(' - Writing shape animations') doc.start_tag('animations', {}) _fps = float(bpy.context.scene.render.fps) for nla in ob.data.shape_keys.animation_data.nla_tracks: for idx, strip in enumerate(nla.strips): doc.start_tag( 'animation', { 'name': strip.name, 'length': str((strip.frame_end - strip.frame_start) / _fps) }) doc.start_tag('tracks', {}) doc.start_tag( 'track', { 'type': 'pose', 'target': 'mesh' # If target is 'mesh', no index needed, if target is submesh then submesh identified by 'index' #'index' : str(idx) #'index' : '0' }) doc.start_tag('keyframes', {}) for frame in range( int(strip.frame_start), int(strip.frame_end) + 1, bpy.context.scene.frame_step): #thanks to Vesa bpy.context.scene.frame_set(frame) doc.start_tag( 'keyframe', { 'time': str( (frame - strip.frame_start) / _fps) }) for sidx, skey in enumerate( ob.data.shape_keys.key_blocks): if sidx == 0: continue doc.leaf_tag( 'poseref', { 'poseindex': str(sidx - 1), 'influence': str(skey.value) }) doc.end_tag('keyframe') doc.end_tag('keyframes') doc.end_tag('track') doc.end_tag('tracks') doc.end_tag('animation') doc.end_tag('animations') print(' Done at', timer_diff_str(start), "seconds") ## Clean up and save #bpy.context.scene.meshes.unlink(mesh) if cleanup: #bpy.context.scene.objects.unlink(copy) copy.user_clear() bpy.data.objects.remove(copy) mesh.user_clear() bpy.data.meshes.remove(mesh) del copy del mesh del _remap_verts_ del _remap_normals_ del _face_indices_ del uvcache doc.close() # reported by Reyn f.close() if logging: print(' - Created .mesh.xml at', timer_diff_str(start), "seconds") # todo: Very ugly, find better way def replaceInplace(f, searchExp, replaceExp): import fileinput for line in fileinput.input(f, inplace=1): if searchExp in line: line = line.replace(searchExp, replaceExp) sys.stdout.write(line) fileinput.close() # reported by jakob replaceInplace(target_file, '__TO_BE_REPLACED_VERTEX_COUNT__' + '"', str(numverts) + '"') #+ ' ' * (ls - lr)) del (replaceInplace) # Start .mesh.xml to .mesh convertion tool util.xml_convert(target_file, has_uvs=dotextures) # note that exporting the skeleton does not happen here anymore # it moved to the function dot_skeleton in its own module mats = [] for mat_name, extern, mat in materials: # _missing_material_ is marked as extern if not extern: mats.append(mat_name) else: print("extern material", mat_name) logging.info(' - Created .mesh in total time %s seconds', timer_diff_str(start)) return mats
def get_view_direction_by_rot_matrix(view_rotation): dir = view_rotation @ mathutils.Vector((0,0,-1)) return dir.normalized()
def parse_brush(model, bsp, brush_id, import_settings): brush = bsp.lumps["brushes"].data[brush_id] shader = bsp.lumps["shaders"].data[brush.texture].name + ".brush" if not (shader in model.material_names): model.material_names.append(shader) brush_shader_id = model.material_names.index(shader) planes = [] brush_materials = [] for side in range(brush.n_brushsides): brushside = bsp.lumps["brushsides"].data[brush.brushside + side] plane = bsp.lumps["planes"].data[brushside.plane] normal = mathutils.Vector(plane.normal) position = normal * plane.distance shader = bsp.lumps["shaders"].data[ brushside.texture].name + ".brush" if not (shader in model.material_names): model.material_names.append(shader) if not (shader in brush_materials): brush_materials.append(shader) mat_id = brush_materials.index(shader) planes.append([position, normal, mat_id]) me = bpy.data.meshes.new("Brush " + str(brush_id).zfill(4)) for texture_instance in brush_materials: mat = bpy.data.materials.get(texture_instance) if (mat == None): mat = bpy.data.materials.new(name=texture_instance) me.materials.append(mat) bm = bmesh.new() bm.from_mesh(me) bmesh.ops.create_cube(bm, size=65536, calc_uvs=True) uv_layer = bm.loops.layers.uv.verify() #bmesh bisect #face from bisect + assign shader for plane in planes: geom = bm.verts[:] + bm.edges[:] + bm.faces[:] vert_dict = bmesh.ops.bisect_plane(bm, geom=geom, dist=0.1, plane_co=plane[0], plane_no=plane[1], clear_outer=True) bm_faces = bmesh.ops.contextual_create(bm, geom=vert_dict["geom_cut"], mat_nr=plane[2], use_smooth=False)["faces"] #if mat_nr would actually work, this wouldnt be needed :/ for f in bm_faces: f.material_index = plane[2] bm.verts.ensure_lookup_table() bm.verts.index_update() bm.verts.sort() bm.faces.ensure_lookup_table() bm.faces.index_update() bm.faces.sort() bm.to_mesh(me) if import_settings.preset == "BRUSHES": collection = bpy.data.collections.get("Brushes") if collection == None: collection = bpy.data.collections.new("Brushes") bpy.context.scene.collection.children.link(collection) obj = bpy.data.objects.new("Brush " + str(brush_id).zfill(4), me) obj.cycles_visibility.camera = False #obj.cycles_visibility.diffuse = False obj.cycles_visibility.glossy = False obj.cycles_visibility.transmission = False obj.cycles_visibility.scatter = False bpy.data.collections["Brushes"].objects.link(obj) return vert_mapping = [-2 for i in range(len(bm.verts))] for vert in bm.verts: vert_mapping[vert.index] = model.current_index model.vertices.append(vert.co.copy()) model.normals.append(vert.normal.copy()) model.vertex_bsp_indices.append(-999) model.current_index += 1 for face in bm.faces: indices = [] tcs = [] lmtcs = [] colors = [] for vert, loop in zip(face.verts, face.loops): indices.append(vert_mapping[vert.index]) tcs.append(loop[uv_layer].uv.copy()) lmtcs.append([0.0, 0.0]) colors.append([0.4, 0.4, 0.4, 1.0]) material_index = brush_shader_id model.face_materials.append(material_index) model.face_vertices.append(indices) model.face_tcs.append(tcs) model.face_lm1_tcs.append(lmtcs) model.face_vert_color.append(colors) model.face_vert_alpha.append(colors) if model.lightmaps > 1: model.face_lm2_tcs.append(lmtcs) model.face_lm3_tcs.append(lmtcs) model.face_lm4_tcs.append(lmtcs) model.face_vert_color2.append(colors) model.face_vert_color3.append(colors) model.face_vert_color4.append(colors) bm.free()
def bmEdgeSeqToTupleList(edgeSeq): return list(map(lambda edge: (edge.index, reduce(lambda acc,vert: acc + vert.co, edge.verts, mathutils.Vector((0.0,0.0,0.0))) / len(edge.verts)), edgeSeq))
def process_leaf(obj, l_id): bm = bmesh.new() bm.from_mesh(obj.data) step = 0.0 num_seg = 256 grow_v = mathutils.Vector((0, 0, 1)) lat_v = mathutils.Vector((0, 1, 0)) n = lat_v.cross(grow_v).normalized() bm.verts.ensure_lookup_table() for v in bm.verts: proj = v.co.dot(grow_v) if (proj > step): step = proj step = step / (num_seg - 1) seg_vals = [[0] * 2 for _ in range(num_seg)] bm.edges.ensure_lookup_table() def calculate_seg_vals(v0, v1): v0_proj_len = v0.dot(grow_v) v1_proj_len = v1.dot(grow_v) if v0_proj_len > v1_proj_len: tmp = v0 v0 = v1 v1 = tmp tmp = v0_proj_len v0_proj_len = v1_proj_len v1_proj_len = tmp on_left = n0.dot(n) < 0 v0_seg_idx = math.ceil(v0_proj_len / step) v1_seg_idx = math.ceil(v1_proj_len / step) if v1_seg_idx == (num_seg - 1): v1_seg_idx += 1 if v0_seg_idx == v1_seg_idx: return for idx in range(v0_seg_idx, v1_seg_idx): if v1_proj_len - v0_proj_len != 0: vt_lon_len = idx * step vt_lon = grow_v * vt_lon_len t = (vt_lon_len - v0_proj_len) / (v1_proj_len - v0_proj_len) vt = (1.0 - t) * v0 + t * v1 lat_vt = vt - vt_lon if on_left: if (seg_vals[idx][1] > -lat_vt.length): seg_vals[idx][1] = -lat_vt.length else: if (seg_vals[idx][0] < lat_vt.length): seg_vals[idx][0] = lat_vt.length for e in bm.edges: v0 = e.verts[0].co v1 = e.verts[1].co n0 = v0.cross(grow_v).normalized() n1 = v1.cross(grow_v).normalized() if n0.dot(n1) < 0: v0_lon = grow_v * v0.dot(grow_v) v1_lon = grow_v * v1.dot(grow_v) v0_lat = v0 - v0_lon v1_lat = v1 - v1_lon t = v0_lat.length / (v0_lat.length + v1_lat.length) v3 = (1.0 - t) * v0 + t * v1 calculate_seg_vals(v0, v3) calculate_seg_vals(v1, v3) else: calculate_seg_vals(v0, v1) #generate_envelope(obj,step,num_seg,seg_vals) ids_cl = bm.loops.layers.color["veg_ids"] lon_w_cl = bm.loops.layers.color["veg_lon_weights"] lat_w_cl = bm.loops.layers.color["veg_lat_weights"] max_lon = 0.0 max_lat = 0.0 for val in seg_vals: if max_lat < abs(val[0]): max_lat = abs(val[0]) if max_lat < abs(val[1]): max_lat = abs(val[1]) for f in bm.faces: for l in f.loops: lv = l.vert.co lv_lon_len = lv.dot(grow_v) if (lv_lon_len > max_lon): max_lon = lv_lon_len lat_treshold = max_lat * 0.1 max_lat *= 0.5 front_max_lon = max_lon - max_lat bm.faces.ensure_lookup_table() for f in bm.faces: for l in f.loops: lv = l.vert.co on_left = lv.cross(grow_v).normalized().dot(n) < 0 lv_lon_len = lv.dot(grow_v) lv_lon = grow_v * lv.dot(grow_v) t = lv_lon_len / step i = math.floor(t) t -= i max_lat_r = 0.0 max_lat_l = 0.0 if (i == num_seg - 1): max_lat_r = (1.0 - t) * seg_vals[i][0] max_lat_l = (1.0 - t) * seg_vals[i][1] else: max_lat_r = (1.0 - t) * seg_vals[i][0] + t * seg_vals[i + 1][0] max_lat_l = (1.0 - t) * seg_vals[i][1] + t * seg_vals[i + 1][1] lv_lat = lv - lv_lon lat_val = lv_lat.length if lat_val < lat_treshold: lat_val = 0.0 else: if on_left: lat_val = -lat_val lat_val = abs(((lat_val - max_lat_l) / (max_lat_r - max_lat_l)) * 2.0 - 1.0) if lv_lon_len > front_max_lon: lat_val2 = (lv_lon_len - front_max_lon) / max_lat lat_val = max(lat_val, lat_val2) l[ids_cl] = (l[ids_cl][0], l[ids_cl][1], l_id / 255.0) l[lon_w_cl] = (l[lon_w_cl][0], l[lon_w_cl][1], lv_lon_len / max_lon) l[lat_w_cl] = (l[lat_w_cl][0], l[lat_w_cl][1], lat_val) bm.to_mesh(obj.data) bm.free()
def save(operator, context, filepath='', author_name="HENDRIX", export_materials=True, create_lods=False, fix_root_bones=False, numlods=1, rate=1): if create_lods: print('Adding LODs...') from . import batch_bfb batch_bfb.add_lods(numlods, rate) print('Exporting', filepath, '...') global errors errors = [] #just in case - probably not needed #make sure there is an active object - is it needed? try: bpy.ops.object.mode_set(mode="OBJECT") except: bpy.context.scene.objects.active = bpy.context.scene.objects[0] bpy.ops.object.mode_set(mode="OBJECT") global write_materials write_materials = export_materials global dirname dirname = os.path.dirname(filepath) armature_bytes = b"" #if one model uses an armature, all have to. If they don't, they can't be exported. has_armature = False global ob_2_block ob_2_block = {} global ob_2_weight_bytes ob_2_weight_bytes = {} global ob_2_fx_wind ob_2_fx_wind = {} global ob_2_id ob_2_id = {} ob_2_meshOffset = {} BFRVertex_2_meshData = {} global stream stream = b'' starttime = time.clock() print('Generating block IDs for objects in scene...') #keep track of objects without a parent, if >1 add an Auto Root roots = [] for ob in bpy.context.scene.objects: if type(ob.data) in (type(None), bpy.types.Armature, bpy.types.Mesh): if ob.parent == None: roots.append(ob) if len(roots) == 1: root = roots[0] if root.name.startswith("lodgroup"): print( 'Warning! Lodgroup must not be root! Created an Auto Root empty!' ) root = create_empty(None, 'Auto Root', mathutils.Matrix()) roots[0].parent = root else: print( 'Warning! Found more than one root object! Created an Auto Root empty!' ) root = create_empty(None, 'Auto Root', mathutils.Matrix()) for ob in roots: ob.parent = root identity = mathutils.Matrix() blockcount = 0 ID = 1 for ob in bpy.context.scene.objects: ob_2_id[ob] = ID ID += 1 if type(ob.data) == bpy.types.Mesh: #note that this is not the final blockcount, as every mesh data also gets counted blockcount += 1 if ob.find_armature(): apply_transform(ob) has_armature = True #fix meshes parented to a bone by adding vgroups if ob.parent_type == "BONE" and not ob.name.startswith('capsule'): log_error( ob.name + " was parented to a bone, which is not supported by BFBs. This has been fixed for you." ) bonename = ob.parent_bone ob.vertex_groups.new(bonename) try: ob.data.transform( ob.parent.data.bones[bonename].matrix_local) except: pass ob.vertex_groups[bonename].add(range(len(ob.data.vertices)), 1.0, 'REPLACE') ob.parent_type = "OBJECT" bpy.context.scene.update() # apply again just to be sure apply_transform(ob) print('Gathering mesh data...') #get all objects, meshData, meshes + skeletons and collisions for ob in bpy.context.scene.objects: if type(ob.data) == bpy.types.Mesh: if ob.name.startswith('capsule'): stream += export_capsule(ob, 88 + len(stream)) elif ob.name.startswith('sphere'): stream += export_sphere(ob, 88 + len(stream)) elif ob.name.startswith('orientedbox'): stream += export_bounding_box(ob, 88 + len(stream)) else: #export the armature if not already done for a previous mesh armature = ob.find_armature() #we have an armature on one mesh, means we can't export meshes without armature if has_armature and not armature: log_error( ob.name + " is not exported because it does not use an armature while other models do." ) continue if has_armature and not armature_bytes: for pbone in armature.pose.bones: pbone.matrix_basis = mathutils.Matrix() bones = armature.data.bones.values() #todo: calculate this value properly, refer values from other objects lodgroup = -1 root_bones = [bone for bone in bones if not bone.parent] #fatal if len(root_bones) > 1: if fix_root_bones: #determine the proper root root_bone = root_bones[0] for bone in root_bones: if bone.name == "Bip01": root_bone = bone break bpy.context.scene.objects.active = armature bpy.ops.object.mode_set(mode='EDIT') #delete the other root bones for bone in root_bones: if bone != root_bone: e_bone = armature.data.edit_bones[ bone.name] armature.data.edit_bones.remove(e_bone) print( "Removed", bone.name, "because it is a superfluous root bone" ) bpy.ops.object.mode_set(mode='OBJECT') #update the bones list bones = armature.data.bones.values() else: log_error( armature.name + " has more than one root bone. Remove all other root bones so that only Bip01 remains. This usually means: Bake and export your animations and then remove all control bones before you export the model." ) return errors # locate rest scale action if "!scale!" in bpy.data.actions: rest_scale = bpy.data.actions["!scale!"] # we have to apply the scale dummy action armature.animation_data.action = rest_scale bpy.context.scene.frame_set(0) else: log_error( "Rest scale action is missing, assuming rest scale of 1.0 for all bones!" ) rest_scale = None # export bones for bone in bones: boneid = bones.index(bone) + 1 if bone.parent: parentid = bones.index(bone.parent) + 1 else: parentid = 0 #new rest scale support try: group = rest_scale.groups[bone.name] scales = [ fcurve for fcurve in group.channels if fcurve.data_path.endswith("scale") ] scale = scales[0].keyframe_points[0].co[1] except: scale = 1.0 mat = mathutils.Matrix.Scale(scale, 4) * get_bfb_matrix(bone) armature_bytes += pack( '<bbb 64s 16f', boneid, parentid, lodgroup, blendername_to_bfbname( bone.name).lower().encode('utf-8'), *flatten(mat)) #remove unneeded modifiers for mod in ob.modifiers: if mod.type in ('TRIANGULATE', ): ob.modifiers.remove(mod) ob.modifiers.new('Triangulate', 'TRIANGULATE') #make a copy with all modifiers applied - I think there was another way to do it too me = ob.to_mesh(bpy.context.scene, True, "PREVIEW", calc_tessface=False) if len(me.vertices) == 0: log_error( ob.name + " has no vertices. Delete the object and export again." ) return errors #need this? me.calc_normals_split() mesh_vertices = [] mesh_triangles = [] #used to ignore the normals for checking equality dummy_vertices = [] weights_bytes = b'' bfb_col = b'' if 'fx_wind' in ob.vertex_groups: weight_group_index = ob.vertex_groups['fx_wind'].index ob_2_fx_wind[ob] = "_wind" #this is for some shaders to make sure the decal set uses the UV1 if len(me.uv_layers) > 1: ob_2_fx_wind[ob] += "_uv11" #use this to look up the index of the uv layer #this is a little faster than BFRVertex = 'PN' if me.vertex_colors: if len(me.vertex_colors) == 1: log_error( 'Mesh ' + me.name + ' has 1 vertex color layer, must be either 0 or 2 (RGB and AAA)' ) return errors BFRVertex += 'D' for i in range(0, len(me.uv_layers)): if 'fx_wind' in ob.vertex_groups: BFRVertex += 'T3' + str(i) else: BFRVertex += 'T' + str(i) #select all verts without weights unweighted_vertices = [] for polygon in me.polygons: tri = [] for loop_index in polygon.loop_indices: vertex_index = me.loops[loop_index].vertex_index co = me.vertices[vertex_index].co no = me.loops[loop_index].normal bfb_vertex = pack('<3f', co.x, co.y, co.z) bfb_normal = pack('<3f', no.x, no.y, no.z) if me.vertex_colors: bfb_col = pack( '<4B', int(me.vertex_colors[0].data[loop_index].color. b * 255), int(me.vertex_colors[0].data[loop_index].color. g * 255), int(me.vertex_colors[0].data[loop_index].color. r * 255), int(me.vertex_colors[1].data[loop_index].color. b * 255)) bfb_uv = b'' if 'T3' in BFRVertex: try: weight = me.vertices[vertex_index].groups[ weight_group_index].weight except: weight = 0 for uv_layer in me.uv_layers: if 'T3' in BFRVertex: bfb_uv += pack( '<3f', uv_layer.data[loop_index].uv.x, 1 - uv_layer.data[loop_index].uv.y, weight) else: bfb_uv += pack( '<2f', uv_layer.data[loop_index].uv.x, 1 - uv_layer.data[loop_index].uv.y) #we have to add new verts also if the UV is different! if bfb_vertex + bfb_uv not in dummy_vertices: dummy_vertices.append(bfb_vertex + bfb_uv) mesh_vertices.append(bfb_vertex + bfb_normal + bfb_col + bfb_uv) if armature_bytes: w = [] bones = armature.data.bones.keys() for vertex_group in me.vertices[ vertex_index].groups: #dummy vertex groups without corresponding bones try: w.append((bones.index(ob.vertex_groups[ vertex_group.group].name), vertex_group.weight)) except: pass w_s = sorted(w, key=lambda x: x[1], reverse=True)[0:4] #pad the weight list to 4 bones, ie. add empty bones if missing for i in range(0, 4 - len(w_s)): w_s.append((-1, 0)) sw = w_s[0][1] + w_s[1][1] + w_s[2][1] + w_s[ 3][1] if sw > 0.0: weights_bytes += pack( '<4b 3f', w_s[0][0], w_s[1][0], w_s[2][0], w_s[3][0], w_s[0][1] / sw, w_s[1][1] / sw, w_s[2][1] / sw) elif vertex_index not in unweighted_vertices: unweighted_vertices.append(vertex_index) tri.append(dummy_vertices.index(bfb_vertex + bfb_uv)) mesh_triangles.append(pack('<3H', *tri)) if armature_bytes: ob_2_weight_bytes[ob] = weights_bytes if unweighted_vertices: log_error('Found ' + str(len(unweighted_vertices)) + ' unweighted vertices in ' + ob.name + '! Add them to vertex groups!') return errors #does a mesh of this type already exist? if BFRVertex not in BFRVertex_2_meshData: BFRVertex_2_meshData[BFRVertex] = ([], [], []) BFRVertex_2_meshData[BFRVertex][0].append(ob) BFRVertex_2_meshData[BFRVertex][1].append(mesh_vertices) BFRVertex_2_meshData[BFRVertex][2].append(mesh_triangles) #1) create a meshData block for every vertex type we have #2) merge all meshes that use the same vertex type #3) get the counts for creating separate mesh blocks in the next loop #4) increment blockcount + ID for each meshData for BFRVertex, (obs, vertex_lists, triangle_lists) in BFRVertex_2_meshData.items(): ID += 1 blockcount += 1 print('Assigned meshID', ID, 'to BFRVertex' + BFRVertex) num_all_vertices = 0 num_all_triangles = 0 bytes_vertices = b'' bytes_triangles = b'' for ob, vertex_list, triangle_list in zip(obs, vertex_lists, triangle_lists): num_vertices = len(vertex_list) num_triangles = len(triangle_list) bytes_vertices += b''.join(vertex_list) bytes_triangles += b''.join(triangle_list) ob_2_meshOffset[ob] = (ID, num_all_vertices, num_vertices, num_all_triangles, num_triangles) num_all_vertices += num_vertices num_all_triangles += num_triangles len_vert = len(vertex_list[0]) #write the meshData block stream += pack( '<i 2h i 64s B 64s 2i', ID, 6, -32768, len(stream) + 242 + num_all_vertices * len_vert + num_all_triangles * 6, b'meshData', 8, ('BFRVertex' + BFRVertex).encode('utf-8'), len_vert, num_all_vertices) + bytes_vertices + pack( '<B i', 2, num_all_triangles * 3) + bytes_triangles #write the mesh blocks for ob in ob_2_meshOffset: meshDataID, start_vertices, num_vertices, start_triangles, num_triangles = ob_2_meshOffset[ ob] me = ob.data center = mathutils.Vector() for v in me.vertices: center += v.co center /= len(me.vertices) radius = max([(v.co - center).length for v in me.vertices]) if ob in ob_2_weight_bytes: weights_bytes = ob_2_weight_bytes[ob] typeid = 8 len_weights = len(armature_bytes) + len(weights_bytes) + 8 else: typeid = 5 len_weights = 0 stream += pack('<i 2h i 64s B 7i 4f', ob_2_id[ob], typeid, -32768, len(stream) + 209 + len_weights, b'mesh', 0, meshDataID, 1, start_triangles * 3, num_triangles * 3, start_vertices, num_vertices, num_triangles, *center, radius) if ob in ob_2_weight_bytes: stream += pack( '<2i', len(armature_bytes) // 131, len(weights_bytes) // 16) + armature_bytes + weights_bytes stream = pack('<8s l l 64s i i', b'BFB!*000', 131073, 1, author_name.encode('utf-8'), blockcount, blockcount) + stream stream += write_linked_list(root, len(stream)) if not os.path.exists(dirname): os.makedirs(dirname) f = open(filepath, 'wb') f.write(stream) f.close() print('Finished BFB Export in %.2f seconds' % (time.clock() - starttime)) return errors
import bge import mathutils cont = bge.logic.getCurrentController() velocityVect = cont.owner scene = bge.logic.getCurrentScene() fighter = scene.objects['FighterJet'] #camera position velocityVect.worldPosition = fighter.worldPosition v = mathutils.Vector((fighter['vx'], fighter['vy'], fighter['vz'])) velocityVect.alignAxisToVect(v, 0, 1.0) velocityVect.localScale = [fighter['speed'] / 100, fighter['speed'] / 100, 1]
def read_mesh(mesh_template, materials, armature): name = mesh_template.getBlock(0).getDataString(0) material_id = mesh_template.getBlock(1).getDataInt(0) # channel_id = mesh_template.getBlock(2).getDataInt(0) try: mtl_textures = materials[material_id] except: mtl_textures = None if material_id != -1: warn("Unable to find material for mesh %s: mtl_id %d" % (name, material_id)) mesh_block_array = mesh_template.getBlock(3) ani_time_array = [ mesh_template.getBlock(4).getDataInt(i) for i in range(mesh_template.getBlock(4).getElementNumber()) ] ani_matrix_array = [ mesh_template.getBlock(5).getDataFloat(i) for i in range(mesh_template.getBlock(5).getElementNumber()) ] visi_time_array = [ mesh_template.getBlock(6).getDataInt(i) for i in range(mesh_template.getBlock(6).getElementNumber()) ] visi_value_array = [ mesh_template.getBlock(7).getDataFloat(i) for i in range(mesh_template.getBlock(7).getElementNumber()) ] mesh_children_array = [ mesh_template.getBlock(9).getBlock(i) for i in range(mesh_template.getBlock(9).getElementNumber()) ] mesh = bpy.data.meshes.new(name) mesh_object = bpy.data.objects.new(name, mesh) bpy.context.scene.objects.link(mesh_object) mesh_object.parent = armature armature_modifier = mesh_object.modifiers.new(armature.name, 'ARMATURE') armature_modifier.object = armature # mesh data handling bm = bmesh.new() bones_weight = [] mesh_blocks = [ mesh_block_array.getBlock(i) for i in range(mesh_block_array.getElementNumber()) ] for i, mesh_block in enumerate(mesh_blocks): bones_weight.append( read_mesh_block(mesh_block, mesh_object, bm, mtl_textures)) bm.to_mesh(mesh) bm.free() del bm # bones weights handling bpy.ops.object.mode_set(mode='EDIT') for bone_weights in bones_weight: for bone_name, weight_infos in bone_weights: if bone_name not in list(armature.data.edit_bones.keys()): bone = armature.data.edit_bones.new(bone_name) bone.head = (0, 0, 0) bone.tail = (0, 1, 0) if bone_name not in list(mesh_object.vertex_groups.keys()): vertex_group = mesh_object.vertex_groups.new(bone_name) else: vertex_group = mesh_object.vertex_groups[bone_name] for vertex_index, weight in weight_infos: vertex_group.add([vertex_index], weight, 'ADD') armature.data.update_tag() bpy.ops.object.mode_set(mode='OBJECT') #animation handling if len(ani_time_array) > 0: mesh_object.rotation_mode = 'QUATERNION' anim_data = mesh_object.animation_data_create() anim_data.action = bpy.data.actions.new(name + "_matrix_anim") anim_action = anim_data.action location_keyframes = [None] * 3 location_keyframes[0] = create_anim_fcurve(anim_action, "location", 0, len(ani_time_array)) location_keyframes[1] = create_anim_fcurve(anim_action, "location", 1, len(ani_time_array)) location_keyframes[2] = create_anim_fcurve(anim_action, "location", 2, len(ani_time_array)) rotation_keyframes = [None] * 4 rotation_keyframes[0] = create_anim_fcurve(anim_action, "rotation_quaternion", 0, len(ani_time_array)) rotation_keyframes[1] = create_anim_fcurve(anim_action, "rotation_quaternion", 1, len(ani_time_array)) rotation_keyframes[2] = create_anim_fcurve(anim_action, "rotation_quaternion", 2, len(ani_time_array)) rotation_keyframes[3] = create_anim_fcurve(anim_action, "rotation_quaternion", 3, len(ani_time_array)) scale_keyframes = [None] * 3 scale_keyframes[0] = create_anim_fcurve(anim_action, "scale", 0, len(ani_time_array)) scale_keyframes[1] = create_anim_fcurve(anim_action, "scale", 1, len(ani_time_array)) scale_keyframes[2] = create_anim_fcurve(anim_action, "scale", 2, len(ani_time_array)) for i, time in enumerate(ani_time_array): frame_id = time_to_frame(time) location = mathutils.Vector( (ani_matrix_array[i * 12 + 9], ani_matrix_array[i * 12 + 10], ani_matrix_array[i * 12 + 11])) rot_matrix = mathutils.Matrix( ((ani_matrix_array[i * 12 + 0], ani_matrix_array[i * 12 + 3], ani_matrix_array[i * 12 + 6]), (ani_matrix_array[i * 12 + 1], ani_matrix_array[i * 12 + 4], ani_matrix_array[i * 12 + 7]), (ani_matrix_array[i * 12 + 2], ani_matrix_array[i * 12 + 5], ani_matrix_array[i * 12 + 8]))) location_keyframes[0][i].co = frame_id, location[0] location_keyframes[1][i].co = frame_id, location[1] location_keyframes[2][i].co = frame_id, location[2] rotation_keyframes[0][i].co = frame_id, rot_matrix.to_quaternion( )[0] rotation_keyframes[1][i].co = frame_id, rot_matrix.to_quaternion( )[1] rotation_keyframes[2][i].co = frame_id, rot_matrix.to_quaternion( )[2] rotation_keyframes[3][i].co = frame_id, rot_matrix.to_quaternion( )[3] scale_keyframes[0][i].co = frame_id, rot_matrix.to_scale()[0] scale_keyframes[1][i].co = frame_id, rot_matrix.to_scale()[1] scale_keyframes[2][i].co = frame_id, rot_matrix.to_scale()[2] bpy.context.scene.frame_end = time_to_frame( ani_time_array[len(ani_time_array) - 1]) # visibility handling (object transparency) if len(visi_time_array) > 0: visibility_keyframes = [None] * len(mesh_object.data.materials) for i, material in enumerate(mesh_object.data.materials): material.use_transparency = True material.transparency_method = 'Z_TRANSPARENCY' anim_data = material.animation_data_create() anim_data.action = bpy.data.actions.new(name + "_visibility_anim") anim_action = anim_data.action visibility_keyframes[i] = create_anim_fcurve( anim_action, "alpha", 0, len(visi_time_array)) for i, time in enumerate(visi_time_array): frame_id = time_to_frame(time) for visibility_keyframes_per_mtl in visibility_keyframes: visibility_keyframes_per_mtl[ i].co = frame_id, visi_value_array[i] bpy.context.scene.frame_end = max( bpy.context.scene.frame_end, time_to_frame(visi_time_array[len(visi_time_array) - 1])) # children handling (recursive) for children in mesh_children_array: read_mesh(children, materials, armature).parent = mesh_object return mesh_object
def execute(self, context): import bmesh, mathutils ob = context.active_object me = ob.data pre_mode = ob.mode bpy.ops.object.mode_set(mode='OBJECT') bm = bmesh.new() bm.from_mesh(me) edge_lengths = [e.calc_length() for e in bm.edges] bm.free() edge_lengths.sort() average_edge_length = sum(edge_lengths) / len(edge_lengths) center_index = int( (len(edge_lengths) - 1) / 2.0 ) average_edge_length = (average_edge_length + edge_lengths[center_index]) / 2 radius = average_edge_length * self.radius context.window_manager.progress_begin(0, len(me.vertices)) progress_reduce = len(me.vertices) // 200 + 1 near_vert_data = [] kd = mathutils.kdtree.KDTree(len(me.vertices)) for vert in me.vertices: kd.insert(vert.co.copy(), vert.index) kd.balance() for vert in me.vertices: near_vert_data.append([]) near_vert_data_append = near_vert_data[-1].append for co, index, dist in kd.find_range(vert.co, radius): multi = (radius - dist) / radius if self.blend == 'SMOOTH1': multi = common.in_out_quad_blend(multi) elif self.blend == 'SMOOTH2': multi = common.bezier_blend(multi) near_vert_data_append((index, multi)) if vert.index % progress_reduce == 0: context.window_manager.progress_update(vert.index) context.window_manager.progress_end() target_shape_keys = [] if self.target == 'ACTIVE': target_shape_keys.append(ob.active_shape_key) elif self.target == 'UP': for index, shape_key in enumerate(me.shape_keys.key_blocks): if index <= ob.active_shape_key_index: target_shape_keys.append(shape_key) elif self.target == 'DOWN': for index, shape_key in enumerate(me.shape_keys.key_blocks): if ob.active_shape_key_index <= index: target_shape_keys.append(shape_key) elif self.target == 'ALL': for index, shape_key in enumerate(me.shape_keys.key_blocks): target_shape_keys.append(shape_key) progress_total = len(target_shape_keys) * self.strength * len(me.vertices) context.window_manager.progress_begin(0, progress_total) progress_reduce = progress_total // 200 + 1 progress_count = 0 for strength_count in range(self.strength): for shape_key in target_shape_keys: shapes = [] shapes_append = shapes.append for index, vert in enumerate(me.vertices): co = shape_key.data[index].co - vert.co shapes_append(co) for vert in me.vertices: target_shape = shapes[vert.index] total_shape = mathutils.Vector() total_multi = 0.0 for index, multi in near_vert_data[vert.index]: co = shapes[index] if self.effect == 'ADD': if target_shape.length <= co.length: total_shape += co * multi total_multi += multi elif self.effect == 'SUB': if co.length <= target_shape.length: total_shape += co * multi total_multi += multi else: total_shape += co * multi total_multi += multi if 0 < total_multi: average_shape = total_shape / total_multi else: average_shape = mathutils.Vector() shape_key.data[vert.index].co = vert.co + average_shape progress_count += 1 if progress_count % progress_reduce == 0: context.window_manager.progress_update(progress_count) context.window_manager.progress_end() bpy.ops.object.mode_set(mode=pre_mode) return {'FINISHED'}
def execute(self, context): import mathutils, time start_time = time.time() target_ob = context.active_object target_me = target_ob.data pre_mode = target_ob.mode bpy.ops.object.mode_set(mode='OBJECT') for ob in context.selected_objects: if ob.name != target_ob.name: source_original_ob = ob break source_ob = source_original_ob.copy() source_me = source_original_ob.data.copy() source_ob.data = source_me context.scene.objects.link(source_ob) context.scene.objects.active = source_ob bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.reveal() bpy.ops.mesh.select_all(action='SELECT') bpy.ops.mesh.subdivide(number_cuts=self.subdivide_number, smoothness=0.0, quadtri=False, quadcorner='STRAIGHT_CUT', fractal=0.0, fractal_along_normal=0.0, seed=0) source_ob.active_shape_key_index = 0 bpy.ops.object.mode_set(mode='OBJECT') if self.is_first_remove_all: try: target_ob.active_shape_key_index = 1 bpy.ops.object.shape_key_remove(all=True) except: pass kd = mathutils.kdtree.KDTree(len(source_me.vertices)) for vert in source_me.vertices: co = source_ob.matrix_world * vert.co kd.insert(co, vert.index) kd.balance() context.window_manager.progress_begin(0, len(target_me.vertices)) progress_reduce = len(target_me.vertices) // 200 + 1 near_vert_data = [] near_vert_multi_total = [] near_vert_multi_total_append = near_vert_multi_total.append for vert in target_me.vertices: near_vert_data.append([]) near_vert_data_append = near_vert_data[-1].append target_co = target_ob.matrix_world * vert.co mini_co, mini_index, mini_dist = kd.find(target_co) radius = mini_dist * self.extend_range diff_radius = radius - mini_dist multi_total = 0.0 for co, index, dist in kd.find_range(target_co, radius): if 0 < diff_radius: multi = (diff_radius - (dist - mini_dist)) / diff_radius else: multi = 1.0 near_vert_data_append((index, multi)) multi_total += multi near_vert_multi_total_append(multi_total) if vert.index % progress_reduce == 0: context.window_manager.progress_update(vert.index) context.window_manager.progress_end() is_shapeds = {} relative_keys = [] context.window_manager.progress_begin(0, len(source_me.shape_keys.key_blocks)) context.window_manager.progress_update(0) for source_shape_key_index, source_shape_key in enumerate(source_me.shape_keys.key_blocks): if target_me.shape_keys: if source_shape_key.name in target_me.shape_keys.key_blocks: target_shape_key = target_me.shape_keys.key_blocks[source_shape_key.name] else: target_shape_key = target_ob.shape_key_add(name=source_shape_key.name, from_mix=False) else: target_shape_key = target_ob.shape_key_add(name=source_shape_key.name, from_mix=False) relative_key_name = source_shape_key.relative_key.name if relative_key_name not in relative_keys: relative_keys.append(relative_key_name) is_shapeds[source_shape_key.name] = False try: target_shape_key.relative_key = target_me.shape_keys.key_blocks[relative_key_name] except: pass mat1, mat2 = source_ob.matrix_world, target_ob.matrix_world source_shape_keys = [(mat1 * source_shape_key.data[v.index].co * mat2) - (mat1 * source_me.vertices[v.index].co * mat2) for v in source_me.vertices] for target_vert in target_me.vertices: if 0 < near_vert_multi_total[target_vert.index]: total_diff_co = mathutils.Vector((0, 0, 0)) for near_index, near_multi in near_vert_data[target_vert.index]: total_diff_co += source_shape_keys[near_index] * near_multi average_diff_co = total_diff_co / near_vert_multi_total[target_vert.index] else: average_diff_co = mathutils.Vector((0, 0, 0)) target_shape_key.data[target_vert.index].co = target_me.vertices[target_vert.index].co + average_diff_co if 0.01 < average_diff_co.length: is_shapeds[source_shape_key.name] = True context.window_manager.progress_update(source_shape_key_index) context.window_manager.progress_end() if self.is_remove_empty: for source_shape_key_name, is_shaped in is_shapeds.items(): if source_shape_key_name not in relative_keys and not is_shaped: target_shape_key = target_me.shape_keys.key_blocks[source_shape_key_name] target_ob.shape_key_remove(target_shape_key) target_ob.active_shape_key_index = 0 common.remove_data([source_ob, source_me]) context.scene.objects.active = target_ob bpy.ops.object.mode_set(mode=pre_mode) diff_time = time.time() - start_time self.report(type={'INFO'}, message=str(round(diff_time, 1)) + " Seconds") return {'FINISHED'}
def import_palette(parms): try: palette_file = open(parms.filepath, "r") except IOError as why: raise Failure(str(why)) #end try if palette_file.readline().strip() != "GIMP Palette": raise Failure("doesn't look like a GIMP palette file") #end if name = "Untitled" while True: line = palette_file.readline() if len(line) == 0: raise Failure("palette file seems to be empty") #end if line = line.rstrip("\n") if line.startswith("Name: "): name = line[6:].strip() #end if if line.startswith("#"): break #end while colours = [] while True: line = palette_file.readline() if len(line) == 0: break if not line.startswith("#"): line = line.rstrip("\n") components = line.split("\t", 1) if len(components) == 1: components.append("") # empty name #end if try: colour = tuple( int(i.strip()) / 255.0 for i in components[0].split(None, 2)) except ValueError: raise Failure("bad colour on line %s" % repr(line)) #end try colours.append((colour, components[1])) #end if #end while # all successfully loaded prev_scene = bpy.context.scene bpy.ops.object.select_all(action="DESELECT") bpy.ops.scene.new(type="NEW") the_scene = bpy.context.scene the_scene.name = parms.scene_name the_scene.world = prev_scene.world the_scene.render.engine = "CYCLES" if parms.base_object != no_object and parms.base_object in bpy.data.objects: swatch_object = bpy.data.objects[parms.base_object] else: swatch_object = None #end if if swatch_object != None: swatch_material = bpy.data.materials[parms.base_material] x_offset, y_offset = tuple(x * 1.1 for x in tuple(swatch_object.dimensions.xy)) else: swatch_material = None x_offset, y_offset = 2.2, 2.2 # nice margins assuming default mesh size of 2x2 units #end if # Create material with a node group containing a single diffuse shader # and an input RGB colour. That way colour can be varied across # each swatch, while contents of common node group can be easily changed by user # into something more elaborate. common_group = bpy.data.node_groups.new("palette material common", "ShaderNodeTree") group_inputs = common_group.nodes.new("NodeGroupInput") group_inputs.location = (-300, 0) common_group.inputs.new("NodeSocketColor", "Colour") shader = common_group.nodes.new("ShaderNodeBsdfDiffuse") shader.location = (0, 0) common_group.links.new(group_inputs.outputs[0], shader.inputs[0]) # group will contain material output directly material_output = common_group.nodes.new("ShaderNodeOutputMaterial") material_output.location = (300, 0) common_group.links.new(shader.outputs[0], material_output.inputs[0]) per_row = math.ceil(math.sqrt(len(colours))) row = 0 col = 0 layers = (True, ) + 19 * (False, ) for colour in colours: bpy.ops.object.select_all( action="DESELECT") # ensure materials get added to right objects location = mathutils.Vector((row * x_offset, col * y_offset, 0.0)) if swatch_object != None: swatch = swatch_object.copy() swatch.data = swatch.data.copy( ) # ensure material slots are not shared the_scene.objects.link(swatch) swatch.layers = layers swatch.location = location else: bpy.ops.mesh.primitive_plane_add \ ( layers = layers, location = location ) swatch = bpy.context.selected_objects[0] #end if col += 1 if col == per_row: col = 0 row += 1 #end if material_name = "%s_%s" % (name, colour[1]) the_material = bpy.data.materials.new(material_name) # TODO: option to reuse existing material? the_material.use_nodes = True material_tree = the_material.node_tree for node in list(material_tree.nodes): # clear out default nodes material_tree.nodes.remove(node) #end for the_material.diffuse_color = colour[0] # used in viewport group_node = material_tree.nodes.new("ShaderNodeGroup") group_node.node_tree = common_group group_node.location = (0, 0) in_colour = material_tree.nodes.new("ShaderNodeRGB") in_colour.location = (-300, 0) in_colour.outputs[0].default_value = colour[0] + (1, ) material_tree.links.new(in_colour.outputs[0], group_node.inputs[0]) if swatch_material != None: # replace existing material slot for i in range(0, len(swatch.data.materials)): if swatch.data.materials[i] == swatch_material: swatch.data.materials[i] = the_material #end if #end for else: swatch.data.materials.append(the_material)
def offset(self): '''(Vector 3D) Fine tune of the texture mapping X, Y and Z locations''' return mathutils.Vector()
def get_view_direction(context): view_rot = get_view_rotation(context) dir = view_rot @ mathutils.Vector((0,0,-1)) return dir.normalized()
def scale(self): '''(Vector 3D) Set scaling for the texture's X, Y and Z sizes''' return mathutils.Vector()
def pack_lightgrid(bsp): world_mins = bsp.lumps["models"].data[0].mins world_maxs = bsp.lumps["models"].data[0].maxs lightgrid_origin = [ bsp.lightgrid_size[0] * ceil(world_mins[0] / bsp.lightgrid_size[0]), bsp.lightgrid_size[1] * ceil(world_mins[1] / bsp.lightgrid_size[1]), bsp.lightgrid_size[2] * ceil(world_mins[2] / bsp.lightgrid_size[2]) ] bsp.lightgrid_origin = lightgrid_origin maxs = [ bsp.lightgrid_size[0] * floor(world_maxs[0] / bsp.lightgrid_size[0]), bsp.lightgrid_size[1] * floor(world_maxs[1] / bsp.lightgrid_size[1]), bsp.lightgrid_size[2] * floor(world_maxs[2] / bsp.lightgrid_size[2]) ] lightgrid_dimensions = [ (maxs[0] - lightgrid_origin[0]) / bsp.lightgrid_size[0] + 1, (maxs[1] - lightgrid_origin[1]) / bsp.lightgrid_size[1] + 1, (maxs[2] - lightgrid_origin[2]) / bsp.lightgrid_size[2] + 1 ] bsp.lightgrid_inverse_dim = [ 1.0 / lightgrid_dimensions[0], 1.0 / (lightgrid_dimensions[1] * lightgrid_dimensions[2]), 1.0 / lightgrid_dimensions[2] ] bsp.lightgrid_z_step = 1.0 / lightgrid_dimensions[2] bsp.lightgrid_dim = lightgrid_dimensions a1_pixels = [] a2_pixels = [] a3_pixels = [] a4_pixels = [] d1_pixels = [] d2_pixels = [] d3_pixels = [] d4_pixels = [] l_pixels = [] num_elements = int(lightgrid_dimensions[0] * lightgrid_dimensions[1] * lightgrid_dimensions[2]) num_elements_bsp = bsp.lumps[ "lightgridarray"].count if bsp.use_lightgridarray else bsp.lumps[ "lightgrid"].count if num_elements == num_elements_bsp: for pixel in range(num_elements): if bsp.use_lightgridarray: index = bsp.lumps["lightgridarray"].data[pixel].data else: index = pixel ambient1 = mathutils.Vector((0, 0, 0)) ambient2 = mathutils.Vector((0, 0, 0)) ambient3 = mathutils.Vector((0, 0, 0)) ambient4 = mathutils.Vector((0, 0, 0)) direct1 = mathutils.Vector((0, 0, 0)) direct2 = mathutils.Vector((0, 0, 0)) direct3 = mathutils.Vector((0, 0, 0)) direct4 = mathutils.Vector((0, 0, 0)) l = mathutils.Vector((0, 0, 0)) ambient1 = bsp.lumps["lightgrid"].data[index].ambient1 direct1 = bsp.lumps["lightgrid"].data[index].direct1 if bsp.lightmaps > 1: ambient2 = bsp.lumps["lightgrid"].data[index].ambient2 ambient3 = bsp.lumps["lightgrid"].data[index].ambient3 ambient4 = bsp.lumps["lightgrid"].data[index].ambient4 direct2 = bsp.lumps["lightgrid"].data[index].direct2 direct3 = bsp.lumps["lightgrid"].data[index].direct3 direct4 = bsp.lumps["lightgrid"].data[index].direct4 lat = (bsp.lumps["lightgrid"].data[index].lat_long[0] / 255.0) * 2.0 * pi long = (bsp.lumps["lightgrid"].data[index].lat_long[1] / 255.0) * 2.0 * pi slat = sin(lat) clat = cos(lat) slong = sin(long) clong = cos(long) l = mathutils.Vector( (clat * slong, slat * slong, clong)).normalized() color_scale = 1.0 / 255.0 append_byte_to_color_list(ambient1, a1_pixels, color_scale) append_byte_to_color_list(direct1, d1_pixels, color_scale) if bsp.lightmaps > 1: append_byte_to_color_list(ambient2, a2_pixels, color_scale) append_byte_to_color_list(ambient3, a3_pixels, color_scale) append_byte_to_color_list(ambient4, a4_pixels, color_scale) append_byte_to_color_list(direct2, d2_pixels, color_scale) append_byte_to_color_list(direct3, d3_pixels, color_scale) append_byte_to_color_list(direct4, d4_pixels, color_scale) append_byte_to_color_list(l, l_pixels, 1.0) else: a1_pixels = [0.0 for i in range(num_elements * 4)] a2_pixels = [0.0 for i in range(num_elements * 4)] a3_pixels = [0.0 for i in range(num_elements * 4)] a4_pixels = [0.0 for i in range(num_elements * 4)] d1_pixels = [0.0 for i in range(num_elements * 4)] d2_pixels = [0.0 for i in range(num_elements * 4)] d3_pixels = [0.0 for i in range(num_elements * 4)] d4_pixels = [0.0 for i in range(num_elements * 4)] l_pixels = [0.0 for i in range(num_elements * 4)] print("Lightgridarray mismatch!") print(str(num_elements) + " != " + str(num_elements_bsp)) ambient1 = create_new_image( "$lightgrid_ambient1", lightgrid_dimensions[0], lightgrid_dimensions[1] * lightgrid_dimensions[2]) ambient1.pixels = a1_pixels ambient1.pack() direct1 = create_new_image( "$lightgrid_direct1", lightgrid_dimensions[0], lightgrid_dimensions[1] * lightgrid_dimensions[2]) direct1.pixels = d1_pixels direct1.pack() if bsp.lightmaps > 1: ambient2 = create_new_image( "$lightgrid_ambient2", lightgrid_dimensions[0], lightgrid_dimensions[1] * lightgrid_dimensions[2]) ambient3 = create_new_image( "$lightgrid_ambient3", lightgrid_dimensions[0], lightgrid_dimensions[1] * lightgrid_dimensions[2]) ambient4 = create_new_image( "$lightgrid_ambient4", lightgrid_dimensions[0], lightgrid_dimensions[1] * lightgrid_dimensions[2]) ambient2.pixels = a2_pixels ambient3.pixels = a3_pixels ambient4.pixels = a4_pixels ambient2.pack() ambient3.pack() ambient4.pack() direct2 = create_new_image( "$lightgrid_direct2", lightgrid_dimensions[0], lightgrid_dimensions[1] * lightgrid_dimensions[2]) direct3 = create_new_image( "$lightgrid_direct3", lightgrid_dimensions[0], lightgrid_dimensions[1] * lightgrid_dimensions[2]) direct4 = create_new_image( "$lightgrid_direct4", lightgrid_dimensions[0], lightgrid_dimensions[1] * lightgrid_dimensions[2]) direct2.pixels = d2_pixels direct3.pixels = d3_pixels direct4.pixels = d4_pixels direct2.pack() direct3.pack() direct4.pack() lightvec = create_new_image( "$lightgrid_vector", lightgrid_dimensions[0], lightgrid_dimensions[1] * lightgrid_dimensions[2], True) lightvec.colorspace_settings.name = "Non-Color" lightvec.pixels = l_pixels lightvec.pack()
def color(self): '''(Vector 3D) Default color for textures that don't return RGB or when RGB to intensity is enabled''' return mathutils.Vector()
bm.faces.new([bottom, prev_ring[s + 1], prev_ring[s]]) bm.verts.ensure_lookup_table() bm.verts.index_update() uv_layer = bm.loops.layers.uv.new() for face in bm.faces: safe = False for l in range(len(face.loops)): loop = face.loops[l] x, y, z = loop.vert.co u = math.atan2(y, x) / (2 * math.pi) if u < 0.0: u += 1.0 v = -math.acos(z) / math.pi if l == 0 and u == 0: safe = True if u == 0.0 and not safe: u = 1.0 loop[uv_layer].uv = mathutils.Vector((u, v)) for v in bm.verts: v.co *= size bm.to_mesh(mesh) bm.free()
def create_vector(x, y, z): vector = mathutils.Vector(x, y, z) return vector, vector.length
def floor(self, context, event, ray_max=100000.0): """Run this function on left mouse, execute the ray cast""" # get the context arguments scene = context.scene region = context.region rv3d = context.region_data coord = event.mouse_region_x, event.mouse_region_y # get the ray from the viewport and mouse view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord) ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord) ray_target = ray_origin + (view_vector * ray_max) def obj_ray_cast(obj, matrix): """Wrapper for ray casting that moves the ray into object space""" # get the ray relative to the object matrix_inv = matrix.inverted() ray_origin_obj = matrix_inv * ray_origin ray_target_obj = matrix_inv * ray_target # cast the ray hit, normal, face_index = obj.ray_cast(ray_origin_obj, ray_target_obj) if face_index != -1: return hit, normal, face_index else: return None, None, None # cast rays and find the closest object best_length_squared = ray_max * ray_max best_obj = None obj = bpy.context.active_object matrix = obj.matrix_world.copy() if obj.type == 'MESH': hit, normal, face_index = obj_ray_cast(obj, matrix) print(hit) if hit is not None: hit_world = matrix * hit scene.cursor_location = hit_world self.hits.append(hit) print(len(self.hits)) #if len(self.hits)==1: # n = mathutils.Vector((0, 0, 0)) if len(self.hits) >= 3: for a in range(0, len(self.hits) - 2): v1 = matrix * self.hits[a] v2 = matrix * self.hits[a + 1] v3 = matrix * self.hits[a + 2] ntri = mathutils.geometry.normal(v1, v2, v3) n += ntri up = mathutils.Vector((0, 0, 1)) r = n.rotation_difference(up) #.to_euler() print(n, r) print(obj.rotation_quaternion) print(matrix) #print(r) v = matrix * ((self.hits[0] + self.hits[1] + self.hits[2]) / 3) obj.location -= v m = obj.rotation_mode bpy.ops.object.transform_apply(location=True, rotation=False, scale=True) obj.rotation_mode = 'QUATERNION' obj.rotation_quaternion.rotate(r) obj.rotation_mode = m #obj.rotation_euler=obj.rotation_euler #obj.update() matrix = obj.matrix_world.copy() print(matrix) bpy.ops.object.transform_apply(location=False, rotation=True, scale=False) return True return False
def create_vector_from_two_points(point_a, point_b): vector = mathutils.Vector(point_b.x - point_a.x, point_b.y - point_a.y, point_b.z - point_a.z) return vector, vector.length
def invoke(self, context, event): # initialization self.number_arr.clear() if not context.active_object.data.use_auto_smooth: return {'CANCELLED'} global b4w_loops_normals global b4w_vertex_to_loops_map global rotation_helper_params # toggle mode for update selection bpy.ops.object.mode_set(mode="OBJECT") bpy.ops.object.mode_set(mode="EDIT") load_loops_normals_into_global_cache(context.active_object) rotation_helper_params.is_active = True self.mouse_x = event.mouse_x self.mouse_y = event.mouse_y # make a copy of all normals which will be as start data self.init_loops_normals = copy.copy(b4w_loops_normals) if not context.window_manager.b4w_split: self.init_normals = [] i = 0 j = 0 for v in context.active_object.data.vertices: vert = mathutils.Vector() # check for floating vertices if i in b4w_vertex_to_loops_map: for j in b4w_vertex_to_loops_map[i]: vert = vert + mathutils.Vector(b4w_loops_normals[j]) else: vert = mathutils.Vector() vert = vert / (j + 1) i = i + 1 self.init_normals.append(vert) # calculate rotation center rotation_helper_params.c_world = mathutils.Vector((0, 0, 0)) n = 0 for v in context.active_object.data.vertices: if v.select: rotation_helper_params.c_world = rotation_helper_params.c_world + v.co n = n + 1 if context.window_manager.b4w_split: break if n > 0: rotation_helper_params.c_world = ( context.active_object.matrix_world * (rotation_helper_params.c_world / n)) rotation_helper_params.constraint = None mouse_world = self.calc_mouse_world(context) if mouse_world: self.mouse_local_old = context.active_object.matrix_world.inverted( ) * mouse_world else: self.mouse_local_old = None self.execute(context) context.window_manager.modal_handler_add(self) return {'RUNNING_MODAL'}
def M_CrossVecs(self, v1,v2): if 'cross' in dir(mathutils.Vector()): return v1.cross(v2) #for up2.49 else: return mathutils.CrossVecs(v1,v2) #for pre2.49
def apply(self, idx, offset): x, y, z = b4w_loops_normals[idx] n = mathutils.Vector( (x + self.sign * offset.x, y + self.sign * offset.y, z + self.sign * offset.z)).normalized() b4w_loops_normals[idx] = (n.x, n.y, n.z)
def M_DotVecs(self, v1,v2): if 'cross' in dir(mathutils.Vector()): return v1.dot(v2) #for up2.49 else: return mathutils.DotVecs(v1,v2) #for pre2.49
def create_dictionaries(): """Creation of a list of all the robots and components in the scene. Uses the properties of the objects to determine what they are.""" # Create a dictionary that stores initial positions of all objects # in the simulation, used to reset the simulation. persistantstorage.blender_objects = {} # Create a dictionary of the components in the scene persistantstorage.componentDict = {} # Create a dictionary of the robots in the scene persistantstorage.robotDict = {} # Create a dictionary of the external robots in the scene # Used for the multi-node simulation persistantstorage.externalRobotDict = {} # Create a dictionnary with the passive, but interactive (ie, with an # 'Object' property) objects in the scene. persistantstorage.passiveObjectsDict = {} # Create a dictionary with the modifiers persistantstorage.modifierDict = {} # Create a dictionary with the datastream interfaces used persistantstorage.datastreamDict = {} # this dictionary stores, for each components, the direction and the # configured datastream interfaces. Direction is 'IN' for streams # that are read by MORSE (typically, for actuators), and 'OUT' # for streams published by MORSE (typically, for sensors) persistantstorage.datastreams = {} # Create a dictionnary with the overlaid used persistantstorage.overlayDict = {} # Create the 'request managers' manager persistantstorage.morse_services = MorseServices() scene = morse.core.blenderapi.scene() # Store the position and orientation of all objects for obj in scene.objects: if obj.parent is None: import mathutils pos = mathutils.Vector(obj.worldPosition) ori = mathutils.Matrix(obj.worldOrientation) persistantstorage.blender_objects[obj] = [pos, ori] # Get the list of passive interactive objects. # These objects have a 'Object' property set to true # (plus several other optional properties). # See the documentation for the up-to-date list # (doc/morse/user/others/passive_objects.rst) -- or read the code below :-) for obj in scene.objects: # Check the object has an 'Object' property set to true if 'Object' in obj and obj['Object']: details = { 'label': obj['Label'] if 'Label' in obj else str(obj), 'description': obj['Description'] if 'Description' in obj else "", 'type': obj['Type'] if 'Type' in obj else "Object", 'graspable': obj['Graspable'] if 'Graspable' in obj else False } persistantstorage.passiveObjectsDict[obj] = details logger.info("Added {name} as a {graspable}active object".format( name=details['label'], graspable="graspable " if details['graspable'] else "")) if not persistantstorage.passiveObjectsDict: logger.info("No passive objects in the scene.") # Get the robots for obj in scene.objects: if 'Robot_Tag' in obj or 'External_Robot_Tag' in obj: if not 'classpath' in obj: logger.error( "No 'classpath' in %s\n Please make sure you are " "using the new builder classes" % str(obj.name)) return False # Create an object instance and store it instance = create_instance_level(obj['classpath'], obj.get('abstraction_level'), obj) if not instance: logger.error("Could not create %s" % str(obj['classpath'])) return False # store instance in persistant storage dictionary if 'Robot_Tag' in obj: persistantstorage.robotDict[obj] = instance else: persistantstorage.externalRobotDict[obj] = instance if not (persistantstorage.robotDict or \ persistantstorage.externalRobotDict): # No robot! logger.error("INITIALIZATION ERROR: no robot in your simulation!" "Do not forget that components _must_ belong to a" "robot (you can not have free objects)") return False # Get the robot and its instance for obj, robot_instance in persistantstorage.robotDict.items(): if not _associate_child_to_robot(obj, robot_instance, False): return False # Get the external robot and its instance for obj, robot_instance in persistantstorage.externalRobotDict.items(): if not _associate_child_to_robot(obj, robot_instance, True): return False # Check we have no 'free' component (they all must belong to a robot) for obj in scene.objects: try: obj['Component_Tag'] if obj.name not in persistantstorage.componentDict.keys(): logger.error("INITIALIZATION ERROR: the component '%s' " "does not belong to any robot: you need to fix " "that by parenting it to a robot." % obj.name) return False except KeyError as detail: pass # Will return true always (for the moment) return True
def visualize(coords): '''Create 3D renderings of the objects at the given coords coords: a dictionary of final object locations output: PNG file of the 3D scene in the images folder''' def generateFileName(filename): '''recursive function to auto-generate output filename and avoid overwriting''' if os.path.exists(filename): num = str(int(os.path.splitext(filename)[-2][-4:])+1) return generateFileName( \ ''.join(os.path.splitext(filename)[:-1])[:-4]+ \ '0'*(4-len(num))+num+'.png') return filename def selectObj(objs): '''helper function to select a non-dummy object to be the active object ''' index = 0 active = objs[index] while active.name[0]=='$': index+=1 active = objs[index] return active # initialize parameters scene = bpy.context.scene camera = bpy.data.objects['Camera'] sizes = loadObjectSizes() obj3d = [f for f in os.listdir('3d') if os.path.splitext(f)[1]=='.3DS'] # place camera c_loc = coords['CAMERA'] camera.location = c_loc look_at(camera,mathutils.Vector((0,0,0))) # variables used in for loop below lc = 0 # location counter for text filepath = '' # object names will be added to create image filename maxSize = max([sizes[obj] for obj,_ in coords.items() if obj!='CAMERA' and obj!='SCALE']) avgLoc = [sum(l)/len(l) for l in list(zip(*[xyz for obj,xyz in coords.items() \ if obj!='CAMERA' and obj!='SCALE' and obj!='floor']))] # x,y of floor floor = 0 # keeps track of lowest point to place floor (z of floor) newDims = {} # will store dims to update after for loop: workaround for Blender bug for obj,xyz in coords.items(): if obj=='CAMERA' or obj=='SCALE': continue filepath+=obj+'-' if obj=='lamp' or obj=='light': bpy.ops.object.select_all(action='DESELECT') bpy.data.objects["Lamp"].location = xyz if obj=='light': continue # if no 3d data exists for object, use spheres: if obj not in ['_'.join(f.split('_')[:-1]) for f in obj3d]: if obj=='floor': bpy.ops.object.select_all(action='DESELECT') bpy.ops.mesh.primitive_plane_add() ob = bpy.context.selected_objects[0] ob.name = 'floor' ob.scale[0] = ob.scale[1] = \ sizes[obj]/maxSize*coords['SCALE']/4 else: # add/setup text to identify the object bpy.ops.object.select_all(action='DESELECT') bpy.ops.object.text_add(radius=0.4,location=\ (xyz[0],xyz[1],xyz[2]+1.3+lc)) lc+=0.4 text = bpy.context.object text.data.body = obj look_at(text,mathutils.Vector((0,0,0))) #text.rotation_euler[0]+=0.2 #text.rotation_euler[1]+=0.2 #text.rotation_euler[2]-=0.2 # add/setup sphere to represent the object bpy.ops.object.select_all(action='DESELECT') bpy.ops.mesh.primitive_uv_sphere_add(segments=32,\ size=sizes[obj]/maxSize*coords['SCALE']/2,location=xyz) ob = bpy.context.active_object ob.name = obj # create material to color the sphere mat = bpy.data.materials.new(name=obj+"-mat") ob.data.materials.append(mat) text.data.materials.append(mat) color = [random.random() for i in range(3)] mat.diffuse_color = color floor = min(floor,ob.location[2]-(ob.dimensions[2]/2)) else: # if 3d data exists for object, use this data # select random 3d file for current object type and import it into scene fname = random.choice( \ [f for f in obj3d if '_'.join(f.split('_')[:-1])==obj]) bpy.ops.object.select_all(action='DESELECT') bpy.ops.import_scene.autodesk_3ds(filepath=os.path.join('3d',fname), \ axis_forward='-Y',axis_up='Z',constrain_size=0,use_image_search=False) # clear object location and scale bpy.ops.object.location_clear() bpy.ops.object.scale_clear() bpy.ops.object.rotation_clear() bpy.context.scene.objects.active = selectObj(bpy.context.selected_objects) bpy.ops.object.join() # selected_objects should be length 1 unless there is dummy object in index 0 ob = selectObj(bpy.context.selected_objects) # select the non-dummy object ob.name = obj ob.location = xyz maxDim = max(ob.dimensions) newDims[obj] = [x/maxDim for x in ob.dimensions] # normalize to 1 scale = [sizes[obj]/maxSize*coords['SCALE']/2 for r in range(0,3)] ob.scale = scale floor = min(floor,ob.location[2]-(newDims[obj][2]/2)) if 'floor' in [n.name for n in bpy.context.scene.objects]: bpy.ops.object.select_all(action='DESELECT') bpy.data.objects['floor'].location = avgLoc[0],avgLoc[1],floor bpy.context.scene.objects['floor'].dimensions = (6,6,0) bpy.context.scene.update() bpy.ops.object.select_all(action='DESELECT') for o in bpy.context.scene.objects: if (o.type == 'MESH' and \ o.name in ['_'.join(f.split('_')[:-1]) for f in obj3d]): o.select = True newDim = newDims[o.name] o.dimensions = newDim bpy.context.scene.update() # render/save image bpy.data.cameras['Camera'].type = 'ORTHO' bpy.ops.view3d.camera_to_view_selected() # look_at(camera,mathutils.Vector((0,0,0))) filepath = generateFileName('images/'+filepath.rstrip('-')+'_0001.png') bpy.data.scenes['Scene'].render.filepath = filepath bpy.ops.render.render(write_still=True)