def main(): # Gets the current scene, there can be many scenes in 1 blend file. sce = bpy.data.scenes.active # Get the active object, there can only ever be 1 # and the active object is always the editmode object. #ob_act = sce.objects.active ob_act = sce.objects.active if not ob_act or ob_act.type != 'Mesh': BPyMessages.Error_NoMeshActive() return # Saves the editmode state and go's out of # editmode if its enabled, we cant make # changes to the mesh data while in editmode. is_editmode = Window.EditMode() if is_editmode: Window.EditMode(0) Window.WaitCursor(1) me = ob_act.getData(mesh=1) # old NMesh api is default t = sys.time() # Restore editmode if it was enabled if is_editmode: Window.EditMode(1) print 'ActionScript 3.0 Exporter Script finished in %.2f seconds' % ( sys.time() - t) Window.WaitCursor(0)
def main(): # Gets the current scene, there can be many scenes in 1 blend file. sce = Scene.GetCurrent() # Get the active object, there can only ever be 1 # and the active object is always the editmode object. ob_act = sce.objects.active me = ob_act.getData(mesh=1) if not ob_act or ob_act.type != 'Mesh' or not me.faceUV: BPyMessages.Error_NoMeshUvActive() return # Saves the editmode state and go's out of # editmode if its enabled, we cant make # changes to the mesh data while in editmode. is_editmode = Window.EditMode() if is_editmode: Window.EditMode(0) Window.WaitCursor(1) t = sys.time() # Run the mesh editing function seams_from_islands(me) if is_editmode: Window.EditMode(1) # Timing the script is a good way to be aware on any speed hits when scripting print 'UV Seams from Islands finished in %.2f seconds' % (sys.time()-t) Window.WaitCursor(0)
def main(): # Gets the current scene, there can be many scenes in 1 blend file. sce = bpy.data.scenes.active # Get the active object, there can only ever be 1 # and the active object is always the editmode object. ob_act = sce.objects.active if not ob_act or ob_act.type != 'Mesh': BPyMessages.Error_NoMeshActive() return # Saves the editmode state and go's out of # editmode if its enabled, we cant make # changes to the mesh data while in editmode. is_editmode = Window.EditMode() Window.EditMode(0) Window.WaitCursor(1) me = ob_act.getData(mesh=1) # old NMesh api is default t = sys.time() # Run the mesh editing function vgroup_invert(ob_act, me) # Timing the script is a good way to be aware on any speed hits when scripting print 'Invert VGroup in %.2f seconds' % (sys.time()-t) Window.WaitCursor(0) if is_editmode: Window.EditMode(1)
def main(): # Gets the current scene, there can be many scenes in 1 blend file. sce = Scene.GetCurrent() # Get the active object, there can only ever be 1 # and the active object is always the editmode object. ob_act = sce.objects.active me = ob_act.getData(mesh=1) if not ob_act or ob_act.type != 'Mesh' or not me.faceUV: BPyMessages.Error_NoMeshUvActive() return # Saves the editmode state and go's out of # editmode if its enabled, we cant make # changes to the mesh data while in editmode. is_editmode = Window.EditMode() if is_editmode: Window.EditMode(0) Window.WaitCursor(1) t = sys.time() # Run the mesh editing function seams_from_islands(me) if is_editmode: Window.EditMode(1) # Timing the script is a good way to be aware on any speed hits when scripting print 'UV Seams from Islands finished in %.2f seconds' % (sys.time() - t) Window.WaitCursor(0)
def main(): # get selected meshes obs = [ob for ob in self.context.selected_objects if ob.type == 'MESH'] # ask for weights to delete PREF_CUTOFF = Blender.Draw.Create(0.02) PREF_NBONES = Blender.Draw.Create(4) pup_block = [\ ('Weight Cutoff', PREF_CUTOFF, 0.001, 0.499, 'Vertices with weight less than this number will be deleted from the vertex group.'),\ ('Max Bones', PREF_NBONES, 1, 10, 'Also remove weakest influences so total number of bone influences is never larger than this number.'),\ ] if not Blender.Draw.PupBlock('Vertex Squash', pup_block): return # saves editmode state and exit editmode if it is enabled # (cannot make changes mesh data in editmode) is_editmode = Window.EditMode() Window.EditMode(0) Window.WaitCursor(1) t = sys.time() # run script num_affected = 0 for ob in obs: me = ob.getData(mesh=1) # get Mesh, not NMesh num_affected += weight_squash(me, cutoff=PREF_CUTOFF.val, nbones=PREF_NBONES.val) print('Weight Squash finished in %.2f seconds' % (sys.time() - t)) print('%i vertices removed from groups' % num_affected) Window.WaitCursor(0) if is_editmode: Window.EditMode(1)
def write(filename, objects): def veckey3d(v): return round(v.x, 6), round(v.y, 6), round(v.z, 6) def veckey2d(v): return round(v.x, 6), round(v.y, 6) print 'OBJ Export path: "%s"' % filename temp_mesh_name = '~tmp-mesh' time1 = sys.time() scn = Scene.GetCurrent() file = open(filename, "w") # Write Header file.write('# Blender3D v%s EOL Armature File: %s\n' % (Blender.Get('version'), Blender.Get('filename').split('/')[-1].split('\\')[-1])) file.write('# www.blender3d.org\n') # Get all armatures for ob_main in objects: for ob, ob_mat in BPyObject.getDerivedObjects(ob_main): write_armature(file, ob) write_poses(file, ob) file.close() print "Armature Export time: %.2f" % (sys.time() - time1)
def main(): # Gets the current scene, there can be many scenes in 1 blend file. sce = bpy.data.scenes.active # Get the active object, there can only ever be 1 # and the active object is always the editmode object. #ob_act = sce.objects.active ob_act = sce.objects.active if not ob_act or ob_act.type != 'Mesh': BPyMessages.Error_NoMeshActive() return # Saves the editmode state and go's out of # editmode if its enabled, we cant make # changes to the mesh data while in editmode. is_editmode = Window.EditMode() if is_editmode: Window.EditMode(0) Window.WaitCursor(1) me = ob_act.getData(mesh=1) # old NMesh api is default t = sys.time() # Restore editmode if it was enabled if is_editmode: Window.EditMode(1) print 'ActionScript 3.0 Exporter Script finished in %.2f seconds' % (sys.time()-t) Window.WaitCursor(0)
def main(): # Gets the current scene, there can be many scenes in 1 blend file. sce = bpy.data.scenes.active # Get the active object, there can only ever be 1 # and the active object is always the editmode object. ob_act = sce.objects.active if not ob_act or ob_act.type != 'Mesh': BPyMessages.Error_NoMeshActive() return # Saves the editmode state and go's out of # editmode if its enabled, we cant make # changes to the mesh data while in editmode. is_editmode = Window.EditMode() Window.EditMode(0) PREF_ALL_VGROUPS = Draw.PupMenu("All Groups?%t|All Groups%x1|Active Group Only%x0") if PREF_ALL_VGROUPS==-1: return Window.WaitCursor(1) me = ob_act.getData(mesh=1) # old NMesh api is default t = sys.time() # Run the mesh editing function vgroup_average(ob_act, me, sce, PREF_ALL_VGROUPS) # Timing the script is a good way to be aware on any speed hits when scripting print 'Average VGroups in %.2f seconds' % (sys.time()-t) Window.WaitCursor(0) if is_editmode: Window.EditMode(1)
def main(): # Gets the current scene, there can be many scenes in 1 blend file. sce = bpy.data.scenes.active # Get the active object, there can only ever be 1 # and the active object is always the editmode object. ob_act = sce.objects.active if not ob_act or ob_act.type != 'Mesh': BPyMessages.Error_NoMeshActive() return is_editmode = Window.EditMode() if is_editmode: Window.EditMode(0) Window.WaitCursor(1) me = ob_act.getData(mesh=1) # old NMesh api is default t = sys.time() # Run the mesh editing function my_mesh_util(me) # Restore editmode if it was enabled if is_editmode: Window.EditMode(1) # Timing the script is a good way to be aware on any speed hits when scripting print 'My Script finished in %.2f seconds' % (sys.time() - t) Window.WaitCursor(0)
def main(): # Gets the current scene, there can be many scenes in 1 blend file. sce = bpy.data.scenes.active # Get the active object, there can only ever be 1 # and the active object is always the editmode object. ob_act = sce.objects.active if not ob_act or ob_act.type != "Mesh": BPyMessages.Error_NoMeshActive() return # Saves the editmode state and go's out of # editmode if its enabled, we cant make # changes to the mesh data while in editmode. is_editmode = Window.EditMode() if is_editmode: Window.EditMode(0) Window.WaitCursor(1) me = ob_act.getData(mesh=1) # old NMesh api is default t = sys.time() # Run the mesh editing function my_mesh_util(me) # Restore editmode if it was enabled if is_editmode: Window.EditMode(1) # Timing the script is a good way to be aware on any speed hits when scripting print "My Script finished in %.2f seconds" % (sys.time() - t) Window.WaitCursor(0)
def write(filename, objects): def veckey3d(v): return round(v.x, 6), round(v.y, 6), round(v.z, 6) def veckey2d(v): return round(v.x, 6), round(v.y, 6) print 'OBJ Export path: "%s"' % filename temp_mesh_name = '~tmp-mesh' time1 = sys.time() scn = Scene.GetCurrent() file = open(filename, "w") # Write Header file.write('# Blender3D v%s EOL Armature File: %s\n' % (Blender.Get('version'), Blender.Get('filename').split('/')[-1].split('\\')[-1] )) file.write('# www.blender3d.org\n') # Get all armatures for ob_main in objects: for ob, ob_mat in BPyObject.getDerivedObjects(ob_main): write_armature(file,ob) write_poses(file,ob) file.close() print "Armature Export time: %.2f" % (sys.time() - time1)
def main(): # get selected meshes obs = [ob for ob in Blender.Object.GetSelected() if ob.type == 'Mesh'] # ask for weights to delete PREF_CUTOFF = Blender.Draw.Create(0.02) PREF_NBONES = Blender.Draw.Create(4) pup_block = [\ ('Weight Cutoff', PREF_CUTOFF, 0.001, 0.499, 'Vertices with weight less than this number will be deleted from the vertex group.'),\ ('Max Bones', PREF_NBONES, 1, 10, 'Also remove weakest influences so total number of bone influences is never larger than this number.'),\ ] if not Blender.Draw.PupBlock('Vertex Squash', pup_block): return # saves editmode state and exit editmode if it is enabled # (cannot make changes mesh data in editmode) is_editmode = Window.EditMode() Window.EditMode(0) Window.WaitCursor(1) t = sys.time() # run script num_affected = 0 for ob in obs: me = ob.getData(mesh=1) # get Mesh, not NMesh num_affected += weight_squash(me, cutoff = PREF_CUTOFF.val, nbones = PREF_NBONES.val) print('Weight Squash finished in %.2f seconds' % (sys.time()-t)) print('%i vertices removed from groups'%num_affected) Window.WaitCursor(0) if is_editmode: Window.EditMode(1)
def main(arg): # get selected meshes obs = [ob for ob in Blender.Object.GetSelected() if ob.type == 'Mesh'] # saves editmode state and exit editmode if it is enabled # (cannot make changes mesh data in editmode) is_editmode = Window.EditMode() Window.EditMode(0) Window.WaitCursor(1) t = sys.time() # run script num_affected = 0 for ob in obs: me = ob.getData(mesh=1) # get Mesh, not NMesh # are any vertices selected? selected_only = is_editmode and (1 in ( vert.sel for vert in me.verts )) # create mesh by requested type if arg == 'box': hull_box(ob, me, selected_only) elif arg == 'sphere': hull_sphere(ob, me, selected_only) elif arg == 'convex': PREF_PRECISION = Blender.Draw.Create(0.1) pup_block = [ ('Precision', PREF_PRECISION, 0.001, 2.0, 'Maximum distance by which a vertex may fall outside the hull: larger values yield simpler hulls at the expense of missing more vertices.') ] if not Blender.Draw.PupBlock('Convex Hull', pup_block): return hull_convex(ob, me, selected_only, precision = PREF_PRECISION.val) print 'Hull finished in %.2f seconds' % (sys.time()-t) Window.WaitCursor(0) if is_editmode: Window.EditMode(1)
def main(arg): # get selected bones obs = [ob for ob in Blender.Object.GetSelected() if ob.type == 'Armature'] if obs: boneitems = [(bonename, bone) for (bonename, bone) in obs[0].getPose().bones.items() if bone.sel] else: boneitems = [] # exit if no bones selected if not boneitems: print("no bones selected in pose mode") Blender.Draw.PupMenu('ERROR%t|no bones selected in pose mode') return # ask for weights to delete PREF_BUFFER = Blender.Draw.Create("BonePose") pup_block = [\ ('Text Buffer', PREF_BUFFER, 0, 20, 'The text buffer where to store the bone poses.'),\ ] if not Blender.Draw.PupBlock('Save Bone Pose', pup_block): return # saves editmode state and exit editmode if it is enabled # (cannot make changes mesh data in editmode) is_editmode = Window.EditMode() Window.EditMode(0) Window.WaitCursor(1) t = sys.time() # run script # open/clear text buffer try: posetxt = Blender.Text.Get(PREF_BUFFER.val) except NameError: posetxt = Blender.Text.New(PREF_BUFFER.val) posetxt.clear() for bonename, bone in boneitems: print("saving pose of bone %s to %s" % (bonename, PREF_BUFFER.val)) matrix = bone.quat.toMatrix() matrix.resize4x4() matrix[3][0] = bone.loc[0] matrix[3][1] = bone.loc[1] matrix[3][2] = bone.loc[2] matrixtxt = '' for row in matrix: matrixtxt = '%s;%s,%s,%s,%s' % (matrixtxt, row[0], row[1], row[2], row[3]) # matrixtxt[1:] discards the first semi-colon posetxt.write("%s/%s\n" % (bonename, matrixtxt[1:])) # report finish and timing print 'Save bone pose finished in %.2f seconds' % (sys.time()-t) Window.WaitCursor(0) if is_editmode: Window.EditMode(1)
def do_export(sel_group, filepath): Window.WaitCursor(1) t = sys.time() #init Drawing --------------------- d=Drawing() #add Tables ----------------- #d.blocks.append(b) #table blocks d.styles.append(Style()) #table styles d.views.append(View('Normal')) #table view d.views.append(ViewByWindow('Window',leftBottom=(1,0),rightTop=(2,1))) #idem #add Entities -------------------- something_ready = False #ViewVector = Mathutils.Vector(Window.GetViewVector()) #print 'deb: ViewVector=', ViewVector #------------------ mw0 = Window.GetViewMatrix() #mw0 = Window.GetPerspMatrix() #TODO: how get it working? mw = mw0.copy() if FLATTEN: m0 = Mathutils.Matrix() m0[2][2]=0.0 mw *= m0 #flatten ViewMatrix for ob in sel_group: entities = [] mx = ob.matrix.copy() mb = mx.copy() #print 'deb: mb =\n', mb #--------- #print 'deb: mw0 =\n', mw0 #--------- mx_n = mx.rotationPart() * mw0.rotationPart() #trans-matrix for normal_vectors if SCALE_FACTOR!=1.0: mx *= SCALE_FACTOR if FLATTEN: mx *= mw #mx_inv = mx.copy().invert() #print 'deb: mx =\n', mx #--------- #print 'deb: mx_inv=\n', mx_inv #--------- if (ob.type == 'Mesh'): entities = exportMesh(ob, mx, mx_n) elif (ob.type == 'Curve'): entities = exportCurve(ob, mx) for e in entities: d.append(e) something_ready = True if something_ready: d.saveas(filepath) Window.WaitCursor(0) #Draw.PupMenu('DXF Exporter: job finished') print 'exported to %s' % filepath print 'finished in %.2f seconds' % (sys.time()-t) else: Window.WaitCursor(0) print "Abort: selected objects dont mach choosen export option, nothing exported!" Draw.PupMenu('DXF Exporter: nothing exported!|selected objects dont mach choosen export option!')
def filesel_callback(filename): inform("\nTrying to import AC3D model(s) from:\n%s ..." % filename) Window.WaitCursor(1) starttime = bsys.time() test = AC3DImport(filename) Window.WaitCursor(0) endtime = bsys.time() - starttime inform('Done! Data imported in %.3f seconds.\n' % endtime)
def fs_callback(filename): t = sys.time() import time import datetime print "====== IMPORTING UNREAL SKELETAL ANIMATION FORMAT========\r\n" ImportPSA(filename) print 'Import PSA Script finished in %.2f seconds' % (sys.time() - t) t = datetime.datetime.now() EpochSeconds = time.mktime(t.timetuple()) print datetime.datetime.fromtimestamp(EpochSeconds)
def fs_callback(filename): t = sys.time() import time import datetime print "====== IMPORTING UNREAL SKELETAL MESH FORMAT========\r\n" ImportPSK(filename) print 'Import PSK Script finished in %.2f seconds' % (sys.time()-t) t = datetime.datetime.now() EpochSeconds = time.mktime(t.timetuple()) print datetime.datetime.fromtimestamp(EpochSeconds)
def main(arg): # get selected bones obs = [ob for ob in self.context.selected_objects if ob.type == 'ARMATURE'] if obs: boneitems = [(bonename, bone) for (bonename, bone) in list(obs[0].getPose().bones.items()) if bone.sel] else: boneitems = [] # exit if no bones selected if not boneitems: print("no bones selected in pose mode") Blender.Draw.PupMenu('ERROR%t|no bones selected in pose mode') return # ask for weights to delete PREF_PRIORITY = Blender.Draw.Create(30) pup_block = [\ ('Priority', PREF_PRIORITY, 0, 200, 'Bone priority.'),\ ] if not Blender.Draw.PupBlock('Set Bone Priority', pup_block): return # saves editmode state and exit editmode if it is enabled # (cannot make changes mesh data in editmode) is_editmode = Window.EditMode() Window.EditMode(0) Window.WaitCursor(1) t = sys.time() # run script for bonename, bone in boneitems: # get priorty null constraint print(("setting priority %i on %s" % (PREF_PRIORITY.val, bonename))) priorityconstr = None for constr in bone.constraints: if constr.type == Blender.Constraint.Type.NULL \ and constr.name[:9] == "priority:": priorityconstr = constr break if not priorityconstr: priorityconstr = bone.constraints.append( Blender.Constraint.Type.NULL) priorityconstr.name = "priority:%i" % PREF_PRIORITY.val print('Set bone priority finished in %.2f seconds' % (sys.time() - t)) Window.WaitCursor(0) if is_editmode: Window.EditMode(1)
def main(): # Gets the current scene, there can be many scenes in 1 blend file. sce = bpy.data.scenes.active # Get the active object, there can only ever be 1 # and the active object is always the editmode object. ob_act = sce.objects.active if not ob_act or ob_act.type != 'Mesh': BPyMessages.Error_NoMeshActive() return # Saves the editmode state and go's out of # editmode if its enabled, we cant make # changes to the mesh data while in editmode. is_editmode = Window.EditMode() Window.EditMode(0) me = ob_act.getData(mesh=1) # old NMesh api is default if len(me.faces)==0: BPyMessages.Error_NoMeshFaces() if is_editmode: Window.EditMode(1) return # Create the variables. PREF_THICK = Blender.Draw.Create(0.005) PREF_SOLID = Blender.Draw.Create(1) PREF_SHARP = Blender.Draw.Create(1) PREF_XSHARP = Blender.Draw.Create(0) pup_block = [\ ('Thick:', PREF_THICK, 0.0001, 2.0, 'Skin thickness in mesh space.'),\ ('Solid Wire', PREF_SOLID, 'If Disabled, will use 6 sided wire segments'),\ ('Sharp Wire', PREF_SHARP, 'Use the original mesh topology for more accurate sharp wire.'),\ ('Extra Sharp', PREF_XSHARP, 'Use less geometry to create a sharper looking wire'),\ ] if not Blender.Draw.PupBlock('Solid Wireframe', pup_block): if is_editmode: Window.EditMode(1) return Window.WaitCursor(1) t = sys.time() # Run the mesh editing function solid_wire(ob_act, me, sce, PREF_THICK.val, PREF_SOLID.val, PREF_SHARP.val, PREF_XSHARP.val) # Timing the script is a good way to be aware on any speed hits when scripting print 'Solid Wireframe finished in %.2f seconds' % (sys.time()-t) Window.WaitCursor(0) if is_editmode: Window.EditMode(1)
def main(arg): # get selected bones obs = [ob for ob in Blender.Object.GetSelected() if ob.type == 'Armature'] if obs: boneitems = [(bonename, bone) for (bonename, bone) in list(obs[0].getPose().bones.items()) if bone.sel] else: boneitems = [] # exit if no bones selected if not boneitems: print("no bones selected in pose mode") Blender.Draw.PupMenu('ERROR%t|no bones selected in pose mode') return # ask for weights to delete PREF_PRIORITY = Blender.Draw.Create(30) pup_block = [\ ('Priority', PREF_PRIORITY, 0, 200, 'Bone priority.'),\ ] if not Blender.Draw.PupBlock('Set Bone Priority', pup_block): return # saves editmode state and exit editmode if it is enabled # (cannot make changes mesh data in editmode) is_editmode = Window.EditMode() Window.EditMode(0) Window.WaitCursor(1) t = sys.time() # run script for bonename, bone in boneitems: # get priorty null constraint print(("setting priority %i on %s" % (PREF_PRIORITY.val, bonename))) priorityconstr = None for constr in bone.constraints: if constr.type == Blender.Constraint.Type.NULL \ and constr.name[:9] == "priority:": priorityconstr = constr break if not priorityconstr: priorityconstr = bone.constraints.append( Blender.Constraint.Type.NULL) priorityconstr.name = "priority:%i" % PREF_PRIORITY.val print('Set bone priority finished in %.2f seconds' % (sys.time()-t)) Window.WaitCursor(0) if is_editmode: Window.EditMode(1)
def main(): # Gets the current scene, there can be many scenes in 1 blend file. sce = bpy.data.scenes.active Window.WaitCursor(1) t = sys.time() # Run the object editing function export_to_lua(sce) # Timing the script is a good way to be aware on any speed hits when scripting print 'My Script finished in %.2f seconds' % (sys.time()-t) Window.WaitCursor(0)
def fs_callback(filename): global EXPORT_DIR, OBJS, CONFIRM_OVERWRITE, VERBOSE if not filename.endswith('.ac'): filename = '%s.ac' % filename if bsys.exists(filename) and CONFIRM_OVERWRITE: if Blender.Draw.PupMenu('OVERWRITE?%t|File exists') != 1: return Blender.Window.WaitCursor(1) starttime = bsys.time() export_dir = bsys.dirname(filename) if export_dir != EXPORT_DIR: EXPORT_DIR = export_dir update_RegistryInfo() try: file = open(filename, 'w') except IOError, (errno, strerror): error = "IOError #%s: %s" % (errno, strerror) REPORT_DATA['errors'].append("Saving failed - %s." % error) error_msg = "Couldn't save file!%%t|%s" % error Blender.Draw.PupMenu(error_msg) return
def main(): # Gets the current scene, there can be many scenes in 1 blend file. sce = bpy.data.scenes.active # Get the active object, there can only ever be 1 # and the active object is always the editmode object. ob_act = sce.objects.active if not ob_act or ob_act.type != 'Mesh': BPyMessages.Error_NoMeshActive() return # Saves the editmode state and go's out of # editmode if its enabled, we cant make # changes to the mesh data while in editmode. is_editmode = Window.EditMode() if is_editmode: Window.EditMode(0) Window.WaitCursor(1) t = sys.time() # Run the mesh editing function report = find_unassigned_verts(ob_act) # Restore editmode if it was enabled if is_editmode: Window.EditMode(1) if report != "": report = report + "!!! I have selected all affected verts. Please lookup your mesh in edit mode !!!" text = "Report: %t|" + report +"|OK" Draw.PupMenu(text) Window.WaitCursor(0)
def main(): sce= bpy.data.scenes.active ob= sce.objects.active if not ob or ob.type != 'Mesh': Draw.PupMenu('Error, no active mesh object, aborting.') return me= ob.getData(mesh=1) PREF_BLUR_ITERATIONS= Draw.Create(1) PREF_BLUR_RADIUS= Draw.Create(0.05) PREF_MIN_EDLEN= Draw.Create(0.01) PREF_CLAMP_CONCAVE= Draw.Create(90) PREF_CLAMP_CONVEX= Draw.Create(20) PREF_SHADOW_ONLY= Draw.Create(0) PREF_SEL_ONLY= Draw.Create(0) pup_block= [\ 'Post AO Blur',\ (' Iterations:', PREF_BLUR_ITERATIONS, 0, 40, 'Number times to blur the colors. (higher blurs more)'),\ (' Blur Radius:', PREF_BLUR_RADIUS, 0.01, 40.0, 'How much distance effects blur transfur (higher blurs more).'),\ (' Min EdgeLen:', PREF_MIN_EDLEN, 0.00001, 1.0, 'Minimim edge length to blur (very low values can cause errors).'),\ 'Angle Clipping',\ (' Highlight Angle:', PREF_CLAMP_CONVEX, 0, 180, 'Less then 180 limits the angle used in the tonal range.'),\ (' Shadow Angle:', PREF_CLAMP_CONCAVE, 0, 180, 'Less then 180 limits the angle used in the tonal range.'),\ ('Shadow Only', PREF_SHADOW_ONLY, 'Dont calculate highlights for convex areas.'),\ ('Sel Faces Only', PREF_SEL_ONLY, 'Only apply to UV/Face selected faces (mix vpain/uvface select).'),\ ] if not Draw.PupBlock('SelfShadow...', pup_block): return PREF_BLUR_ITERATIONS= PREF_BLUR_ITERATIONS.val PREF_BLUR_RADIUS= PREF_BLUR_RADIUS.val PREF_MIN_EDLEN= PREF_MIN_EDLEN.val PREF_CLAMP_CONCAVE= PREF_CLAMP_CONCAVE.val PREF_CLAMP_CONVEX= PREF_CLAMP_CONVEX.val PREF_SHADOW_ONLY= PREF_SHADOW_ONLY.val PREF_SEL_ONLY= PREF_SEL_ONLY.val if not me.vertexColors: me.vertexColors= 1 t= sys.time() vertexFakeAO(me, PREF_BLUR_ITERATIONS, PREF_BLUR_RADIUS, PREF_MIN_EDLEN, PREF_CLAMP_CONCAVE, PREF_CLAMP_CONVEX, PREF_SHADOW_ONLY, PREF_SEL_ONLY) print 'done in %.6f' % (sys.time()-t)
def main(): sce = bpy.data.scenes.active ob = sce.objects.active if not ob or ob.type != 'Mesh': Draw.PupMenu('Error, no active mesh object, aborting.') return me = ob.getData(mesh=1) PREF_BLUR_ITERATIONS = Draw.Create(1) PREF_BLUR_STRENGTH = Draw.Create(0.5) PREF_CLAMP_CONCAVE = Draw.Create(90) PREF_CLAMP_CONVEX = Draw.Create(20) PREF_SHADOW_ONLY = Draw.Create(0) PREF_SEL_ONLY = Draw.Create(0) pup_block= [\ 'Post AO Blur',\ ('Strength:', PREF_BLUR_STRENGTH, 0, 1, 'Blur strength per iteration'),\ ('Iterations:', PREF_BLUR_ITERATIONS, 0, 40, 'Number times to blur the colors. (higher blurs more)'),\ 'Angle Clipping',\ ('Highlight Angle:', PREF_CLAMP_CONVEX, 0, 180, 'Less then 180 limits the angle used in the tonal range.'),\ ('Shadow Angle:', PREF_CLAMP_CONCAVE, 0, 180, 'Less then 180 limits the angle used in the tonal range.'),\ ('Shadow Only', PREF_SHADOW_ONLY, 'Dont calculate highlights for convex areas.'),\ ('Sel Faces Only', PREF_SEL_ONLY, 'Only apply to UV/Face selected faces (mix vpain/uvface select).'),\ ] if not Draw.PupBlock('SelfShadow...', pup_block): return if not me.vertexColors: me.vertexColors = 1 t = sys.time() vertexFakeAO(me, PREF_BLUR_ITERATIONS.val, \ PREF_BLUR_STRENGTH.val, \ PREF_CLAMP_CONCAVE.val, \ PREF_CLAMP_CONVEX.val, \ PREF_SHADOW_ONLY.val, \ PREF_SEL_ONLY.val) if ob.modifiers: me.update() print 'done in %.6f' % (sys.time() - t)
def main(): sce= bpy.data.scenes.active ob= sce.objects.active if not ob or ob.type != 'Mesh': Draw.PupMenu('Error, no active mesh object, aborting.') return me= ob.getData(mesh=1) PREF_BLUR_ITERATIONS= Draw.Create(1) PREF_BLUR_STRENGTH= Draw.Create(0.5) PREF_CLAMP_CONCAVE= Draw.Create(90) PREF_CLAMP_CONVEX= Draw.Create(20) PREF_SHADOW_ONLY= Draw.Create(0) PREF_SEL_ONLY= Draw.Create(0) pup_block= [\ 'Post AO Blur',\ ('Strength:', PREF_BLUR_STRENGTH, 0, 1, 'Blur strength per iteration'),\ ('Iterations:', PREF_BLUR_ITERATIONS, 0, 40, 'Number times to blur the colors. (higher blurs more)'),\ 'Angle Clipping',\ ('Highlight Angle:', PREF_CLAMP_CONVEX, 0, 180, 'Less then 180 limits the angle used in the tonal range.'),\ ('Shadow Angle:', PREF_CLAMP_CONCAVE, 0, 180, 'Less then 180 limits the angle used in the tonal range.'),\ ('Shadow Only', PREF_SHADOW_ONLY, 'Dont calculate highlights for convex areas.'),\ ('Sel Faces Only', PREF_SEL_ONLY, 'Only apply to UV/Face selected faces (mix vpain/uvface select).'),\ ] if not Draw.PupBlock('SelfShadow...', pup_block): return if not me.vertexColors: me.vertexColors= 1 t= sys.time() vertexFakeAO(me, PREF_BLUR_ITERATIONS.val, \ PREF_BLUR_STRENGTH.val, \ PREF_CLAMP_CONCAVE.val, \ PREF_CLAMP_CONVEX.val, \ PREF_SHADOW_ONLY.val, \ PREF_SEL_ONLY.val) if ob.modifiers: me.update() print 'done in %.6f' % (sys.time()-t)
def main(): THICK = Draw.Create(0.02) if not Draw.PupBlock('Cube wireframe', [\ ('Thick:', THICK, 0.0001, 10, 'Thickness of the skinned edges'),\ ]): return # Gets the current scene, there can be many scenes in 1 blend file. sce = bpy.data.scenes.active # Get the active object, there can only ever be 1 # and the active object is always the editmode object. ob_act = sce.objects.active if not ob_act or ob_act.type != 'Mesh': BPyMessages.Error_NoMeshActive() return # Saves the editmode state and go's out of # editmode if its enabled, we cant make # changes to the mesh data while in editmode. is_editmode = Window.EditMode() if is_editmode: Window.EditMode(0) Window.WaitCursor(1) me = ob_act.getData(mesh=1) # old NMesh api is default t = sys.time() # Run the mesh editing function create_wired_mesh(me, THICK.val/2.0) ob_act.select(False) # Restore editmode if it was enabled if is_editmode: Window.EditMode(1) # Timing the script is a good way to be aware on any speed hits when scripting print 'My Script finished in %.2f seconds' % (sys.time()-t) Window.WaitCursor(0)
def __init__(self, filename): # find where the directory path ends # (both un*x and win accounted for) self.path, simpleFile = os.path.split(sys.expandpath(filename)) self.objName = os.path.splitext(simpleFile)[0] # material values (to be checked later) self.faces = [] self.verts = [] self.uvs = [] self.faceuvs = [] self.alpha =\ self.rgbCol =\ self.amb =\ self.emit =\ self.colorTexture =\ self.specTexture =\ self.spec =\ self.specCol = None # finally, start chronometer sys.time()
def get_modules(since=1): """Returns the set of built-in modules and any modules that have been imported into the system upto 'since' seconds ago. """ global _modules, _modules_updated t = time() if _modules_updated < t - since: _modules.update(sys.modules) _modules_updated = t return _modules.keys()
def fs_callback(filename): global IMPORT_DIR, EXPORT_DIR, import_dir, tipo tipo = '' if not filename.endswith('.fef'): filename = '%s.fef' % filename Blender.Window.WaitCursor(1) starttime = bsys.time() import_dir = bsys.dirname(filename) if import_dir != IMPORT_DIR: IMPORT_DIR = import_dir update_RegistryInfo() file = open(filename, "r") header = file.readline() if header != '//Face and Hair Exchange File (c)2010 by Skunk\n': print "Wrong file!" Blender.Draw.PupMenu('Wrong File!') file.close() return if not load_data(file): Blender.Draw.PupMenu('Error in input file!') file.close() return if not import2blender(): Blender.Draw.PupMenu('Error in input file!') file.close() return file.close() endtime = bsys.time() - starttime print "Data imported in %.3f seconds." % endtime Blender.Window.WaitCursor(0) return
def get_cached_descriptor(txt, force_parse=0): """Returns the cached ScriptDesc for the specified Text object 'txt'. If the script has not been parsed in the last 'period' seconds it will be reparsed to obtain this descriptor. Specifying TP_AUTO for the period (default) will choose a period based on the size of the Text object. Larger texts are parsed less often. """ global _parse_cache parse = True key = hash(txt) if not force_parse and _parse_cache.has_key(key): desc = _parse_cache[key] if desc.parse_due > time(): parse = desc.incomplete if parse: desc = parse_text(txt) return desc
def packIslands(islandList): if USER_FILL_HOLES: Window.DrawProgressBar(0.1, 'Merging Islands (Ctrl: skip merge)...') mergeUvIslands(islandList) # Modify in place # Now we have UV islands, we need to pack them. # Make a synchronised list with the islands # so we can box pak the islands. packBoxes = [] # Keep a list of X/Y offset so we can save time by writing the # uv's and packed data in one pass. islandOffsetList = [] islandIdx = 0 while islandIdx < len(islandList): minx, miny, maxx, maxy = boundsIsland(islandList[islandIdx]) w, h = maxx - minx, maxy - miny if USER_ISLAND_MARGIN: minx -= USER_ISLAND_MARGIN # *w miny -= USER_ISLAND_MARGIN # *h maxx += USER_ISLAND_MARGIN # *w maxy += USER_ISLAND_MARGIN # *h # recalc width and height w, h = maxx - minx, maxy - miny if w < 0.00001 or h < 0.00001: del islandList[islandIdx] islandIdx -= 1 continue '''Save the offset to be applied later, we could apply to the UVs now and allign them to the bottom left hand area of the UV coords like the box packer imagines they are but, its quicker just to remember their offset and apply the packing and offset in 1 pass ''' islandOffsetList.append((minx, miny)) # Add to boxList. use the island idx for the BOX id. packBoxes.append([0, 0, w, h]) islandIdx += 1 # Now we have a list of boxes to pack that syncs # with the islands. #print '\tPacking UV Islands...' Window.DrawProgressBar(0.7, 'Packing %i UV Islands...' % len(packBoxes)) time1 = sys.time() packWidth, packHeight = Geometry.BoxPack2D(packBoxes) # print 'Box Packing Time:', sys.time() - time1 #if len(pa ckedLs) != len(islandList): # raise "Error packed boxes differes from original length" #print '\tWriting Packed Data to faces' Window.DrawProgressBar(0.8, 'Writing Packed Data to faces') # Sort by ID, so there in sync again islandIdx = len(islandList) # Having these here avoids devide by 0 if islandIdx: if USER_STRETCH_ASPECT: # Maximize to uv area?? Will write a normalize function. xfactor = 1.0 / packWidth yfactor = 1.0 / packHeight else: # Keep proportions. xfactor = yfactor = 1.0 / max(packWidth, packHeight) while islandIdx: islandIdx -= 1 # Write the packed values to the UV's xoffset = packBoxes[islandIdx][0] - islandOffsetList[islandIdx][0] yoffset = packBoxes[islandIdx][1] - islandOffsetList[islandIdx][1] for f in islandList[ islandIdx]: # Offsetting the UV's so they fit in there packed box for uv in f.uv: uv.x = (uv.x + xoffset) * xfactor uv.y = (uv.y + yoffset) * yfactor
def main(): global USER_FILL_HOLES global USER_FILL_HOLES_QUALITY global USER_STRETCH_ASPECT global USER_ISLAND_MARGIN objects = bpy.data.scenes.active.objects # we can will tag them later. obList = [ob for ob in objects.context if ob.type == 'Mesh'] # Face select object may not be selected. ob = objects.active if ob and ob.sel == 0 and ob.type == 'Mesh': # Add to the list obList = [ob] del objects if not obList: Draw.PupMenu('error, no selected mesh objects') return # Create the variables. USER_PROJECTION_LIMIT = Draw.Create(66) USER_ONLY_SELECTED_FACES = Draw.Create(1) USER_SHARE_SPACE = Draw.Create(1) # Only for hole filling. USER_STRETCH_ASPECT = Draw.Create(1) # Only for hole filling. USER_ISLAND_MARGIN = Draw.Create(0.0) # Only for hole filling. USER_FILL_HOLES = Draw.Create(0) USER_FILL_HOLES_QUALITY = Draw.Create(50) # Only for hole filling. USER_VIEW_INIT = Draw.Create(0) # Only for hole filling. USER_AREA_WEIGHT = Draw.Create(1) # Only for hole filling. pup_block = [\ 'Projection',\ ('Angle Limit:', USER_PROJECTION_LIMIT, 1, 89, 'lower for more projection groups, higher for less distortion.'),\ ('Selected Faces Only', USER_ONLY_SELECTED_FACES, 'Use only selected faces from all selected meshes.'),\ ('Init from view', USER_VIEW_INIT, 'The first projection will be from the view vector.'),\ ('Area Weight', USER_AREA_WEIGHT, 'Weight projections vector by face area.'),\ '',\ '',\ '',\ 'UV Layout',\ ('Share Tex Space', USER_SHARE_SPACE, 'Objects Share texture space, map all objects into 1 uvmap.'),\ ('Stretch to bounds', USER_STRETCH_ASPECT, 'Stretch the final output to texture bounds.'),\ ('Island Margin:', USER_ISLAND_MARGIN, 0.0, 0.5, 'Margin to reduce bleed from adjacent islands.'),\ 'Fill in empty areas',\ ('Fill Holes', USER_FILL_HOLES, 'Fill in empty areas reduced texture waistage (slow).'),\ ('Fill Quality:', USER_FILL_HOLES_QUALITY, 1, 100, 'Depends on fill holes, how tightly to fill UV holes, (higher is slower)'),\ ] # Reuse variable if len(obList) == 1: ob = "Unwrap %i Selected Mesh" else: ob = "Unwrap %i Selected Meshes" # HACK, loop until mouse is lifted. ''' while Window.GetMouseButtons() != 0: sys.sleep(10) ''' if not Draw.PupBlock(ob % len(obList), pup_block): return del ob # Convert from being button types USER_PROJECTION_LIMIT = USER_PROJECTION_LIMIT.val USER_ONLY_SELECTED_FACES = USER_ONLY_SELECTED_FACES.val USER_SHARE_SPACE = USER_SHARE_SPACE.val USER_STRETCH_ASPECT = USER_STRETCH_ASPECT.val USER_ISLAND_MARGIN = USER_ISLAND_MARGIN.val USER_FILL_HOLES = USER_FILL_HOLES.val USER_FILL_HOLES_QUALITY = USER_FILL_HOLES_QUALITY.val USER_VIEW_INIT = USER_VIEW_INIT.val USER_AREA_WEIGHT = USER_AREA_WEIGHT.val USER_PROJECTION_LIMIT_CONVERTED = cos(USER_PROJECTION_LIMIT * DEG_TO_RAD) USER_PROJECTION_LIMIT_HALF_CONVERTED = cos( (USER_PROJECTION_LIMIT / 2) * DEG_TO_RAD) # Toggle Edit mode is_editmode = Window.EditMode() if is_editmode: Window.EditMode(0) # Assume face select mode! an annoying hack to toggle face select mode because Mesh dosent like faceSelectMode. if USER_SHARE_SPACE: # Sort by data name so we get consistant results try: obList.sort(key=lambda ob: ob.getData(name_only=1)) except: obList.sort(lambda ob1, ob2: cmp(ob1.getData(name_only=1), ob2.getData(name_only=1))) collected_islandList = [] Window.WaitCursor(1) time1 = sys.time() # Tag as False se we dont operate on teh same mesh twice. bpy.data.meshes.tag = False for ob in obList: me = ob.getData(mesh=1) if me.tag or me.lib: continue # Tag as used me.tag = True if not me.faceUV: # Mesh has no UV Coords, dont bother. me.faceUV = True if USER_ONLY_SELECTED_FACES: meshFaces = [thickface(f) for f in me.faces if f.sel] else: meshFaces = map(thickface, me.faces) if not meshFaces: continue Window.DrawProgressBar( 0.1, 'SmartProj UV Unwrapper, mapping "%s", %i faces.' % (me.name, len(meshFaces))) # ======= # Generate a projection list from face normals, this is ment to be smart :) # make a list of face props that are in sync with meshFaces # Make a Face List that is sorted by area. # meshFaces = [] # meshFaces.sort( lambda a, b: cmp(b.area , a.area) ) # Biggest first. try: meshFaces.sort(key=lambda a: -a.area) except: meshFaces.sort(lambda a, b: cmp(b.area, a.area)) # remove all zero area faces while meshFaces and meshFaces[-1].area <= SMALL_NUM: # Set their UV's to 0,0 for uv in meshFaces[-1].uv: uv.zero() meshFaces.pop() # Smallest first is slightly more efficient, but if the user cancels early then its better we work on the larger data. # Generate Projection Vecs # 0d is 1.0 # 180 IS -0.59846 # Initialize projectVecs if USER_VIEW_INIT: # Generate Projection projectVecs = [ Vector(Window.GetViewVector()) * ob.matrixWorld.copy().invert().rotationPart() ] # We add to this allong the way else: projectVecs = [] newProjectVec = meshFaces[0].no newProjectMeshFaces = [] # Popping stuffs it up. # Predent that the most unique angke is ages away to start the loop off mostUniqueAngle = -1.0 # This is popped tempMeshFaces = meshFaces[:] # This while only gathers projection vecs, faces are assigned later on. while 1: # If theres none there then start with the largest face # add all the faces that are close. for fIdx in xrange(len(tempMeshFaces) - 1, -1, -1): # Use half the angle limit so we dont overweight faces towards this # normal and hog all the faces. if newProjectVec.dot(tempMeshFaces[fIdx].no ) > USER_PROJECTION_LIMIT_HALF_CONVERTED: newProjectMeshFaces.append(tempMeshFaces.pop(fIdx)) # Add the average of all these faces normals as a projectionVec averageVec = Vector(0, 0, 0) if USER_AREA_WEIGHT: for fprop in newProjectMeshFaces: averageVec += (fprop.no * fprop.area) else: for fprop in newProjectMeshFaces: averageVec += fprop.no if averageVec.x != 0 or averageVec.y != 0 or averageVec.z != 0: # Avoid NAN projectVecs.append(averageVec.normalize()) # Get the next vec! # Pick the face thats most different to all existing angles :) mostUniqueAngle = 1.0 # 1.0 is 0d. no difference. mostUniqueIndex = 0 # dummy for fIdx in xrange(len(tempMeshFaces) - 1, -1, -1): angleDifference = -1.0 # 180d difference. # Get the closest vec angle we are to. for p in projectVecs: temp_angle_diff = p.dot(tempMeshFaces[fIdx].no) if angleDifference < temp_angle_diff: angleDifference = temp_angle_diff if angleDifference < mostUniqueAngle: # We have a new most different angle mostUniqueIndex = fIdx mostUniqueAngle = angleDifference if mostUniqueAngle < USER_PROJECTION_LIMIT_CONVERTED: #print 'adding', mostUniqueAngle, USER_PROJECTION_LIMIT, len(newProjectMeshFaces) # Now weight the vector to all its faces, will give a more direct projection # if the face its self was not representive of the normal from surrounding faces. newProjectVec = tempMeshFaces[mostUniqueIndex].no newProjectMeshFaces = [tempMeshFaces.pop(mostUniqueIndex)] else: if len(projectVecs) >= 1: # Must have at least 2 projections break # If there are only zero area faces then its possible # there are no projectionVecs if not len(projectVecs): Draw.PupMenu( 'error, no projection vecs where generated, 0 area faces can cause this.' ) return faceProjectionGroupList = [[] for i in xrange(len(projectVecs))] # MAP and Arrange # We know there are 3 or 4 faces here for fIdx in xrange(len(meshFaces) - 1, -1, -1): fvec = meshFaces[fIdx].no i = len(projectVecs) # Initialize first bestAng = fvec.dot(projectVecs[0]) bestAngIdx = 0 # Cycle through the remaining, first alredy done while i - 1: i -= 1 newAng = fvec.dot(projectVecs[i]) if newAng > bestAng: # Reverse logic for dotvecs bestAng = newAng bestAngIdx = i # Store the area for later use. faceProjectionGroupList[bestAngIdx].append(meshFaces[fIdx]) # Cull faceProjectionGroupList, # Now faceProjectionGroupList is full of faces that face match the project Vecs list for i in xrange(len(projectVecs)): # Account for projectVecs having no faces. if not faceProjectionGroupList[i]: continue # Make a projection matrix from a unit length vector. MatProj = VectoMat(projectVecs[i]) # Get the faces UV's from the projected vertex. for f in faceProjectionGroupList[i]: f_uv = f.uv for j, v in enumerate(f.v): f_uv[j][:] = (MatProj * v.co)[:2] if USER_SHARE_SPACE: # Should we collect and pack later? islandList = getUvIslands(faceProjectionGroupList, me) collected_islandList.extend(islandList) else: # Should we pack the islands for this 1 object? islandList = getUvIslands(faceProjectionGroupList, me) packIslands(islandList) # update the mesh here if we need to. # We want to pack all in 1 go, so pack now if USER_SHARE_SPACE: Window.DrawProgressBar(0.9, "Box Packing for all objects...") packIslands(collected_islandList) print "Smart Projection time: %.2f" % (sys.time() - time1) # Window.DrawProgressBar(0.9, "Smart Projections done, time: %.2f sec." % (sys.time() - time1)) if is_editmode: Window.EditMode(1) Window.DrawProgressBar(1.0, "") Window.WaitCursor(0) Window.RedrawAll()
def main(arg): # get armature and its bones obs = [ob for ob in Blender.Object.GetSelected() if ob.type == "Armature"] if obs: boneitems = [(bonename, bone) for (bonename, bone) in obs[0].getPose().bones.items()] else: boneitems = [] # exit if no bones selected if not boneitems: print ("no armature selected") Blender.Draw.PupMenu("ERROR%t|no armature selected") return # ask for weights to delete PREF_BUFFER = Blender.Draw.Create("BonePose") pup_block = [("Text Buffer: ", PREF_BUFFER, 0, 20, "The text buffer to load the bone poses from.")] if not Blender.Draw.PupBlock("Load Bone Pose", pup_block): return # saves editmode state and exit editmode if it is enabled # (cannot make changes mesh data in editmode) is_editmode = Window.EditMode() Window.EditMode(0) Window.WaitCursor(1) t = sys.time() # run script # open text buffer try: posetxt = Blender.Text.Get(PREF_BUFFER.val) except NameError: Blender.Draw.PupMenu("ERROR%t|text buffer does not exist") return # reconstruct poses for matrixtxt in posetxt.asLines(): # skip empty lines if not matrixtxt: continue # reconstruct matrix from text bonename, matrixstr = matrixtxt.split("/") print ("loading pose of bone %s from %s" % (bonename, PREF_BUFFER.val)) try: matrix = Blender.Mathutils.Matrix(*[[float(f) for f in row.split(",")] for row in matrixstr.split(";")]) except: Blender.Draw.PupMenu("ERROR%t|syntax error in buffer") return # save pose matrix for bonename2, bone in boneitems: if bonenamematch(bonename, bonename2): bone.quat = matrix.rotationPart().toQuat() bone.loc = matrix.translationPart() break else: print ("WARNING: bone %s not found in armature" % bonename) # display the result obs[0].getPose().update() # report finish and timing print "Load bone pose finished in %.2f seconds" % (sys.time() - t) Window.WaitCursor(0) if is_editmode: Window.EditMode(1)
def packIslands(islandList): if USER_FILL_HOLES: # Window.DrawProgressBar(0.1, 'Merging Islands (Ctrl: skip merge)...') mergeUvIslands(islandList) # Modify in place # Now we have UV islands, we need to pack them. # Make a synchronised list with the islands # so we can box pak the islands. packBoxes = [] # Keep a list of X/Y offset so we can save time by writing the # uv's and packed data in one pass. islandOffsetList = [] islandIdx = 0 while islandIdx < len(islandList): minx, miny, maxx, maxy = boundsIsland(islandList[islandIdx]) w, h = maxx - minx, maxy - miny if USER_ISLAND_MARGIN: minx -= USER_ISLAND_MARGIN # *w miny -= USER_ISLAND_MARGIN # *h maxx += USER_ISLAND_MARGIN # *w maxy += USER_ISLAND_MARGIN # *h # recalc width and height w, h = maxx - minx, maxy - miny if w < 0.00001 or h < 0.00001: del islandList[islandIdx] islandIdx -= 1 continue """Save the offset to be applied later, we could apply to the UVs now and allign them to the bottom left hand area of the UV coords like the box packer imagines they are but, its quicker just to remember their offset and apply the packing and offset in 1 pass """ islandOffsetList.append((minx, miny)) # Add to boxList. use the island idx for the BOX id. packBoxes.append([0, 0, w, h]) islandIdx += 1 # Now we have a list of boxes to pack that syncs # with the islands. # print '\tPacking UV Islands...' # Window.DrawProgressBar(0.7, 'Packing %i UV Islands...' % len(packBoxes) ) time1 = sys.time() packWidth, packHeight = Geometry.BoxPack2D(packBoxes) # print 'Box Packing Time:', sys.time() - time1 # if len(pa ckedLs) != len(islandList): # raise "Error packed boxes differes from original length" # print '\tWriting Packed Data to faces' # Window.DrawProgressBar(0.8, 'Writing Packed Data to faces') # Sort by ID, so there in sync again islandIdx = len(islandList) # Having these here avoids devide by 0 if islandIdx: if USER_STRETCH_ASPECT: # Maximize to uv area?? Will write a normalize function. xfactor = 1.0 / packWidth yfactor = 1.0 / packHeight else: # Keep proportions. xfactor = yfactor = 1.0 / max(packWidth, packHeight) while islandIdx: islandIdx -= 1 # Write the packed values to the UV's xoffset = packBoxes[islandIdx][0] - islandOffsetList[islandIdx][0] yoffset = packBoxes[islandIdx][1] - islandOffsetList[islandIdx][1] for f in islandList[islandIdx]: # Offsetting the UV's so they fit in there packed box for uv in f.uv: uv.x = (uv.x + xoffset) * xfactor uv.y = (uv.y + yoffset) * yfactor
def write(filename, objects,\ EXPORT_NORMALS_HQ=False,\ EXPORT_MTL=True, EXPORT_COPY_IMAGES=False,\ EXPORT_APPLY_MODIFIERS=True, EXPORT_BLEN_OBS=True,\ EXPORT_GROUP_BY_OB=False): ''' Basic write function. The context and options must be alredy set This can be accessed externaly eg. write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options. ''' def veckey3d(v): return round(v.x, 6), round(v.y, 6), round(v.z, 6) def veckey2d(v): return round(v.x, 6), round(v.y, 6) print 'WTF Export path: "%s"' % filename temp_mesh_name = '~tmp-mesh' time1 = sys.time() scn = Scene.GetCurrent() file = open(filename, "w") file.write('<?xml version="1.0"?>\n') file.write('<OPEN_TRACK>\n') # Write Header # file.write('\n<!--\n' # + ' Blender3D v%s WTF File: %s\n' % (Blender.Get('version'), Blender.Get('filename').split('/')[-1].split('\\')[-1] ) # + ' www.blender3d.org\n' # + '-->\n\n') # Get the container mesh. - used for applying modifiers and non mesh objects. containerMesh = meshName = tempMesh = None for meshName in Blender.NMesh.GetNames(): if meshName.startswith(temp_mesh_name): tempMesh = Mesh.Get(meshName) if not tempMesh.users: containerMesh = tempMesh if not containerMesh: containerMesh = Mesh.New(temp_mesh_name) del meshName del tempMesh # Initialize totals, these are updated each object totverts = totuvco = totno = 0 face_vert_index = 0 globalNormals = {} file.write('\n<library_objects>\n') # Get all meshs for ob_main in objects: obnamestring = fixName(ob_main.name) file.write('\t<object id="%s">\n' % obnamestring) # Write Object name for ob, ob_mat in BPyObject.getDerivedObjects(ob_main): # Will work for non meshes now! :) # getMeshFromObject(ob, container_mesh=None, apply_modifiers=True, vgroups=True, scn=None) me = BPyMesh.getMeshFromObject(ob, containerMesh, EXPORT_APPLY_MODIFIERS, False, scn) if not me: file.write('\t\t<loc>%.6f %.6f %.6f</loc>\n' % tuple(ob_main.loc)) # Write Object name file.write('\t\t<rot>%.6f %.6f %.6f</rot>\n' % tuple(ob_main.rot)) # Write Object name continue faceuv = me.faceUV # We have a valid mesh if me.faces: # Add a dummy object to it. has_quads = False for f in me.faces: if len(f) == 4: has_quads = True break if has_quads: oldmode = Mesh.Mode() Mesh.Mode(Mesh.SelectModes['FACE']) me.sel = True tempob = scn.objects.new(me) me.quadToTriangle(0) # more=0 shortest length oldmode = Mesh.Mode(oldmode) scn.objects.unlink(tempob) Mesh.Mode(oldmode) # Make our own list so it can be sorted to reduce context switching faces = [ f for f in me.faces ] edges = me.edges if not (len(faces)+len(edges)+len(me.verts)): # Make sure there is somthing to write continue # dont bother with this mesh. me.transform(ob_mat) # High Quality Normals if faces: if EXPORT_NORMALS_HQ: BPyMesh.meshCalcNormals(me) else: # transforming normals is incorrect # when the matrix is scaled, # better to recalculate them me.calcNormals() # # Crash Blender #materials = me.getMaterials(1) # 1 == will return None in the list. materials = me.materials materialNames = [] materialItems = materials[:] if materials: for mat in materials: if mat: # !=None materialNames.append(mat.name) else: materialNames.append(None) # Cant use LC because some materials are None. # materialNames = map(lambda mat: mat.name, materials) # Bug Blender, dosent account for null materials, still broken. # Possible there null materials, will mess up indicies # but at least it will export, wait until Blender gets fixed. materialNames.extend((16-len(materialNames)) * [None]) materialItems.extend((16-len(materialItems)) * [None]) # Sort by Material, then images # so we dont over context switch in the obj file. if faceuv: try: faces.sort(key = lambda a: (a.mat, a.image, a.smooth)) except: faces.sort(lambda a,b: cmp((a.mat, a.image, a.smooth), (b.mat, b.image, b.smooth))) elif len(materials) > 1: try: faces.sort(key = lambda a: (a.mat, a.smooth)) except: faces.sort(lambda a,b: cmp((a.mat, a.smooth), (b.mat, b.smooth))) else: # no materials try: faces.sort(key = lambda a: a.smooth) except: faces.sort(lambda a,b: cmp(a.smooth, b.smooth)) # Set the default mat to no material and no image. contextMat = (0, 0) # Can never be this, so we will label a new material teh first chance we get. contextSmooth = None # Will either be true or false, set bad to force initialization switch. if len(faces) > 0: file.write('\t\t<mesh>\n') else: file.write('\t\t<curve>\n') vertname = "%s-Vertices" % obnamestring vertarrayname = "%s-Array" % vertname normname = "%s-Normals" % obnamestring normarrayname = "%s-Array" % normname texname = "%s-TexCoord" % obnamestring texarrayname = "%s-Array" % texname # Vert file.write('\t\t\t<float_array count="%d" id="%s">' % (len(me.verts), vertarrayname)) for v in me.verts: file.write(' %.6f %.6f %.6f' % tuple(v.co)) file.write('</float_array>\n') file.write('\t\t\t<vertices id="%s" source="#%s" />\n' % (vertname, vertarrayname)) # UV if faceuv: file.write('\t\t\t<float_array id="%s">' % texarrayname) uv_face_mapping = [[0,0,0,0] for f in faces] # a bit of a waste for tri's :/ uv_dict = {} # could use a set() here for f_index, f in enumerate(faces): for uv_index, uv in enumerate(f.uv): uvkey = veckey2d(uv) try: uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] except: uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict) file.write(' %.6f %.6f' % tuple(uv)) uv_unique_count = len(uv_dict) del uv, uvkey, uv_dict, f_index, uv_index # Only need uv_unique_count and uv_face_mapping file.write('</float_array>\n') file.write('\t\t\t<texcoords id="%s" source="#%s" />\n' % (texname, texarrayname)) # NORMAL, Smooth/Non smoothed. if len(faces) > 0: file.write('\t\t\t<float_array id="%s">' % normarrayname) for f in faces: if f.smooth: for v in f: noKey = veckey3d(v.no) if not globalNormals.has_key( noKey ): globalNormals[noKey] = totno totno +=1 file.write(' %.6f %.6f %.6f' % noKey) else: # Hard, 1 normal from the face. noKey = veckey3d(f.no) if not globalNormals.has_key( noKey ): globalNormals[noKey] = totno totno +=1 file.write(' %.6f %.6f %.6f' % noKey) file.write('</float_array>\n') file.write('\t\t\t<normals id="%s" source="#%s" />\n' % (normname, normarrayname)) if not faceuv: f_image = None in_triangles = False for f_index, f in enumerate(faces): f_v= f.v f_smooth= f.smooth f_mat = min(f.mat, len(materialNames)-1) if faceuv: f_image = f.image f_uv= f.uv # MAKE KEY if faceuv and f_image: # Object is always true. key = materialNames[f_mat], f_image.name else: key = materialNames[f_mat], None # No image, use None instead. # CHECK FOR CONTEXT SWITCH if key == contextMat: pass # Context alredy switched, dont do anythoing else: if key[0] == None and key[1] == None: # Write a null material, since we know the context has changed. if in_triangles: file.write('</p>\n') file.write('\t\t\t</triangles>\n') file.write('\t\t\t<triangles id="%s_%s">\n' % (fixName(ob.name), fixName(ob.getData(1)))) in_triangles = True else: mat_data= MTL_DICT.get(key) if not mat_data: # First add to global dict so we can export to mtl # Then write mtl # Make a new names from the mat and image name, # converting any spaces to underscores with fixName. # If none image dont bother adding it to the name if key[1] == None: mat_data = MTL_DICT[key] = ('%s'%fixName(key[0])), materialItems[f_mat], f_image else: mat_data = MTL_DICT[key] = ('%s_%s' % (fixName(key[0]), fixName(key[1]))), materialItems[f_mat], f_image if in_triangles: file.write('</p>\n') file.write('\t\t\t</triangles>\n') file.write('\t\t\t<triangles id="%s_%s_%s" material="#%s">\n' % (fixName(ob.name), fixName(ob.getData(1)), mat_data[0], mat_data[0]) ) in_triangles = True file.write('\t\t\t\t<input offset="0" semantic="VERTEX" source="#%s" />\n' % vertname) file.write('\t\t\t\t<input offset="1" semantic="NORMAL" source="#%s" />\n' % normname) if faceuv: file.write('\t\t\t\t<input offset="2" semantic="TEXCOORD" source="#%s" />\n' % texname) file.write('\t\t\t\t<p>') contextMat = key if f_smooth != contextSmooth: if f_smooth: # on now off # file.write('s 1\n') contextSmooth = f_smooth else: # was off now on # file.write('s off\n') contextSmooth = f_smooth if faceuv: if f_smooth: # Smoothed, use vertex normals for vi, v in enumerate(f_v): file.write( ' %d %d %d' % (\ v.index+totverts,\ totuvco + uv_face_mapping[f_index][vi],\ globalNormals[ veckey3d(v.no) ])) # vert, uv, normal else: # No smoothing, face normals no = globalNormals[ veckey3d(f.no) ] for vi, v in enumerate(f_v): file.write( ' %d %d %d' % (\ v.index+totverts,\ totuvco + uv_face_mapping[f_index][vi],\ no)) # vert, uv, normal face_vert_index += len(f_v) else: # No UV's if f_smooth: # Smoothed, use vertex normals for v in f_v: file.write( ' %d %d' % (\ v.index+totverts,\ globalNormals[ veckey3d(v.no) ])) else: # No smoothing, face normals no = globalNormals[ veckey3d(f.no) ] for v in f_v: file.write( ' %d %d' % (\ v.index+totverts,\ no)) if in_triangles: file.write('</p>\n') file.write('\t\t\t</triangles>\n') # Write edges. LOOSE = Mesh.EdgeFlags.LOOSE has_edge = False for ed in edges: if ed.flag & LOOSE: has_edge = True if has_edge: file.write('\t\t\t<edges>\n') file.write('\t\t\t\t<input offset="0" semantic="VERTEX" source="#%s" />\n' % vertname) file.write('\t\t\t\t<p>') for ed in edges: if ed.flag & LOOSE: file.write(' %d %d' % (ed.v1.index+totverts, ed.v2.index+totverts)) file.write('</p>\n') file.write('\t\t\t</edges>\n') # Make the indicies global rather then per mesh # totverts += len(me.verts) # if faceuv: # totuvco += uv_unique_count me.verts= None if len(faces) > 0: file.write('\t\t</mesh>\n') else: file.write('\t\t</curve>\n') file.write('\t</object>\n') file.write('</library_objects>\n\n') # Now we have all our materials, save them if EXPORT_MTL: write_library_materials(file) # Save the groups write_library_groups(file) file.write('</OPEN_TRACK>\n') file.close() if EXPORT_COPY_IMAGES: dest_dir = filename # Remove chars until we are just the path. while dest_dir and dest_dir[-1] not in '\\/': dest_dir = dest_dir[:-1] if dest_dir: copy_images(dest_dir) else: print '\tError: "%s" could not be used as a base for an image path.' % filename print "WTF Export time: %.2f" % (sys.time() - time1)
def parse_text(txt): """Parses an entire script's text and returns a ScriptDesc instance containing information about the script. If the text is not a valid Python script (for example if brackets are left open), parsing may fail to complete. However, if this occurs, no exception is thrown. Instead the returned ScriptDesc instance will have its incomplete flag set and information processed up to this point will still be accessible. """ start_time = time() txt.reset() tokens = generate_tokens(txt.readline) # Throws TokenError curl, cursor = txt.getCursorPos() linen = curl + 1 # Token line numbers are one-based imports = dict() imp_step = 0 classes = dict() cls_step = 0 defs = dict() def_step = 0 vars = dict() var1_step = 0 var2_step = 0 var3_step = 0 var_accum = dict() var_forflag = False indent = 0 prev_type = -1 prev_text = '' incomplete = False while True: try: type, text, start, end, line = tokens.next() except StopIteration: break except (TokenError, IndentationError): incomplete = True break # Skip all comments and line joining characters if type == COMMENT or type == NL: continue ################# ## Indentation ## ################# if type == INDENT: indent += 1 elif type == DEDENT: indent -= 1 ######################### ## Module importing... ## ######################### imp_store = False # Default, look for 'from' or 'import' to start if imp_step == 0: if text == 'from': imp_tmp = [] imp_step = 1 elif text == 'import': imp_from = None imp_tmp = [] imp_step = 2 # Found a 'from', create imp_from in form '???.???...' elif imp_step == 1: if text == 'import': imp_from = '.'.join(imp_tmp) imp_tmp = [] imp_step = 2 elif type == NAME: imp_tmp.append(text) elif text != '.': imp_step = 0 # Invalid syntax # Found 'import', imp_from is populated or None, create imp_name elif imp_step == 2: if text == 'as': imp_name = '.'.join(imp_tmp) imp_step = 3 elif type == NAME or text == '*': imp_tmp.append(text) elif text != '.': imp_name = '.'.join(imp_tmp) imp_symb = imp_name imp_store = True # Found 'as', change imp_symb to this value and go back to step 2 elif imp_step == 3: if type == NAME: imp_symb = text else: imp_store = True # Both imp_name and imp_symb have now been populated so we can import if imp_store: # Handle special case of 'import *' if imp_name == '*': parent = get_module(imp_from) imports.update(parent.__dict__) else: # Try importing the name as a module try: if imp_from: module = get_module(imp_from + '.' + imp_name) else: module = get_module(imp_name) except (ImportError, ValueError, AttributeError, TypeError): # Try importing name as an attribute of the parent try: module = __import__(imp_from, globals(), locals(), [imp_name]) imports[imp_symb] = getattr(module, imp_name) except (ImportError, ValueError, AttributeError, TypeError): pass else: imports[imp_symb] = module # More to import from the same module? if text == ',': imp_tmp = [] imp_step = 2 else: imp_step = 0 ################### ## Class parsing ## ################### # If we are inside a class then def and variable parsing should be done # for the class. Otherwise the definitions are considered global # Look for 'class' if cls_step == 0: if text == 'class': cls_name = None cls_lineno = start[0] cls_indent = indent cls_step = 1 # Found 'class', look for cls_name followed by '(' parents ')' elif cls_step == 1: if not cls_name: if type == NAME: cls_name = text cls_sline = False cls_parents = dict() cls_defs = dict() cls_vars = dict() elif type == NAME: if classes.has_key(text): parent = classes[text] cls_parents[text] = parent cls_defs.update(parent.defs) cls_vars.update(parent.vars) elif text == ':': cls_step = 2 # Found 'class' name ... ':', now check if it's a single line statement elif cls_step == 2: if type == NEWLINE: cls_sline = False else: cls_sline = True cls_doc = '' cls_step = 3 elif cls_step == 3: if not cls_doc and type == STRING: cls_doc = _trim_doc(text) if cls_sline: if type == NEWLINE: classes[cls_name] = ClassDesc(cls_name, cls_parents, cls_defs, cls_vars, cls_lineno, cls_doc) cls_step = 0 else: if type == DEDENT and indent <= cls_indent: classes[cls_name] = ClassDesc(cls_name, cls_parents, cls_defs, cls_vars, cls_lineno, cls_doc) cls_step = 0 ################# ## Def parsing ## ################# # Look for 'def' if def_step == 0: if text == 'def': def_name = None def_lineno = start[0] def_step = 1 # Found 'def', look for def_name followed by '(' elif def_step == 1: if type == NAME: def_name = text def_params = [] elif def_name and text == '(': def_step = 2 # Found 'def' name '(', now identify the parameters upto ')' # TODO: Handle ellipsis '...' elif def_step == 2: if type == NAME: def_params.append(text) elif text == ':': def_step = 3 # Found 'def' ... ':', now check if it's a single line statement elif def_step == 3: if type == NEWLINE: def_sline = False else: def_sline = True def_doc = '' def_step = 4 elif def_step == 4: if type == STRING: def_doc = _trim_doc(text) newdef = None if def_sline: if type == NEWLINE: newdef = FunctionDesc(def_name, def_params, def_lineno, def_doc) else: if type == NAME: newdef = FunctionDesc(def_name, def_params, def_lineno, def_doc) if newdef: if cls_step > 0: # Parsing a class cls_defs[def_name] = newdef else: defs[def_name] = newdef def_step = 0 ########################## ## Variable assignation ## ########################## if cls_step > 0: # Parsing a class # Look for 'self.???' if var1_step == 0: if text == 'self': var1_step = 1 elif var1_step == 1: if text == '.': var_name = None var1_step = 2 else: var1_step = 0 elif var1_step == 2: if type == NAME: var_name = text if cls_vars.has_key(var_name): var_step = 0 else: var1_step = 3 elif var1_step == 3: if text == '=': var1_step = 4 elif text != ',': var1_step = 0 elif var1_step == 4: var_type = None if type == NUMBER: close = end[1] if text.find('.') != -1: var_type = float else: var_type = int elif type == STRING: close = end[1] var_type = str elif text == '[': close = line.find(']', end[1]) var_type = list elif text == '(': close = line.find(')', end[1]) var_type = tuple elif text == '{': close = line.find('}', end[1]) var_type = dict elif text == 'dict': close = line.find(')', end[1]) var_type = dict if var_type and close + 1 < len(line): if line[close + 1] != ' ' and line[close + 1] != '\t': var_type = None cls_vars[var_name] = VarDesc(var_name, var_type, start[0]) var1_step = 0 elif def_step > 0: # Parsing a def # Look for 'global ???[,???]' if var2_step == 0: if text == 'global': var2_step = 1 elif var2_step == 1: if type == NAME: if not vars.has_key(text): vars[text] = VarDesc(text, None, start[0]) elif text != ',' and type != NL: var2_step == 0 else: # In global scope if var3_step == 0: # Look for names if text == 'for': var_accum = dict() var_forflag = True elif text == '=' or (var_forflag and text == 'in'): var_forflag = False var3_step = 1 elif type == NAME: if prev_text != '.' and not vars.has_key(text): var_accum[text] = VarDesc(text, None, start[0]) elif not text in [',', '(', ')', '[', ']']: var_accum = dict() var_forflag = False elif var3_step == 1: if len(var_accum) != 1: var_type = None vars.update(var_accum) else: var_name = var_accum.keys()[0] var_type = None if type == NUMBER: if text.find('.') != -1: var_type = float else: var_type = int elif type == STRING: var_type = str elif text == '[': var_type = list elif text == '(': var_type = tuple elif text == '{': var_type = dict vars[var_name] = VarDesc(var_name, var_type, start[0]) var3_step = 0 ####################### ## General utilities ## ####################### prev_type = type prev_text = text desc = ScriptDesc(txt.name, imports, classes, defs, vars, incomplete) desc.set_delay(10 * (time() - start_time) + 0.05) global _parse_cache _parse_cache[hash(txt)] = desc return desc
file = open(filename, 'w') except IOError, (errno, strerror): error = "IOError #%s: %s" % (errno, strerror) REPORT_DATA['errors'].append("Saving failed - %s." % error) error_msg = "Couldn't save file!%%t|%s" % error Blender.Draw.PupMenu(error_msg) return try: test = AC3DExport(OBJS, file) except: file.close() raise else: file.close() endtime = bsys.time() - starttime REPORT_DATA['main'].append("Done. Saved to: %s" % filename) REPORT_DATA['main'].append("Data exported in %.3f seconds." % endtime) if VERBOSE: report_data() Blender.Window.WaitCursor(0) # -- End of definitions scn = Blender.Scene.GetCurrent() if ONLY_SELECTED: OBJS = list(scn.objects.context) else: OBJS = list(scn.objects)
def main(): scn = Scene.GetCurrent() act_ob= scn.getActiveObject() if act_ob.getType()!='Mesh': act_ob= None sel= [ob for ob in Object.GetSelected() if ob.getType()=='Mesh' if ob != act_ob] if not sel and not act_ob: Draw.PupMenu('Error, select a mesh as your active object') return # Defaults PREF_EDITMESH_ONLY= Draw.Create(1) PREF_MIRROR_LOCATION= Draw.Create(1) PREF_XMID_SNAP= Draw.Create(1) PREF_MAX_DIST= Draw.Create(0.02) PREF_XZERO_THRESH= Draw.Create(0.002) #PREF_MODE= Draw.Create(0) # THIS IS TOOO CONFUSING, HAVE 2 BUTTONS AND MAKE THE MODE FROM THEM. PREF_MODE_L2R= Draw.Create(1) PREF_MODE_R2L= Draw.Create(0) PREF_SEL_ONLY= Draw.Create(1) PREF_EDGE_USERS= Draw.Create(0) # Weights PREF_MIRROR_WEIGHTS= Draw.Create(0) PREF_FLIP_NAMES= Draw.Create(1) PREF_CREATE_FLIP_NAMES= Draw.Create(1) pup_block = [\ ('EditMesh Only', PREF_EDITMESH_ONLY, 'If disabled, will mirror all selected meshes.'),\ 'Left (-), Right (+)',\ ('Left > Right', PREF_MODE_L2R, 'Copy from the Left to Right of the mesh. Enable Both for a mid loc/weight.'),\ ('Right > Left', PREF_MODE_R2L, 'Copy from the Right to Left of the mesh. Enable Both for a mid loc/weight.'),\ '',\ ('MaxDist:', PREF_MAX_DIST, 0.0, 1.0, 'Generate interpolated verts so closer vert weights can be copied.'),\ ('XZero limit:', PREF_XZERO_THRESH, 0.0, 1.0, 'Mirror verts above this distance from the middle, else lock to X/zero.'),\ ('Sel Verts Only', PREF_SEL_ONLY, 'Only mirror selected verts. Else try and mirror all'),\ ('Edge Users', PREF_EDGE_USERS, 'Only match up verts that have the same number of edge users.'),\ 'Location Prefs',\ ('Mirror Location', PREF_MIRROR_LOCATION, 'Mirror vertex locations.'),\ ('XMidSnap Verts', PREF_XMID_SNAP, 'Snap middle verts to X Zero (uses XZero limit)'),\ 'Weight Prefs',\ ('Mirror Weights', PREF_MIRROR_WEIGHTS, 'Mirror vertex locations.'),\ ('Flip Groups', PREF_FLIP_NAMES, 'Mirror flip names.'),\ ('New Flip Groups', PREF_CREATE_FLIP_NAMES, 'Make new groups for flipped names.'),\ ] if not Draw.PupBlock("X Mirror mesh tool", pup_block): return # WORK OUT THE MODE 0 # PREF_MODE, 0:middle, 1: Left. 2:Right. PREF_MODE_R2L= PREF_MODE_R2L.val PREF_MODE_L2R= PREF_MODE_L2R.val if PREF_MODE_R2L and PREF_MODE_L2R: PREF_MODE= 0 # Middle elif not PREF_MODE_R2L and PREF_MODE_L2R: PREF_MODE= 1 # Left to Right elif PREF_MODE_R2L and not PREF_MODE_L2R: PREF_MODE= 2 # Right to Left else: # Neither Selected. Do middle anyway PREF_MODE= 0 PREF_EDITMESH_ONLY= PREF_EDITMESH_ONLY.val PREF_MIRROR_LOCATION= PREF_MIRROR_LOCATION.val PREF_XMID_SNAP= PREF_XMID_SNAP.val PREF_MAX_DIST= PREF_MAX_DIST.val PREF_XZERO_THRESH= PREF_XZERO_THRESH.val PREF_SEL_ONLY= PREF_SEL_ONLY.val PREF_EDGE_USERS= PREF_EDGE_USERS.val # weights PREF_MIRROR_WEIGHTS= PREF_MIRROR_WEIGHTS.val PREF_FLIP_NAMES= PREF_FLIP_NAMES.val PREF_CREATE_FLIP_NAMES= PREF_CREATE_FLIP_NAMES.val t= sys.time() is_editmode = Window.EditMode() # Exit Editmode. if is_editmode: Window.EditMode(0) Mesh.Mode(Mesh.SelectModes['VERTEX']) Window.WaitCursor(1) if act_ob: mesh_mirror(act_ob.getData(mesh=1), PREF_MIRROR_LOCATION, PREF_XMID_SNAP, PREF_MAX_DIST, PREF_XZERO_THRESH, PREF_MODE, PREF_SEL_ONLY, PREF_EDGE_USERS, PREF_MIRROR_WEIGHTS, PREF_FLIP_NAMES, PREF_CREATE_FLIP_NAMES) if (not PREF_EDITMESH_ONLY) and sel: for ob in sel: mesh_mirror(ob.getData(mesh=1), PREF_MIRROR_LOCATION, PREF_XMID_SNAP, PREF_MAX_DIST, PREF_XZERO_THRESH, PREF_MODE, PREF_SEL_ONLY, PREF_EDGE_USERS, PREF_MIRROR_WEIGHTS, PREF_FLIP_NAMES, PREF_CREATE_FLIP_NAMES) if is_editmode: Window.EditMode(1) Window.WaitCursor(0) Window.DrawProgressBar(1.0, '') Window.RedrawAll() print 'Mirror done in %.6f sec.' % (sys.time()-t)
def set_delay(self, delay): self.parse_due = time() + delay
def parse_text(txt): """Parses an entire script's text and returns a ScriptDesc instance containing information about the script. If the text is not a valid Python script (for example if brackets are left open), parsing may fail to complete. However, if this occurs, no exception is thrown. Instead the returned ScriptDesc instance will have its incomplete flag set and information processed up to this point will still be accessible. """ start_time = time() txt.reset() tokens = generate_tokens(txt.readline) # Throws TokenError curl, cursor = txt.getCursorPos() linen = curl + 1 # Token line numbers are one-based imports = dict() imp_step = 0 classes = dict() cls_step = 0 defs = dict() def_step = 0 vars = dict() var1_step = 0 var2_step = 0 var3_step = 0 var_accum = dict() var_forflag = False indent = 0 prev_type = -1 prev_text = '' incomplete = False while True: try: type, text, start, end, line = tokens.next() except StopIteration: break except (TokenError, IndentationError): incomplete = True break # Skip all comments and line joining characters if type == COMMENT or type == NL: continue ################# ## Indentation ## ################# if type == INDENT: indent += 1 elif type == DEDENT: indent -= 1 ######################### ## Module importing... ## ######################### imp_store = False # Default, look for 'from' or 'import' to start if imp_step == 0: if text == 'from': imp_tmp = [] imp_step = 1 elif text == 'import': imp_from = None imp_tmp = [] imp_step = 2 # Found a 'from', create imp_from in form '???.???...' elif imp_step == 1: if text == 'import': imp_from = '.'.join(imp_tmp) imp_tmp = [] imp_step = 2 elif type == NAME: imp_tmp.append(text) elif text != '.': imp_step = 0 # Invalid syntax # Found 'import', imp_from is populated or None, create imp_name elif imp_step == 2: if text == 'as': imp_name = '.'.join(imp_tmp) imp_step = 3 elif type == NAME or text == '*': imp_tmp.append(text) elif text != '.': imp_name = '.'.join(imp_tmp) imp_symb = imp_name imp_store = True # Found 'as', change imp_symb to this value and go back to step 2 elif imp_step == 3: if type == NAME: imp_symb = text else: imp_store = True # Both imp_name and imp_symb have now been populated so we can import if imp_store: # Handle special case of 'import *' if imp_name == '*': parent = get_module(imp_from) imports.update(parent.__dict__) else: # Try importing the name as a module try: if imp_from: module = get_module(imp_from +'.'+ imp_name) else: module = get_module(imp_name) except (ImportError, ValueError, AttributeError, TypeError): # Try importing name as an attribute of the parent try: module = __import__(imp_from, globals(), locals(), [imp_name]) imports[imp_symb] = getattr(module, imp_name) except (ImportError, ValueError, AttributeError, TypeError): pass else: imports[imp_symb] = module # More to import from the same module? if text == ',': imp_tmp = [] imp_step = 2 else: imp_step = 0 ################### ## Class parsing ## ################### # If we are inside a class then def and variable parsing should be done # for the class. Otherwise the definitions are considered global # Look for 'class' if cls_step == 0: if text == 'class': cls_name = None cls_lineno = start[0] cls_indent = indent cls_step = 1 # Found 'class', look for cls_name followed by '(' parents ')' elif cls_step == 1: if not cls_name: if type == NAME: cls_name = text cls_sline = False cls_parents = dict() cls_defs = dict() cls_vars = dict() elif type == NAME: if classes.has_key(text): parent = classes[text] cls_parents[text] = parent cls_defs.update(parent.defs) cls_vars.update(parent.vars) elif text == ':': cls_step = 2 # Found 'class' name ... ':', now check if it's a single line statement elif cls_step == 2: if type == NEWLINE: cls_sline = False else: cls_sline = True cls_doc = '' cls_step = 3 elif cls_step == 3: if not cls_doc and type == STRING: cls_doc = _trim_doc(text) if cls_sline: if type == NEWLINE: classes[cls_name] = ClassDesc(cls_name, cls_parents, cls_defs, cls_vars, cls_lineno, cls_doc) cls_step = 0 else: if type == DEDENT and indent <= cls_indent: classes[cls_name] = ClassDesc(cls_name, cls_parents, cls_defs, cls_vars, cls_lineno, cls_doc) cls_step = 0 ################# ## Def parsing ## ################# # Look for 'def' if def_step == 0: if text == 'def': def_name = None def_lineno = start[0] def_step = 1 # Found 'def', look for def_name followed by '(' elif def_step == 1: if type == NAME: def_name = text def_params = [] elif def_name and text == '(': def_step = 2 # Found 'def' name '(', now identify the parameters upto ')' # TODO: Handle ellipsis '...' elif def_step == 2: if type == NAME: def_params.append(text) elif text == ':': def_step = 3 # Found 'def' ... ':', now check if it's a single line statement elif def_step == 3: if type == NEWLINE: def_sline = False else: def_sline = True def_doc = '' def_step = 4 elif def_step == 4: if type == STRING: def_doc = _trim_doc(text) newdef = None if def_sline: if type == NEWLINE: newdef = FunctionDesc(def_name, def_params, def_lineno, def_doc) else: if type == NAME: newdef = FunctionDesc(def_name, def_params, def_lineno, def_doc) if newdef: if cls_step > 0: # Parsing a class cls_defs[def_name] = newdef else: defs[def_name] = newdef def_step = 0 ########################## ## Variable assignation ## ########################## if cls_step > 0: # Parsing a class # Look for 'self.???' if var1_step == 0: if text == 'self': var1_step = 1 elif var1_step == 1: if text == '.': var_name = None var1_step = 2 else: var1_step = 0 elif var1_step == 2: if type == NAME: var_name = text if cls_vars.has_key(var_name): var_step = 0 else: var1_step = 3 elif var1_step == 3: if text == '=': var1_step = 4 elif text != ',': var1_step = 0 elif var1_step == 4: var_type = None if type == NUMBER: close = end[1] if text.find('.') != -1: var_type = float else: var_type = int elif type == STRING: close = end[1] var_type = str elif text == '[': close = line.find(']', end[1]) var_type = list elif text == '(': close = line.find(')', end[1]) var_type = tuple elif text == '{': close = line.find('}', end[1]) var_type = dict elif text == 'dict': close = line.find(')', end[1]) var_type = dict if var_type and close+1 < len(line): if line[close+1] != ' ' and line[close+1] != '\t': var_type = None cls_vars[var_name] = VarDesc(var_name, var_type, start[0]) var1_step = 0 elif def_step > 0: # Parsing a def # Look for 'global ???[,???]' if var2_step == 0: if text == 'global': var2_step = 1 elif var2_step == 1: if type == NAME: if not vars.has_key(text): vars[text] = VarDesc(text, None, start[0]) elif text != ',' and type != NL: var2_step == 0 else: # In global scope if var3_step == 0: # Look for names if text == 'for': var_accum = dict() var_forflag = True elif text == '=' or (var_forflag and text == 'in'): var_forflag = False var3_step = 1 elif type == NAME: if prev_text != '.' and not vars.has_key(text): var_accum[text] = VarDesc(text, None, start[0]) elif not text in [',', '(', ')', '[', ']']: var_accum = dict() var_forflag = False elif var3_step == 1: if len(var_accum) != 1: var_type = None vars.update(var_accum) else: var_name = var_accum.keys()[0] var_type = None if type == NUMBER: if text.find('.') != -1: var_type = float else: var_type = int elif type == STRING: var_type = str elif text == '[': var_type = list elif text == '(': var_type = tuple elif text == '{': var_type = dict vars[var_name] = VarDesc(var_name, var_type, start[0]) var3_step = 0 ####################### ## General utilities ## ####################### prev_type = type prev_text = text desc = ScriptDesc(txt.name, imports, classes, defs, vars, incomplete) desc.set_delay(10 * (time()-start_time) + 0.05) global _parse_cache _parse_cache[hash(txt)] = desc return desc
def write(filename, objects,\ EXPORT_TRI=False, EXPORT_EDGES=False, EXPORT_NORMALS=False, EXPORT_NORMALS_HQ=False,\ EXPORT_UV=True, EXPORT_MTL=True, EXPORT_COPY_IMAGES=False,\ EXPORT_APPLY_MODIFIERS=True, EXPORT_ROTX90=True, EXPORT_BLEN_OBS=True,\ EXPORT_GROUP_BY_OB=False, EXPORT_GROUP_BY_MAT=False, EXPORT_MORPH_TARGET=False, EXPORT_ARMATURE=False): ''' Basic write function. The context and options must be alredy set This can be accessed externaly eg. write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options. ''' def veckey3d(v): return round(v.x, 6), round(v.y, 6), round(v.z, 6) def veckey2d(v): return round(v.x, 6), round(v.y, 6) print 'OBJ Export path: "%s"' % filename temp_mesh_name = '~tmp-mesh' time1 = sys.time() scn = Scene.GetCurrent() file = open(filename, "w") # Write Header file.write('# Blender3D v%s OBJ File: %s\n' % (Blender.Get('version'), Blender.Get('filename').split('/')[-1].split('\\')[-1] )) file.write('# www.blender3d.org\n') # Tell the obj file what material file to use. if EXPORT_MTL: mtlfilename = '%s.mtl' % '.'.join(filename.split('.')[:-1]) file.write('mtllib %s\n' % ( mtlfilename.split('\\')[-1].split('/')[-1] )) # Get the container mesh. - used for applying modifiers and non mesh objects. containerMesh = meshName = tempMesh = None for meshName in Blender.NMesh.GetNames(): if meshName.startswith(temp_mesh_name): tempMesh = Mesh.Get(meshName) if not tempMesh.users: containerMesh = tempMesh if not containerMesh: containerMesh = Mesh.New(temp_mesh_name) if EXPORT_ROTX90: mat_xrot90= Blender.Mathutils.RotationMatrix(-90, 4, 'x') del meshName del tempMesh # Initialize totals, these are updated each object totverts = totuvco = totno = 1 face_vert_index = 1 globalNormals = {} # Get all meshs for ob_main in objects: for ob, ob_mat in BPyObject.getDerivedObjects(ob_main): # Will work for non meshes now! :) # getMeshFromObject(ob, container_mesh=None, apply_modifiers=True, vgroups=True, scn=None) if EXPORT_ARMATURE: write_armature(file,ob) write_poses(file,ob) me= BPyMesh.getMeshFromObject(ob, containerMesh, EXPORT_APPLY_MODIFIERS, False, scn) if not me: continue if EXPORT_UV: faceuv= me.faceUV else: faceuv = False # We have a valid mesh if EXPORT_TRI and me.faces: # Add a dummy object to it. has_quads = False for f in me.faces: if len(f) == 4: has_quads = True break if has_quads: oldmode = Mesh.Mode() Mesh.Mode(Mesh.SelectModes['FACE']) me.sel = True tempob = scn.objects.new(me) me.quadToTriangle(0) # more=0 shortest length oldmode = Mesh.Mode(oldmode) scn.objects.unlink(tempob) Mesh.Mode(oldmode) faces = [ f for f in me.faces ] if EXPORT_EDGES: edges = me.edges else: edges = [] if not (len(faces)+len(edges)+len(me.verts)): # Make sure there is somthing to write continue # dont bother with this mesh. if EXPORT_ROTX90: me.transform(ob_mat*mat_xrot90) else: me.transform(ob_mat) # High Quality Normals if EXPORT_NORMALS and faces: if EXPORT_NORMALS_HQ: BPyMesh.meshCalcNormals(me) else: # transforming normals is incorrect # when the matrix is scaled, # better to recalculate them me.calcNormals() # # Crash Blender #materials = me.getMaterials(1) # 1 == will return None in the list. materials = me.materials materialNames = [] materialItems = materials[:] if materials: for mat in materials: if mat: # !=None materialNames.append(mat.name) else: materialNames.append(None) # Cant use LC because some materials are None. # materialNames = map(lambda mat: mat.name, materials) # Bug Blender, dosent account for null materials, still broken. # Possible there null materials, will mess up indicies # but at least it will export, wait until Blender gets fixed. materialNames.extend((16-len(materialNames)) * [None]) materialItems.extend((16-len(materialItems)) * [None]) # Sort by Material, then images # so we dont over context switch in the obj file. if EXPORT_MORPH_TARGET: pass elif faceuv: try: faces.sort(key = lambda a: (a.mat, a.image, a.smooth)) except: faces.sort(lambda a,b: cmp((a.mat, a.image, a.smooth), (b.mat, b.image, b.smooth))) elif len(materials) > 1: try: faces.sort(key = lambda a: (a.mat, a.smooth)) except: faces.sort(lambda a,b: cmp((a.mat, a.smooth), (b.mat, b.smooth))) else: # no materials try: faces.sort(key = lambda a: a.smooth) except: faces.sort(lambda a,b: cmp(a.smooth, b.smooth)) # Set the default mat to no material and no image. contextMat = (0, 0) # Can never be this, so we will label a new material teh first chance we get. contextSmooth = None # Will either be true or false, set bad to force initialization switch. if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB: name1 = ob.name name2 = ob.getData(1) if name1 == name2: obnamestring = fixName(name1) else: obnamestring = '%s_%s' % (fixName(name1), fixName(name2)) if EXPORT_BLEN_OBS: file.write('o %s\n' % obnamestring) # Write Object name else: # if EXPORT_GROUP_BY_OB: file.write('g %s\n' % obnamestring) # Vert mesh = ob.getData() objmat = ob.getMatrix() for i in objmat: file.write('obm: %.6f %.6f %.6f %.6f\n' % tuple(i)) vgrouplist = mesh.getVertGroupNames() file.write('vgroupcount: %i\n' % len(vgrouplist)) for vgname in vgrouplist: file.write('vgroup: %s\n' % vgname) for v in mesh.verts: file.write('v %.6f %.6f %.6f\n' % tuple(v.co)) influences = mesh.getVertexInfluences(v.index) file.write('influence: %i\n' % len(influences)) for name,weight in influences: file.write('GroupName: %s\n' % name) file.write('Weight: %f\n' % weight) # UV if faceuv: uv_face_mapping = [[0,0,0,0] for f in faces] # a bit of a waste for tri's :/ uv_dict = {} # could use a set() here for f_index, f in enumerate(faces): for uv_index, uv in enumerate(f.uv): uvkey = veckey2d(uv) try: uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] except: uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict) file.write('vt %.6f %.6f\n' % tuple(uv)) uv_unique_count = len(uv_dict) del uv, uvkey, uv_dict, f_index, uv_index # Only need uv_unique_count and uv_face_mapping # NORMAL, Smooth/Non smoothed. if EXPORT_NORMALS: for f in faces: if f.smooth: for v in f: noKey = veckey3d(v.no) if not globalNormals.has_key( noKey ): globalNormals[noKey] = totno totno +=1 file.write('vn %.6f %.6f %.6f\n' % noKey) else: # Hard, 1 normal from the face. noKey = veckey3d(f.no) if not globalNormals.has_key( noKey ): globalNormals[noKey] = totno totno +=1 file.write('vn %.6f %.6f %.6f\n' % noKey) if not faceuv: f_image = None for f_index, f in enumerate(faces): f_v= f.v f_smooth= f.smooth f_mat = min(f.mat, len(materialNames)-1) if faceuv: f_image = f.image f_uv= f.uv # MAKE KEY if faceuv and f_image: # Object is always true. key = materialNames[f_mat], f_image.name else: key = materialNames[f_mat], None # No image, use None instead. # CHECK FOR CONTEXT SWITCH if key == contextMat: pass # Context alredy switched, dont do anythoing else: if key[0] == None and key[1] == None: # Write a null material, since we know the context has changed. if EXPORT_GROUP_BY_MAT: file.write('g %s_%s\n' % (fixName(ob.name), fixName(ob.getData(1))) ) # can be mat_image or (null) file.write('usemtl (null)\n') # mat, image else: mat_data= MTL_DICT.get(key) if not mat_data: # First add to global dict so we can export to mtl # Then write mtl # Make a new names from the mat and image name, # converting any spaces to underscores with fixName. # If none image dont bother adding it to the name if key[1] == None: mat_data = MTL_DICT[key] = ('%s'%fixName(key[0])), materialItems[f_mat], f_image else: mat_data = MTL_DICT[key] = ('%s_%s' % (fixName(key[0]), fixName(key[1]))), materialItems[f_mat], f_image if EXPORT_GROUP_BY_MAT: file.write('g %s_%s_%s\n' % (fixName(ob.name), fixName(ob.getData(1)), mat_data[0]) ) # can be mat_image or (null) file.write('usemtl %s\n' % mat_data[0]) # can be mat_image or (null) contextMat = key if f_smooth != contextSmooth: if f_smooth: # on now off file.write('s 1\n') contextSmooth = f_smooth else: # was off now on file.write('s off\n') contextSmooth = f_smooth file.write('f') if faceuv: if EXPORT_NORMALS: if f_smooth: # Smoothed, use vertex normals for vi, v in enumerate(f_v): file.write( ' %d/%d/%d' % (\ v.index+totverts,\ totuvco + uv_face_mapping[f_index][vi],\ globalNormals[ veckey3d(v.no) ])) # vert, uv, normal else: # No smoothing, face normals no = globalNormals[ veckey3d(f.no) ] for vi, v in enumerate(f_v): file.write( ' %d/%d/%d' % (\ v.index+totverts,\ totuvco + uv_face_mapping[f_index][vi],\ no)) # vert, uv, normal else: # No Normals for vi, v in enumerate(f_v): file.write( ' %d/%d' % (\ v.index+totverts,\ totuvco + uv_face_mapping[f_index][vi])) # vert, uv face_vert_index += len(f_v) else: # No UV's if EXPORT_NORMALS: if f_smooth: # Smoothed, use vertex normals for v in f_v: file.write( ' %d//%d' % (\ v.index+totverts,\ globalNormals[ veckey3d(v.no) ])) else: # No smoothing, face normals no = globalNormals[ veckey3d(f.no) ] for v in f_v: file.write( ' %d//%d' % (\ v.index+totverts,\ no)) else: # No Normals for v in f_v: file.write( ' %d' % (\ v.index+totverts)) file.write('\n') # Write edges. if EXPORT_EDGES: LOOSE= Mesh.EdgeFlags.LOOSE for ed in edges: if ed.flag & LOOSE: file.write('f %d %d\n' % (ed.v1.index+totverts, ed.v2.index+totverts)) # Make the indicies global rather then per mesh totverts += len(me.verts) if faceuv: totuvco += uv_unique_count me.verts= None file.close() # Now we have all our materials, save them if EXPORT_MTL: write_mtl(mtlfilename) if EXPORT_COPY_IMAGES: dest_dir = filename # Remove chars until we are just the path. while dest_dir and dest_dir[-1] not in '\\/': dest_dir = dest_dir[:-1] if dest_dir: copy_images(dest_dir) else: print '\tError: "%s" could not be used as a base for an image path.' % filename print "OBJ Export time: %.2f" % (sys.time() - time1)
def main(): scn = Scene.GetCurrent() act_ob= scn.objects.active if not act_ob or act_ob.type != 'Mesh': BPyMessages.Error_NoMeshActive() return act_me= act_ob.getData(mesh=1) if act_me.multires: BPyMessages.Error_NoMeshMultiresEdit() return act_group= act_me.activeGroup if not act_group: act_group= '' # Defaults PREF_REDUX= Draw.Create(0.5) PREF_BOUNDRY_WEIGHT= Draw.Create(5.0) PREF_REM_DOUBLES= Draw.Create(1) PREF_FACE_AREA_WEIGHT= Draw.Create(1.0) PREF_FACE_TRIANGULATE= Draw.Create(1) VGROUP_INF_ENABLE= Draw.Create(0) VGROUP_INF_REDUX= Draw.Create(act_group) VGROUP_INF_WEIGHT= Draw.Create(10.0) PREF_DO_UV= Draw.Create(1) PREF_DO_VCOL= Draw.Create(1) PREF_DO_WEIGHTS= Draw.Create(1) PREF_OTHER_SEL_OBS= Draw.Create(0) pup_block = [\ ('Poly Reduce:', PREF_REDUX, 0.05, 0.95, 'Scale the meshes poly count by this value.'),\ ('Boundry Weight:', PREF_BOUNDRY_WEIGHT, 0.0, 20.0, 'Weight boundry verts by this scale, 0.0 for no boundry weighting.'),\ ('Area Weight:', PREF_FACE_AREA_WEIGHT, 0.0, 20.0, 'Collapse edges effecting lower area faces first.'),\ ('Triangulate', PREF_FACE_TRIANGULATE, 'Convert quads to tris before reduction, for more choices of edges to collapse.'),\ '',\ ('VGroup Weighting', VGROUP_INF_ENABLE, 'Use a vertex group to influence the reduction, higher weights for higher quality '),\ ('vgroup name: ', VGROUP_INF_REDUX, 0, 32, 'The name of the vertex group to use for the weight map'),\ ('vgroup mult: ', VGROUP_INF_WEIGHT, 0.0, 100.0, 'How much to make the weight effect the reduction'),\ ('Other Selected Obs', PREF_OTHER_SEL_OBS, 'reduce other selected objects.'),\ '',\ '',\ '',\ ('UV Coords', PREF_DO_UV, 'Interpolate UV Coords.'),\ ('Vert Colors', PREF_DO_VCOL, 'Interpolate Vertex Colors'),\ ('Vert Weights', PREF_DO_WEIGHTS, 'Interpolate Vertex Weights'),\ ('Remove Doubles', PREF_REM_DOUBLES, 'Remove doubles before reducing to avoid boundry tearing.'),\ ] if not Draw.PupBlock("Poly Reducer", pup_block): return PREF_REDUX= PREF_REDUX.val PREF_BOUNDRY_WEIGHT= PREF_BOUNDRY_WEIGHT.val PREF_REM_DOUBLES= PREF_REM_DOUBLES.val PREF_FACE_AREA_WEIGHT= PREF_FACE_AREA_WEIGHT.val PREF_FACE_TRIANGULATE= PREF_FACE_TRIANGULATE.val VGROUP_INF_ENABLE= VGROUP_INF_ENABLE.val VGROUP_INF_WEIGHT= VGROUP_INF_WEIGHT.val if VGROUP_INF_ENABLE and VGROUP_INF_WEIGHT: VGROUP_INF_REDUX= VGROUP_INF_REDUX.val else: VGROUP_INF_WEIGHT= 0.0 VGROUP_INF_REDUX= None PREF_DO_UV= PREF_DO_UV.val PREF_DO_VCOL= PREF_DO_VCOL.val PREF_DO_WEIGHTS= PREF_DO_WEIGHTS.val PREF_OTHER_SEL_OBS= PREF_OTHER_SEL_OBS.val t= sys.time() is_editmode = Window.EditMode() # Exit Editmode. if is_editmode: Window.EditMode(0) Window.WaitCursor(1) print 'reducing:', act_ob.name, act_ob.getData(1) BPyMesh.redux(act_ob, PREF_REDUX, PREF_BOUNDRY_WEIGHT, PREF_REM_DOUBLES, PREF_FACE_AREA_WEIGHT, PREF_FACE_TRIANGULATE, PREF_DO_UV, PREF_DO_VCOL, PREF_DO_WEIGHTS, VGROUP_INF_REDUX, VGROUP_INF_WEIGHT) if PREF_OTHER_SEL_OBS: for ob in scn.objects.context: if ob.type == 'Mesh' and ob != act_ob: print 'reducing:', ob.name, ob.getData(1) BPyMesh.redux(ob, PREF_REDUX, PREF_BOUNDRY_WEIGHT, PREF_REM_DOUBLES, PREF_FACE_AREA_WEIGHT, PREF_FACE_TRIANGULATE, PREF_DO_UV, PREF_DO_VCOL, PREF_DO_WEIGHTS, VGROUP_INF_REDUX, VGROUP_INF_WEIGHT) Window.RedrawAll() if is_editmode: Window.EditMode(1) Window.WaitCursor(0) Window.RedrawAll() print 'Reduction done in %.6f sec.' % (sys.time()-t)
def load_obj(filepath, CLAMP_SIZE=0.0, CREATE_FGONS=True, CREATE_SMOOTH_GROUPS=True, CREATE_EDGES=True, SPLIT_OBJECTS=True, SPLIT_GROUPS=True, SPLIT_MATERIALS=True, ROTATE_X90=True, IMAGE_SEARCH=True, POLYGROUPS=False): ''' Called by the user interface or another script. load_obj(path) - should give acceptable results. This function passes the file and sends the data off to be split into objects and then converted into mesh objects ''' print '\nimporting obj "%s"' % filepath if SPLIT_OBJECTS or SPLIT_GROUPS or SPLIT_MATERIALS: POLYGROUPS = False time_main = sys.time() verts_loc = [] verts_tex = [] faces = [] # tuples of the faces material_libs = [] # filanems to material libs this uses vertex_groups = {} # when POLYGROUPS is true # Get the string to float conversion func for this file- is 'float' for almost all files. float_func = get_float_func(filepath) # Context variables context_material = None context_smooth_group = None context_object = None context_vgroup = None # Nurbs context_nurbs = {} nurbs = [] context_parm = '' # used by nurbs too but could be used elsewhere has_ngons = False # has_smoothgroups= False - is explicit with len(unique_smooth_groups) being > 0 # Until we can use sets unique_materials = {} unique_material_images = {} unique_smooth_groups = {} # unique_obects= {} - no use for this variable since the objects are stored in the face. # when there are faces that end with \ # it means they are multiline- # since we use xreadline we cant skip to the next line # so we need to know weather context_multi_line = '' print '\tparsing obj file "%s"...' % filepath, time_sub = sys.time() file = open(filepath, 'rU') for line in file: #.xreadlines(): line = line.lstrip( ) # rare cases there is white space at the start of the line if line.startswith('v '): line_split = line.split() # rotate X90: (x,-z,y) verts_loc.append( (float_func(line_split[1]), -float_func(line_split[3]), float_func(line_split[2]))) elif line.startswith('vn '): pass elif line.startswith('vt '): line_split = line.split() verts_tex.append( (float_func(line_split[1]), float_func(line_split[2]))) # Handel faces lines (as faces) and the second+ lines of fa multiline face here # use 'f' not 'f ' because some objs (very rare have 'fo ' for faces) elif line.startswith('f') or context_multi_line == 'f': if context_multi_line: # use face_vert_loc_indicies and face_vert_tex_indicies previously defined and used the obj_face line_split = line.split() else: line_split = line[2:].split() face_vert_loc_indicies = [] face_vert_tex_indicies = [] # Instance a face faces.append((\ face_vert_loc_indicies,\ face_vert_tex_indicies,\ context_material,\ context_smooth_group,\ context_object\ )) if strip_slash(line_split): context_multi_line = 'f' else: context_multi_line = '' for v in line_split: obj_vert = v.split('/') vert_loc_index = int(obj_vert[0]) - 1 # Add the vertex to the current group # *warning*, this wont work for files that have groups defined around verts if POLYGROUPS and context_vgroup: vertex_groups[context_vgroup].append(vert_loc_index) # Make relative negative vert indicies absolute if vert_loc_index < 0: vert_loc_index = len(verts_loc) + vert_loc_index + 1 face_vert_loc_indicies.append(vert_loc_index) if len(obj_vert) > 1 and obj_vert[1]: # formatting for faces with normals and textures us # loc_index/tex_index/nor_index vert_tex_index = int(obj_vert[1]) - 1 # Make relative negative vert indicies absolute if vert_tex_index < 0: vert_tex_index = len(verts_tex) + vert_tex_index + 1 face_vert_tex_indicies.append(vert_tex_index) else: # dummy face_vert_tex_indicies.append(0) if len(face_vert_loc_indicies) > 4: has_ngons = True elif CREATE_EDGES and (line.startswith('l ') or context_multi_line == 'l'): # very similar to the face load function above with some parts removed if context_multi_line: # use face_vert_loc_indicies and face_vert_tex_indicies previously defined and used the obj_face line_split = line.split() else: line_split = line[2:].split() face_vert_loc_indicies = [] face_vert_tex_indicies = [] # Instance a face faces.append((\ face_vert_loc_indicies,\ face_vert_tex_indicies,\ context_material,\ context_smooth_group,\ context_object\ )) if strip_slash(line_split): context_multi_line = 'l' else: context_multi_line = '' isline = line.startswith('l') for v in line_split: vert_loc_index = int(v) - 1 # Make relative negative vert indicies absolute if vert_loc_index < 0: vert_loc_index = len(verts_loc) + vert_loc_index + 1 face_vert_loc_indicies.append(vert_loc_index) elif line.startswith('s'): if CREATE_SMOOTH_GROUPS: context_smooth_group = line_value(line.split()) if context_smooth_group == 'off': context_smooth_group = None elif context_smooth_group: # is not None unique_smooth_groups[context_smooth_group] = None elif line.startswith('o'): if SPLIT_OBJECTS: context_object = line_value(line.split()) # unique_obects[context_object]= None elif line.startswith('g'): if SPLIT_GROUPS: context_object = line_value(line.split()) # print 'context_object', context_object # unique_obects[context_object]= None elif POLYGROUPS: context_vgroup = line_value(line.split()) if context_vgroup and context_vgroup != '(null)': vertex_groups.setdefault(context_vgroup, []) else: context_vgroup = None # dont assign a vgroup elif line.startswith('usemtl'): context_material = line_value(line.split()) unique_materials[context_material] = None elif line.startswith('mtllib'): # usemap or usemat material_libs.extend( line.split() [1:]) # can have multiple mtllib filenames per line # Nurbs support elif line.startswith('cstype '): context_nurbs['cstype'] = line_value( line.split()) # 'rat bspline' / 'bspline' elif line.startswith('curv ') or context_multi_line == 'curv': line_split = line.split() curv_idx = context_nurbs['curv_idx'] = context_nurbs.get( 'curv_idx', []) # incase were multiline if not context_multi_line: context_nurbs['curv_range'] = float_func( line_split[1]), float_func(line_split[2]) line_split[0:3] = [] # remove first 3 items if strip_slash(line_split): context_multi_line = 'curv' else: context_multi_line = '' for i in line_split: vert_loc_index = int(i) - 1 if vert_loc_index < 0: vert_loc_index = len(verts_loc) + vert_loc_index + 1 curv_idx.append(vert_loc_index) elif line.startswith('parm') or context_multi_line == 'parm': line_split = line.split() if context_multi_line: context_multi_line = '' else: context_parm = line_split[1] line_split[0:2] = [] # remove first 2 if strip_slash(line_split): context_multi_line = 'parm' else: context_multi_line = '' if context_parm.lower() == 'u': context_nurbs.setdefault('parm_u', []).extend( [float_func(f) for f in line_split]) elif context_parm.lower() == 'v': # surfaces not suported yet context_nurbs.setdefault('parm_v', []).extend( [float_func(f) for f in line_split]) # else: # may want to support other parm's ? elif line.startswith('deg '): context_nurbs['deg'] = [int(i) for i in line.split()[1:]] elif line.startswith('end'): # Add the nurbs curve if context_object: context_nurbs['name'] = context_object nurbs.append(context_nurbs) context_nurbs = {} context_parm = '' ''' # How to use usemap? depricated? elif line.startswith('usema'): # usemap or usemat context_image= line_value(line.split()) ''' file.close() time_new = sys.time() print '%.4f sec' % (time_new - time_sub) time_sub = time_new print '\tloading materials and images...', create_materials(filepath, material_libs, unique_materials, unique_material_images, IMAGE_SEARCH) time_new = sys.time() print '%.4f sec' % (time_new - time_sub) time_sub = time_new if not ROTATE_X90: verts_loc[:] = [(v[0], v[2], -v[1]) for v in verts_loc] # deselect all scn = bpy.data.scenes.active scn.objects.selected = [] new_objects = [] # put new objects here print '\tbuilding geometry...\n\tverts:%i faces:%i materials: %i smoothgroups:%i ...' % ( len(verts_loc), len(faces), len(unique_materials), len(unique_smooth_groups)), # Split the mesh by objects/materials, may if SPLIT_OBJECTS or SPLIT_GROUPS: SPLIT_OB_OR_GROUP = True else: SPLIT_OB_OR_GROUP = False for verts_loc_split, faces_split, unique_materials_split, dataname in split_mesh( verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP, SPLIT_MATERIALS): # Create meshes from the data, warning 'vertex_groups' wont support splitting create_mesh(scn, new_objects, has_ngons, CREATE_FGONS, CREATE_EDGES, verts_loc_split, verts_tex, faces_split, unique_materials_split, unique_material_images, unique_smooth_groups, vertex_groups, dataname) # nurbs support for context_nurbs in nurbs: create_nurbs(scn, context_nurbs, verts_loc, new_objects) axis_min = [1000000000] * 3 axis_max = [-1000000000] * 3 if CLAMP_SIZE: # Get all object bounds for ob in new_objects: for v in ob.getBoundBox(): for axis, value in enumerate(v): if axis_min[axis] > value: axis_min[axis] = value if axis_max[axis] < value: axis_max[axis] = value # Scale objects max_axis = max(axis_max[0] - axis_min[0], axis_max[1] - axis_min[1], axis_max[2] - axis_min[2]) scale = 1.0 while CLAMP_SIZE < max_axis * scale: scale = scale / 10.0 for ob in new_objects: ob.setSize(scale, scale, scale) # Better rotate the vert locations #if not ROTATE_X90: # for ob in new_objects: # ob.RotX = -1.570796326794896558 time_new = sys.time() print '%.4f sec' % (time_new - time_sub) print 'finished importing: "%s" in %.4f sec.' % (filepath, (time_new - time_main))
def fs_callback(filename): t = sys.time() import time import datetime print "======EXPORTING TO UNREAL SKELETAL MESH FORMATS========\r\n" psk = PSKFile() psa = PSAFile() #sanity check - this should already have the extension, but just in case, we'll give it one if it doesn't psk_filename = make_filename_ext(filename, '.psk') #make the psa filename psa_filename = make_filename_ext(filename, '.psa') #print 'PSK File: ' + psk_filename #print 'PSA File: ' + psa_filename blender_meshes = [] blender_armature = [] current_scene = Blender.Scene.GetCurrent() current_scene.makeCurrent() cur_frame = Blender.Get('curframe') #store current frame before we start walking them during animation parse objects = current_scene.getChildren() blender_meshes = get_blender_objects(objects, 'Mesh') blender_armature = get_blender_objects(objects, 'Armature') try: ####################### # STEP 1: MESH DUMP # we build the vertexes, wedges, and faces in here, as well as a vertexgroup lookup table # for the armature parse parse_meshes(blender_meshes, psk) except: Blender.Set('curframe', cur_frame) #set frame back to original frame print "Exception during Mesh Parse" raise try: ####################### # STEP 2: ARMATURE DUMP # IMPORTANT: do this AFTER parsing meshes - we need to use the vertex group data from # the mesh parse in here to generate bone influences parse_armature(blender_armature, psk, psa) except: Blender.Set('curframe', cur_frame) #set frame back to original frame print "Exception during Armature Parse" raise try: ####################### # STEP 3: ANIMATION DUMP # IMPORTANT: do AFTER parsing bones - we need to do bone lookups in here during animation frames parse_animation(current_scene, psa) except: Blender.Set('curframe', cur_frame) #set frame back to original frame print "Exception during Animation Parse" raise # reset current frame Blender.Set('curframe', cur_frame) #set frame back to original frame ########################## # FILE WRITE #RG - dump psk file psk.PrintOut() file = open(psk_filename, "wb") file.write(psk.dump()) file.close() print 'Successfully Exported File: ' + psk_filename #RG - dump psa file if not psa.IsEmpty(): psa.PrintOut() file = open(psa_filename, "wb") file.write(psa.dump()) file.close() print 'Successfully Exported File: ' + psa_filename else: print 'No Animations to Export' print 'My Export PSK/PSA Script finished in %.2f seconds' % (sys.time()-t) t = datetime.datetime.now() EpochSeconds = time.mktime(t.timetuple()) print datetime.datetime.fromtimestamp(EpochSeconds) textstring = 'Export Complete!' #Blender.Draw.PupStrInput("Name:", "untitled", 25) Draw.PupMenu(textstring)
def main(arg): # get armature and its bones obs = [ob for ob in self.context.selected_objects if ob.type == 'ARMATURE'] if obs: boneitems = [(bonename, bone) for (bonename, bone) in list(obs[0].getPose().bones.items())] else: boneitems = [] # exit if no bones selected if not boneitems: print("no armature selected") Blender.Draw.PupMenu('ERROR%t|no armature selected') return # ask for weights to delete PREF_BUFFER = Blender.Draw.Create("BonePose") pup_block = [\ ('Text Buffer: ', PREF_BUFFER, 0, 20, 'The text buffer to load the bone poses from.'),\ ] if not Blender.Draw.PupBlock('Load Bone Pose', pup_block): return # saves editmode state and exit editmode if it is enabled # (cannot make changes mesh data in editmode) is_editmode = Window.EditMode() Window.EditMode(0) Window.WaitCursor(1) t = sys.time() # run script # open text buffer try: posetxt = Blender.Text.Get(PREF_BUFFER.val) except NameError: Blender.Draw.PupMenu('ERROR%t|text buffer does not exist') return # reconstruct poses for matrixtxt in posetxt.asLines(): # skip empty lines if not matrixtxt: continue # reconstruct matrix from text bonename, matrixstr = matrixtxt.split('/') print(f"loading pose of bone {bonename:s} from {PREF_BUFFER.val:s}") try: matrix = mathutils.Matrix( *[[float(f) for f in row.split(',')] for row in matrixstr.split(';')]) except: Blender.Draw.PupMenu('ERROR%t|syntax error in buffer') return # save pose matrix for bonename2, bone in boneitems: if bonenamematch(bonename, bonename2): bone.quat = matrix.rotationPart().toQuat() bone.loc = matrix.translationPart() break else: print(f"WARNING: bone {bonename:s} not found in armature") # display the result obs[0].getPose().update() # report finish and timing print(f'Load bone pose finished in {(sys.time()-t):.2f} seconds') Window.WaitCursor(0) if is_editmode: Window.EditMode(1)
def main(): scn = Scene.GetCurrent() act_ob = scn.objects.active if not act_ob or act_ob.type != 'Mesh': BPyMessages.Error_NoMeshActive() return act_me = act_ob.getData(mesh=1) if act_me.multires: BPyMessages.Error_NoMeshMultiresEdit() return act_group = act_me.activeGroup if not act_group: act_group = '' # Defaults PREF_REDUX = Draw.Create(0.5) PREF_BOUNDRY_WEIGHT = Draw.Create(5.0) PREF_REM_DOUBLES = Draw.Create(1) PREF_FACE_AREA_WEIGHT = Draw.Create(1.0) PREF_FACE_TRIANGULATE = Draw.Create(1) VGROUP_INF_ENABLE = Draw.Create(0) VGROUP_INF_REDUX = Draw.Create(act_group) VGROUP_INF_WEIGHT = Draw.Create(10.0) PREF_DO_UV = Draw.Create(1) PREF_DO_VCOL = Draw.Create(1) PREF_DO_WEIGHTS = Draw.Create(1) PREF_OTHER_SEL_OBS = Draw.Create(0) pup_block = [\ ('Poly Reduce:', PREF_REDUX, 0.05, 0.95, 'Scale the meshes poly count by this value.'),\ ('Boundry Weight:', PREF_BOUNDRY_WEIGHT, 0.0, 20.0, 'Weight boundry verts by this scale, 0.0 for no boundry weighting.'),\ ('Area Weight:', PREF_FACE_AREA_WEIGHT, 0.0, 20.0, 'Collapse edges effecting lower area faces first.'),\ ('Triangulate', PREF_FACE_TRIANGULATE, 'Convert quads to tris before reduction, for more choices of edges to collapse.'),\ '',\ ('VGroup Weighting', VGROUP_INF_ENABLE, 'Use a vertex group to influence the reduction, higher weights for higher quality '),\ ('vgroup name: ', VGROUP_INF_REDUX, 0, 32, 'The name of the vertex group to use for the weight map'),\ ('vgroup mult: ', VGROUP_INF_WEIGHT, 0.0, 100.0, 'How much to make the weight effect the reduction'),\ ('Other Selected Obs', PREF_OTHER_SEL_OBS, 'reduce other selected objects.'),\ '',\ '',\ '',\ ('UV Coords', PREF_DO_UV, 'Interpolate UV Coords.'),\ ('Vert Colors', PREF_DO_VCOL, 'Interpolate Vertex Colors'),\ ('Vert Weights', PREF_DO_WEIGHTS, 'Interpolate Vertex Weights'),\ ('Remove Doubles', PREF_REM_DOUBLES, 'Remove doubles before reducing to avoid boundry tearing.'),\ ] if not Draw.PupBlock("Poly Reducer", pup_block): return PREF_REDUX = PREF_REDUX.val PREF_BOUNDRY_WEIGHT = PREF_BOUNDRY_WEIGHT.val PREF_REM_DOUBLES = PREF_REM_DOUBLES.val PREF_FACE_AREA_WEIGHT = PREF_FACE_AREA_WEIGHT.val PREF_FACE_TRIANGULATE = PREF_FACE_TRIANGULATE.val VGROUP_INF_ENABLE = VGROUP_INF_ENABLE.val VGROUP_INF_WEIGHT = VGROUP_INF_WEIGHT.val if VGROUP_INF_ENABLE and VGROUP_INF_WEIGHT: VGROUP_INF_REDUX = VGROUP_INF_REDUX.val else: VGROUP_INF_WEIGHT = 0.0 VGROUP_INF_REDUX = None PREF_DO_UV = PREF_DO_UV.val PREF_DO_VCOL = PREF_DO_VCOL.val PREF_DO_WEIGHTS = PREF_DO_WEIGHTS.val PREF_OTHER_SEL_OBS = PREF_OTHER_SEL_OBS.val t = sys.time() is_editmode = Window.EditMode() # Exit Editmode. if is_editmode: Window.EditMode(0) Window.WaitCursor(1) print 'reducing:', act_ob.name, act_ob.getData(1) BPyMesh.redux(act_ob, PREF_REDUX, PREF_BOUNDRY_WEIGHT, PREF_REM_DOUBLES, PREF_FACE_AREA_WEIGHT, PREF_FACE_TRIANGULATE, PREF_DO_UV, PREF_DO_VCOL, PREF_DO_WEIGHTS, VGROUP_INF_REDUX, VGROUP_INF_WEIGHT) if PREF_OTHER_SEL_OBS: for ob in scn.objects.context: if ob.type == 'Mesh' and ob != act_ob: print 'reducing:', ob.name, ob.getData(1) BPyMesh.redux(ob, PREF_REDUX, PREF_BOUNDRY_WEIGHT, PREF_REM_DOUBLES, PREF_FACE_AREA_WEIGHT, PREF_FACE_TRIANGULATE, PREF_DO_UV, PREF_DO_VCOL, PREF_DO_WEIGHTS, VGROUP_INF_REDUX, VGROUP_INF_WEIGHT) Window.RedrawAll() if is_editmode: Window.EditMode(1) Window.WaitCursor(0) Window.RedrawAll() print 'Reduction done in %.6f sec.' % (sys.time() - t)
def write(filename, objects,\ EXPORT_TRI=False, EXPORT_EDGES=False, EXPORT_NORMALS=False, EXPORT_NORMALS_HQ=False,\ EXPORT_UV=True, EXPORT_MTL=True, EXPORT_COPY_IMAGES=False,\ EXPORT_APPLY_MODIFIERS=True, EXPORT_ROTX90=True, EXPORT_BLEN_OBS=True,\ EXPORT_GROUP_BY_OB=False, EXPORT_GROUP_BY_MAT=False, EXPORT_KEEP_VERT_ORDER=False,\ EXPORT_POLYGROUPS=False, EXPORT_CURVE_AS_NURBS=True): ''' Basic write function. The context and options must be alredy set This can be accessed externaly eg. write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options. ''' def veckey3d(v): return round(v.x, 6), round(v.y, 6), round(v.z, 6) def veckey2d(v): return round(v.x, 6), round(v.y, 6) def findVertexGroupName(face, vWeightMap): """ Searches the vertexDict to see what groups is assigned to a given face. We use a frequency system in order to sort out the name because a given vetex can belong to two or more groups at the same time. To find the right name for the face we list all the possible vertex group names with their frequency and then sort by frequency in descend order. The top element is the one shared by the highest number of vertices is the face's group """ weightDict = {} for vert in face: vWeights = vWeightMap[vert.index] for vGroupName, weight in vWeights: weightDict[vGroupName] = weightDict.get(vGroupName, 0) + weight if weightDict: alist = [(weight,vGroupName) for vGroupName, weight in weightDict.iteritems()] # sort least to greatest amount of weight alist.sort() return(alist[-1][1]) # highest value last else: return '(null)' print 'OBJ Export path: "%s"' % filename temp_mesh_name = '~tmp-mesh' time1 = sys.time() scn = Scene.GetCurrent() file = open(filename, "w") # Write Header file.write('# Blender3D v%s OBJ File: %s\n' % (Blender.Get('version'), Blender.Get('filename').split('/')[-1].split('\\')[-1] )) file.write('# www.blender3d.org\n') # Tell the obj file what material file to use. if EXPORT_MTL: mtlfilename = '%s.mtl' % '.'.join(filename.split('.')[:-1]) file.write('mtllib %s\n' % ( mtlfilename.split('\\')[-1].split('/')[-1] )) # Get the container mesh. - used for applying modifiers and non mesh objects. containerMesh = meshName = tempMesh = None for meshName in Blender.NMesh.GetNames(): if meshName.startswith(temp_mesh_name): tempMesh = Mesh.Get(meshName) if not tempMesh.users: containerMesh = tempMesh if not containerMesh: containerMesh = Mesh.New(temp_mesh_name) if EXPORT_ROTX90: mat_xrot90= Blender.Mathutils.RotationMatrix(-90, 4, 'x') del meshName del tempMesh # Initialize totals, these are updated each object totverts = totuvco = totno = 1 face_vert_index = 1 globalNormals = {} # Get all meshes for ob_main in objects: for ob, ob_mat in BPyObject.getDerivedObjects(ob_main): # Nurbs curve support if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob): if EXPORT_ROTX90: ob_mat = ob_mat * mat_xrot90 totverts += write_nurb(file, ob, ob_mat) continue # end nurbs # Will work for non meshes now! :) # getMeshFromObject(ob, container_mesh=None, apply_modifiers=True, vgroups=True, scn=None) me= BPyMesh.getMeshFromObject(ob, containerMesh, EXPORT_APPLY_MODIFIERS, EXPORT_POLYGROUPS, scn) if not me: continue if EXPORT_UV: faceuv= me.faceUV else: faceuv = False # We have a valid mesh if EXPORT_TRI and me.faces: # Add a dummy object to it. has_quads = False for f in me.faces: if len(f) == 4: has_quads = True break if has_quads: oldmode = Mesh.Mode() Mesh.Mode(Mesh.SelectModes['FACE']) me.sel = True tempob = scn.objects.new(me) me.quadToTriangle(0) # more=0 shortest length oldmode = Mesh.Mode(oldmode) scn.objects.unlink(tempob) Mesh.Mode(oldmode) # Make our own list so it can be sorted to reduce context switching faces = [ f for f in me.faces ] if EXPORT_EDGES: edges = me.edges else: edges = [] if not (len(faces)+len(edges)+len(me.verts)): # Make sure there is somthing to write continue # dont bother with this mesh. if EXPORT_ROTX90: me.transform(ob_mat*mat_xrot90) else: me.transform(ob_mat) # High Quality Normals if EXPORT_NORMALS and faces: if EXPORT_NORMALS_HQ: BPyMesh.meshCalcNormals(me) else: # transforming normals is incorrect # when the matrix is scaled, # better to recalculate them me.calcNormals() # # Crash Blender #materials = me.getMaterials(1) # 1 == will return None in the list. materials = me.materials materialNames = [] materialItems = materials[:] if materials: for mat in materials: if mat: # !=None materialNames.append(mat.name) else: materialNames.append(None) # Cant use LC because some materials are None. # materialNames = map(lambda mat: mat.name, materials) # Bug Blender, dosent account for null materials, still broken. # Possible there null materials, will mess up indicies # but at least it will export, wait until Blender gets fixed. materialNames.extend((16-len(materialNames)) * [None]) materialItems.extend((16-len(materialItems)) * [None]) # Sort by Material, then images # so we dont over context switch in the obj file. if EXPORT_KEEP_VERT_ORDER: pass elif faceuv: try: faces.sort(key = lambda a: (a.mat, a.image, a.smooth)) except: faces.sort(lambda a,b: cmp((a.mat, a.image, a.smooth), (b.mat, b.image, b.smooth))) elif len(materials) > 1: try: faces.sort(key = lambda a: (a.mat, a.smooth)) except: faces.sort(lambda a,b: cmp((a.mat, a.smooth), (b.mat, b.smooth))) else: # no materials try: faces.sort(key = lambda a: a.smooth) except: faces.sort(lambda a,b: cmp(a.smooth, b.smooth)) # Set the default mat to no material and no image. contextMat = (0, 0) # Can never be this, so we will label a new material teh first chance we get. contextSmooth = None # Will either be true or false, set bad to force initialization switch. if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB: name1 = ob.name name2 = ob.getData(1) if name1 == name2: obnamestring = fixName(name1) else: obnamestring = '%s_%s' % (fixName(name1), fixName(name2)) if EXPORT_BLEN_OBS: file.write('o %s\n' % obnamestring) # Write Object name else: # if EXPORT_GROUP_BY_OB: file.write('g %s\n' % obnamestring) # Vert for v in me.verts: file.write('v %.6f %.6f %.6f\n' % tuple(v.co)) # UV if faceuv: uv_face_mapping = [[0,0,0,0] for f in faces] # a bit of a waste for tri's :/ uv_dict = {} # could use a set() here for f_index, f in enumerate(faces): for uv_index, uv in enumerate(f.uv): uvkey = veckey2d(uv) try: uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] except: uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict) file.write('vt %.6f %.6f\n' % tuple(uv)) uv_unique_count = len(uv_dict) del uv, uvkey, uv_dict, f_index, uv_index # Only need uv_unique_count and uv_face_mapping # NORMAL, Smooth/Non smoothed. if EXPORT_NORMALS: for f in faces: if f.smooth: for v in f: noKey = veckey3d(v.no) if not globalNormals.has_key( noKey ): globalNormals[noKey] = totno totno +=1 file.write('vn %.6f %.6f %.6f\n' % noKey) else: # Hard, 1 normal from the face. noKey = veckey3d(f.no) if not globalNormals.has_key( noKey ): globalNormals[noKey] = totno totno +=1 file.write('vn %.6f %.6f %.6f\n' % noKey) if not faceuv: f_image = None if EXPORT_POLYGROUPS: # Retrieve the list of vertex groups vertGroupNames = me.getVertGroupNames() currentVGroup = '' # Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to vgroupsMap = [[] for _i in xrange(len(me.verts))] for vertexGroupName in vertGroupNames: for vIdx, vWeight in me.getVertsFromGroup(vertexGroupName, 1): vgroupsMap[vIdx].append((vertexGroupName, vWeight)) for f_index, f in enumerate(faces): f_v= f.v f_smooth= f.smooth f_mat = min(f.mat, len(materialNames)-1) if faceuv: f_image = f.image f_uv= f.uv # MAKE KEY if faceuv and f_image: # Object is always true. key = materialNames[f_mat], f_image.name else: key = materialNames[f_mat], None # No image, use None instead. # Write the vertex group if EXPORT_POLYGROUPS: if vertGroupNames: # find what vertext group the face belongs to theVGroup = findVertexGroupName(f,vgroupsMap) if theVGroup != currentVGroup: currentVGroup = theVGroup file.write('g %s\n' % theVGroup) # CHECK FOR CONTEXT SWITCH if key == contextMat: pass # Context alredy switched, dont do anything else: if key[0] == None and key[1] == None: # Write a null material, since we know the context has changed. if EXPORT_GROUP_BY_MAT: file.write('g %s_%s\n' % (fixName(ob.name), fixName(ob.getData(1))) ) # can be mat_image or (null) file.write('usemtl (null)\n') # mat, image else: mat_data= MTL_DICT.get(key) if not mat_data: # First add to global dict so we can export to mtl # Then write mtl # Make a new names from the mat and image name, # converting any spaces to underscores with fixName. # If none image dont bother adding it to the name if key[1] == None: mat_data = MTL_DICT[key] = ('%s'%fixName(key[0])), materialItems[f_mat], f_image else: mat_data = MTL_DICT[key] = ('%s_%s' % (fixName(key[0]), fixName(key[1]))), materialItems[f_mat], f_image if EXPORT_GROUP_BY_MAT: file.write('g %s_%s_%s\n' % (fixName(ob.name), fixName(ob.getData(1)), mat_data[0]) ) # can be mat_image or (null) file.write('usemtl %s\n' % mat_data[0]) # can be mat_image or (null) contextMat = key if f_smooth != contextSmooth: if f_smooth: # on now off file.write('s 1\n') contextSmooth = f_smooth else: # was off now on file.write('s off\n') contextSmooth = f_smooth file.write('f') if faceuv: if EXPORT_NORMALS: if f_smooth: # Smoothed, use vertex normals for vi, v in enumerate(f_v): file.write( ' %d/%d/%d' % (\ v.index+totverts,\ totuvco + uv_face_mapping[f_index][vi],\ globalNormals[ veckey3d(v.no) ])) # vert, uv, normal else: # No smoothing, face normals no = globalNormals[ veckey3d(f.no) ] for vi, v in enumerate(f_v): file.write( ' %d/%d/%d' % (\ v.index+totverts,\ totuvco + uv_face_mapping[f_index][vi],\ no)) # vert, uv, normal else: # No Normals for vi, v in enumerate(f_v): file.write( ' %d/%d' % (\ v.index+totverts,\ totuvco + uv_face_mapping[f_index][vi])) # vert, uv face_vert_index += len(f_v) else: # No UV's if EXPORT_NORMALS: if f_smooth: # Smoothed, use vertex normals for v in f_v: file.write( ' %d//%d' % (\ v.index+totverts,\ globalNormals[ veckey3d(v.no) ])) else: # No smoothing, face normals no = globalNormals[ veckey3d(f.no) ] for v in f_v: file.write( ' %d//%d' % (\ v.index+totverts,\ no)) else: # No Normals for v in f_v: file.write( ' %d' % (\ v.index+totverts)) file.write('\n') # Write edges. if EXPORT_EDGES: LOOSE= Mesh.EdgeFlags.LOOSE for ed in edges: if ed.flag & LOOSE: file.write('f %d %d\n' % (ed.v1.index+totverts, ed.v2.index+totverts)) # Make the indicies global rather then per mesh totverts += len(me.verts) if faceuv: totuvco += uv_unique_count me.verts= None file.close() # Now we have all our materials, save them if EXPORT_MTL: write_mtl(mtlfilename) if EXPORT_COPY_IMAGES: dest_dir = filename # Remove chars until we are just the path. while dest_dir and dest_dir[-1] not in '\\/': dest_dir = dest_dir[:-1] if dest_dir: copy_images(dest_dir) else: print '\tError: "%s" could not be used as a base for an image path.' % filename print "OBJ Export time: %.2f" % (sys.time() - time1)
def uvcalc_main(obList): global USER_FILL_HOLES global USER_FILL_HOLES_QUALITY global USER_STRETCH_ASPECT global USER_ISLAND_MARGIN # objects= bpy.data.scenes.active.objects # we can will tag them later. # obList = [ob for ob in objects.context if ob.type == 'Mesh'] # Face select object may not be selected. # ob = objects.active # if ob and ob.sel == 0 and ob.type == 'Mesh': # # Add to the list # obList =[ob] # del objects if not obList: Draw.PupMenu("error, no selected mesh objects") return # Create the variables. USER_PROJECTION_LIMIT = Draw.Create(66) USER_ONLY_SELECTED_FACES = Draw.Create(1) USER_SHARE_SPACE = Draw.Create(1) # Only for hole filling. USER_STRETCH_ASPECT = Draw.Create(1) # Only for hole filling. USER_ISLAND_MARGIN = Draw.Create(0.0) # Only for hole filling. USER_FILL_HOLES = Draw.Create(0) USER_FILL_HOLES_QUALITY = Draw.Create(50) # Only for hole filling. USER_VIEW_INIT = Draw.Create(0) # Only for hole filling. USER_AREA_WEIGHT = Draw.Create(1) # Only for hole filling. pup_block = [ "Projection", ("Angle Limit:", USER_PROJECTION_LIMIT, 1, 89, "lower for more projection groups, higher for less distortion."), ("Selected Faces Only", USER_ONLY_SELECTED_FACES, "Use only selected faces from all selected meshes."), ("Init from view", USER_VIEW_INIT, "The first projection will be from the view vector."), ("Area Weight", USER_AREA_WEIGHT, "Weight projections vector by face area."), "", "", "", "UV Layout", ("Share Tex Space", USER_SHARE_SPACE, "Objects Share texture space, map all objects into 1 uvmap."), ("Stretch to bounds", USER_STRETCH_ASPECT, "Stretch the final output to texture bounds."), ("Island Margin:", USER_ISLAND_MARGIN, 0.0, 0.5, "Margin to reduce bleed from adjacent islands."), "Fill in empty areas", ("Fill Holes", USER_FILL_HOLES, "Fill in empty areas reduced texture waistage (slow)."), ( "Fill Quality:", USER_FILL_HOLES_QUALITY, 1, 100, "Depends on fill holes, how tightly to fill UV holes, (higher is slower)", ), ] # Reuse variable if len(obList) == 1: ob = "Unwrap %i Selected Mesh" else: ob = "Unwrap %i Selected Meshes" # if not Draw.PupBlock(ob % len(obList), pup_block): # return # del ob # Convert from being button types USER_PROJECTION_LIMIT = USER_PROJECTION_LIMIT.val USER_ONLY_SELECTED_FACES = USER_ONLY_SELECTED_FACES.val USER_SHARE_SPACE = USER_SHARE_SPACE.val USER_STRETCH_ASPECT = USER_STRETCH_ASPECT.val USER_ISLAND_MARGIN = USER_ISLAND_MARGIN.val USER_FILL_HOLES = USER_FILL_HOLES.val USER_FILL_HOLES_QUALITY = USER_FILL_HOLES_QUALITY.val USER_VIEW_INIT = USER_VIEW_INIT.val USER_AREA_WEIGHT = USER_AREA_WEIGHT.val USER_PROJECTION_LIMIT_CONVERTED = cos(USER_PROJECTION_LIMIT * DEG_TO_RAD) USER_PROJECTION_LIMIT_HALF_CONVERTED = cos((USER_PROJECTION_LIMIT / 2) * DEG_TO_RAD) # Toggle Edit mode # is_editmode = Window.EditMode() # if is_editmode: # Window.EditMode(0) # Assume face select mode! an annoying hack to toggle face select mode because Mesh dosent like faceSelectMode. if USER_SHARE_SPACE: # Sort by data name so we get consistant results try: obList.sort(key=lambda ob: ob.getData(name_only=1)) except: obList.sort(lambda ob1, ob2: cmp(ob1.getData(name_only=1), ob2.getData(name_only=1))) collected_islandList = [] # Window.WaitCursor(1) time1 = sys.time() # Tag as False se we dont operate on teh same mesh twice. bpy.data.meshes.tag = False for ob in obList: me = ob.getData(mesh=1) if me.tag or me.lib: continue # Tag as used me.tag = True if not me.faceUV: # Mesh has no UV Coords, dont bother. me.faceUV = True if USER_ONLY_SELECTED_FACES: meshFaces = [thickface(f) for f in me.faces if f.sel] else: meshFaces = map(thickface, me.faces) if not meshFaces: continue # Window.DrawProgressBar(0.1, 'SmartProj UV Unwrapper, mapping "%s", %i faces.' % (me.name, len(meshFaces))) # ======= # Generate a projection list from face normals, this is ment to be smart :) # make a list of face props that are in sync with meshFaces # Make a Face List that is sorted by area. # meshFaces = [] # meshFaces.sort( lambda a, b: cmp(b.area , a.area) ) # Biggest first. try: meshFaces.sort(key=lambda a: -a.area) except: meshFaces.sort(lambda a, b: cmp(b.area, a.area)) # remove all zero area faces while meshFaces and meshFaces[-1].area <= SMALL_NUM: # Set their UV's to 0,0 for uv in meshFaces[-1].uv: uv.zero() meshFaces.pop() # Smallest first is slightly more efficient, but if the user cancels early then its better we work on the larger data. # Generate Projection Vecs # 0d is 1.0 # 180 IS -0.59846 # Initialize projectVecs if USER_VIEW_INIT: # Generate Projection projectVecs = [ Vector(Window.GetViewVector()) * ob.matrixWorld.copy().invert().rotationPart() ] # We add to this allong the way else: projectVecs = [] newProjectVec = meshFaces[0].no newProjectMeshFaces = [] # Popping stuffs it up. # Predent that the most unique angke is ages away to start the loop off mostUniqueAngle = -1.0 # This is popped tempMeshFaces = meshFaces[:] # This while only gathers projection vecs, faces are assigned later on. while 1: # If theres none there then start with the largest face # add all the faces that are close. for fIdx in xrange(len(tempMeshFaces) - 1, -1, -1): # Use half the angle limit so we dont overweight faces towards this # normal and hog all the faces. if newProjectVec.dot(tempMeshFaces[fIdx].no) > USER_PROJECTION_LIMIT_HALF_CONVERTED: newProjectMeshFaces.append(tempMeshFaces.pop(fIdx)) # Add the average of all these faces normals as a projectionVec averageVec = Vector(0, 0, 0) if USER_AREA_WEIGHT: for fprop in newProjectMeshFaces: averageVec += fprop.no * fprop.area else: for fprop in newProjectMeshFaces: averageVec += fprop.no if averageVec.x != 0 or averageVec.y != 0 or averageVec.z != 0: # Avoid NAN projectVecs.append(averageVec.normalize()) # Get the next vec! # Pick the face thats most different to all existing angles :) mostUniqueAngle = 1.0 # 1.0 is 0d. no difference. mostUniqueIndex = 0 # dummy for fIdx in xrange(len(tempMeshFaces) - 1, -1, -1): angleDifference = -1.0 # 180d difference. # Get the closest vec angle we are to. for p in projectVecs: temp_angle_diff = p.dot(tempMeshFaces[fIdx].no) if angleDifference < temp_angle_diff: angleDifference = temp_angle_diff if angleDifference < mostUniqueAngle: # We have a new most different angle mostUniqueIndex = fIdx mostUniqueAngle = angleDifference if mostUniqueAngle < USER_PROJECTION_LIMIT_CONVERTED: # print 'adding', mostUniqueAngle, USER_PROJECTION_LIMIT, len(newProjectMeshFaces) # Now weight the vector to all its faces, will give a more direct projection # if the face its self was not representive of the normal from surrounding faces. newProjectVec = tempMeshFaces[mostUniqueIndex].no newProjectMeshFaces = [tempMeshFaces.pop(mostUniqueIndex)] else: if len(projectVecs) >= 1: # Must have at least 2 projections break # If there are only zero area faces then its possible # there are no projectionVecs if not len(projectVecs): Draw.PupMenu("error, no projection vecs where generated, 0 area faces can cause this.") return faceProjectionGroupList = [[] for i in xrange(len(projectVecs))] # MAP and Arrange # We know there are 3 or 4 faces here for fIdx in xrange(len(meshFaces) - 1, -1, -1): fvec = meshFaces[fIdx].no i = len(projectVecs) # Initialize first bestAng = fvec.dot(projectVecs[0]) bestAngIdx = 0 # Cycle through the remaining, first alredy done while i - 1: i -= 1 newAng = fvec.dot(projectVecs[i]) if newAng > bestAng: # Reverse logic for dotvecs bestAng = newAng bestAngIdx = i # Store the area for later use. faceProjectionGroupList[bestAngIdx].append(meshFaces[fIdx]) # Cull faceProjectionGroupList, # Now faceProjectionGroupList is full of faces that face match the project Vecs list for i in xrange(len(projectVecs)): # Account for projectVecs having no faces. if not faceProjectionGroupList[i]: continue # Make a projection matrix from a unit length vector. MatProj = VectoMat(projectVecs[i]) # Get the faces UV's from the projected vertex. for f in faceProjectionGroupList[i]: f_uv = f.uv for j, v in enumerate(f.v): f_uv[j][:] = (MatProj * v.co)[:2] if USER_SHARE_SPACE: # Should we collect and pack later? islandList = getUvIslands(faceProjectionGroupList, me) collected_islandList.extend(islandList) else: # Should we pack the islands for this 1 object? islandList = getUvIslands(faceProjectionGroupList, me) packIslands(islandList) # update the mesh here if we need to. # We want to pack all in 1 go, so pack now if USER_SHARE_SPACE: # Window.DrawProgressBar(0.9, "Box Packing for all objects...") packIslands(collected_islandList) print "Smart Projection time: %.2f" % (sys.time() - time1)
def write(filename, objects,\ EXPORT_NORMALS_HQ=False,\ EXPORT_MTL=True, EXPORT_COPY_IMAGES=False,\ EXPORT_APPLY_MODIFIERS=True, EXPORT_BLEN_OBS=True,\ EXPORT_GROUP_BY_OB=False): ''' Basic write function. The context and options must be alredy set This can be accessed externaly eg. write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options. ''' def veckey3d(v): return round(v.x, 6), round(v.y, 6), round(v.z, 6) def veckey2d(v): return round(v.x, 6), round(v.y, 6) print 'WTF Export path: "%s"' % filename temp_mesh_name = '~tmp-mesh' time1 = sys.time() scn = Scene.GetCurrent() file = open(filename, "w") file.write('<?xml version="1.0"?>\n') file.write('<OPEN_TRACK>\n') # Write Header # file.write('\n<!--\n' # + ' Blender3D v%s WTF File: %s\n' % (Blender.Get('version'), Blender.Get('filename').split('/')[-1].split('\\')[-1] ) # + ' www.blender3d.org\n' # + '-->\n\n') # Get the container mesh. - used for applying modifiers and non mesh objects. containerMesh = meshName = tempMesh = None for meshName in Blender.NMesh.GetNames(): if meshName.startswith(temp_mesh_name): tempMesh = Mesh.Get(meshName) if not tempMesh.users: containerMesh = tempMesh if not containerMesh: containerMesh = Mesh.New(temp_mesh_name) del meshName del tempMesh # Initialize totals, these are updated each object totverts = totuvco = totno = 0 face_vert_index = 0 globalNormals = {} file.write('\n<library_objects>\n') # Get all meshs for ob_main in objects: obnamestring = fixName(ob_main.name) file.write('\t<object id="%s">\n' % obnamestring) # Write Object name for ob, ob_mat in BPyObject.getDerivedObjects(ob_main): # Will work for non meshes now! :) # getMeshFromObject(ob, container_mesh=None, apply_modifiers=True, vgroups=True, scn=None) me = BPyMesh.getMeshFromObject(ob, containerMesh, EXPORT_APPLY_MODIFIERS, False, scn) if not me: file.write('\t\t<loc>%.6f %.6f %.6f</loc>\n' % tuple(ob_main.loc)) # Write Object name file.write('\t\t<rot>%.6f %.6f %.6f</rot>\n' % tuple(ob_main.rot)) # Write Object name continue faceuv = me.faceUV # We have a valid mesh if me.faces: # Add a dummy object to it. has_quads = False for f in me.faces: if len(f) == 4: has_quads = True break if has_quads: oldmode = Mesh.Mode() Mesh.Mode(Mesh.SelectModes['FACE']) me.sel = True tempob = scn.objects.new(me) me.quadToTriangle(0) # more=0 shortest length oldmode = Mesh.Mode(oldmode) scn.objects.unlink(tempob) Mesh.Mode(oldmode) # Make our own list so it can be sorted to reduce context switching faces = [f for f in me.faces] edges = me.edges if not (len(faces) + len(edges) + len(me.verts)): # Make sure there is somthing to write continue # dont bother with this mesh. me.transform(ob_mat) # High Quality Normals if faces: if EXPORT_NORMALS_HQ: BPyMesh.meshCalcNormals(me) else: # transforming normals is incorrect # when the matrix is scaled, # better to recalculate them me.calcNormals() # # Crash Blender #materials = me.getMaterials(1) # 1 == will return None in the list. materials = me.materials materialNames = [] materialItems = materials[:] if materials: for mat in materials: if mat: # !=None materialNames.append(mat.name) else: materialNames.append(None) # Cant use LC because some materials are None. # materialNames = map(lambda mat: mat.name, materials) # Bug Blender, dosent account for null materials, still broken. # Possible there null materials, will mess up indicies # but at least it will export, wait until Blender gets fixed. materialNames.extend((16 - len(materialNames)) * [None]) materialItems.extend((16 - len(materialItems)) * [None]) # Sort by Material, then images # so we dont over context switch in the obj file. if faceuv: try: faces.sort(key=lambda a: (a.mat, a.image, a.smooth)) except: faces.sort(lambda a, b: cmp((a.mat, a.image, a.smooth), (b.mat, b.image, b.smooth))) elif len(materials) > 1: try: faces.sort(key=lambda a: (a.mat, a.smooth)) except: faces.sort(lambda a, b: cmp((a.mat, a.smooth), (b.mat, b.smooth))) else: # no materials try: faces.sort(key=lambda a: a.smooth) except: faces.sort(lambda a, b: cmp(a.smooth, b.smooth)) # Set the default mat to no material and no image. contextMat = ( 0, 0 ) # Can never be this, so we will label a new material teh first chance we get. contextSmooth = None # Will either be true or false, set bad to force initialization switch. if len(faces) > 0: file.write('\t\t<mesh>\n') else: file.write('\t\t<curve>\n') vertname = "%s-Vertices" % obnamestring vertarrayname = "%s-Array" % vertname normname = "%s-Normals" % obnamestring normarrayname = "%s-Array" % normname texname = "%s-TexCoord" % obnamestring texarrayname = "%s-Array" % texname # Vert file.write('\t\t\t<float_array count="%d" id="%s">' % (len(me.verts), vertarrayname)) for v in me.verts: file.write(' %.6f %.6f %.6f' % tuple(v.co)) file.write('</float_array>\n') file.write('\t\t\t<vertices id="%s" source="#%s" />\n' % (vertname, vertarrayname)) # UV if faceuv: file.write('\t\t\t<float_array id="%s">' % texarrayname) uv_face_mapping = [[0, 0, 0, 0] for f in faces ] # a bit of a waste for tri's :/ uv_dict = {} # could use a set() here for f_index, f in enumerate(faces): for uv_index, uv in enumerate(f.uv): uvkey = veckey2d(uv) try: uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] except: uv_face_mapping[f_index][uv_index] = uv_dict[ uvkey] = len(uv_dict) file.write(' %.6f %.6f' % tuple(uv)) uv_unique_count = len(uv_dict) del uv, uvkey, uv_dict, f_index, uv_index # Only need uv_unique_count and uv_face_mapping file.write('</float_array>\n') file.write('\t\t\t<texcoords id="%s" source="#%s" />\n' % (texname, texarrayname)) # NORMAL, Smooth/Non smoothed. if len(faces) > 0: file.write('\t\t\t<float_array id="%s">' % normarrayname) for f in faces: if f.smooth: for v in f: noKey = veckey3d(v.no) if not globalNormals.has_key(noKey): globalNormals[noKey] = totno totno += 1 file.write(' %.6f %.6f %.6f' % noKey) else: # Hard, 1 normal from the face. noKey = veckey3d(f.no) if not globalNormals.has_key(noKey): globalNormals[noKey] = totno totno += 1 file.write(' %.6f %.6f %.6f' % noKey) file.write('</float_array>\n') file.write('\t\t\t<normals id="%s" source="#%s" />\n' % (normname, normarrayname)) if not faceuv: f_image = None in_triangles = False for f_index, f in enumerate(faces): f_v = f.v f_smooth = f.smooth f_mat = min(f.mat, len(materialNames) - 1) if faceuv: f_image = f.image f_uv = f.uv # MAKE KEY if faceuv and f_image: # Object is always true. key = materialNames[f_mat], f_image.name else: key = materialNames[ f_mat], None # No image, use None instead. # CHECK FOR CONTEXT SWITCH if key == contextMat: pass # Context alredy switched, dont do anythoing else: if key[0] == None and key[1] == None: # Write a null material, since we know the context has changed. if in_triangles: file.write('</p>\n') file.write('\t\t\t</triangles>\n') file.write('\t\t\t<triangles id="%s_%s">\n' % (fixName(ob.name), fixName(ob.getData(1)))) in_triangles = True else: mat_data = MTL_DICT.get(key) if not mat_data: # First add to global dict so we can export to mtl # Then write mtl # Make a new names from the mat and image name, # converting any spaces to underscores with fixName. # If none image dont bother adding it to the name if key[1] == None: mat_data = MTL_DICT[key] = ('%s' % fixName( key[0])), materialItems[f_mat], f_image else: mat_data = MTL_DICT[key] = ( '%s_%s' % (fixName(key[0]), fixName(key[1])) ), materialItems[f_mat], f_image if in_triangles: file.write('</p>\n') file.write('\t\t\t</triangles>\n') file.write( '\t\t\t<triangles id="%s_%s_%s" material="#%s">\n' % (fixName(ob.name), fixName( ob.getData(1)), mat_data[0], mat_data[0])) in_triangles = True file.write( '\t\t\t\t<input offset="0" semantic="VERTEX" source="#%s" />\n' % vertname) file.write( '\t\t\t\t<input offset="1" semantic="NORMAL" source="#%s" />\n' % normname) if faceuv: file.write( '\t\t\t\t<input offset="2" semantic="TEXCOORD" source="#%s" />\n' % texname) file.write('\t\t\t\t<p>') contextMat = key if f_smooth != contextSmooth: if f_smooth: # on now off # file.write('s 1\n') contextSmooth = f_smooth else: # was off now on # file.write('s off\n') contextSmooth = f_smooth if faceuv: if f_smooth: # Smoothed, use vertex normals for vi, v in enumerate(f_v): file.write( ' %d %d %d' % (\ v.index+totverts,\ totuvco + uv_face_mapping[f_index][vi],\ globalNormals[ veckey3d(v.no) ])) # vert, uv, normal else: # No smoothing, face normals no = globalNormals[veckey3d(f.no)] for vi, v in enumerate(f_v): file.write( ' %d %d %d' % (\ v.index+totverts,\ totuvco + uv_face_mapping[f_index][vi],\ no)) # vert, uv, normal face_vert_index += len(f_v) else: # No UV's if f_smooth: # Smoothed, use vertex normals for v in f_v: file.write( ' %d %d' % (\ v.index+totverts,\ globalNormals[ veckey3d(v.no) ])) else: # No smoothing, face normals no = globalNormals[veckey3d(f.no)] for v in f_v: file.write( ' %d %d' % (\ v.index+totverts,\ no)) if in_triangles: file.write('</p>\n') file.write('\t\t\t</triangles>\n') # Write edges. LOOSE = Mesh.EdgeFlags.LOOSE has_edge = False for ed in edges: if ed.flag & LOOSE: has_edge = True if has_edge: file.write('\t\t\t<edges>\n') file.write( '\t\t\t\t<input offset="0" semantic="VERTEX" source="#%s" />\n' % vertname) file.write('\t\t\t\t<p>') for ed in edges: if ed.flag & LOOSE: file.write( ' %d %d' % (ed.v1.index + totverts, ed.v2.index + totverts)) file.write('</p>\n') file.write('\t\t\t</edges>\n') # Make the indicies global rather then per mesh # totverts += len(me.verts) # if faceuv: # totuvco += uv_unique_count me.verts = None if len(faces) > 0: file.write('\t\t</mesh>\n') else: file.write('\t\t</curve>\n') file.write('\t</object>\n') file.write('</library_objects>\n\n') # Now we have all our materials, save them if EXPORT_MTL: write_library_materials(file) # Save the groups write_library_groups(file) file.write('</OPEN_TRACK>\n') file.close() if EXPORT_COPY_IMAGES: dest_dir = filename # Remove chars until we are just the path. while dest_dir and dest_dir[-1] not in '\\/': dest_dir = dest_dir[:-1] if dest_dir: copy_images(dest_dir) else: print '\tError: "%s" could not be used as a base for an image path.' % filename print "WTF Export time: %.2f" % (sys.time() - time1)