def combine(self): # Combines all cylinders and delete all history pm.select(all=True) pm.polyUnite(muv=1) pm.delete(ch=True)
def on_click_run(slider_scale, slider_angle, slider_level, snow_win): ### get values from sliders snow_scale = slider_scale.getValue() snow_angle = slider_angle.getValue() snow_level = slider_level.getValue() ### list for 1/6 snow piece snow_list = [] snow_list.append(snow_obj) ### list for 6 parts of the snow piece snow_joint_list = [] ### rename the origin object pm.rename(snow_obj, 's0') ### draw 1/6 snow piece for i in range(1, snow_level): tmp = pm.duplicate(snow_list[i - 1]) snow_list.append(tmp[0]) pm.rotate(snow_list[i], 0, snow_angle, 0, r=True) pm.scale(snow_list[i], snow_scale, 1, snow_scale, r=True) pos = pm.xform('%s.vtx[%d]' % (snow_list[i], to_point_index), q=1, t=1, ws=1) pm.xform(snow_list[i], t=(pos[0], 0, pos[2]), ws=1) ### unite the 1/6 snow piece tmpunit = snow_list[0] for i in range(1, snow_level): print tmpunit, i tmpunit = pm.polyUnite(tmpunit, snow_list[i]) ### draw all 6 parts snow_joint_list.append(tmpunit[0]) for i in range(1, 6): tmp = pm.duplicate(tmpunit[0]) snow_joint_list.append(tmp) pm.rotate(tmp, 0, 60 * i, 0, r=True) ### unite the whole snow piece snow_final = snow_joint_list[0] for i in range(1, 6): snow_final = pm.polyUnite(snow_final, snow_joint_list[i]) ### delete the menu window snow_win.delete() return
def make_bake_mesh( meshes, name = '' ): """ Turn one or more meshes into a single mesh for export to xNormal """ arg_string = '' meshes = list( set( meshes ) ) pmc.select( clear = True ) if len( meshes ) == 0: return False if len( meshes ) > 1: if name == '': name = 'bake_mesh' merged = pmc.polyUnite( meshes, ch = False, name = name ) else: merged = meshes[ 0 ] tr = pmc.polyTriangulate( merged, ch = False) return merged
def mergeMeshes(self): self._mesh = pm.polyUnite(pm.ls( (mesh_name + 'FBXASC046Shape' for mesh_name in self.mesh_names), type='transform'), n='Base', muv=1)[0] pm.delete(self.mesh, ch=True)
def merge_surfacing_object_meshes(surfacing_object): """ Merge all the meshs assigned to a surfacing Object. Args: surfacing_object (PyNode): surfacing object Raises: BaseException. Could not merge member meshes. """ try: members = surfacing_object.members() logger.info("Merging members: %s" % members) geo_name = "%s_geo" % str(surfacing_object) if len(members) > 1: geo = pm.polyUnite(*members, n=geo_name) return geo[0] else: logger.info("single object found, skipping merge: %s" % members[0]) members[0].rename(geo_name) pm.parent(members[0], world=True) return members[0] except BaseException: logger.error("Could not merge members of: %s" % surfacing_object) return False
def make_joints_for_cards(cards=None, head_jnt=None, numCVs=5, w=2): """ Create a joint chain at the averaged position of each ring for the given hair cards. This joint chain can then be driven by FK controls which can themselves blend into riding along an nHair via motionPath. FOR NOW, JUST DO JOINTS << MAKE JOINTS DYNAMIC THOUGH. TO TEST HOW IT LOOKS. Maybe rotate since that goes better with FK controls which are probably better for hair. """ if not cards: cards = pmc.selected() if not head_jnt: head_jnt = cards.pop() # max influences = 1, to only head_jnt (not children) merged_cards = pmc.polyUnite(cards, muv=True)[0] sc = pmc.skinCluster(head_jnt, merged_cards, mi=1, tsb=True) j = head_jnt pmc.select(cl=True) for n in range(numCVs): # starting point is length step by width steps, step is total verts per card if n: pmc.skinCluster(sc, e=True, addInfluence=j, wt=0) verts = [ v for i in range(w) for v in merged_cards.vtx[n * w + i::numCVs * w] ] pmc.skinPercent(sc, verts, transformValue=(j, 1.0)) avg_pos = sum(v.getPosition() for v in verts) / len(verts) j = pmc.joint(p=avg_pos, n="hairCurve{:02d}_bind".format(n + 1)) j.radius.set(.05) # merging objects & skinclusters is be one easy step return merged_cards
def get_combined_mesh_from_set(_set, midfix='shaded'): meshes = [ shape for transform in _set.dsm.inputs() for shape in transform.getShapes(ni=True, type='mesh') ] if not meshes: return pc.select(meshes) meshName = _set.name().replace('_geo_', '_' + midfix + '_').replace( '_set', '_combined') if len(meshes) == 1: pc.mel.DeleteHistory() mesh = pc.duplicate(ic=True, name=meshName)[0] pc.parent(mesh, w=True) meshes[0].io.set(True) trash = [ child for child in mesh.getChildren() if child != mesh.getShape(type='mesh', ni=True) ] pc.delete(trash) else: mesh = pc.polyUnite(ch=1, mergeUVSets=1, name=meshName)[0] try: pc.delete(_set) except: pass return mesh
def create_collider(flaps, radius): ''' Create collision geometry for combined flaps. :param flaps: Combined flaps geometry :param radius: Radius of inner wheel allows us to estimate collider geo ''' tx = flaps.boundingBox().width() * 0.5 ty = flaps.boundingBox().height() * 0.505 - radius tz = radius * 1.08 tz2 = radius * 1.15 cubea, cubea_shape = pm.polyCube(width=0.2, height=0.05, depth=0.05) cubeb, cubeb_shape = pm.polyCube(width=0.2, height=0.05, depth=0.05) cubec, cubec_shape = pm.polyCube(width=0.2, height=0.05, depth=0.05) cubed, cubed_shape = pm.polyCube(width=0.2, height=0.05, depth=0.05) cubea.setTranslation([tx, ty, tz]) cubeb.setTranslation([-tx, ty, tz]) cubec.setTranslation([tx, -ty, -tz2]) cubed.setTranslation([-tx, -ty, -tz2]) merge_verts(cubea, 7, 5) merge_verts(cubea, 6, 4) merge_verts(cubeb, 7, 5) merge_verts(cubeb, 6, 4) merge_verts(cubec, 3, 1) merge_verts(cubec, 2, 0) merge_verts(cubed, 3, 1) merge_verts(cubed, 2, 0) collider = pm.polyUnite( [cubea, cubeb, cubec, cubed], ch=False, mergeUVSets=True, name=flaps.replace('geo', 'collider_geo') )[0] return collider
def marge_run(self): objects = common.search_polygon_mesh(self.objects, serchChildeNode=True, fullPath=True) #print('marge target :', objects) if len(objects) < 2: self.marged_mesh = objects return True skined_list = [] no_skin_list = [] parent_list = [cmds.listRelatives(obj, p=True, f=True) for obj in objects] for obj in objects: skin = cmds.ls(cmds.listHistory(obj), type='skinCluster') if skin: skined_list.append(obj) else: no_skin_list.append(obj) if no_skin_list and skined_list: skined_mesh = skined_list[0] for no_skin_mesh in no_skin_list: weight.transfer_weight(skined_mesh, no_skin_mesh, transferWeight=False, returnInfluences=False, logTransfer=False) if skined_list: marged_mesh = pm.polyUniteSkinned(objects)[0] pm.polyMergeVertex(marged_mesh, d=0.001) target_mesh = pm.duplicate(marged_mesh)[0] weight.transfer_weight(str(marged_mesh), str(target_mesh), transferWeight=True, returnInfluences=False, logTransfer=False) else: marged_mesh = pm.polyUnite(objects, o=True)[0] pm.polyMergeVertex(marged_mesh, d=0.001) target_mesh = pm.duplicate(marged_mesh)[0] #pm.delete(objects) for obj in objects: if pm.ls(obj): pm.delete(obj) pm.delete(marged_mesh) all_attr_list = [['.sx', '.sy', '.sz'], ['.rx', '.ry', '.rz'], ['.tx', '.ty', '.tz']] for p_node in parent_list: if cmds.ls(p_node, l=True): all_lock_list = [] for attr_list in all_attr_list: lock_list = [] for attr in attr_list: lock_list.append(pm.getAttr(target_mesh+attr, lock=True)) pm.setAttr(target_mesh+attr, lock=False) all_lock_list.append(lock_list) pm.parent(target_mesh, p_node[0]) for lock_list, attr_list in zip(all_lock_list, all_attr_list): for lock, attr in zip(lock_list, attr_list): #continue #print('lock attr :', lock, target_mesh, attr) pm.setAttr(target_mesh+attr, lock=lock) break pm.rename(target_mesh, objects[0]) pm.select(target_mesh) self.marged_mesh = str(target_mesh) return True
def main(): sceneName = pm.sceneName().basename() part = sceneName.split("_") if part[0].endswith('.ma') or part[0].endswith('.mb'): scene = part[0][:-3] elif part[0] == '': scene = 'scene' else: scene = part[0] sels = pm.selected() sel = tuple(sels) pm.polyUnite(sel, n=sels[0]) pm.delete(ch=True) grpIN = pm.selected() if pm.objExists('_'.join(['grp', 'geo', scene])): pm.parent(grpIN, '_'.join(['grp', 'geo', scene]))
def shadowMesh(triCount=10000): meshArg = pm.ls(sl=True) meshList = pm.duplicate(meshArg, rr=True) shadowGeo = pm.polyUnite(meshList, ch=False, name='ShadowGeo')[0] pm.delete(meshList) #deleting leftover garbage transform nodes pm.parent(shadowGeo, 'CharaA') newSkinClust = pm.skinCluster( shadowGeo, pm.listRelatives('CharaA', ad=True, type='joint')) #skinning begins pm.select(meshArg) pm.select(shadowGeo, add=True) pm.copySkinWeights( noMirror=True, surfaceAssociation='closestPoint', influenceAssociation='closestJoint') #copying skin weights pm.selectMode(o=True) #shadowmesh starts here pm.select(shadowGeo) mel.eval( 'polyCleanupArgList 3 { "0","1","1","0","0","0","0","0","0","1e-005","0","1e-005","0","1e-005","0","1","0" };' ) #clean up before reduction pm.selectMode(o=True) #reduce polycount to fall under budget pm.polyReduce(shadowGeo, ver=1, trm=2, triangleCount=triCount, sharpness=0, keepBorder=1, keepColorBorder=1, keepFaceGroupBorder=1, keepHardEdge=1, keepCreaseEdge=1, keepBorderWeight=0.5, keepMapBorderWeight=0, keepColorBorderWeight=0, keepFaceGroupBorderWeight=0, keepHardEdgeWeight=0.25, keepCreaseEdgeWeight=0, useVirtualSymmetry=0, symmetryTolerance=0.01, sx=0, sy=1, sz=0, sw=0, preserveTopology=1, keepQuadsWeight=1, vertexMapName="", replaceOriginal=1, cachingReduce=1, constructionHistory=1) pm.select(deselect=True) pm.bakePartialHistory(shadowGeo, prePostDeformers=True, preDeformers=True) pm.select(shadowGeo) print('EKKO shadowMesh successful!\nShadow Mesh specs:') print(pm.polyEvaluate(shadowGeo, fmt=True))
def controlShapeAdaptive(controlList, geoList, ctrlSmooth=6, scaleConstant=1.5, rebuildCV=32): adaptiveShapeBuildGrp = pm.group(n='daptiveShapeBuild_GRP', em=True) geoList = pm.ls(geoList) dupliGeo = pm.duplicate(geoList) geoCombined = pm.polyUnite(dupliGeo, ch=False, name='tmpAdaptiveRef_GEO')[0] pm.parent(geoCombined, adaptiveShapeBuildGrp) ctrlList = pm.ls(controlList) for ctrl in ctrlList: ctrlShapeBuildGrp = pm.group(n=ctrl.name() + '_GRP', em=True, p=adaptiveShapeBuildGrp) dupliCtrl = pm.duplicate(ctrl, n='tmpCtrl')[0] pm.delete(pm.ls(dupliCtrl, dagObjects=True, exactType='transform')[1:]) pm.rebuildCurve(dupliCtrl, ch=False, s=rebuildCV) pm.parent(dupliCtrl, ctrlShapeBuildGrp) # extrusion extrudeCircle = pm.circle(r=0.1, ch=0)[0] pm.parent(extrudeCircle, ctrlShapeBuildGrp) motionPathNode = \ pm.ls(pm.pathAnimation(extrudeCircle, curve=dupliCtrl, fractionMode=True, follow=True, followAxis='z', upAxis='y', worldUpType='vector', worldUpVector=[0, 1, 0], inverseUp=False, inverseFront=False, bank=False))[0] pm.disconnectAttr(extrudeCircle.tx) pm.disconnectAttr(extrudeCircle.ty) pm.disconnectAttr(extrudeCircle.tz) pm.disconnectAttr(extrudeCircle.rx) pm.disconnectAttr(extrudeCircle.ry) pm.disconnectAttr(extrudeCircle.rz) pm.disconnectAttr(motionPathNode.u) pm.delete(motionPathNode) extrudedSurface = \ pm.extrude(extrudeCircle, dupliCtrl, ch=False, rn=False, po=0, et=2, ucp=0, fpt=1, upn=0, rotation=0, scale=1, rsp=1)[0] pm.parent(extrudedSurface, ctrlShapeBuildGrp) nurbsToPoly = pm.nurbsToPoly(extrudedSurface, ch=False, polygonType=1, chr=0.9) pm.parent(nurbsToPoly, ctrlShapeBuildGrp) # add deformer wrapNode = deform.wrapDeformer(dupliCtrl, nurbsToPoly) shrinkWrapNode = deform.shrinkWrapDeformer(nurbsToPoly, geoCombined) shrinkWrapNode.projection.set(4) shrinkWrapNode.targetSmoothLevel.set(ctrlSmooth) # delete history common.deleteHistory(nurbsToPoly) common.deleteHistory(dupliCtrl) pm.scale(dupliCtrl.cv[:], [scaleConstant, scaleConstant, scaleConstant]) copyShape(dupliCtrl, ctrl) pm.delete(adaptiveShapeBuildGrp)
def cleanCombine(sel = pm.ls(sl = True)): bakName = sel[0].nodeName() res = pm.polyUnite(sel, ch = False) for s in sel: if s: pm.delete(s) if len(res) == 1: pm.rename(res, bakName)
def merge_and_weld(cls, polyList=None, *args, **kwargs): ''' merge and weld items from a list, or if empty selection ''' if not polyList: polyList = pm.ls(sl=True) if len(polyList) > 1: merged = pm.PyNode(pm.polyUnite(polyList)[0]) pm.polyMergeVertex(merged, distance=0.001) pm.delete(merged, constructionHistory=True) else: lcUtility.Utility.lc_print('Select 2 or more polygon objects', mode='warning')
def createWriteNodes(shadingMixNode, shadingGroupNode, blendReturned): for i in blendReturned: attr = i + '.output' if pm.getAttr(attr, type=True) == 'float3': # VECTOR typeAttr = 'vector' nodeToConnect = pm.createNode( 'aiWriteColor' , n='aiWriteColor_AOV') else: # FLOAT typeAttr = 'float' nodeToConnect = pm.createNode( 'aiWriteFloat' , n='aiWriteFloat_AOV') AOV_name = i.name().split('_')[1] attrToConnect = nodeToConnect.listAttr(st='input') attrName = nodeToConnect.listAttr(st='aovName') pm.setAttr(attrName[0], AOV_name) pm.connectAttr(attr, attrToConnect[0]) inConnect = shadingGroupNode.listConnections(d=False, t=['alLayer', 'aiWriteColor', 'aiWriteFloat'], p=True)[0] pm.connectAttr(inConnect, nodeToConnect.listAttr(st='beauty')[0]) pm.connectAttr(nodeToConnect.listAttr(st='outColor')[0], shadingGroupNode.listAttr(st='surfaceShader')[0], f=True) selectionList = [] for i in shadingGroupNode.listConnections(): if i.nodeType() == "transform": selectionList.append(i) pm.select(selectionList) pm.showHidden() folder_to_save = os.path.join(folder_to_save_base, shadingMixNode.name()) s = pm.ls(sl=True) nameTr = shadingMixNode.name() if (len(s) != 1): pm.polyUnite(s, n=nameTr) #cmds.arnoldRenderToTexture(folder=folder_to_save, enable_aovs=True, resolution=2048, all_udims=True) pm.select(cl=True)
def _assign_instance_material(self, instance_name): self._assert_prequisites_completed(instance_name, "mtl") assign_mtl_from_resources(["pole"], STEM_MTL_NAME) assign_mtl_from_resources( ["cone_*"], SPIKES_MTL_NAME, displacement_kw={'subdiv_type': SUBDIV_LINEAR}) plant = pm.polyUnite("cone_*", "pole", n="plant") self.instances[instance_name]['mesh'] = plant pm.delete(plant, constructionHistory=True) self._mark_stage_completed(instance_name, "mtl")
def intersections(self, a, b): bDoutside = pm.duplicate(b, n="tmpBDoutside")[0] bDinside = pm.duplicate(b, n="tmpBDinside")[0] aCopyDupe = pm.duplicate(a, n="aCopyDupe")[0] dupes = [ bDoutside, bDinside, aCopyDupe, ] for dupe in dupes: dupe.setParent(w=True) self.polygonVolume(bDinside, -.005) pm.polyColorPerVertex(bDinside, r=1, g=0, b=0, a=1, cdo=1) pm.polyColorPerVertex(bDoutside, r=0, g=0, b=0, a=1, cdo=1) bColoured = pm.polyUnite(bDoutside, bDinside, ch=False)[0] bColoured.setParent(w=True) pm.delete(bColoured, ch=True) pm.transferAttributes(bColoured, aCopyDupe, sourceColorSet="colorSet1", targetColorSet="colorSet1", transferColors=True, sampleSpace=0, colorBorders=1) pm.delete(aCopyDupe, ch=True) pm.delete(bColoured) aVerts = [ str(a) + ".vtx[" + x.split("[")[-1].split("]")[0] + "]" for x in cmds.ls(str(aCopyDupe) + ".vtx[*]", fl=1) if cmds.polyColorPerVertex(x, q=1, rgb=1)[0] < .51 ] #bVerts = [str(b)+".vtx["+x.split("[")[-1].split("]")[0]+"]" for x in cmds.ls(str(bCopyDupe)+".vtx[*]",fl=1) if cmds.polyColorPerVertex(x,q=1,rgb=1)[0] < .51] pm.delete(aCopyDupe) sel = pm.select(aVerts) selVerts = pm.ls(sl=True) pm.select(a) Verts = pm.select(pm.polyListComponentConversion(tv=True)) allVerts = pm.ls(sl=True) pm.select(allVerts) pm.select(selVerts, d=True)
def compeleteProfile(self): for sel in self.sel_list: name = "%s_profile" % sel mesh_list = self.crv_dict[sel]["mesh"] if len(mesh_list) > 2: mesh = pm.polyUnite(mesh_list, n=name, ch=0) else: mesh = mesh_list[0] mesh.rename(name) # pm.delete(mesh,ch=1) pm.parent(mesh, w=1) self.resetValue()
def cleanGroupCombineCmd( group_list ): combined_list = [] for grp in group_list: grp_parent = grp.getParent() print "grp_parent:", grp_parent pm.delete( ch=1 ) grp_str = grp.nodeName() mesh_children = pm.ls( grp, dag=1, type="mesh" ) if len( mesh_children ) > 1: if grp_parent is not None: pm.parent( grp, world=1 ) obj = pm.polyUnite( grp, ch=1 )[0] pm.delete( obj, ch=1 ) try: pm.delete( grp ) except TypeError: pass if grp_parent is not None: pm.parent( obj, grp_parent ) obj_str = obj.rename( grp_str ) print '// Result: Combined group as', obj_str elif len( mesh_children ) == 1: obj = mesh_children[0].getParent() if grp_parent is not None: pm.parent( obj, grp_parent ) pm.delete( grp ) obj_str = obj.rename( grp_str ) print '// Result: Promoted mesh as', obj_str else: obj = None mel.warning( "No meshes found under group." ) if not obj is None: combined_list.append( obj ) return combined_list
def createShape(self, i_isGroup, i_mesh): """create voxels Keyword arguments: i_isGroup: if the result is one object or each voxel seperated i_mesh: the voxel """ distanceLimit = math.sqrt(self._voxelSize*self._voxelSize*3) for p in self._voxelsPosList: r = self._oriMdl.getClosestPointAndNormal((p[0], p[1], p[2])) closestPoint = r[0] + self._oriMdl.getTranslation() # when the cube is far from the surface if distanceLimit < getLength((p[0]-closestPoint[0], p[1]-closestPoint[1], p[2]-closestPoint[2])): continue dp = dotProduct(normalize([p[0]-closestPoint[0], p[1]-closestPoint[1], p[2]-closestPoint[2]]), normalize(r[1])) # doc product > 0 means two vectors angle is from 0~90 if dp < 0: mesh = pm.duplicate(i_mesh, name='Voxel1') pm.move(p[0], p[1], p[2], mesh, ws=True) self._voxelsList.append(mesh) # print "Create Voxel @ "+str(p[0])+","+str(p[1])+","+str(p[2])+" "+str(mesh) voxelGrp = pm.group(self._voxelsList, name='VoxelGrp1') if i_isGroup ==True: if len(self._voxelsList)>1: pm.polyUnite(voxelGrp, name='V1') pm.polyMergeVertex(distance=0.0) pm.delete(pm.polyInfo(laminaFaces=True)) pm.delete(self._oriMdl)
def ec_face_mirror(self, **kwargs): attr = kwargs.setdefault('axis', 'sx') if self.gui is True: obj = py.textField('nameText', q=True, tx=True) else: obj = kwargs.setdefault('obj') sel = py.ls(sl=True) py.polyChipOff(sel, dup=True) temp_obj = py.polySeparate(obj, n='tempObj') py.setAttr('%s.%s' % (temp_obj[1], attr), -1) py.select(temp_obj) temp_obj[0] = py.polyUnite(temp_obj, ch=False) py.rename(temp_obj[0], obj) mel.eval('ConvertSelectionToVertices;') py.polyMergeVertex()
def mirror_transfer_skin_weights(): """ Helper function to mirror weights from two pieces of geo that are separate. Selection order: This -> That """ user_sel = pm.ls(sl=True) # TODO: More robust way of finding from and to objects. from_object = user_sel[0] to_object = pm.PyNode(from_object.replace('_L', '_R')) # If the to object has no skin cluster, add one best we can. bind_mirror_weights(from_object, to_object) # Duplicate and combine the pieces duplicate_pieces = pm.duplicate([from_object, to_object]) combined_geo, _ = pm.polyUnite(*duplicate_pieces, ch=1, mergeUVSets=1, centerPivot=True, name='TEMP_combined_mirror_geo') # Delete history pm.delete(combined_geo, constructionHistory=True) skin_cluster = copy_skin_influence(from_object, combined_geo) # Mirroring the skin cluster pm.copySkinWeights(ss=skin_cluster, ds=skin_cluster, mirrorMode='YZ', surfaceAssociation='closestPoint', influenceAssociation='closestJoint') # Final copy skin weights to target geo pm.copySkinWeights(combined_geo, to_object, noMirror=True, surfaceAssociation='closestPoint', influenceAssociation='closestJoint') # Cleanup pm.delete(combined_geo) pm.delete(duplicate_pieces) pm.select(user_sel, r=True) print('Copying Complete!')
def rig_chain(self, chain): """ takes chain and adds joints and a smooth bind to rig it chain: list of objects """ pm.select(deselect=True) joints = [] for obj in chain: pos = obj.translateX.get(), obj.translateY.get(), obj.translateZ.get() jnt = pm.joint(p=pos) joints.append(jnt) pm.rename(jnt, self.name + "_joint%d" % len(joints)) linked = pm.polyUnite(chain, ch=0)[0] self.name = pm.rename(linked, self.name) pm.skinCluster(linked, joints[0]) return joints
def combineGeo(objArray, centerPivot=1): p = pm.listRelatives(objArray[0], p=1) if len(p) == 1: loc = pm.spaceLocator() pm.parent(loc, p[0]) newObj = pm.polyUnite(objArray, ch=0, mergeUVSets=1) if len(p) == 1: pm.parent(newObj, p[0]) pm.delete(loc) pm.select(newObj) pm.rename(newObj, '%sComb' % objArray[0].nodeName()) if centerPivot == 1: pm.xform(newObj, cp=1) return newObj
def buildJacket(): planeBack = pm.polyPlane( width=jacketBack, height=paperLength, subdivisionsX=paperSubdivisionsX, subdivisionsY=paperSubdivisionsY, axis=(0, 1, 0), createUVs=UVnormalized, ch=1, name="planeBack" ) planeLeft = pm.polyPlane( width=paperWidth, height=paperLength, subdivisionsX=jointNumber, subdivisionsY=paperSubdivisionsY, axis=(1, 0, 0), createUVs=UVnormalized, ch=1, name="planeLeft" ) pm.move(planeLeft, (((jacketBack/2)*-1),paperLength/2,0)) planeRight = pm.polyPlane( width=paperWidth, height=paperLength, subdivisionsX=jointNumber, subdivisionsY=paperSubdivisionsY, axis=(1, 0, 0), createUVs=UVnormalized, ch=1, name="planeRight" ) pm.move(planeRight, (((jacketBack/2)),paperLength/2,0)) jacket = pm.polyUnite(planeBack, planeRight, planeLeft, ch=0, name="jacket_geo") pm.select(cl=1) return jacket
def combineClean(instanceGroup, meshName, duplicateFaces=False): print("Combining mesh") mesh = pm.polyUnite(instanceGroup, name=meshName, constructionHistory=False) #print( "Merging %i" % len( mesh[ 0 ].vtx ) + " verticies" ) pm.polyMergeVertex(mesh[0].vtx, distance=0.1) #print( "Reduced to %i" % mesh[ 0 ].numVertices() + " verticies" ) if duplicateFaces: print("Cleaning up faces") pm.select(mesh[0]) pm.selectType(polymeshFace=True) pm.polySelectConstraint(mode=3, type=0x0008, topology=2) # Don't ask me how I did this mel.eval( 'polyCleanupArgList 3 { "0","2","0","0","0","0","0","0","0","1e-005","0","1e-005","1","0.3","0","-1","1" };' ) pm.delete() pm.polySelectConstraint(mode=0, topology=0) pm.selectType(polymeshFace=False) pm.selectMode(object=True) print("Faces reduced") if pm.PyNode(instanceGroup).exists(): pm.delete(instanceGroup) pm.delete(constructionHistory=True) pm.select(clear=True) print("Cleaning up complete") return mesh
def combineClean( instanceGroup, meshName, duplicateFaces = False ): print( "Combining mesh" ) mesh = pm.polyUnite( instanceGroup, name = meshName, constructionHistory = False ) #print( "Merging %i" % len( mesh[ 0 ].vtx ) + " verticies" ) pm.polyMergeVertex( mesh[ 0 ].vtx, distance = 0.1 ) #print( "Reduced to %i" % mesh[ 0 ].numVertices() + " verticies" ) if duplicateFaces: print( "Cleaning up faces" ) pm.select( mesh[ 0 ] ) pm.selectType( polymeshFace = True ) pm.polySelectConstraint( mode = 3, type = 0x0008, topology = 2 ) # Don't ask me how I did this mel.eval('polyCleanupArgList 3 { "0","2","0","0","0","0","0","0","0","1e-005","0","1e-005","1","0.3","0","-1","1" };') pm.delete() pm.polySelectConstraint( mode = 0, topology = 0 ) pm.selectType( polymeshFace = False ) pm.selectMode( object = True ) print( "Faces reduced" ) if pm.PyNode( instanceGroup ).exists(): pm.delete( instanceGroup ) pm.delete( constructionHistory = True ) pm.select( clear = True ) print( "Cleaning up complete" ) return mesh
def _renameShapes(figure_name): for shp in pm.ls(figure_name+'__*', type='transform'): base_name = shp.name().split('__')[1] pm.rename(shp, 'BODY_'+base_name) if pm.ls(figure_name+'Eyelashes__'+base_name): pm.rename(figure_name+'Eyelashes__' + base_name, 'LASHES_'+base_name) newmesh = pm.polyUnite(shp, 'LASHES_'+base_name, n='MERGED_'+base_name, muv=1)[0] pm.delete(newmesh, ch=True) if 'eCTRL' in newmesh.name(): pm.rename(newmesh, newmesh.name().replace('MERGED_eCTRL', 'POS_')) # elif 'CTRL' in newmesh.name(): # pm.rename(newmesh, newmesh.name().replace('MERGED_CTRL', 'SHP_')) elif 'pJCM' in newmesh.name(): pm.rename(newmesh, newmesh.name().replace( 'pJCM', '').replace('MERGED_', 'JCM_')) else: pm.rename(newmesh, newmesh.name().replace('MERGED_CTRL', 'SHP_').replace('MERGED_', 'SHP_'))
def combine_meshes(self): x = 0 verts = [] self.joints = [] bound_objects = [] for bound_geo in self.bound_geo_instances: geo = bound_geo.get_bound_geo() joint = bound_geo.get_joint() vert = bound_geo.get_verts() bound_objects.append(geo) self.joints.append(joint) verts.append(vert) self.combined_object = pm.polyUnite(bound_objects, name='%s_mesh' % (self.name), ch=False)[0] self.bind_geo() #pm.skinPercent( 'skinCluster1', vert, transformValue=['joint1', 1]) print verts, self.joints while x < len(verts): #self.number = None if x > 0: pm.skinPercent('%s' % (self.skin_cluster), '%s.vtx[%s:%s]' % (self.combined_object, self.number, self.number + verts[x]), transformValue=['%s' % (self.joints[x]), 1]) self.number += verts[x] print verts[x], self.number if x == 0: pm.skinPercent('%s' % (self.skin_cluster), '%s.vtx[0:%s]' % (self.combined_object, verts[x]), transformValue=['%s' % (self.joints[x]), 1]) self.number = verts[x] print self.number, 'first' x += 1 return self.combined_object
def axisWidget(parentUnder=BLANK): ''' Makes colored object represent the 3 axes (as a child of the selection if possible). Can take an object to be the parent, or None for no parent. ''' sel = selected() info = { 'x': [[1, 0, 0], [1, 0, 0, 1]], 'y': [[0, 1, 0], [1, 1, 0, 1]], 'z': [[0, 0, 1], [0, 0, 1, 1]], } cyls = [] for name, (axis, color) in info.items(): cyl = polyCylinder(radius=.1, axis=axis)[0] cyl.t.set(axis) polyColorPerVertex(cyl, r=color[0], g=color[1], b=color[2]) cyl.displayColors.set(True) cyls.append(cyl) obj = polyUnite(cyls, ch=False)[0] if parentUnder is not None: try: if parentUnder is BLANK: if sel: obj.setParent(sel[0]) else: obj.setParent(parentUnder) obj.t.set(0, 0, 0) obj.r.set(0, 0, 0) except Exception: pass return obj
def combine_meshes(self): x = 0 verts = [] self.joints = [] bound_objects = [] for bound_geo in self.bound_geo_instances: geo = bound_geo.get_bound_geo() joint = bound_geo.get_joint() vert = bound_geo.get_verts() bound_objects.append(geo) self.joints.append(joint) verts.append(vert) self.combined_object = pm.polyUnite(bound_objects, name= '%s_mesh' % (self.name), ch= False)[0] self.bind_geo() #pm.skinPercent( 'skinCluster1', vert, transformValue=['joint1', 1]) print verts, self.joints while x < len(verts): #self.number = None if x > 0: pm.skinPercent( '%s' % (self.skin_cluster), '%s.vtx[%s:%s]' % (self.combined_object, self.number, self.number+verts[x]), transformValue=['%s' % (self.joints[x]), 1]) self.number += verts[x] print verts[x], self.number if x == 0: pm.skinPercent( '%s' % (self.skin_cluster), '%s.vtx[0:%s]' % (self.combined_object, verts[x]), transformValue=['%s' % (self.joints[x]), 1]) self.number = verts[x] print self.number, 'first' x += 1 return self.combined_object
def _deformPlanes(self, autoGrp, baseGrp=None): """ Create a plane and copy the deforms from base mesh, then constraint the autoGrp and baseGrp to the plane :param autoGrp:autoGrp, generally contains a controller as child :param baseGrp: to control a baseShape like wire deformer baseCure :return: planes """ # create a small plane per point, then combine them planes = [] for ctr in autoGrp: plane = pm.polyPlane(h=0.01, w=0.01, sh=1, sw=1, ch=False)[0] plane.setTranslation(ctr.getTranslation("world"), "world") planes.append(plane) # combine planes if len(planes) > 1: # len 1 gives an error with polyUnite planes = pm.polyUnite(planes, ch=False, mergeUVSets=True)[0] else: planes = planes[0] pm.makeIdentity(planes, a=True, r=True, s=True, t=True) planes.setPivots([0, 0, 0]) planes.rename("%s_planes" % self._baseName) # rename pm.polyAutoProjection(planes, ch=False, lm=0, pb=0, ibd=1, cm=0, l=2, sc=1, o=1, p=6, ps=0.2, ws=0) # uvs if self._MESH_SHAPE: # if skin sample, copy skin weights to planes # find skin node skinNode = pm.listHistory(self._MESH_SHAPE, type='skinCluster')[0] # joint list jointList = skinNode.influenceObjects() # create skinCluster copySkinCluster = pm.skinCluster(planes, jointList, mi=3) # copy skin weigths pm.copySkinWeights(ss=skinNode, ds=copySkinCluster, noMirror=True, surfaceAssociation='closestPoint', influenceAssociation=('closestJoint', 'closestJoint')) # connect each auto grp to each poly face numFaces = planes.getShape().numFaces() logger.debug("num Faces: %s" % numFaces) for i in range(numFaces): pm.select(planes.f[i], r=True) pm.select(autoGrp[i], add=True) pm.pointOnPolyConstraint(maintainOffset=True) pm.select(cl=True) if baseGrp: pm.select(planes.f[i], r=True) pm.select(baseGrp[i], add=True) pm.pointOnPolyConstraint(maintainOffset=True) pm.select(cl=True) #hammer weights try: #TODO: bad aproximation vertex = pm.modeling.polyListComponentConversion(planes.f[i], tv=True) pm.select(vertex, r=True) logger.debug("vertices %s:" % vertex) mel.eval("weightHammerVerts;") pm.select(cl=True) except: pm.select(cl=True) # parent planes to nonXform grp self._noXformGrp.addChild(planes) # parent to noXform return planes
def main(size=0.5): pm.nurbsToPolygonsPref(polyType=1) pm.nurbsToPolygonsPref(format=2) pm.nurbsToPolygonsPref(uType=3) pm.nurbsToPolygonsPref(uNumber=1) pm.nurbsToPolygonsPref(vType=3) pm.nurbsToPolygonsPref(vNumber=1) mel.eval(""" source "assignPfxToon.mel"; """) sel_list = pm.ls(sl=1, ni=1) cam = getActiveCamera() for sel in sel_list: # NOTE 通过 Toon 创建轮廓 mel.eval('assignPfxToon "" 0;') # NOTE 获取 Toon 节点 profile_node = pm.ls(sl=1)[0] # NOTE 链接当前摄像机 外轮廓 cam.t.connect(profile_node.cameraPoint, f=1) # NOTE 生成描边曲线 base_crv_list = generateProfileCurve(profile_node) mesh_list = [] inflate_crv_list = [] # NOTE 曲线放样成描边模型 for base in base_crv_list: pm.rebuildCurve(base, ch=0, rpo=1, rt=0, end=1, kr=1, kcp=0, kep=1, kt=0, s=0, d=3, tol=0.01) inflate = inflateCurveOnMesh(base, sel, scale=size) inflate_crv_list.append(inflate) mesh = pm.loft(base, inflate, ch=0, u=1, c=0, ar=1, d=3, ss=1, rn=0, po=1, rsn=1) mesh_list.append(mesh) base_grp = pm.group(base_crv_list, n="base_grp") inflate_grp = pm.group(inflate_crv_list, n="inflate_grp") if len(mesh_list) > 2: pm.polyUnite(mesh_list, n="%s_profile" % sel, ch=0) # pm.delete(base_grp,inflate_grp) pm.delete(profile_node.getParent())
def import_model(body_index, settings, loading_box): global body_file global palette global resources lba_model = read_lba2_model(body_file[body_index]) materials = [] if settings.use_palette: pm.progressWindow(loading_box, edit=True, status="Generating Palette...", progress=5) # get list with all used palette values for i in range(len(lba_model.polygons)): materials.append(lba_model.polygons[i].colour) for i in range(len(lba_model.spheres)): materials.append(lba_model.spheres[i].colour) for i in range(len(lba_model.lines)): materials.append(lba_model.lines[i].colour) materials = list(dict.fromkeys(materials)) create_materials(materials) bones = None if settings.use_rigging: pm.progressWindow(loading_box, edit=True, status="Generating Bones...", progress=10) bones = bone_generator(lba_model.bones, lba_model.vertices) # generate the main mesh pm.progressWindow(loading_box, edit=True, status="Generating Mesh...", progress=15) model = mesh_generator(lba_model.vertices, lba_model.polygons, lba_model.normals, materials, lba_model.bones, bones, settings) # generate the spheres pm.progressWindow(loading_box, edit=True, status="Generating Spheres...", progress=20) spheres = sphere_generator(lba_model.spheres, lba_model.vertices, bones, settings) # generate the lines pm.progressWindow(loading_box, edit=True, status="Generating Lines...", progress=25) lines = line_generator(lba_model.lines, lba_model.vertices, bones, settings) # unite all the rigged meshes pm.progressWindow(loading_box, edit=True, status="Unifying...", progress=40) pm.select(clear=True) pm.select(model, add=True) pm.polyAutoProjection() pm.select(clear=True) pm.select(model, add=True) pm.select(spheres, add=True) pm.select(lines, add=True) unified_mesh = None if len(lba_model.spheres) > 0 or len(lba_model.lines) > 0: unified_mesh = pm.polyUniteSkinned( ) if settings.use_rigging else pm.polyUnite() if settings.use_rigging: if unified_mesh is None: pm.select(model, r=True) else: pm.select(unified_mesh, r=True) pm.select(bones[0], add=True) pm.group() # ## Load Animations ## # if settings.use_animation: pm.progressWindow(loading_box, edit=True, status="Loading Animations...", progress=45) for resource in resources: for body in resource.bodies: if body.realIndex == body_index: if len(resource.animations) > 0: pm.progressWindow( loading_box, edit=True, status="Generating Animations...", progress=50) anim_importer(bones, resource.animations, loading_box) pm.progressWindow(loading_box, endProgress=1) return else: pm.progressWindow(loading_box, endProgress=1)
def combine(cls): getNodes = pm.ls(sl=True,type='transform') pm.polyUnite(getNodes,ch=False,n="%s" % getNodes[0])
def oneSkin(arg = 2): meshArg = pm.ls(sl = True) if len(meshArg) == 1: pm.confirmDialog(title = 'Error', message = 'Mesh is already one skin') return if arg == 0:#arg = 0 don't retain old data. weightGeo = pm.polyUniteSkinned(meshArg, ch = True, mergeUVSets = True) #[0] is the mesh, [1] is the skindata charaGeo = pm.duplicate(weightGeo[0], name = 'CharaGeo') newSkinClust = pm.skinCluster(charaGeo, pm.skinCluster(weightGeo[1], q = True, influence = True)) #skin object pm.select(weightGeo) pm.select(charaGeo, add = True) pm.copySkinWeights(noMirror = True, surfaceAssociation = 'closestComponent', influenceAssociation = 'oneToOne') #copying skin weights pm.delete(meshArg, weightGeo) pm.parent(charaGeo, world = True) print('EKKO oneSkin successful!') return charaGeo elif arg == 1: #retain data meshDupe = pm.duplicate(meshArg, rr = True) skinDictionary = {} counter = 0 for i in meshArg: skinDictionary[i] = meshDupe[counter] counter += 1 for i in meshArg: pm.skinCluster(skinDictionary[i], pm.skinCluster(i, q = True, influence = True)) #copy skin first for i in meshArg: pm.select(i) pm.select(skinDictionary[i], add = True) pm.copySkinWeights(noMirror = True, surfaceAssociation = 'closestComponent', influenceAssociation = 'closestJoint') #copying skin weights charaGeo = pm.polyUniteSkinned(meshDupe, ch = False) #charaGeo[0] is the mesh/transform, [1] is the skin node pm.parent(charaGeo[0], world = True) renamedGeo = pm.rename(charaGeo[0], 'CharaGeo') pm.delete(meshDupe) #deleting garbage data print('EKKO oneSkin successful!') return renamedGeo elif arg == 2: #original method, if mesh is unclean and keeps crashing meshList = pm.duplicate(meshArg, rr = True) charaGeo = pm.polyUnite(meshList, ch = False, name = 'CharaGeo')[0] pm.delete(meshList) #deleting leftover garbage transform nodes pm.parent(charaGeo, world = True) newSkinClust = pm.skinCluster(charaGeo, pm.ls(type = 'joint')) #skinning begins pm.select(meshArg) pm.select(charaGeo, add = True) pm.copySkinWeights(noMirror = True, surfaceAssociation = 'closestPoint', influenceAssociation = 'closestJoint') #copying skin weights pm.selectMode(o = True) pm.select(deselect = True) pm.select(charaGeo) print('EKKO oneSkin successful!') return charaGeo print('EKKO oneSkin successful!')
def main(attemptsLeft, outputDir, typeName): sel = pm.ls(o=True) if (attemptsLeft == 0): return numExported = 0 tilesExpected = [ "1000_0000", "1000_1000", "1100_0000", "1100_0100", "1100_1000", "1100_1100", "1101_1100", "1110_0000", "1110_0010", "1110_0100", "1110_1000", "1110_1010", "1110_1100", "1110_1110", "1111_0000", "1111_1000", "1111_1010", "1111_1100", "1111_1110", "1010_1000", "1010_1010" ] filesExported = [] missingTiles = [] execute = False for x in sel: if (x.lower().startswith("tile_") and x.type() == "transform"): execute = True break if (execute == False): pm.error("Can't find meshes that start with 'tile_', so not exporting") else: if outputDir == "": result = pm.promptDialog(title='Export Options', message='Output Directory', button=['OK', 'Cancel'], cancelButton='Cancel', dismissString='Cancel') dir = pm.promptDialog(q=True, text=True) else: result = 'OK' dir = outputDir if result == 'OK': if (dir[-1] != "\\" and dir[-1] != "/"): dir += "\\" if (typeName == ""): result = pm.promptDialog(title='Export Options', message='Tileset', button=['OK', 'Cancel'], cancelButton='Cancel', dismissString='Cancel') type = pm.promptDialog(q=True, text=True) else: result == 'OK' type = typeName if result == 'OK': try: for obj in sel: pm.select(obj) if (obj.lower().startswith("tile_")): if (obj.type() != "transform"): continue children = pm.listRelatives(obj, c=True) name = obj pos = pm.xform(obj, q=True, t=True) rot = pm.xform(obj, q=True, ro=True) pm.xform(obj, a=1, ws=1, t=(0, 0, 0), ro=(0, 0, 0)) if len(children) > 1: toMerge = children[1:] toMerge.append(obj) merged = pm.polyUnite(toMerge) pm.delete(merged, ch=True) pm.rename(pm.ls(os=True)[0], str(name)) obj = name bbox = pm.objectCenter(obj) dst = math.sqrt(bbox[0] * bbox[0] + bbox[1] * bbox[1] + bbox[2] * bbox[2]) if (dst > 1): pm.warning( "Center point of " + obj + " is more than 1 (" + str(dst) + ") units away from origin. Is it's pivot in the correct place?" ) name = type + obj[4:] print(dir + name) cmds.file( dir + name, pr=1, typ="OBJexport", es=1, f=1, op= "groups=1; ptgroups=1; materials=1; smoothing=1; normals=1;" ) numExported += 1 filesExported.append(name) pm.xform(obj, a=1, t=pos, ro=rot) print(str(numExported) + " tiles created") except: main(attemptsLeft - 1, dir, type) for t in tilesExpected: found = False for f in filesExported: code = f[len(type) + 1:len(type) + 10] if code == t: found = True break if not found: missingTiles.append(t) if (len(missingTiles) > 0): pm.warning("Less than " + str(len(tilesExpected)) + " tile types created. Missing: " + str(missingTiles)) else: pm.warning("Export cancelled!") else: pm.warning("Export cancelled!")
def create(cls, base_flaps, num_images, rows, columns, radius, layout_index=0): ''' :param base_flaps: Base flaps to choose from :param num_images: Number of images in sequence :param rows: Number of rows in layout :param columns: Number of columns in layout :param radius: Radius :param layout_index: Index in row column layout ''' ProgressBar.setup( title='Creating Split Flaps', text='...', maximum=100, parent=utils.get_maya_window() ) flaps = utils.create_flaps( num_images, base_flaps, layout_index, rows, columns) ProgressBar.set(10, 'Creating cloth flaps...') cloth_flaps = [utils.create_cloth_flap(flaps[0])] cloth_flaps.extend([cloth_flaps[0].duplicate(rc=True)[0] for i in xrange(num_images - 1)]) ProgressBar.set(20, 'Radially arranging flaps...') utils.radial_arrangement(flaps, radius) ProgressBar.set(30, 'Radially arranging cloth flaps') utils.radial_arrangement(cloth_flaps, radius) r, c = utils.get_row_col(layout_index, None, columns) rowcol = '{:02d}{:02d}'.format(int(r), int(c)) cloth_name = 'cloth_flap_{}'.format(rowcol) flaps_name = 'flaps_{}'.format(rowcol) ProgressBar.set(50, 'Combining cloth geo...') cloth = pm.polyUnite( cloth_flaps, ch=False, mergeUVSets=True, name=cloth_name + '_geo', )[0] ProgressBar.set(60, 'Combining flap geo...') flaps = pm.polyUnite( flaps, ch=False, mergeUVSets=True, name=flaps_name + '_geo', )[0] pm.hide(cloth) # Create colliders ProgressBar.set(70, 'Creating Collider...') collider = utils.create_collider(flaps, radius) ProgressBar.set(80, 'Grouping geometry...') flaps_grp = pm.group([flaps, collider], name='flaps_{}_geo_grp'.format(rowcol)) rotate_grp = pm.group(cloth, name='rotate_{}_grp'.format(rowcol)) copy_grp = pm.group(em=True, name='copy_{}_grp'.format(rowcol)) split_flap = pm.group( [copy_grp, rotate_grp, flaps_grp], name=flaps_name + '_grp' ) ProgressBar.set(90, 'Adding attributes...') split_flap.addAttr('split_flap', at='bool', dv=True) split_flap.addAttr('layout_index', at='long', dv=layout_index) split_flap.addAttr('layout_row', at='long', dv=r) split_flap.addAttr('layout_column', at='long', dv=c) split_flap.addAttr('number_of_rows', at='long', dv=rows) split_flap.addAttr('number_of_columns', at='long', dv=columns) split_flap.addAttr('flaps', at='message') split_flap.addAttr('cloth', at='message') split_flap.addAttr('collider', at='message') split_flap.addAttr('rotate_grp', at='message') split_flap.addAttr('copy_grp', at='message') copy_grp.message.connect(split_flap.copy_grp) rotate_grp.message.connect(split_flap.rotate_grp) flaps.message.connect(split_flap.flaps) cloth.message.connect(split_flap.cloth) collider.message.connect(split_flap.collider) # Rename hierarchy ProgressBar.set(95, 'Renaming hierarchy') utils.replace_in_hierarchy(split_flap, r'\d+', 'BASE') ProgressBar.set(100, 'Done!') ProgressBar.hide() return cls(split_flap)
def mergePieces(objs=None, keepSource=False): ''' Combine the skinned objects into one nicely skinned object with clean history. ''' dups = [] merged = None try: if not objs: objs = selected() weight_data = { 'weights': [], 'joints': {}, 'jointNames': [] } for obj in objs: if not findRelatedSkinCluster(obj): warning("{0} wasn't bound, all objects must be bound to merge".format(obj)) return dups.append( duplicate(obj)[0].name(long=True) ) temp = get(obj) # Possibly add new jointNames, and then update the index references in the weighting newJointIndex = {} for i, jName in enumerate(temp['jointNames']): try: newIndex = weight_data['jointNames'].index(jName) except ValueError: newIndex = len(weight_data['jointNames']) weight_data['jointNames'].append(jName) newJointIndex[i] = newIndex altered = [[(newJointIndex[oldIndex], val) for oldIndex, val in w] for w in temp['weights']] weight_data['weights'] += altered #weight_data['joints'].update( temp['joints'] ) # Cheesily assume all joints are required weight_data['required'] = list(range(len(weight_data['jointNames']))) merged = polyUnite( dups )[0] delete( merged, ch=True ) for dup in dups: if objExists(dup): delete(dup) apply( merged, weight_data ) if not keepSource: delete(objs) return merged except Exception: print( traceback.format_exc() ) warning('An error occurred trying to merge the skinned objects') if merged: delete(merged) if dups: delete(dups) raise
def LeaveFace(uvls=None,**op): if uvls is None: uvls=[] if op.get('tg'): tgMesh=op.get('tg') else: tgMesh='Null' if tgMesh=='Null' or len(uvls)==0: return None pm.select( pm.polyListComponentConversion(tgMesh, tf=1) ) tgAllFace=pm.ls(sl=1,fl=1) pm.select( pm.polyListComponentConversion(uvls, tf=1) ) tgSlFace=pm.ls(sl=1,fl=1) delFaceLs=list( set(tgAllFace)-set(tgSlFace) ) pm.delete(delFaceLs) slMesh=pm.ls(sl=1)[0] uvShellLs=UVShell(slMesh) copyMeshLs=[] for i in xrange(len(uvShellLs)): copyMeshLs.append( pm.duplicate(slMesh, n='%sCopy%d_geo'% (slMesh.name(),i+1) )[0] ) slTgUVLs=ChangeUVListTarget(uvShellLs[i],tg=copyMeshLs[i]) LeaveFace(slTgUVLs, tg=copyMeshLs[i]) if len(copyMeshLs)>1: dvdMesh=pm.polyUnite(copyMeshLs,ch=0,muv=1, cp=0,n='%sDvd_geo'% slMesh.name() )[0] else: dvdMesh=copyMeshLs[0].rename('%sDvd_geo'% slMesh.name()) unfoldMesh=pm.duplicate(dvdMesh, n='%sUnfold_geo'% slMesh.name() )[0] pm.select( pm.polyListComponentConversion(unfoldMesh, tuv=1) ) unfoldMeshUVLs=pm.ls(sl=1,fl=1) for ufuv in unfoldMeshUVLs: uvCoor=pm.polyEditUV(ufuv,q=1,u=1) tgVtx=pm.polyListComponentConversion(ufuv, tv=1)[0] pm.move(tgVtx,[ uvCoor[0]*5, 0.0, uvCoor[1]*-5 ],ws=1)
def makeWord(self, in_word): """ create particle word""" # in_word='maya' # font = 'Arial' # font = self.fontChoose.currentFont() # print self.font[-1] tCrvs = pm.textCurves(t=in_word, f=self.font, ch=0) tCrvs = pm.PyNode(tCrvs[0]) letterNum = tCrvs.numChildren() letter = [] grpWord = pm.group(em=True) for n in range(0, letterNum): letterShape = pm.listRelatives(tCrvs.getChildren()[n], type='nurbsCurve', ad=True, path=True) letter.append( pm.planarSrf(letterShape, ch=1, tol=0.01, o=1, po=1)[0]) pm.parent(letter, grpWord) # pm.select(grpWord) wordshape = pm.polyUnite(ch=1, muv=1)[0] mc.DeleteHistory() wordshape = pm.PyNode(wordshape) self.word.append(wordshape) # see(wordshape) pm.setAttr(tCrvs + ".visibility", 0) wordshape.centerPivots() # pm.move(-8,0,0) pm.makeIdentity(apply=True, t=1, r=1, s=1, n=0, pn=1) wordshape.makeLive() wordshape.select() pm.emitter(type='surface', r=1000, spd=0) wordEmitter = wordshape.getChildren()[1] wordEmitter = pm.PyNode(wordEmitter) wordEmitter.cycleEmission.set(1) wordEmitter.maxDistance.set(5) # see(wordEmitter) # wordEmitter.select() wordParticle = pm.particle()[0] wordParticle = pm.PyNode(wordParticle) wordPaShape = wordParticle.getShape() self.wordPar.append(wordPaShape) pm.connectDynamic(wordParticle, em=wordEmitter) mc.setKeyframe([wordEmitter + ".rate"], v=200, t=100) mc.setKeyframe([wordEmitter + ".rate"], v=0, t=101) wordPaShape.lifespanMode.set(2) wordPaShape.attr("lifespan").set(5) wordPaShape.lifespanRandom.set(3) wordPaShape.particleRenderType.set(0) wordPaShape.addAttr('colorAccum', dv=True, at='bool', internalSet=True, keyable=True) wordPaShape.addAttr('useLighting', dv=False, at='bool', internalSet=True) wordPaShape.addAttr('multiCount', at='long', min=1, max=60, dv=2, internalSet=True) wordPaShape.addAttr('multiRadius', at='float', min=1, max=60, dv=0.3, internalSet=True) wordPaShape.addAttr('normalDir', min=1, max=3, at='long', internalSet=True, dv=2) wordPaShape.addAttr('pointSize', min=1, max=60, at='long', internalSet=True, dv=2) wordPaShape.colorAccum.set(1) wordPaShape.multiCount.set(7) wordPaShape.pointSize.set(1) wordPaShape.addAttr('goalU', dt='doubleArray', keyable=True) wordPaShape.addAttr('goalV', dt='doubleArray', keyable=True) pm.dynExpression(wordPaShape, s='goalU=rand(0,1);\ngoalV=rand(0,1);', c=1) wordPaShape.addAttr('rgbPP', dt='vectorArray', keyable=True) pm.dynExpression(wordPaShape, s='rgbPP=position;', rbd=1) pm.goal(wordParticle, g=wordshape, w=1, utr=0) pm.setKeyframe(wordParticle, attribute='goalWeight[0]', v=1, t=90) pm.setKeyframe(wordParticle, attribute='goalWeight[0]', v=0, t=100) pm.setAttr(wordshape + ".visibility", 0) field = pm.turbulence(pos=(0, 0, 2), m=10) pm.connectDynamic(wordParticle, f=field) pm.setKeyframe(field, attribute='tx', v=12, t=100) pm.setKeyframe(field, attribute='tx', v=0, t=110) pm.parent(field, wordshape) lambert = pm.shadingNode('lambert', asShader=1) lambertSG = pm.sets(renderable=True, empty=1, noSurfaceShader=True, name=lambert + "SG") pm.connectAttr(lambert + ".outColor", lambert + "SG.surfaceShader", f=1) # pm.sets(wordParticle,forceElement='lambert6SG',e=True) wordParticle.select() pm.hyperShade(a=lambertSG) self.wordTexture.append(lambert) pm.setAttr(lambert + ".transparency", 0.7, 0.7, 0.7, type='double3') pm.setAttr(lambert + ".incandescence", 0.6, 0.6, 0.6, type='double3') pm.setAttr(lambert + ".incandescence", 0.5, 0.5, 0.5, type='double3') pm.setAttr(lambert + ".glowIntensity", 0.6) wordshape.select()
def checkAiShaderSurface(shader_node, shadingGroupNode): for i in shadingGroupNode.listConnections(): if i.nodeType() == "transform": selectionList.append(i) if selectionList: # DISABLE ALL AOVS for aov in listOfAovsEnabled: pm.setAttr(aov + ".enabled", 0) listConnected = pm.listConnections(shader_node, p=True, d=False) # CREATE FOLDER TO SAVE (AOV) folder_to_save = os.path.join(folder_to_save_base, shader_node.name()) if not os.path.exists(folder_to_save): os.makedirs(folder_to_save) # add .txt file with information of parametrs in shader fileName = os.path.join(folder_to_save, shader_node.name() + '.txt') preferencesFile = open(fileName, 'w+') for i in pm.listAttr(shader_node): value = pm.getAttr(shader_node.name() + '.' + i) try: preferencesFile.write(i) preferencesFile.write(" " + str(value)) except: pass preferencesFile.write("\n") preferencesFile.close() aovExistsV = shader_node.listConnections(s=False, t='aiWriteColor') aovExistsF = shader_node.listConnections(s=False, t='aiWriteFloat') if (len(aovExistsV)==0 and len(aovExistsF)==0): # DID NOT CREATE AOVS NODE for plug in listConnected: if "float3" in plug.type(): # VECTOR typeAttr = 'vector' nodeToConnect = pm.createNode( 'aiWriteColor' , n='aiWriteColor_AOV') else: # FLOAT typeAttr = 'float' nodeToConnect = pm.createNode( 'aiWriteFloat' , n='aiWriteFloat_AOV') attrToConnect = nodeToConnect.listAttr(st='input') attrName = nodeToConnect.listAttr(st='aovName') AOV_name = plug.outputs(p=True)[0].name().split('.')[1] pm.setAttr(attrName[0], AOV_name) if (typeAttr, AOV_name) not in listofAOVS: listofAOVS.append((typeAttr, AOV_name)) if (AOV_name != "inputs[7]"): if (AOV_name != "layer2a"): pm.setAttr("aiAOV_" + AOV_name + ".enabled", 1) else: pass pm.connectAttr(plug, attrToConnect[0]) inConnect = shadingGroupNode.listConnections(d=False, t=['aiStandardSurface', 'aiWriteColor', 'aiWriteFloat'], p=True)[0] pm.connectAttr(inConnect, nodeToConnect.listAttr(st='beauty')[0]) pm.connectAttr(nodeToConnect.listAttr(st='outColor')[0], shadingGroupNode.listAttr(st='surfaceShader')[0], f=True) pm.select(selectionList) pm.showHidden() s = pm.ls(sl=True) nameTr = shader_node.name() if shader_node.name() != "krissVector_black_part26_nl_shd_aiss": if (len(s) != 1): pm.polyUnite(s, n=nameTr) cmds.arnoldRenderToTexture(folder=folder_to_save, enable_aovs=True, resolution=2048, all_udims=True) pm.select(cl=True) else: # ALREADY CREATED AOVS NODE print "BAKED WAS ALREADY DONE TO %s shader" % shader_node.name() else: print "NO OBJECTS ASSIGN TO %s shader" % shader_node.name()
def create(cls, split_flap, padding=(0.2, 0)): ''' :param split_flap: SplitFlap object :param rows: Number of rows in layout :param columns: Number of columns in layout :param padding: Padding in cm between SplitFlaps ''' ProgressBar.setup( title='Creating Split Flap Wall', text='Duplicating base split flap...', maximum=100, parent=utils.get_maya_window() ) rows = split_flap.number_of_rows.get() columns = split_flap.number_of_columns.get() bounds = split_flap.flaps.boundingBox() x_step = bounds.width() + padding[0] y_step = bounds.height() + padding[1] x_offset = x_step * (columns - 1) * 0.5 y_offset = y_step * (rows - 1) u_step = 1 / columns v_step = 1 / rows uvs = utils.get_uvs(split_flap.flaps) uvids = utils.get_uvs_in_range(uvs, 0, 0, 9999, 9999) world_grp = pm.group(name='world_grp', em=True) anim_grp = pm.group(name='anim_grp', em=True) # Create soup copiers arrays = [] xforms = [] ProgressBar.set(10, 'Copying rotate geo...') rotators = list(split_flap.rotators) rot_copier = utils.create_copier( [rotators.pop()], name=str(rotators) + '_cp') rot_xform, rot_copier, rot_array = rot_copier arrays.append(rot_array) xforms.append(rot_xform) while rotators: r = rotators.pop() copier = utils.create_copier( [r], name=str(r) + '_cp', in_array=rot_array ) xforms.append(copier[0]) ProgressBar.set(20, 'Copying translate geo...') copies = list(split_flap.copies) while copies: copy = copies.pop() static_copier = utils.create_copier( [copy], name=str(copy) + '_cp', in_array=rot_array, rotate=False ) xforms.append(static_copier[0]) ProgressBar.set(30, 'Copying cloth geo...') cloth_copier = utils.create_copier( [split_flap.cloth], name='ncloth_cp', in_array=rot_array ) cloth_xform, cloth_copier, cloth_array = cloth_copier cloth_xform.hide() ProgressBar.set(40, 'Copying collider geo...') cldr_copier = utils.create_copier( [split_flap.collider], 'nrigid_cp', rotate=False, in_array=rot_array) cldr_xform, cldr_copier, cldr_array = cldr_copier cldr_xform.hide() dyn_grp = pm.group([cldr_xform, cloth_xform], name='dynamics_grp') split_flaps = [] step = 30 / rows * columns i = 0 for r in xrange(rows): for c in xrange(columns): ProgressBar.set( 40 + i * step, 'Creating Split Flap {}'.format(i + 1) ) index_name = '{:02d}{:02d}'.format(r, c) name = str(split_flap.flaps).replace('BASE', index_name) new_flap = split_flap.flaps.duplicate( name=name, un=True, rc=False)[0] translate = (c * x_step - x_offset, r * -y_step + y_offset, 0) # Translate flaps new_flap.setTranslation(translate) # Create animation hierarchy loc = pm.spaceLocator(name='world_{}_xform'.format(index_name)) loc.hide() loc.setTranslation(translate) jnt_name = 'anim_{}_xform'.format(index_name) anim_jnt = utils.create_joint(jnt_name) anim_jnt.setTranslation(translate) anim_jnt.rotateX.setKey(v=0, t=1) anim_jnt.rotateX.setKey(v=90, t=24) parent = anim_jnt for i in range(6): rot_name = 'rot_{}_{:02d}'.format(index_name, i) rot_grp = pm.group(em=True, name=rot_name) pm.parent(rot_grp, parent, relative=True) parent = rot_grp pm.parent(anim_jnt, anim_grp) pm.parent(loc, world_grp) pm.parentConstraint(parent, loc) # Shift uvs mesh_uvs = list(uvs) utils.shift_uvs(mesh_uvs, uvids, c * u_step, r * -v_step) utils.set_uvs(new_flap, mesh_uvs) split_flaps.append(new_flap) i += 1 ProgressBar.set(75, 'Connecting xforms to copier arrays...') for i, l in enumerate(world_grp.getChildren()): l.rotateOrder.connect(rot_array.inTransforms[i].inRotateOrder) l.worldMatrix[0].connect(rot_array.inTransforms[i].inMatrix) ProgressBar.set(80, 'Combining flap geometry...takes awhile') flaps_geo = pm.polyUnite( split_flaps, ch=False, mergeUVSets=True, name='flaps_geo' )[0] ProgressBar.set(95, 'Grouping and adding attributes...') split_flap.pynode.hide() grp = pm.group( [flaps_geo, xforms, world_grp, anim_grp, dyn_grp], name='wall_grp') grp.addAttr('world_grp', at='message') grp.addAttr('flaps', at='message') grp.addAttr('cloth', at='message') grp.addAttr('collider', at='message') grp.addAttr('anim_grp', at='message') grp.addAttr('dyn_grp', at='message') world_grp.message.connect(grp.world_grp) flaps_geo.message.connect(grp.flaps) anim_grp.message.connect(grp.anim_grp) cloth_xform.message.connect(grp.cloth) cldr_xform.message.connect(grp.collider) dyn_grp.message.connect(grp.dyn_grp) ProgressBar.set(100, 'Done!') ProgressBar.hide() return cls(grp)