def doIt( self, argList ): """Creates forest instances from a scene object.""" argsLen = argList.length() if argsLen >= 4: SpeedTreeForest = argList.asString(0) partCloudShape = argList.asString(1) tree = argList.asString(2) setIndex = argList.asInt(3) if argsLen == 5: instancerPrefix = argList.asString(4) else: instancerPrefix = partCloudShape[:-5] instancerName = instancerPrefix + '_Instancer' treeShape = '' partInstancer = '' SpeedTreeAsset = '' treePlug = mc.connectionInfo( SpeedTreeForest + '.forestObjects[' + str( setIndex ) + ']', sfd=1 ) if tree != '': if mc.nodeType( tree ) != None: treeShapes = mc.listRelatives( tree, s=1 ) if treeShapes != None and len( treeShapes ) > 0: treeShape = treeShapes[0] supportedTreeTypes = [ 'transform', 'mesh', 'nurbsSurface', 'nurbsCurve', 'subdiv' ] assignForestObject = False #remove existing object if there is one cons = mc.listConnections( partCloudShape ) if ( cons != None and len( cons ) > 0 and mc.objExists( cons[0] ) and mc.nodeType( cons[0] ) == 'instancer' and mc.nodeType( treeShape ) != 'particle' ): if treePlug != '': oldObj = mel.eval( "instancer -index 0 -q -obj " + cons[0] ) if oldObj != '': mc.particleInstancer( partCloudShape, n=cons[0], edit=True, obj=oldObj, rm=True ) partInstancer = mc.particleInstancer( partCloudShape, n=cons[0], edit=True, obj=tree, a=True ) assignForestObject = True elif mc.nodeType( tree ) in supportedTreeTypes and mc.nodeType( treeShape ) != 'particle': partInstancer = mc.particleInstancer( partCloudShape, n=instancerName, obj=tree, a=True, lod='geometry', p='position', r='rotationPP', sc='UserVector1PP' ) useBBs = mc.getAttr( SpeedTreeForest + ".bb" ) mc.setAttr( partInstancer + '.lod', useBBs ) assignForestObject = True if assignForestObject: if treePlug != '': mc.disconnectAttr( treePlug, SpeedTreeForest + '.forestObjects[' + str( setIndex ) + ']' ) mc.setAttr( treePlug.split( '.' )[0] + '.visibility', 1 ) mc.connectAttr( tree + '.message', SpeedTreeForest + '.forestObjects[' + str( setIndex ) + ']' ) #create asset if MayaVersion >= 2009: SpeedTreeAsset = mc.container( q=True, findContainer=[ partCloudShape ] ) if partInstancer != '' and SpeedTreeAsset != None and SpeedTreeAsset != '': mc.container( SpeedTreeAsset, edit=True, addNode=partInstancer, force=True )
def instanceThis(): #grab object name objname = cmds.ls(sl=True).pop() #if there are particles if (cmds.objExists('instancePositions')): #make an instancer with the particles with object selected. cmds.particleInstancer('instancePositionsShape', addObject=True, object=objname)
def createInstancer(self,insterName,emitSurfPtcsShape): '''create the instancer node based on the dress type''' print "creating the instancer" cmds.particleInstancer(emitSurfPtcsShape, name=insterName, cycle = 'None', cycleStep= 1, cycleStepUnits ='Frames',levelOfDetail='Geometry', rotationUnits ='Radians', rotationOrder= 'XYZ', position= 'worldPosition',age='age') cmds.particleInstancer(emitSurfPtcsShape,name=insterName, edit = True,objectIndex = 'indexPP') cmds.particleInstancer(emitSurfPtcsShape,name=insterName, edit = True,scale = 'scalePP') cmds.particleInstancer(emitSurfPtcsShape,name=insterName, edit = True,aimDirection = 'goalWorldNormal0PP',aimUpAxis = 'surfRotPP') dressType = cmds.getAttr(emitSurfPtcsShape+'.dressType') if dressType == 'all': for geo in cmds.listRelatives('dressExampleGrp', c = True, f = True): cmds.particleInstancer(emitSurfPtcsShape,name=insterName, edit = True,addObject=True, object=geo) return
def w06_MainCmd(self, *args): parName = cmds.textField('w06_uiMessage',q=True,text=True) if parName!="Select a particle object and click Get": parNode = self.w06_getParShape(parName)[1] startFrame = int( cmds.intField('w06_startField', q=True, v=True ) ) endFrame = int( cmds.intField('w06_endField', q=True, v=True ) ) exclusiveAttrs=[] if cmds.checkBox('w06_uiBakePosition', q=True, v=True)==False: exclusiveAttrs.append('position') if cmds.checkBox('w06_uiBakeRotate', q=True, v=True)==False: exclusiveAttrs.append('rotation') if cmds.checkBox('w06_uiBakeScale', q=True, v=True)==False: exclusiveAttrs.append('scale') if cmds.checkBox('w06_uiAttrListSwitch', q=True, v=True): otherParAttrs = cmds.textScrollList( "w06_uiAttrList", q=True, si=True) else: otherParAttrs = [] #print 'w06_doBake', parNode, startFrame, endFrame, exclusiveAttrs, otherParAttr insIndex = cmds.optionMenu( 'w06_uiInstancers', q=True, select=True) instancerNode = cmds.particleInstancer( parNode, q=True, name=True)[insIndex-1] self.w06_doBake(parNode=parNode, instancerNode=instancerNode, startFrame=startFrame, endFrame=endFrame, exclusiveAttrs=exclusiveAttrs, otherParAttr=otherParAttrs, renameChild=cmds.checkBox('w06_uiRenameChildren',q=True,v=True), dup_connect=cmds.checkBox('w06_uiInputCon', q=True, v=True), dup_instance=cmds.checkBox('w06_uiInstanceLeaf', q=True, v=True), )
def w06_setPartileName(self, *args): parNode = self.w06_getParShape() fieldStr = parNode[0] if parNode else "Select a particle object and click Get" cmds.textField("w06_uiMessage", e=True, text= fieldStr ) if not parNode: return ins = cmds.optionMenu( 'w06_uiInstancers', q=True, itemListLong=True) if ins: cmds.deleteUI( ins ) insNodes = cmds.particleInstancer( parNode[1], q=True, name=True) print 'b', insNodes if insNodes: for ins in insNodes: cmds.menuItem( label=ins, p='w06_uiInstancers') #w06_T_nParticlesBakeWin() #----------------w06 End
def miSequence(start=None, end=None): '''{'path':'Rendering/Render/miSequence()ONLYSE', 'icon' : ':/menuIconRender.png', 'tip' : '将物体转为mentalray代理的粒子替代', 'usage':'$fun( start=0, end=100)', } ''' projDir = cmds.workspace(q=1, rootDirectory=1) proxyFolder = os.path.join(projDir, 'data/mrProxyFiles') if os.path.exists(proxyFolder) == False: os.makedirs(proxyFolder) exportObjs = cmds.ls(sl=True) if start == None: start = cmds.playbackOptions(q=True, min=True) if end == None: end = cmds.playbackOptions(q=True, max=True) end = end + 1 pad = 5 padStr = '%0' + '%sd' % (pad) #print padStr subFolder = exportObjs[0] finalFolder = proxyFolder + '/' + subFolder if os.path.exists(finalFolder) == False: os.makedirs(finalFolder) import mentalray.renderProxyUtils proxyObjs = [] for i in range(start, end): cmds.currentTime(i) exec('frameStr = padStr%(i)') miName = '%s_s.%s.mi' % (exportObjs[0], frameStr) miFilePath = finalFolder + '/' + miName print miFilePath #miFilePath = 'F:/t.mi' cmds.select(exportObjs, r=True) mel.eval( 'Mayatomr -mi -exportFilter 721600 -active -binary -fe -fem -fma -fis -fcd -pcm -as -asn "%s" -xp "3313333333" -file "%s";' % (exportObjs[0] + "_s", miFilePath)) proxyName = miName.replace('.', '-') proxyObj = cmds.polyCube(ch=False, name=proxyName)[0] proxyObjs.append(proxyObj) boxShape = cmds.listRelatives(proxyObj, shapes=True)[0] cmds.setAttr(proxyObj + '.miProxyFile', miFilePath, type='string') mentalray.renderProxyUtils.resizeToBoundingBox(boxShape) cmds.setAttr(proxyObj + ".t", lock=True) cmds.setAttr(proxyObj + ".r", lock=True) cmds.setAttr(proxyObj + ".s", lock=True) cmds.currentTime(start) nPar, parShape = cmds.nParticle() cmds.setAttr(parShape + '.collide', 0) cmds.setAttr(parShape + ".conserve", 0) cmds.setAttr(parShape + ".dynamicsWeight", 0) cmds.setAttr(parShape + ".particleRenderType", 2) #cmds.setAttr( parShape+".selectedOnly", 1) cmds.addAttr(parShape, ln="cus_index0", dt="doubleArray") cmds.addAttr(parShape, ln="cus_index", dt="doubleArray") mel.eval('emit -object nParticle1 -pos 0 0 0') expString = "cus_index = clamp(0, %i, frame-%i);" % (end - start - 1, start) cmds.dynExpression(parShape, s=expString, c=True) cmds.dynExpression(parShape, s=expString, rbd=True) instancerObj = cmds.particleInstancer(parShape, addObject=True, object=proxyObjs, cycle='None', cycleStep=1, cycleStepUnits='Frames', levelOfDetail='Geometry', rotationUnits='Degrees', rotationOrder='XYZ', position='worldPosition', age='age') cmds.particleInstancer(parShape, e=True, name=instancerObj, objectIndex='cus_index') cmds.saveInitialState(parShape) miGrp = cmds.group(proxyObjs, n=exportObjs[0] + '_s_mi') cmds.setAttr(miGrp + '.v', False) cmds.setAttr(miGrp + '.v', lock=True) papaGrp = cmds.group([nPar, miGrp, instancerObj], n=exportObjs[0] + '_mi_group') for obj in (nPar, miGrp, instancerObj, papaGrp): cmds.setAttr(obj + ".t", lock=True) cmds.setAttr(obj + ".r", lock=True) cmds.setAttr(obj + ".s", lock=True)
def __publish_maya_nParticle_export(self, item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb): """ Publish a Maya export selected for the selected nParticle systems and publish it to Shotgun. :param item: The item to publish :param output: The output definition to publish with :param work_template: The work template for the current scene :param primary_publish_path: The path to the primary published file :param sg_task: The Shotgun task we are publishing for :param comment: The publish comment/description :param thumbnail_path: The path to the publish thumbnail :param progress_cb: A callback that can be used to report progress """ # determine the publish info to use # print("Running maya export selected command...") progress_cb(10, "Determining publish details") # get the current scene path and extract fields from it # using the work template: scene_path = os.path.abspath(cmds.file(query=True, sn=True)) fields = work_template.get_fields(scene_path) publish_version = fields["version"] tank_type = output["tank_type"] fields["grp_name"] = str(item['name']).split("|")[1] print("Got fields...") print("Applying fields...") # create the publish path by applying the fields # with the publish template: publish_template = output["publish_template"] publish_path = publish_template.apply_fields(fields) # publish name, NO VERSION NUMBERS HERE # This should be a name that's the same accross all publishes so the Loader app can group them to create a version history # @TODO: Set up a template or something to handle this nicely, ideally this would be: basename.(publish_path) - {version} and {extension} publish_name = "nParticle_" + fields["grp_name"] + "_" + fields["name"] #publish_name = os.path.basename(publish_path) # Also get the cache template to copy over the particle cache as well # @TODO: Read this from *_step.yml, for now I'm just applying a naming convention cache_template = sgtk.platform.current_engine().get_template_by_name(output["publish_template"].name.replace("nparticle","nparticle_cache")) print("Applied fields to templates...") print(publish_path) # Find additional info from the scene: # progress_cb(10, "Analysing scene") # The main body of this export: # Select each node in the hierarchy above a particle system and a bunch of related nodes, such as instancers, cache nodes and the nucleus node selection = [] for nParticle_object in cmds.ls(item["name"], dag=True, type="nParticle", long=True): # get the name of the particle system split_path = str(nParticle_object).split("|")[1:] print(split_path) # get all instancers connected to the particle system and append it to the selection list con = cmds.listConnections(split_path[len(split_path)-1:], type="instancer") if con: for i in set(con): selection.extend(cmds.ls(i, long=True)) print(con) # get all instances connected to the instancer and append it to the selection list if con: inst = cmds.particleInstancer(split_path[len(split_path)-1:], q=True, object=True) if inst: for i in set(inst): selection.extend(cmds.ls(i, long=True)) print(inst) # get the cache node, append it to the selection list and copy the cache to the publishing directory cache = cmds.listConnections(split_path[len(split_path)-1:], type="cacheFile") print(cache) if cache: for i in set(cache): print("copy cache loop!") # append cache node to selection selection.extend(cmds.ls(i, long=True)) # modify a copy of "fields" to add the node name, the "grp_name" field is used for this # This is done to make sure all caches stay unique and with the correct node temp_fields = fields #temp_fields["grp_name"] = "{0}".format(cmds.getAttr('{0}.cacheName'.format(i))) cache_path = cache_template.apply_fields(temp_fields).replace("\\", "/") print("start copy!") progress_cb(30, "Copying Cache") # Copy the cache files to the publishing folder, this takes a while # @TODO: More interactive feedback for the user # @TODO2: Optimize, run this in background so the user can continue working and # make sure the files aren't copied locally first if they're moving from 1 location on the server # to another location on the server dirutil.copy_tree(cmds.getAttr('{0}.cachePath'.format(i)), cache_path) print("copy done!") progress_cb(50, "Analyzing scene") # Set the path of the cache node to the new location, # for some reason the setAttr MEL command for this is more reliable than the python setAttr command... mel.eval('setAttr -type "string" {0}.cachePath "{1}"'.format(i, cache_path)) print("cache path set!") # Get all nucleus nodes and append them to the selection list nucleus = cmds.listConnections(split_path[len(split_path)-1:], type="nucleus") if nucleus: for i in set(nucleus): selection.extend(cmds.ls(i, long=True)) print(nucleus) # Get all display layers and appends them to the selection list #for i in cmds.ls(type="displayLayer"): # selection.extend(cmds.ls(i, long=True)) selection.append(nParticle_object) # Remove all expressions from the particle systems in the selection as these can cause issues cmds.dynExpression(nParticle_object, c=True, s="") cmds.dynExpression(nParticle_object, rbd=True, s="") cmds.dynExpression(nParticle_object, rad=True, s="") # Get display layers the object is in and appends them to the selection list print(selection) for i in range(len(selection)): split_sel = selection[i].split("|") print split_sel if len(split_sel) > 1: dispLayer = cmds.listConnections(list(split_sel[1:]), type="displayLayer") else: dispLayer = cmds.listConnections(str(split_sel[0]), type="displayLayer") print(dispLayer) if dispLayer: for i in set(dispLayer): selection.extend(cmds.ls(i, long=True)) # Make a set of the selection list to get rid of duplicate entries selection = set(selection) print("test") print(selection) # Because we want to delete everything except what is in the selection list # we select everything and then deselect the things in the selection list cmds.select(all=True, hi=True) for i in selection: split_sel = i.split("|") if len(split_sel) > 1: cmds.select(split_sel[1:], deselect=True) else: cmds.select(split_sel[0], deselect=True) print("test2") self.parent.log_debug("Executing delete, export selection and reopen") # delete everything that is selected now cmds.delete() print("test3") # clear the selection, just in case things didn't get deleted for some reason (ie, maya system nodes, reference nodes, etc.) # then select everything to be exported again cmds.select(clear=True) for i in selection: split_sel = i.split("|") if len(split_sel) > 1: cmds.select(split_sel[1:], add=True) else: cmds.select(split_sel[0], add=True) progress_cb(60, "Export scene and reopening file") # get the current file, so we can reopen it after saving old_file = cmds.file(q=True,sceneName=True) # rename the file and "export selected" cmds.file(rename=publish_path) cmds.file(exportSelected=True,type='mayaAscii',force=True) # create a new empty file in case it's easier to open from cmds.file(new=True, force=True) # open the old file from before for the publish registration, otherwise the template won't fit # could this cause issues? cmds.file(old_file, open=True) print("Registering Export Publish") # all data to register the publish # NOTE!!: publish_path has to have backslashes, forward slashes cause the publish to fail! progress_cb(75, "Registering the publish") args = { "tk": self.parent.tank, "context": self.parent.context, "comment": comment, "path": publish_path, "name": publish_name, "version_number": publish_version, "thumbnail_path": thumbnail_path, "task": sg_task, "dependency_paths": [primary_publish_path], "published_file_type":tank_type } # register the publish tank.util.register_publish(**args) print("Finished Registering Export Publish")
def replicate(name, asset_mapping, attribute_mapping=None): """Replicate the Houdini asset in Maya for publishing As a Houdini asset has each shape separated into its own instancer we need to merge the shapes back into one instancer. It is possible that the order of the instanced shapes differs from the Houdini asset. This will need to be update manually by the artist. Args: name(str): name of the asset asset_mapping(dict): data collection of Houdini Asset and its instancers attribute_mapping(dict): data to link particle attributes to instancer Example: {"scale": "radiusPP", "objectIndex": "index"} Returns: bool """ # Debug mapping, will need to come from the UI # Discover houdini assets if not attribute_mapping: attribute_mapping = {} # Pre-flight check nuclea = cmds.ls(type="nucleus") if not nuclea: nucleus = cmds.createNode("nucleus") else: nucleus = nuclea[0] # Flight name += "_" # add underscore as divider suffix = "_GRP" # suffix is not included in return value of lib.unique_name() unique_name = lib.unique_name(name, format="%03d", suffix=suffix) unique_name += suffix asset_group = cmds.group(empty=True, name=unique_name) for i, (asset, mapping) in enumerate(asset_mapping.items()): particle_data = mapping.get("particle_system", None) if particle_data is None: raise RuntimeError("Incomplete mapping of asset '%s'," " missing particle_system" % asset) particle_system = particle_data.keys()[0] # Get connection to particleArrayData array_data_attr = "{}.cacheArrayData".format(particle_system) data_attrs = cmds.listConnections(array_data_attr, plugs=True) or [] assert len(data_attrs) == 1, "This is a bug" data_attr = data_attrs[0] new_name = "{}{:03d}_PART".format(name, i) new_systems = cmds.duplicate(particle_system, name=new_name) assert len(new_systems) == 1, ("This is a bug, duplicated '%s' " "nParticle nodes" % len(new_systems)) new_system = new_systems[0] # Connect particle array data to cache array data cmds.connectAttr(data_attr, "{}.cacheArrayData".format(new_system)) # Link to nucleus cmds.select(clear=True) cmds.select(new_system) mel.eval("assignNSolver {}".format(nucleus)) cmds.parent(new_system, asset_group) hierarchy = mapping.get("hierarchy", []) inst_name = "{}{:03d}_INST".format(name, i) kwargs = attribute_mapping new_instancer = cmds.particleInstancer(new_system, name=inst_name, object=hierarchy, **kwargs) # Force all types to True in UI ( no other way ) if cmds.checkBoxGrp("AEdisplayAllTypes", query=True, exists=True): cmds.checkBoxGrp("AEdisplayAllTypes", edit=True, v1=True) # Set rotation attributes cmds.setAttr("{}.rotationAngleUnits".format(new_instancer), 1) cmds.setAttr("{}.rotationOrder".format(new_instancer), 0) try: cmds.parent(new_instancer, asset_group) except RuntimeError: pass return True
cmds.setAttr("emi.scaleY", 20) cmds.setAttr("emi.scaleZ", 20) cmds.select('emi') myem = cmds.emitter(name='myEmitter', speed=30, speedRandom=10, rate=0.005) mypa = cmds.particle(name='myParticle') cmds.connectDynamic(mypa[0], em=myem) cmds.setAttr("myEmitter.emitterType", 2) mygr = cmds.gravity(name='myGravity') cmds.connectDynamic(mypa, f=mygr) cmds.setAttr("myGravity.magnitude", 4) cmds.select('pCone1', 'myParticle') cmds.particleInstancer("myParticle", object='pCone1', aimDirection='velocity') cmds.select('myParticle') myem2 = cmds.emitter(name='trailEmitter', rate=1000, spread=1, speed=4, speedRandom=3) mypa2 = cmds.particle(name='trailParticle') cmds.connectDynamic(mypa2[0], em=myem2) cmds.setAttr("trailEmitter.emitterType", 0) cmds.setAttr("trailParticleShape.lifespanMode", 2) cmds.setAttr("trailParticleShape.lifespan", 0.3) cmds.setAttr("trailParticleShape.lifespanRandom", 0.2) cmds.setAttr("trailParticleShape.particleRenderType", 8)