def doTestForKwargPermutation(loaded, unloaded, expected): kwargs = {} if loaded is not None: kwargs['loaded'] = loaded if unloaded is not None: kwargs['unloaded'] = unloaded allRefs = pm.listReferences(recursive=True) if expected == 'all': expected = set(allRefs) elif expected == 'loaded': expected = set(x for x in allRefs if x.isLoaded()) elif expected == 'unloaded': expected = set(x for x in allRefs if not x.isLoaded()) elif expected == 'none': expected = set() else: raise ValueError(expected) result = set(pm.listReferences(recursive=True, **kwargs)) self.assertEqual(result, expected) expected = set(x for x in expected if x.parent() is None) result = set(pm.listReferences(recursive=False, **kwargs)) self.assertEqual(result, expected)
def remove_namespace_from_reference(): """Converted a referenced file with name space to one without namespace""" # Check if there are namespaces if pm.listReferences(): # Get the First reference file ref = pm.listReferences()[0] # Get the path path = ref.path # Remove the path name ref.remove() # Reload the reference pm.createReference(path, namespace=":", mergeNamespacesOnClash=False) else: pm.warning("No namespaces found")
def _importAllReferences(self): self.UpdateLog("Importing all references...") # Import all references in file done = False while (done == False and (len(pm.listReferences()) != 0)): refs = pm.listReferences() self.UpdateLog("Importing " + str(len(refs)) + " references.") for ref in refs: if ref.isLoaded(): done = False ref.importContents() else: done = True self.UpdateLog("Done importing references...") return True
def scan_scene(self): # first let's look at maya references for x in pm.listReferences(): node_name = x.refNode.longName() # get the path and make it platform dependent # (maya uses C:/style/paths) maya_path = x.path.replace("/", os.path.sep) self.refs.append( {"node": node_name, "type": "reference", "path": maya_path}) # now look at file texture nodes for file_node in cmds.ls(l=True, type="file"): # ensure this is actually part of this scene and not referenced if cmds.referenceQuery(file_node, isNodeReferenced=True): # this is embedded in another reference, so don't include it in the breakdown continue # get path and make it platform dependent (maya uses C:/style/paths) path = cmds.getAttr("%s.fileTextureName" % file_node).replace("/", os.path.sep) self.refs.append( {"node": file_node, "type": "file", "path": path}) # Append scene file self.refs.append( {"node": None, "type": "scene", "path": pm.sceneName()}) return self.refs
def execute(self, **kwargs): # scan scene for references. # for each reference found, return # a dict with keys node, type and path refs = [] # first let's look at maya references for x in pm.listReferences(): node_name = x.refNode.longName() # get the path and make it platform dependent # (maya uses C:/style/paths) maya_path = x.path.replace("/", os.path.sep) refs.append( {"node": node_name, "type": "reference", "path": maya_path}) # now look at file texture nodes for file_node in cmds.ls(l=True, type="file"): # ensure this is actually part of this scene and not referenced if cmds.referenceQuery(file_node, isNodeReferenced=True): # this is embedded in another reference, so don't include it in the # breakdown continue # get path and make it platform dependent # (maya uses C:/style/paths) path = cmds.getAttr("%s.fileTextureName" % file_node).replace("/", os.path.sep) refs.append( {"node": file_node, "type": "file", "path": path}) return refs
def test_listReferences(self): self.assertEqual(set(pm.listReferences()), set([pm.FileReference(namespace='sphere1'), pm.FileReference(namespace='sphere2'), pm.FileReference(namespace='cube1'), pm.FileReference(namespace='cone1'), ]))
def test_failed_ref_edits(self): # Animate the zombieAttrs for transform in [x.getParent() for x in pm.ls(type='mesh')]: try: zombie = transform.attr('zombieAttr') except pm.MayaAttributeError: continue zombie.setKey(t=1, v=1) zombie.setKey(t=2, v=2) zombie.setKey(t=3, v=4) self.masterFile = pm.saveAs( os.path.join( self.temp, 'master.ma' ), f=1 ) pm.openFile(self.sphereFile, f=1) pm.SCENE.pSphere1.zombieAttr.delete() pm.saveFile(f=1) # deleting the attr should give some failed ref edits... pm.openFile(self.masterFile, f=1) sphereRefs = [x for x in pm.listReferences(recursive=True) if x.path.endswith('sphere.ma')] for ref in sphereRefs: print "testing failed ref edits on: %s" % ref self.assertEqual(1, len(pm.referenceQuery(ref,successfulEdits=False,failedEdits=True,es=True))) self.assertEqual(1, len(cmds.referenceQuery(str(ref.refNode), successfulEdits=False,failedEdits=True,es=True)))
def check_no_references(): """there should be no references """ if len(pm.listReferences()): raise PublishError( 'There should be no <b>References</b> in a <b>Model</b> scene.' )
def check_if_previous_version_references(): """check if a previous version of the same task is referenced to the scene """ from anima.env.mayaEnv import Maya m = Maya() ver = m.get_current_version() if ver is None: return same_version_references = [] for ref in pm.listReferences(): # check only 1st level references ref_version = m.get_version_from_full_path(ref.path) if ref_version: if ref_version.task == ver.task \ and ref_version.take_name == ver.take_name: same_version_references.append(ref) if len(same_version_references): print('The following nodes are references to an older version of this ' 'scene') print( '\n'.join(map(lambda x: x.refNode.name(), same_version_references)) ) raise PublishError( 'The current scene contains a <b>reference</b> to a<br>' '<b>previous version</b> of itself.<br><br>' 'Please remove it!!!' )
def test_file_reference_creation(self): for ref in pm.listReferences(recursive=True): self.assertEqual(ref, pm.FileReference(pm.PyNode(ref.refNode))) self.assertEqual(ref, pm.FileReference(str(ref.refNode))) self.assertEqual(ref, pm.FileReference(pm.Path(ref.withCopyNumber()))) self.assertEqual(ref, pm.FileReference(str(ref.withCopyNumber()))) self.assertEqual(ref, pm.FileReference(namespace=ref.fullNamespace))
def get_referenced_versions(self): """Returns the versions those been referenced to the current scene Returns Version instances and the corresponding Reference instance as a tupple in a list, and a string showing the path of the Reference. Replaces all the relative paths to absolute paths. The returned tuple format is as follows: (Version, Reference, full_path) """ valid_versions = [] # get all the references references = pm.listReferences() refs_and_paths = [] # iterate over them to find valid assets for reference in references: # it is a dictionary temp_version_full_path = reference.path temp_version_full_path = \ os.path.expandvars( os.path.expanduser( os.path.normpath( temp_version_full_path ) ) ).replace("\\", "/") refs_and_paths.append((reference, temp_version_full_path)) # sort them according to path # to make same paths together refs_and_paths = sorted(refs_and_paths, None, lambda x: x[1]) prev_version = None prev_full_path = '' for reference, full_path in refs_and_paths: if full_path == prev_full_path: # directly append the version to the list valid_versions.append( (prev_version, reference, prev_full_path) ) else: # try to get a version with the given path temp_version = self.get_version_from_full_path(full_path) if temp_version: # TODO: don't use the full_path here, it can be get from version instance itself valid_versions.append((temp_version, reference, full_path)) prev_version = temp_version prev_full_path = full_path # return a sorted list return sorted(valid_versions, None, lambda x: x[2])
def test_failed_ref_edits(self): self.createFailedEdits() sphereRefs = [x for x in pm.listReferences(recursive=True) if x.path.endswith('sphere.ma')] for ref in sphereRefs: print "testing failed ref edits on: %s" % ref self.assertEqual(1, len(pm.referenceQuery(ref,successfulEdits=False,failedEdits=True,es=True))) self.assertEqual(1, len(cmds.referenceQuery(str(ref.refNode), successfulEdits=False,failedEdits=True,es=True)))
def check_representations(): """checks if the referenced versions are all matching the representation type of the current version """ ref_reprs = [] wrong_reprs = [] v = staging.get('version') if v: r = Representation(version=v) current_repr = r.repr # For **Base** representation # allow any type of representation to be present in the scene if r.is_base(): return for ref in pm.listReferences(): ref_repr = ref.repr if ref_repr is None: # skip this one this is not related to a Stalker Version continue ref_reprs.append([ref, ref_repr]) if ref_repr != current_repr: wrong_reprs.append(ref) else: return if len(wrong_reprs): ref_repr_labels = [] for ref_repr in ref_reprs: ref = ref_repr[0] repr_name = ref_repr[1] color = 'red' if current_repr != repr_name else 'green' ref_repr_labels.append( '<span style="color: %(color)s">%(repr_name)s</span> -> ' '%(ref)s' % { 'color': color, 'repr_name': repr_name, 'ref': ref.refNode.name() } ) raise PublishError( 'You are saving as the <b>%s</b> representation<br>' 'for the current scene, but the following references<br>' 'are not <b>%s</b> representations of their versions:<br><br>' '%s' % ( current_repr, current_repr, '<br>'.join(ref_repr_labels[:MAX_NODE_DISPLAY]) ) )
def check(self): self.selection = pm.listReferences(refNodes=False, references=True) if self.selection: names = ", ".join([ref.refNode.name() for ref in self.selection]) msg = self.fail_msg.format(len(self.selection), names) return False, msg return True, self.pass_msg
def load_referenced_versions(self): """loads all the references """ # get all the references references = pm.listReferences() for reference in references: reference.load()
def populate_references(self): self.references_dict = {} category_head = self.category_dict['references'] references = pm.listReferences() for x in references: new_widget = QtGui.QTreeWidgetItem(category_head) new_widget.setText(0, x.refNode.name()) new_widget.setText(1, x.path)
def test_listReferences_recursive(self): self.assertEqual(set(pm.listReferences(recursive=True)), set([pm.FileReference(namespace='sphere1'), pm.FileReference(namespace='sphere2'), pm.FileReference(namespace='cube1'), pm.FileReference(namespace='cone1'), pm.FileReference(namespace='cube1:sphere'), pm.FileReference(namespace='cone1:cubeInCone'), pm.FileReference(namespace='cone1:cubeInCone:sphere'), ]))
def setRelativeReferences(): """ Changes unresolved paths of references to relative paths. """ scene_path = pm.sceneName().parent for ref_node in pm.listReferences(): rel_path = scene_path.relpathto( ref_node.path.realpath() ) if ref_node.unresolvedPath() != rel_path: ref_node.load( rel_path ) print "// set relative (%s)" % rel_path
def scan_scene(self): """ The scan scene method is executed once at startup and its purpose is to analyze the current scene and return a list of references that are to be potentially operated on. The return data structure is a list of dictionaries. Each scene reference that is returned should be represented by a dictionary with three keys: - "node": The name of the 'node' that is to be operated on. Most DCCs have a concept of a node, path or some other way to address a particular object in the scene. - "type": The object type that this is. This is later passed to the update method so that it knows how to handle the object. - "path": Path on disk to the referenced object. Toolkit will scan the list of items, see if any of the objects matches any templates and try to determine if there is a more recent version available. Any such versions are then displayed in the UI as out of date. """ refs = [] print("scene breakdown!") # first let's look at maya references for x in pm.listReferences(): node_name = x.refNode.longName() # get the path and make it platform dependent # (maya uses C:/style/paths) maya_path = x.path.replace("/", os.path.sep) refs.append( {"node": node_name, "type": "reference", "path": maya_path}) # now look at file texture nodes for file_node in cmds.ls(l=True, type="file"): # ensure this is actually part of this scene and not referenced if cmds.referenceQuery(file_node, isNodeReferenced=True): # this is embedded in another reference, so don't include it in the breakdown continue # get path and make it platform dependent (maya uses C:/style/paths) path = cmds.getAttr("%s.fileTextureName" % file_node).replace("/", os.path.sep) refs.append( {"node": file_node, "type": "file", "path": path}) for alembics in cmds.ls(type="AlembicNode", long=True): path = cmds.getAttr("%s.abc_File" % alembics).replace("/", os.path.sep) print(path) refs.append( {"node": alembics, "type": "alembic", "path": path}) return refs
def check_component_edits_on_references(): """check if there are component edits on references """ # skip if this is a representation v = staging.get('version') if v and Representation.repr_separator in v.take_name: return import maya.cmds reference_query = maya.cmds.referenceQuery references_with_component_edits = [] from anima.ui.progress_dialog import ProgressDialogManager pdm = ProgressDialogManager() all_refs = pm.listReferences(recursive=True) ref_count = len(all_refs) if not pm.general.about(batch=1) and ref_count: pdm.use_ui = True caller = pdm.register( ref_count, 'Checking component edits on %i reference nodes' % ref_count ) for ref in all_refs: all_edits = reference_query(ref.refNode.name(), es=True) # joined_edits = '\n'.join(all_edits) # if '.pt[' in joined_edits or '.pnts[' in joined_edits: # references_with_component_edits.append(ref) for edit in all_edits: if '.pt[' in edit or '.pnts[' in edit: references_with_component_edits.append(ref) break caller.step() caller.end_progress() if len(references_with_component_edits): raise PublishError( 'There are <b>component edits</b> on the following References:' '<br><br>%s<br><br>Please remove them!!!' % '<br>'.join( map(lambda x: x.refNode.name(), references_with_component_edits[:MAX_NODE_DISPLAY]) ) )
def loadedRefPermutations(parentReference=None): '''Returns dicts mapping from namespaces to whether they are loaded or not Returns all possible such dicts, taking into account the fact that if a ref is unloaded, all it's subrefs will also be unloaded (and, in fact, won't even seem to exist) ''' finalPermutations = [] namespaces = pm.listReferences(parentReference=parentReference, namespaces=True, references=False) # now, get all the possible permutations for each sub-ref, assuming # that sub-ref is loaded subPermutationsByNS = {} for namespace in namespaces: subsForNS = loadedRefPermutations(parentReference=pm.FileReference(namespace=namespace)) if subsForNS: subPermutationsByNS[namespace] = subsForNS # Now get all loaded/unloaded permutations for top namespaces... for loadedVals in itertools.product((True, False), repeat=len(namespaces)): topByNamespace = dict(zip(namespaces, loadedVals)) # then, find any sub-permutations for refs which are loaded possibleSubPermutations = [] for ns, loaded in topByNamespace.iteritems(): if loaded and ns in subPermutationsByNS: possibleSubPermutations.append(subPermutationsByNS[ns]) # finally, if we iterate over all the products of all possible # sub-perms, and combine all the resulting dicts, we should # get all the final permutations... if possibleSubPermutations: for subPermSelections in itertools.product(*possibleSubPermutations): topCopy = dict(topByNamespace) for subPermItem in subPermSelections: topCopy.update(subPermItem) finalPermutations.append(topCopy) else: # there are no sub-permutations, just append the current # permutation for top-level refs finalPermutations.append(topByNamespace) return finalPermutations
def check_only_published_versions_are_used(): """checks if only published versions are used in this scene """ non_published_versions = [] for ref in pm.listReferences(): v = ref.version if v and not v.is_published: non_published_versions.append(v) if len(non_published_versions): raise PublishError( 'Please use only <b>published</b> versions for:<br><br>%s' % '<br>'.join( map(lambda x: x.nice_name, non_published_versions[:MAX_NODE_DISPLAY]) ) )
def execute(self, **kwargs): # scan scene for references. # for each reference found, return # a dict with keys node, type and path refs = [] # first let's look at maya references for x in pm.listReferences(): node_name = x.refNode.longName() # get the path and make it platform dependent # (maya uses C:/style/paths) maya_path = x.path.replace("/", os.path.sep) refs.append( {"node": node_name, "type": "reference", "path": maya_path}) # now look at file texture nodes for file_node in cmds.ls(l=True, type="file"): # ensure this is actually part of this scene and not referenced if cmds.referenceQuery(file_node, isNodeReferenced=True): # this is embedded in another reference, so don't include it in the # breakdown continue # get path and make it platform dependent # (maya uses C:/style/paths) path = cmds.getAttr("%s.fileTextureName" % file_node).replace("/", os.path.sep) refs.append( {"node": file_node, "type": "file", "path": path}) # Get a list of non-referenced Alembics, why on earth doesn't Maya have an option to exclude referenced nodes from a list?! try: set1 = cmds.ls(type="ExocortexAlembicFile", long=True, ro=False) set2 = cmds.ls(type="ExocortexAlembicFile", long=True, ro=True) alembicnodes = set(set1).difference(set2) for alembics in alembicnodes: path = cmds.getAttr("%s.fileName" % alembics).replace("/", os.path.sep) refs.append( {"node": alembics, "type": "alembic", "path": path}) except: pass return refs
def test_save_as_references_to_a_version_in_same_workspace_are_replaced_with_abs_path_with_env_variable(self): """testing if the save_as method updates the references paths with an absolute path starting with conf.repository_env_key for references to a version in the same project thus the referenced path is relative """ # create project proj1 = Project("Proj1") proj1.save() proj1.create() # create assets asset1 = Asset(proj1, "Asset 1") asset1.save() # create a version of asset vtype vers1 = Version(asset1, asset1.code, self.asset_vtypes[0], self.user1) vers1.save() # save it self.mEnv.save_as(vers1) # new scene pm.newFile(force=True) # create another version with different type vers2 = Version(asset1, asset1.code, self.asset_vtypes[1], self.user1) vers2.save() # reference the other version self.mEnv.reference(vers1) # save it self.mEnv.save_as(vers2) # now check if the referenced vers1 starts with $REPO refs = pm.listReferences() # there should be only one ref self.assertTrue(len(refs)==1) # and the path should start with conf.repository_env_key self.assertTrue( refs[0].unresolvedPath().startswith("$" + conf.repository_env_key) )
def reference_lightcam_rig(): lightcampath = "xrefs/pepper_lightcam/dog_lightscamera.ma" projectpath = pm.Workspace().getPath() fullpath = os.path.join(projectpath, lightcampath) for r in pm.listReferences(): filepathwithoutprojectpath = str(r).replace(projectpath, "") if lightcampath in filepathwithoutprojectpath: print "Lightcam rig already loaded: %s" % (r.refNode) return False print "Referencing lightcam rig from %s" % (fullpath) try: pm.createReference(fullpath, groupReference=True, groupName="lightcam", namespace="dog_lightscamera") return True except: pm.err("Could not reference file %s. Do you have your project set correctly?" % (fullpath)) return False
def test_reference_creates_references_to_Versions_in_other_workspaces_loaded(self): """testing if reference method creates references to Versions with different VersionType and the reference state will be loaded """ proj1 = Project("Test Project 1") proj1.create() proj1.save() asset1 = Asset(proj1, "Test Asset 1") asset1.save() vers1 = Version(asset1, asset1.code, self.asset_vtypes[0], self.user1) vers1.save() vers2 = Version(asset1, asset1.code, self.asset_vtypes[1], self.user1) vers2.save() self.mEnv.save_as(vers1) pm.newFile(force=True) self.mEnv.save_as(vers2) # refence vers1 to vers2 self.mEnv.reference(vers1) # now check if the referenced files unresolved path is already starting # with conf.repository_env_key refs = pm.listReferences() # there should be only one reference self.assertEqual(len(refs), 1) # the unresolved path should start with $REPO self.assertTrue( refs[0].unresolvedPath().startswith("$" + conf.repository_env_key) ) self.assertTrue(refs[0].isLoaded())
def addAovs(self): fileToRef = pm.fileDialog2(dir='/jobs/loca/common/aovs', okc = 'Reference', fileFilter = '*.mb', fileMode = 4, ds=2) references = pm.listReferences() if fileToRef: for each in fileToRef: if each not in references: namespace = each.split('/')[-1].split('.mb')[0] pm.createReference(each, namespace = namespace) pm.setAttr('%s:%s.enabled' % (namespace, namespace), 0) self.renderLayers[0] adjustmentPlugs = str(pm.listAttr('%s.adjustments' % self.renderLayers[0], m=1)[-3]) lastAdjsNum = int(adjustmentPlugs.split('[')[-1].split(']')[0]) newAdjsNum = lastAdjsNum + 1 pm.connectAttr('%s:%s.enabled' % (namespace, namespace), '%s.adjustments[%d].plug' % (self.renderLayers[0],newAdjsNum), f=1) self.close() self.__init__() self.show() else: fileName = each.split('/')[-1].split('.m')[0] pm.warning('%s is already referenced so skipped' % fileName) else: pass return 0
def _test_listReferences_options(self, expectedRefs, kwargs): for namespaces in (True, False): for refNodes in (True, False): for references in (True, False): expected = set() for ref in expectedRefs: thisExpected = [] if namespaces: thisExpected.append(ref.fullNamespace) if refNodes: thisExpected.append(ref.refNode) if references: thisExpected.append(ref) if len(thisExpected) == 1: thisExpected = thisExpected[0] else: thisExpected = tuple(thisExpected) expected.add(thisExpected) result = pm.listReferences(namespaces=namespaces, refNodes=refNodes, references=references, **kwargs) self.assertEqual(set(result), expected)
def set_render_meta_data(vraySettings, tank): vraySettings.imageFormatStr.set("exr (multichannel)") # Setup the meta data dependency dependencies = [] try: for ref in pm.listReferences(): validatedPublish = tank.template_from_path(ref.path) if validatedPublish: if "work" in ref.path and ref.isLoaded(): raise TypeError elif "work" not in ref.path: dependencies.append(ref.path) except TypeError: workFiles = libPySide.QCriticalBox() workFiles.setText("Unable to publish with WIP assets") msg = "Your scene contains Work in progress(WIP) references. Please switch to a published asset or unload it. \n\n Example: \n%s" % ref.path workFiles.setDetailedText(msg) workFiles.setWindowTitle("WIP Assets") workFiles.exec_() shotgunInfo = {"SourceMayaFile": pm.sceneName(), "Dependencies": dependencies} shotgunJson = libFile.json.dumps(shotgunInfo) vraySettings.imgOpt_exr_attributes.set('ShotgunInfo = "%s";' % shotgunJson)
def genereateAnim(self, reopen=True): # export_path,_ = self.getFilename() Scene = self.preExport() FBXAnim = os.path.join(Scene.dirname(), "FBXAnim") os.mkdir(FBXAnim) if not os.path.exists(FBXAnim) else None export_path = os.path.join(FBXAnim, "%s.FBX" % Scene.namebase) export_path = export_path.replace('\\', '/') # NOTE 导入所有的 reference ref_list = pm.listReferences() if len(ref_list) > 1: raise RuntimeError("动画文件包含多个引用文件") [ref.importContents(True) for ref in pm.listReferences()] # NOTE 如果找不到 root 骨骼则自动生成一套 root 骨骼 root_list = pm.ls("root", dag=1, type="joint") mesh_list = self.getMeshList() if not root_list: # # NOTE 删除非变形器历史 # pm.bakePartialHistory( mesh_list,prePostDeformers=True ) jnt_list = self.getJntList(mesh_list) pm.select(cl=1) root = pm.joint(n="root") jnt_parent = self.getRelParent(jnt_list, root) anim_parent = {} for jnt in jnt_list: pm.select(cl=1) anim_jnt = pm.joint(n="%s_bind" % jnt) pm.parentConstraint(jnt, anim_jnt, mo=0) pm.scaleConstraint(jnt, anim_jnt, mo=0) parent = jnt_parent[jnt] anim_parent[ anim_jnt] = "%s_bind" % parent if parent != root else root jnt_transform = {} for anim_jnt, parent in anim_parent.items(): anim_jnt.setParent(parent) # NOTE 删除骨骼缩放修正组 transform = anim_jnt.getParent() if transform != parent: pm.ungroup(transform) root_list = anim_parent.keys() # NOTE bake 关键帧 start_time = pm.playbackOptions(q=1, min=1) end_time = pm.playbackOptions(q=1, max=1) pm.bakeResults(root_list, simulation=1, t=(start_time, end_time)) pm.select(root_list) # NOTE 导出文件 mel.eval('FBXExport -f "' + export_path + '" -s') os.startfile(os.path.dirname(export_path)) # NOTE 重新打开当前文件 if reopen: pm.openFile(pm.sceneName(), f=1)
def genereateRig(self, select=True): # export_path,_ = self.getFilename() # if not os.path.exists(export_path): # return Scene = self.preExport() export_path = os.path.join(Scene.dirname(), "%s.FBX" % Scene.namebase) export_path = export_path.replace('\\', '/') mel.eval('FBXExportSkins -v true') # NOTE 导入所有的 reference [ref.importContents(True) for ref in pm.listReferences()] root = pm.ls("root") mesh_list = self.getMeshList() if not root: # # NOTE 删除非变形器历史 # pm.bakePartialHistory( mesh_list,prePostDeformers=True ) jnt_list = self.getJntList(mesh_list) pm.select(cl=1) root = pm.joint(n="root") jnt_parent = self.getRelParent(jnt_list, root) mel.eval('moveJointsMode 1;') # # NOTE 删除所有 Blendshape # pm.delete(pm.ls(type="ikEffector")) pm.delete(pm.ls(type="blendShape")) jnt_transform = {} for jnt, pos in { jnt: pm.xform(jnt, q=1, ws=1, t=1) for jnt in jnt_list }.iteritems(): jnt.tx.setLocked(0) jnt.ty.setLocked(0) jnt.tz.setLocked(0) jnt.rx.setLocked(0) jnt.ry.setLocked(0) jnt.rz.setLocked(0) jnt.sx.setLocked(0) jnt.sy.setLocked(0) jnt.sz.setLocked(0) jnt.tx.showInChannelBox(1) jnt.ty.showInChannelBox(1) jnt.tz.showInChannelBox(1) jnt.rx.showInChannelBox(1) jnt.ry.showInChannelBox(1) jnt.rz.showInChannelBox(1) jnt.sx.showInChannelBox(1) jnt.sy.showInChannelBox(1) jnt.sz.showInChannelBox(1) mel.eval('CBdeleteConnection %s' % jnt.tx) mel.eval('CBdeleteConnection %s' % jnt.ty) mel.eval('CBdeleteConnection %s' % jnt.tz) mel.eval('CBdeleteConnection %s' % jnt.rx) mel.eval('CBdeleteConnection %s' % jnt.ry) mel.eval('CBdeleteConnection %s' % jnt.rz) mel.eval('CBdeleteConnection %s' % jnt.sx) mel.eval('CBdeleteConnection %s' % jnt.sy) mel.eval('CBdeleteConnection %s' % jnt.sz) jnt.setParent(root) jnt.rename("%s_bind" % jnt) parent = jnt.getParent() if parent.name() == root: jnt.t.set(pos) else: jnt_transform[jnt] = parent # NOTE clear jnt transform node for jnt, parent in jnt_transform.items(): pm.xform(parent, piv=pm.xform(jnt, q=1, ws=1, t=1), ws=1) # jnt.s.set(parent.s.get()) # parent.s.set(1,1,1) pm.ungroup(parent) # NOTE delete unrelated node [pm.delete(node) for jnt in jnt_list for node in jnt.getChildren()] # NOTE reparent hierarchy jnt_transform = {} for jnt, parent in jnt_parent.items(): jnt.setParent(parent) transform = jnt.getParent() if transform != parent: jnt_transform[jnt] = transform for jnt, parent in jnt_transform.items(): pm.xform(parent, piv=pm.xform(jnt, q=1, ws=1, t=1), ws=1) # NOTE 避免意外扭动 jnt.s.set(1, 1, 1) parent.s.set(1, 1, 1) pm.ungroup(parent) [mesh.getParent().setParent(w=1) for mesh in mesh_list] pm.select(root, mesh_list) pm.delete(pm.ls(type="dagPose")) pm.dagPose(bp=1, s=1) # mel.eval('moveJointsMode 0;') # NOTE 导出文件 mel.eval('FBXExport -f "' + export_path + '" -s') os.startfile(os.path.dirname(export_path)) # NOTE 重新打开当前文件 pm.openFile(pm.sceneName(), f=1)
def loadAssetsNYS(tricode, location, diagnostic=False, clean=True): # Get team info from database """ Given a specific location (home/away), this function attempts to reference in a selected team's assets and attach them to respected 01, 05 and 06 attachment points.""" try: team = Team(tricode) except: pm.warning('Build Scene ERROR Could not find team in database.') return ''' LK SPECIFIC SECTION ''' # The full path of this scene this_scene = pm.sceneName() # Split into tokens scene_token = this_scene.split('/') # 4th from the right is the project name this_project = scene_token[len(scene_token) - 1].replace( '_SKELETON.mb', '') ''' END LK ''' # Create paths for signs / team logo / region / layout scenes logo_path = os.path.join(cfb.TEAMS_ASSET_DIR, team.tricode, (team.tricode + '.mb')) if (diagnostic): print '\n' print '{} Team: {}'.format(location, team.tricode) print 'Project: {}'.format(this_project) print '{} Sign: {}'.format(location, sign_path) print '{} Logo: {}'.format(location, logo_path) print 'Light Rig: {}'.format(lgtrig_path) # Check for missing files and print warnings if not os.path.exists(logo_path): pm.warning('Build Scene WARNING could not find {0}'.format(logo_path)) logo_path = None if (diagnostic): return # Generate namespaces sign_nspc = '{0}SIGN'.format(location) logo_nspc = '{0}LOGO'.format(location) # Check for existing references sign_ref = None logo_ref = None # Get those reference nodess for ref in pm.listReferences(): if ref.namespace == logo_nspc: logo_ref = ref # If there are references missing, force a clean run for simplicity's sake (i implore you) if (logo_ref) == None and clean == False: pm.warning( 'Build Scene Existing reference not found. Forcing clean reference.' ) clean = True # If the user has asked to do a clean reference of the asset, including attachment if (clean): # If there's already references in those namespaces, just delete them if (logo_ref): logo_ref.remove() # Reference in the asset to the namespace if logo_path: asset.reference(logo_path, logo_nspc) # Attach them to their parent locators attachTeamToSign(location) # (If) there are already references in the namespaces, and the user is requesting # to replace the reference and maintain reference edits (dirty mode) elif not (clean): # Same thing with school logos this time if (team.tricode + '.mb') in logo_ref.path: pass else: logo_ref.replaceWith(logo_path) # Cleanup foster parents try: logo_re = re.compile('{0}RNfosterParent.'.format(logo_nspc)) pm.delete(pm.ls(regex=logo_re)) except: pass
# distintos querys (texturas y referencias) # info about https://groups.google.com/forum/#!msg/python_inside_maya/KYgnHkc1xvk/ADZ4USknEAAJ # So if you want the texture file nodes in the scene, you can get them from pymel with: import pymel.core as pm file_nodes = pm.ls(type=pm.nt.File) file_paths = [fyle.fileTextureName.get() for fyle in file_nodes] # with cmds: import maya.cmds as cmds file_nodes = cmds.ls(type='file') file_paths = [cmds.getAttr(fyle + '.fileTextureName') for fyle in file_nodes] # Unresolved reference paths can be gotten pretty easy in pymel : unresolved_paths = [ref.unresolvedPath() for ref in pm.listReferences()] # with cmds: [cmds.referenceQuery(pth, unresolvedName=True, filename=True) for pth in cmds.file(q=True, reference=True)] # ejemplo en pymel de fileReference y su uso # info about https://forums.cgsociety.org/t/edit-reference-file-attributes/1702955 # https://help.autodesk.com/cloudhelp/2016/ENU/Maya-Tech-Docs/PyMel/generated/classes/pymel.core.system/pymel.core.system.FileReference.html import pymel.core as pm FR = pm.FileReference(namespace='myNamespace') current_path = FR.path FR.replaceWith(new_path) # bloquear desbloquear nodes en maya import pymel.core as pm
def leer_referencias_escena(): """ Localiza las referencias que existen en la escena y las devuelve en un array de arrays en el elemento 0 del array devuelve el namespace y en el 1 un objeto de tipo FileReference """ return pm.listReferences(namespaces = True)
def exportAnimation(options=None): if not options: return frameRangeText = options['FrameRange'] animName = options['AnimName'] + '_' + frameRangeText path = options['AnimDir'] + '/' + animName + '.fbx' frameRange = [int(frame) for frame in frameRangeText.split('-')] frameRangeTuple = (frameRange[0], frameRange[1]) if not len(mc.ls(sl=True)): return selection = mc.ls(sl=True)[0] namespace = selection.split(':')[0] for ref in pymel.listReferences(recursive=True): ref.importContents() mc.select(selection, hi=True) objectsToDelete = mc.ls(sl=True) mc.namespace(removeNamespace=namespace, mergeNamespaceWithRoot=True) # for obj in objectsToDelete: # try: # mc.rename(obj, obj.replace(namespace + ':', '')) # except: # pass selection = selection.replace(namespace + ':', '') mc.select([obj for obj in mc.ls(type='joint') if 'Bind' in obj]) joints = mc.ls(sl=True) mc.bakeResults(joints, simulation=True, t = frameRangeTuple, sampleBy = 1, disableImplicitControl=True, preserveOutsideKeys=True, sparseAnimCurveBake=False, removeBakedAttributeFromLayer=False, removeBakedAnimFromLayer=False, bakeOnOverrideLayer=False, minimizeRotation=True, controlPoints=False, shape=True ) for attr in mc.listAttr('bsh_all', keyable=True): mc.setKeyframe('bsh_all', attribute = attr, time = frameRangeTuple[0]) mc.select(joints, d=True) mc.select([obj for obj in mc.ls(type='transform') if 'lowPoly' in obj]) lowPolyParent = mc.ls(sl=True)[0] rootJointParent = next(joint for joint in joints if 'Ground' in joint) selectionChildren = mc.listRelatives(selection, children=True) for child in selectionChildren: if child not in ['Deformation', 'GEO']: mc.delete(child) mc.select(rootJointParent, hi=True) siblings = [child for child in mc.listRelatives('Deformation', children=True) if 'Ground' not in child] mc.delete(siblings) allJoints = mc.ls(sl=True) uselessJoints = [joint for joint in allJoints if 'End' in joint] for joint in uselessJoints: allJoints.remove(joint) mc.delete(joint) for joint in allJoints: for attr in ['scaleX', 'scaleY', 'scaleZ', 'visibility']: object = mc.listConnections(joint + '.' + attr, source=True) print 'deleting', object mc.delete(object) mc.xform('Deformation', scale = (.01, .01, .01)) mc.select(selection) mc.refresh() if options['Export']: try: mel.eval('file -force -options "groups=1;ptgroups=1;materials=1;smoothing=1;normals=1" -typ "FBX export" -pr -es "' + path + '"') print 'FBX export successful', path except: print "Export failed!"
def replace_external_paths(self, mode=0): """Replaces all the external paths replaces: references: to a path which starts with $REPO env variable in absolute mode and a workspace relative path in relative mode file : to a path which starts with $REPO env variable in absolute mode and a workspace relative path in relative mode Absolute mode works best for now. .. note:: After v0.2.2 the system doesn't care about the mentalrayTexture nodes because the lack of a good environment variable support from that node. Use regular maya file nodes with mib_texture_filter_lookup nodes to have the same sharp results. :param mode: Defines the process mode: if mode == 0 : replaces with relative paths if mode == 1 : replaces with absolute paths """ # TODO: Also check for image planes and replace the path logger.debug("replacing paths with mode: %i" % mode) # create a repository repo = Repository() repo_env_key = "$" + conf.repository_env_key workspace_path = pm.workspace.path # fix for paths like S:/ (ending with a slash) for $REPO server_path = os.environ[conf.repository_env_key] if server_path.endswith('/'): server_path = server_path[:-1] # replace reference paths with $REPO for ref in pm.listReferences(): unresolved_path = ref.unresolvedPath().replace("\\", "/") if not unresolved_path.startswith("$" + conf.repository_env_key): # make it absolute if not os.path.isabs(unresolved_path): unresolved_path = os.path.join(workspace_path, unresolved_path) if unresolved_path.startswith(server_path): new_ref_path = "" if mode: # convert to absolute path new_ref_path = ref.path.replace( server_path, repo_env_key) else: # convert to relative path new_ref_path = utils.relpath(workspace_path, ref.path) logger.info("replacing reference:", ref.path) logger.info("replacing with:", new_ref_path) ref.replaceWith(new_ref_path) types_and_attrs = { 'aiImage': 'filename', 'aiStandIn': 'dso', 'file': 'fileTextureName', 'imagePlane': 'imageName', 'audio': 'filename', 'AlembicNode': 'abc_File', 'gpuCache': 'cacheFileName', } for node_type in types_and_attrs.keys(): attr_name = types_and_attrs[node_type] for node in pm.ls(type=node_type): path = node.getAttr(attr_name) if path: path = path.replace("\\", "/") logger.info("replacing file texture: %s" % path) path = os.path.normpath(os.path.expandvars(path)) if path: path = path.replace("\\", "/") # convert to absolute if not os.path.isabs(path): path = os.path.join(workspace_path, path).replace("\\", "/") new_path = "" if mode: # convert to absolute new_path = path.replace(server_path, "$%s" % conf.repository_env_key) else: # convert to relative new_path = utils.relpath(workspace_path, path, "/", "..") logger.info("with: %s" % new_path) node.setAttr(attr_name, new_path)
import pymel.core as pm list_of_shots = pm.ls(type="shot") list_of_references = pm.listReferences() def reference_node_exists(name, gpu_cache=True): result = True list_of_asset_nodes = [] for reference in list_of_references: list_of_asset_nodes.append(reference.refNode) if gpu_cache: list_of_gpu_caches = pm.ls(type="gpuCache") for gpu_cache in list_of_gpu_caches: list_of_asset_nodes.append(gpu_cache.getParent()) if name not in list_of_asset_nodes: result = False return result def unknown_assets(shot, result): list_of_assets = shot.assets.get().split(";") for asset in list_of_assets: if not reference_node_exists(asset): shot_name = shot.name()
def get_all_reference_file(): all_reference_file = list() references = pm.listReferences() if references: all_reference_file = [ref.path for ref in references] return all_reference_file
def generate_repr_of_all_references(cls, generate_gpu=True, generate_ass=True, generate_rs=True, skip_existing=False): """generates all representations of all references of this scene """ from anima.ui.progress_dialog import ProgressDialogManager from anima.env.mayaEnv import Maya, repr_tools, auxiliary reload(auxiliary) reload(repr_tools) paths_visited = [] versions_to_visit = [] versions_cannot_be_published = [] # generate a sorted version list # and visit each reference only once from anima.env.mayaEnv import MayaMainProgressBarWrapper wrp = MayaMainProgressBarWrapper() pdm = ProgressDialogManager(dialog=wrp) use_progress_window = False if not pm.general.about(batch=1): use_progress_window = True all_refs = pm.listReferences(recursive=True) pdm.use_ui = use_progress_window caller = pdm.register(len(all_refs), 'List References') for ref in reversed(all_refs): ref_path = str(ref.path) caller.step(message=ref_path) if ref_path not in paths_visited: v = ref.version if v is not None: paths_visited.append(ref_path) versions_to_visit.append(v) response = pm.confirmDialog( title='Do Create Representations?', message='Create all Repr. for all %s FileReferences?' % len(versions_to_visit), button=['Yes', 'No'], defaultButton='No', cancelButton='No', dismissString='No') if response == 'No': return # register a new caller caller = pdm.register(max_iteration=len(versions_to_visit), title='Generate Reprs') m_env = Maya() source_version = m_env.get_current_version() gen = repr_tools.RepresentationGenerator() # open each version from stalker import Version for v in versions_to_visit: local_generate_gpu = generate_gpu local_generate_ass = generate_ass local_generate_rs = generate_rs # check if this is a repr if '@' in v.take_name: # use the parent v = v.parent if not v: continue if skip_existing: # check if there is a GPU or ASS repr # generated from this version child_versions = Version.query.filter( Version.parent == v).all() for cv in child_versions: if local_generate_gpu is True and '@GPU' in cv.take_name: local_generate_gpu = False if local_generate_ass is True and '@ASS' in cv.take_name: local_generate_ass = False if local_generate_rs is True and '@RS' in cv.take_name: local_generate_rs = False gen.version = v # generate representations if local_generate_gpu: try: gen.generate_gpu() except RuntimeError: if v not in versions_cannot_be_published: versions_cannot_be_published.append(v) if local_generate_ass: try: gen.generate_ass() except RuntimeError: if v not in versions_cannot_be_published: versions_cannot_be_published.append(v) if local_generate_rs: try: gen.generate_rs() except RuntimeError: if v not in versions_cannot_be_published: versions_cannot_be_published.append(v) caller.step() # now open the source version again m_env.open(source_version, force=True, skip_update_check=True) # and generate representation for the source gen.version = source_version # generate representations if not versions_cannot_be_published: if generate_gpu: gen.generate_gpu() if generate_ass: gen.generate_ass() if generate_rs: gen.generate_rs() else: pm.confirmDialog( title='Error', message='The following versions can not be published ' '(check script editor):\n\n%s' % ('\n'.join( map(lambda x: x.nice_name, versions_cannot_be_published))), button=['OK'], defaultButton='OK', cancelButton='OK', dismissString='OK') pm.error('\n'.join( map(lambda x: x.absolute_full_path, versions_cannot_be_published)))
def generate_bbox(self): """generates the BBox representation of the current scene """ # validate the version first self.version = self._validate_version(self.version) self.open_version(self.version) task = self.version.task # check if all references have an BBOX repr first refs_with_no_bbox_repr = [] for ref in pm.listReferences(): if ref.version and not ref.has_repr('BBOX'): refs_with_no_bbox_repr.append(ref) if len(refs_with_no_bbox_repr): raise RuntimeError( 'Please generate the BBOX Representation of the references ' 'first!!!\n%s' % '\n'.join(map(lambda x: str(x.path), refs_with_no_bbox_repr))) # do different things for Vegetation tasks if self.is_vegetation_task(task): # load all references back for ref in pm.listReferences(): ref.load() # find the _pfxPolygons node pfx_polygons_node = pm.PyNode('kks___vegetation_pfxPolygons') all_children = [] for node in pfx_polygons_node.getChildren(): for child_node in node.getChildren(): all_children.append(child_node) auxiliary.replace_with_bbox(all_children) # clean up other nodes pm.delete('kks___vegetation_pfxStrokes') pm.delete('kks___vegetation_paintableGeos') elif self.is_scene_assembly_task(task): # reload all references # replace all root references with their BBOX representation for ref in pm.listReferences(): ref.to_repr('BBOX') else: # find all non referenced root nodes root_nodes = self.get_local_root_nodes() if len(root_nodes): all_children = [] for root_node in root_nodes: for child in root_node.getChildren(): all_children.append(child) auxiliary.replace_with_bbox(all_children) # reload all references # replace all root references with their BBOX representation for ref in pm.listReferences(): ref.to_repr('BBOX') # if this is an Exterior/Interior -> Layout -> Hires task flatten it if self.is_exterior_or_interior_task(task): # and import all of the references all_refs = pm.listReferences() while len(all_refs) != 0: for ref in all_refs: if not ref.isLoaded(): ref.load() ref.importContents() all_refs = pm.listReferences() # save the scene as {{original_take}}___BBOX # use maya take_name = '%s%s%s' % (self.base_take_name, Representation.repr_separator, 'BBOX') v = self.get_latest_repr_version(take_name) self.maya_env.save_as(v) # reopen the original version pm.newFile(force=True)
content_main = cmds.columnLayout(adj=True) cmds.text("", h=5) cmds.text(" Loading PyMel ", bgc=[.1, .1, .1], fn="boldLabelFont") cmds.text(" Please Wait ", bgc=[.1, .1, .1], fn="boldLabelFont") cmds.text(" ", h=5) cmds.showWindow(pyMel_load_message) import pymel.core as pm if cmds.window("pyMel_load_message", exists=True): cmds.deleteUI("pyMel_load_message") # Store References refs = pm.listReferences() # Main Form ============================================================================ def build_gui_replace_reference_paths(): window_name = "build_gui_replace_reference_paths" if cmds.window(window_name, exists=True): cmds.deleteUI(window_name) # Main GUI Start Here ================================================================================= # Build UI build_gui_replace_reference_paths = cmds.window(window_name, title=script_name + ' - (v' + script_version + ')',\ titleBar=True, mnb=False, mxb=False, sizeable =True) cmds.window(window_name, e=True, s=True, wh=[1, 1])
def generate_gpu(self): """generates the GPU representation of the current scene """ # validate the version first self.version = self._validate_version(self.version) self.open_version(self.version) # load necessary plugins pm.loadPlugin('gpuCache') pm.loadPlugin('AbcExport') pm.loadPlugin('AbcImport') # check if all references have an GPU repr first refs_with_no_gpu_repr = [] for ref in pm.listReferences(): if ref.version and not ref.has_repr('GPU'): refs_with_no_gpu_repr.append(ref) if len(refs_with_no_gpu_repr): raise RuntimeError( 'Please generate the GPU Representation of the references ' 'first!!!\n%s' % '\n'.join(map(lambda x: str(x.path), refs_with_no_gpu_repr))) # unload all references for ref in pm.listReferences(): ref.unload() # for local models generate an ABC file output_path = os.path.join(self.version.absolute_path, 'Outputs/alembic/').replace('\\', '/') abc_command = \ 'AbcExport -j "-frameRange %(start_frame)s ' \ '%(end_frame)s ' \ '-ro -stripNamespaces ' \ '-uvWrite ' \ '-wholeFrameGeo ' \ '-worldSpace ' \ '-root |%(node)s -file %(file_path)s";' gpu_command = \ 'gpuCache -startTime %(start_frame)s ' \ '-endTime %(end_frame)s ' \ '-optimize -optimizationThreshold 40000 ' \ '-writeMaterials ' \ '-directory "%(path)s" ' \ '-fileName "%(filename)s" ' \ '%(node)s;' start_frame = end_frame = int(pm.currentTime(q=1)) if not self.is_scene_assembly_task(self.version.task): if self.is_vegetation_task(self.version.task): # in vegetation files, we export the GPU files directly from # the Base version, also we use the geometry under # "pfxPolygons" and parent the resulting Stand-In nodes to the # pfxPolygons # load all references for ref in pm.listReferences(): ref.load() # find the _pfxPolygons node pfx_polygons_node = pm.PyNode('kks___vegetation_pfxPolygons') for node in pfx_polygons_node.getChildren(): for child_node in node.getChildren(): child_node_name = child_node.name().split('___')[-1] child_node_shape = child_node.getShape() child_node_shape_name = None if child_node_shape: child_node_shape_name = child_node_shape.name() pm.select(child_node) temp_output_fullpath = \ tempfile.mktemp().replace('\\', '/') temp_output_path, temp_output_filename = \ os.path.split(temp_output_fullpath) output_filename = '%s_%s' % ( self.version.nice_name, child_node_name.split(':')[-1].replace( ':', '_').replace('|', '_')) # run the mel command # check if file exists pm.mel.eval( gpu_command % { 'start_frame': start_frame, 'end_frame': end_frame, 'node': child_node.fullPath(), 'path': temp_output_path, 'filename': temp_output_filename }) cache_file_full_path = \ os.path\ .join(output_path, output_filename + '.abc')\ .replace('\\', '/') # create the intermediate directories try: os.makedirs(os.path.dirname(cache_file_full_path)) except OSError: # directory exists pass # now move in to its place shutil.move(temp_output_fullpath + '.abc', cache_file_full_path) # set rotate and scale pivots rp = pm.xform(child_node, q=1, ws=1, rp=1) sp = pm.xform(child_node, q=1, ws=1, sp=1) #child_node.setRotatePivotTranslation([0, 0, 0]) # delete the child and add a GPU node instead pm.delete(child_node) # check if file exists and create nodes if os.path.exists(cache_file_full_path): gpu_node = pm.createNode('gpuCache') gpu_node_tra = gpu_node.getParent() pm.parent(gpu_node_tra, node) gpu_node_tra.rename(child_node_name) if child_node_shape_name is not None: gpu_node.rename(child_node_shape_name) pm.xform(gpu_node_tra, ws=1, rp=rp) pm.xform(gpu_node_tra, ws=1, sp=sp) gpu_node.setAttr('cacheFileName', cache_file_full_path, type="string") else: print('File not found!: %s' % cache_file_full_path) # clean up other nodes pm.delete('kks___vegetation_pfxStrokes') pm.delete('kks___vegetation_paintableGeos') else: root_nodes = self.get_local_root_nodes() if len(root_nodes): for root_node in root_nodes: # export each child of each root as separate nodes for child_node in root_node.getChildren(): # check if it is a transform node if not isinstance(child_node, pm.nt.Transform): continue if not auxiliary.has_shape(child_node): continue child_name = child_node.name() child_shape = child_node.getShape() child_shape_name = None if child_shape: child_shape_name = child_shape.name() child_full_path = \ child_node.fullPath()[1:].replace('|', '_') temp_output_fullpath = \ tempfile.mktemp().replace('\\', '/') temp_output_path, temp_output_filename = \ os.path.split(temp_output_fullpath) output_filename =\ '%s_%s' % ( self.version.nice_name, child_full_path ) # run the mel command # check if file exists pm.mel.eval( gpu_command % { 'start_frame': start_frame, 'end_frame': end_frame, 'node': child_node.fullPath(), 'path': temp_output_path, 'filename': temp_output_filename }) cache_file_full_path = \ os.path\ .join( output_path, '%s.abc' % ( output_filename ) )\ .replace('\\', '/') # create the intermediate directories try: os.makedirs( os.path.dirname(cache_file_full_path)) except OSError: # directory exists pass # now move in to its place shutil.move(temp_output_fullpath + '.abc', cache_file_full_path) # set rotate and scale pivots rp = pm.xform(child_node, q=1, ws=1, rp=1) sp = pm.xform(child_node, q=1, ws=1, sp=1) # rpt = child_node.getRotatePivotTranslation() # delete the child and add a GPU node instead pm.delete(child_node) # check if file exists if os.path.exists(cache_file_full_path): gpu_node = pm.createNode('gpuCache') gpu_node_tra = gpu_node.getParent() pm.parent(gpu_node_tra, root_node) gpu_node_tra.rename(child_name) if child_shape_name is not None: gpu_node.rename(child_shape_name) pm.xform(gpu_node_tra, ws=1, rp=rp) pm.xform(gpu_node_tra, ws=1, sp=sp) # child_node.setRotatePivotTranslation(rpt) gpu_node.setAttr('cacheFileName', cache_file_full_path, type="string") # load all references again # convert all references to GPU logger.debug('converting all references to GPU') for ref in pm.listReferences(): # check if this is a Model reference ref.to_repr('GPU') ref.load() # if this is an Exterior/Interior -> Layout -> Hires task flatten it task = self.version.task is_exterior_or_interior_task = self.is_exterior_or_interior_task(task) if is_exterior_or_interior_task: logger.debug('importing all references') # and import all of the references all_refs = pm.listReferences() while len(all_refs) != 0: for ref in all_refs: if not ref.isLoaded(): ref.load() ref.importContents() all_refs = pm.listReferences() # assign lambert1 to all GPU nodes pm.sets('initialShadingGroup', e=1, fe=auxiliary.get_root_nodes()) # clean up self.clean_up() # 6. save the scene as {{original_take}}___GPU # use maya take_name = '%s%s%s' % (self.base_take_name, Representation.repr_separator, 'GPU') v = self.get_latest_repr_version(take_name) self.maya_env.save_as(v) # export the root nodes under the same file if is_exterior_or_interior_task: logger.debug('exporting root nodes') pm.select(auxiliary.get_root_nodes()) pm.exportSelected(v.absolute_full_path, type='mayaAscii', force=True) logger.debug('renewing scene') # clear scene pm.newFile(force=True)
def refCleanerUI(self): windowName = "kRefCleaner" selectionList = mc.ls(sl=True) selectionLong = len(selectionList) refList = pm.listReferences() self.refNumber = len(refList) for ref in refList: print mc.referenceQuery(ref, referenceNode=True) if mc.window(windowName, q=True, exists=True): mc.deleteUI(windowName) self.myWindow = mc.window(windowName, toolbox=True, titleBar=True, minimizeButton=False, maximizeButton=False, sizeable=False) #, resizeToFitChildren=True) mc.window(self.myWindow, width=210, height=500, edit=True) self.fullColumn = mc.columnLayout() #mc.rowColumnLayout(numberOfColumns=2)#, columnWidth3=(200, 600, 200)) #column 1 mc.frameLayout(label="References :", collapsable=False, collapse=False, labelVisible=1, borderVisible=False, borderStyle="out", marginHeight=2, marginWidth=2) #, width=500, collapseCommand=partial(self.actuUIsize, -64), expandCommand=partial(self.actuUIsize, 64) mc.paneLayout(width=210, height=210) self.refListScroll = mc.textScrollList(numberOfRows=20, allowMultiSelection=True) if refList: for ref in refList: mc.textScrollList(self.refListScroll, append=mc.referenceQuery(ref, referenceNode=True), e=True) mc.textScrollList(self.refListScroll, selectIndexedItem=1, e=True) else: print ">> No Ref in the scene" mc.setParent('..') mc.rowLayout(numberOfColumns=3) mc.button(label="All", width=40, height=20, command=self.selAllRef) mc.button(label="None", width=40, height=20, command=self.selNoneRef) mc.button(label="Open", width=40, height=20) #, command=self.selNoneRef) mc.setParent('..') mc.setParent('..') '''#column 2 mc.textScrollList(self.reListScroll, selectItem=1, q=True) mc.frameLayout(label="List Edits :", collapsable=False, collapse=False, labelVisible=1, borderVisible=False) self.editListScroll = mc.textScrollList( width=400) mc.setParent( '..' ) ''' #column 3 # 'addAttr', 'connectAttr', 'deleteAttr', 'disconnectAttr', 'parent', 'setAttr', 'lock', 'unlock' mc.frameLayout(label="What do you want to clean :", collapsable=False, collapse=False, labelVisible=1, borderVisible=False, borderStyle="out", marginHeight=2, marginWidth=2) mc.columnLayout(adjustableColumn=True) self.successfulEditsBTN = mc.checkBox(label='successfulEdits', value=0) self.failedEditsBTN = mc.checkBox(label='failedEdits', value=1) mc.separator(style="in") self.setAttrBTN = mc.checkBox(label='setAttr', value=1) self.addAttrBTN = mc.checkBox(label='addAttr', value=1) self.connectAttrBTN = mc.checkBox(label='connectAttr', value=0) self.deleteAttrBTN = mc.checkBox(label='deleteAttr', value=0) self.disconnectAttrBTN = mc.checkBox(label='disconnectAttr', value=0) mc.separator(style="in") self.parentBTN = mc.checkBox(label='parent', value=0) mc.separator(style="in") self.lockBTN = mc.checkBox(label='lock', value=0) self.unlockBTN = mc.checkBox(label='unlock', value=0) mc.rowLayout(numberOfColumns=3) mc.button(label="All", width=40, height=20, command=self.selAllEdit) mc.button(label="None", width=40, height=20, command=self.selNoneEdit) mc.button(label="Default", width=40, height=20, command=self.selDefaultEdit) mc.setParent('..') cleanBtn = mc.button(label="Clean Ref Edit", width=210, height=30, command=self.kRefClean) mc.setParent('..') mc.setParent('..') mc.showWindow(self.myWindow)
def check_no_references(): """there should be no references """ if len(pm.listReferences()): raise PublishError( 'There should be no <b>References</b> in a <b>Model</b> scene.')
def test_fullNamespace(self): # first, test that the namespaces are as expected, when all the ref # nodes are "normal" / unaltered expected = [ (u'sphere1', pm.FileReference(u'/usr/tmp/referencesTest/sphere.ma', refnode=u'sphere1RN')), (u'sphere2', pm.FileReference(u'/usr/tmp/referencesTest/sphere.ma{1}', refnode=u'sphere2RN')), (u'cube1', pm.FileReference(u'/usr/tmp/referencesTest/cube.ma', refnode=u'cube1RN')), (u'cube1:sphere', pm.FileReference(u'/usr/tmp/referencesTest/sphere.ma{2}', refnode=u'cube1:sphereRN')), (u'cone1', pm.FileReference(u'/usr/tmp/referencesTest/cone.ma', refnode=u'cone1RN')), (u'cone1:cubeInCone', pm.FileReference(u'/usr/tmp/referencesTest/cube.ma{1}', refnode=u'cone1:cubeInConeRN')), (u'cone1:cubeInCone:sphere', pm.FileReference(u'/usr/tmp/referencesTest/sphere.ma{3}', refnode=u'cone1:cubeInCone:sphereRN')) ] self.assertEqual(pm.listReferences(namespaces=1, recursive=1), expected) self.assertEqual(self.coneRef1.namespace, 'cone1') self.assertEqual(self.coneRef1.fullNamespace, 'cone1') self.assertEqual(self.coneRef1.refNode.namespace(), '') cubeInConeRef = pm.FileReference(refnode='cone1:cubeInConeRN') self.assertEqual(cubeInConeRef.namespace, 'cubeInCone') self.assertEqual(cubeInConeRef.fullNamespace, 'cone1:cubeInCone') self.assertEqual(cubeInConeRef.refNode.namespace(), 'cone1:') sphereInCubeInConeRef = pm.FileReference(refnode='cone1:cubeInCone:sphereRN') self.assertEqual(sphereInCubeInConeRef.namespace, 'sphere') self.assertEqual(sphereInCubeInConeRef.fullNamespace, 'cone1:cubeInCone:sphere') self.assertEqual(sphereInCubeInConeRef.refNode.namespace(), 'cone1:cubeInCone:') # now, try changing the namespace of one of the refnodes... pm.Namespace.create('foobar') coneRefNode = self.coneRef1.refNode coneRefNode.unlock() coneRefNode.rename('foobar:%s' % coneRefNode) coneRefNode.lock() # now, make sure that results are as expected (ie, the namespace of the # reference itself should be UNCHANGED, even though the namespace of the # reference node has changed... self.assertEqual(pm.listReferences(namespaces=1, recursive=1), expected) self.assertEqual(self.coneRef1.namespace, 'cone1') self.assertEqual(self.coneRef1.fullNamespace, 'cone1') self.assertEqual(self.coneRef1.refNode.namespace(), 'foobar:') self.assertEqual(cubeInConeRef.namespace, 'cubeInCone') self.assertEqual(cubeInConeRef.fullNamespace, 'cone1:cubeInCone') self.assertEqual(cubeInConeRef.refNode.namespace(), 'cone1:') self.assertEqual(sphereInCubeInConeRef.namespace, 'sphere') self.assertEqual(sphereInCubeInConeRef.fullNamespace, 'cone1:cubeInCone:sphere') self.assertEqual(sphereInCubeInConeRef.refNode.namespace(), 'cone1:cubeInCone:')
def scan_scene(self): """ The scan scene method is executed once at startup and its purpose is to analyze the current scene and return a list of references that are to be potentially operated on. The return data structure is a list of dictionaries. Each scene reference that is returned should be represented by a dictionary with three keys: - "node": The name of the 'node' that is to be operated on. Most DCCs have a concept of a node, path or some other way to address a particular object in the scene. - "type": The object type that this is. This is later passed to the update method so that it knows how to handle the object. - "path": Path on disk to the referenced object. Toolkit will scan the list of items, see if any of the objects matches any templates and try to determine if there is a more recent version available. Any such versions are then displayed in the UI as out of date. """ refs = [] # first let's look at maya references for x in pm.listReferences(): node_name = x.refNode.longName() # get the path and make it platform dependent # (maya uses C:/style/paths) maya_path = x.path.replace("/", os.path.sep) refs.append({ "node": node_name, "type": "reference", "path": maya_path }) # now look at file texture nodes for file_node in cmds.ls(l=True, type="file"): # ensure this is actually part of this scene and not referenced if cmds.referenceQuery(file_node, isNodeReferenced=True): # this is embedded in another reference, so don't include it in the breakdown continue # get path and make it platform dependent (maya uses C:/style/paths) path = cmds.getAttr("%s.fileTextureName" % file_node).replace( "/", os.path.sep) refs.append({"node": file_node, "type": "file", "path": path}) # now look for maya geo cache nodes for cache_node in cmds.ls(l=True, type="cacheFile"): # get path and make it platform dependent (maya uses C:/style/paths) path = cmds.getAttr("%s.cachePath" % cache_node).replace( "/", os.path.sep) # strip off the asset specific extension to get down to just the version path = os.path.dirname(path) refs.append({ "node": cache_node, "type": "cacheFile", "path": path }) # Alembic cache nodes! alembic_nodes = {} # Ensure later nodes overwrite the old ones alembic_file_nodes = cmds.ls(l=True, type="ExocortexAlembicFile") alembic_file_nodes.sort( key=(lambda node: int(filter(str.isdigit, str(node))))) for alembic_node in alembic_file_nodes: path = cmds.getAttr("%s.fileName" % alembic_node).replace( "/", os.path.sep) # Ignore old AlembicFile nodes alembic_nodes[os.path.basename(path)] = { "node": alembic_node, "type": "alembic", "path": path } for ref in alembic_nodes.itervalues(): refs.append(ref) print "REFFFSSS", refs return refs
def generate_ass(self): """generates the ASS representation of the current scene For Model Tasks the ASS is generated over the LookDev Task because it is not possible to assign a material to an object inside an ASS file. """ # before doing anything, check if this is a look dev task # and export the objects from the referenced files with their current # shadings, then replace all of the references to ASS repr and than # add Stand-in nodes and parent them under the referenced models # load necessary plugins pm.loadPlugin('mtoa') # disable "show plugin shapes" active_panel = auxiliary.Playblaster.get_active_panel() show_plugin_shapes = pm.modelEditor(active_panel, q=1, pluginShapes=1) pm.modelEditor(active_panel, e=1, pluginShapes=False) # validate the version first self.version = self._validate_version(self.version) self.open_version(self.version) task = self.version.task # export_command = 'arnoldExportAss -f "%(path)s" -s -mask 24 ' \ # '-lightLinks 0 -compressed -boundingBox ' \ # '-shadowLinks 0 -cam perspShape;' export_command = 'arnoldExportAss -f "%(path)s" -s -mask 60' \ '-lightLinks 1 -compressed -boundingBox ' \ '-shadowLinks 1 -cam perspShape;' # calculate output path output_path = \ os.path.join(self.version.absolute_path, 'Outputs/ass/')\ .replace('\\', '/') # check if all references have an ASS repr first refs_with_no_ass_repr = [] for ref in pm.listReferences(): if ref.version and not ref.has_repr('ASS'): refs_with_no_ass_repr.append(ref) if len(refs_with_no_ass_repr): raise RuntimeError( 'Please generate the ASS Representation of the references ' 'first!!!\n%s' % '\n'.join(map(lambda x: str(x.path), refs_with_no_ass_repr))) if self.is_look_dev_task(task): # in look dev files, we export the ASS files directly from the Base # version and parent the resulting Stand-In node to the parent of # the child node # load only Model references for ref in pm.listReferences(): v = ref.version load_ref = False if v: ref_task = v.task if self.is_model_task(ref_task): load_ref = True if load_ref: ref.load() ref.importContents() # Make all texture paths relative # replace all "$REPO#" from all texture paths first # # This is needed to properly render textures with any OS types_and_attrs = { 'aiImage': 'filename', 'file': 'fileTextureName', 'imagePlane': 'imageName' } for node_type in types_and_attrs.keys(): attr_name = types_and_attrs[node_type] for node in pm.ls(type=node_type): orig_path = node.getAttr(attr_name).replace("\\", "/") path = re.sub(r'(\$REPO[0-9/]+)', '', orig_path) tx_path = self.make_tx(path) inputs = node.attr(attr_name).inputs(p=1) if len(inputs): # set the input attribute for input_node_attr in inputs: input_node_attr.set(tx_path) else: node.setAttr(attr_name, tx_path) # randomize all render node names # This is needed to prevent clashing of materials in a bigger scene for node in pm.ls(type=RENDER_RELATED_NODE_TYPES): if node.referenceFile() is None and \ node.name() not in READ_ONLY_NODE_NAMES: node.rename('%s_%s' % (node.name(), uuid.uuid4().hex)) nodes_to_ass_files = {} # export all root ass files as they are for root_node in auxiliary.get_root_nodes(): for child_node in root_node.getChildren(): # check if it is a transform node if not isinstance(child_node, pm.nt.Transform): continue if not auxiliary.has_shape(child_node): continue # randomize child node name # TODO: This is not working as intended, node names are like |NS:node1|NS:node2 # resulting a child_node_name as "node2" child_node_name = child_node\ .fullPath()\ .replace('|', '_')\ .split(':')[-1] child_node_full_path = child_node.fullPath() pm.select(child_node) child_node.rename('%s_%s' % (child_node.name(), uuid.uuid4().hex)) output_filename =\ '%s_%s.ass' % ( self.version.nice_name, child_node_name ) output_full_path = \ os.path.join(output_path, output_filename) # run the mel command pm.mel.eval(export_command % {'path': output_full_path.replace('\\', '/')}) nodes_to_ass_files[child_node_full_path] = \ '%s.gz' % output_full_path # print('%s -> %s' % ( # child_node_full_path, # output_full_path) # ) # reload the scene pm.newFile(force=True) self.open_version(self.version) # convert all references to ASS # we are doing it a little bit early here, but we need to for ref in pm.listReferences(): ref.to_repr('ASS') all_stand_ins = pm.ls(type='aiStandIn') for ass_node in all_stand_ins: ass_tra = ass_node.getParent() full_path = ass_tra.fullPath() if full_path in nodes_to_ass_files: ass_file_path = \ Repository.to_os_independent_path( nodes_to_ass_files[full_path] ) ass_node.setAttr('dso', ass_file_path) elif self.is_vegetation_task(task): # in vegetation files, we export the ASS files directly from the # Base version, also we use the geometry under "pfxPolygons" # and parent the resulting Stand-In nodes to the # pfxPolygons # load all references for ref in pm.listReferences(): ref.load() # Make all texture paths relative # replace all "$REPO#" from all texture paths first # # This is needed to properly render textures with any OS types_and_attrs = { 'aiImage': 'filename', 'file': 'fileTextureName', 'imagePlane': 'imageName' } for node_type in types_and_attrs.keys(): attr_name = types_and_attrs[node_type] for node in pm.ls(type=node_type): orig_path = node.getAttr(attr_name).replace("\\", "/") path = re.sub(r'(\$REPO[0-9/]+)', '', orig_path) tx_path = self.make_tx(path) inputs = node.attr(attr_name).inputs(p=1) if len(inputs): # set the input attribute for input_node_attr in inputs: input_node_attr.set(tx_path) else: node.setAttr(attr_name, tx_path) # import shaders that are referenced to this scene # there is only one reference in the vegetation task and this is # the shader scene for ref in pm.listReferences(): ref.importContents() # randomize all render node names # This is needed to prevent clashing of materials in a bigger scene for node in pm.ls(type=RENDER_RELATED_NODE_TYPES): if node.referenceFile() is None and \ node.name() not in READ_ONLY_NODE_NAMES: node.rename('%s_%s' % (node.name(), uuid.uuid4().hex)) # find the _pfxPolygons node pfx_polygons_node = pm.PyNode('kks___vegetation_pfxPolygons') for node in pfx_polygons_node.getChildren(): for child_node in node.getChildren(): #print('processing %s' % child_node.name()) child_node_name = child_node.name().split('___')[-1] pm.select(child_node) output_filename =\ '%s_%s.ass' % ( self.version.nice_name, child_node_name.replace(':', '_').replace('|', '_') ) output_full_path = \ os.path.join(output_path, output_filename) # run the mel command pm.mel.eval(export_command % {'path': output_full_path.replace('\\', '/')}) # generate an aiStandIn node and set the path ass_node = auxiliary.create_arnold_stand_in( path='%s.gz' % output_full_path) ass_tra = ass_node.getParent() # parent the ass node under the current node # under pfx_polygons_node pm.parent(ass_tra, node) # set pivots rp = pm.xform(child_node, q=1, ws=1, rp=1) sp = pm.xform(child_node, q=1, ws=1, sp=1) # rpt = child_node.getRotatePivotTranslation() pm.xform(ass_node, ws=1, rp=rp) pm.xform(ass_node, ws=1, sp=sp) # ass_node.setRotatePivotTranslation(rpt) # delete the child_node pm.delete(child_node) # give it the same name with the original ass_tra.rename('%s' % child_node_name) # clean up other nodes pm.delete('kks___vegetation_pfxStrokes') pm.delete('kks___vegetation_paintableGeos') elif self.is_model_task(task): # convert all children of the root node # to an empty aiStandIn node # and save it as it is root_nodes = self.get_local_root_nodes() for root_node in root_nodes: for child_node in root_node.getChildren(): child_node_name = child_node.name() rp = pm.xform(child_node, q=1, ws=1, rp=1) sp = pm.xform(child_node, q=1, ws=1, sp=1) pm.delete(child_node) ass_node = auxiliary.create_arnold_stand_in(path='') ass_tra = ass_node.getParent() pm.parent(ass_tra, root_node) ass_tra.rename(child_node_name) # set pivots pm.xform(ass_tra, ws=1, rp=rp) pm.xform(ass_tra, ws=1, sp=sp) # because there will be possible material assignments # in look dev disable overrideShaders ass_node.setAttr('overrideShaders', False) # we definitely do not use light linking in our studio, # which seems to create more problems then it solves. ass_node.setAttr('overrideLightLinking', False) # convert all references to ASS for ref in pm.listReferences(): ref.to_repr('ASS') ref.load() # fix an arnold bug for node_name in ['initialShadingGroup', 'initialParticleSE']: node = pm.PyNode(node_name) node.setAttr("ai_surface_shader", (0, 0, 0), type="float3") node.setAttr("ai_volume_shader", (0, 0, 0), type="float3") # if this is an Exterior/Interior -> Layout -> Hires task flatten it is_exterior_or_interior_task = self.is_exterior_or_interior_task(task) if is_exterior_or_interior_task: # and import all of the references all_refs = pm.listReferences() while len(all_refs) != 0: for ref in all_refs: if not ref.isLoaded(): ref.load() ref.importContents() all_refs = pm.listReferences() # assign lambert1 to all GPU nodes pm.sets('initialShadingGroup', e=1, fe=auxiliary.get_root_nodes()) # now remove them from the group pm.sets('initialShadingGroup', e=1, rm=pm.ls()) # and to make sure that no override is enabled [ node.setAttr('overrideLightLinking', False) for node in pm.ls(type='aiStandIn') ] # make sure motion blur is disabled [ node.setAttr('motionBlur', False) for node in pm.ls(type='aiStandIn') ] # clean up self.clean_up() # check if all aiStandIn nodes are included in # ArnoldStandInDefaultLightSet set try: arnold_stand_in_default_light_set = \ pm.PyNode('ArnoldStandInDefaultLightSet') except pm.MayaNodeError: # just create it arnold_stand_in_default_light_set = \ pm.createNode( 'objectSet', name='ArnoldStandInDefaultLightSet' ) pm.select(None) pm.sets(arnold_stand_in_default_light_set, fe=pm.ls(type='aiStandIn')) # save the scene as {{original_take}}___ASS # use maya take_name = '%s%s%s' % (self.base_take_name, Representation.repr_separator, 'ASS') v = self.get_latest_repr_version(take_name) self.maya_env.save_as(v) # export the root nodes under the same file if is_exterior_or_interior_task: pm.select(auxiliary.get_root_nodes()) pm.exportSelected(v.absolute_full_path, type='mayaAscii', force=True) # new scene pm.newFile(force=True) # reset show plugin shapes option active_panel = auxiliary.Playblaster.get_active_panel() pm.modelEditor(active_panel, e=1, pluginShapes=show_plugin_shapes)
def extract_shot(self, shot_node_name): shot_node = pm.ls(shot_node_name, type="shot")[0] shot_name = shot_node.getShotName() split_string = shot_node.assets.get() requiered_nodes = [] if split_string is not None: requiered_nodes.extend(split_string.split(";")) # . . . . . . . . . . . . . . . . . . . . . . # collect info from shot node camera_shape = pm.PyNode(shot_node.getCurrentCamera()) camera_reference = \ pm.referenceQuery(camera_shape, referenceNode=True) camera_node = None if isinstance(camera_shape, pm.nt.Camera): camera_node = camera_shape.getParent() else: camera_node = camera_shape requiered_nodes.append(camera_reference) top_reference_nodes = pm.listReferences(recursive=False) for shot_element in pm.ls(type='shot'): if shot_element != shot_node: pm.delete(shot_element) gpu_cache_nodes = pm.ls(type='gpuCache') for gpu_cache in gpu_cache_nodes: gpu_cache_transform = gpu_cache.getParent() if gpu_cache_transform.name() not in requiered_nodes: pm.delete(gpu_cache_transform) first_frame = shot_node.getStartTime() last_frame = shot_node.getEndTime() self.bake_constraints(first_frame, last_frame) print("finish baking constraints") for reference_node in top_reference_nodes: if reference_node.refNode.name() not in requiered_nodes: reference_node.remove() # . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . self.keyframesManager \ .limit_animation_curves(first_frame, last_frame) self.keyframesManager \ .move_animation_with_cutItem_data(first_frame, shot_node) # . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . camera_publish_path = self.get_publish_camera_path(shot_name) self.export_camera(camera_node, camera_publish_path, first_frame, last_frame) self.publish_camera(shot_name, camera_publish_path) # . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . shot_publish_scene_path = self.get_publish_scene_path( shot_name, "maya_shot_work") pm.renameFile(shot_publish_scene_path) pm.saveFile(force=True, type='mayaAscii') self.publish_scene(shot_name) self.create_breakdown(shot_name, requiered_nodes)
def init_rows(self): references = [] type = "Shot" scene_process, entity = pm.workspace.fileRules["scene"].split("/")[1:] if "01" in scene_process or "02" in scene_process: type = "Asset" # add render camera for any shot scene process if type == "Shot": number = 1 asset_name = "Render Camera" publish = sg.find_one(type, [["project", "is", project]], ["sg_tracked_camera"])["sg_tracked_camera"] if publish is not None: publish = publish["local_path_windows"] current = publish.split(".")[1] # root switching if root: publish = publish.replace("\\", "/").split("04_Maya") publish = "".join([root, publish[1]]) publish = pm.util.common.path(publish) files = publish.dirname().files("*.ma") items = sorted([f.split(".")[1] for f in files])[::-1] reference = None match = publish.stripext().stripext() for ref in pm.listReferences(): if match in ref.path: reference = ref.refNode.__unicode__() references += [[ number, asset_name, publish, reference, current, items ]] # add cache to lighting scene process if "Lighting" in scene_process: number = 1 asset_name = "Alembic Cache" publish = sg.find_one(type, [["project", "is", project]], ["sg_alembic_cache"])["sg_alembic_cache"] if publish is not None: publish = publish["local_path_windows"] current = publish.split(".")[1] # root switching if root: publish = publish.replace("\\", "/").split("04_Maya") publish = "".join([root, publish[1]]) publish = pm.util.common.path(publish) files = publish.dirname().files("*.ma") items = sorted([f.split(".")[1] for f in files])[::-1] reference = None match = publish.stripext().stripext() for ref in pm.listReferences(): if match in ref.path: reference = ref.refNode.__unicode__() references += [[ number, asset_name, publish, reference, current, items ]] # CREATE ROWS WITH QUERIED DATA index = 0 # first row is the header for ast in references: index += 1 # which row ast.append(index) row = self.create_row(*ast) self.ui.central_vlayout.insertWidget(index, row) return # add assets to every scene process except lighting assets = sg.find_one( type, [["project", "is", project], ["code", "is", entity]], ["assets"])["assets"] asset_names = [] for asset in assets: # create row only if published file exists publish = None try: publish = sg.find_one( "Asset", # every scene process references assets [["id", "is", asset["id"]]], ["sg_file"])["sg_file"]["local_path_windows"] except: continue # root switching if root: publish = publish.replace("\\", "/").split("04_Maya") publish = "".join([root, publish[1]]) # QUERY DATA FOR ROWS #TODO: SG SITE "assets" doesn't allow inputting the entities multiple times, need to find a work around asset_name = asset["name"] asset_names += [asset_name] number = asset_names.count(asset_name) # times same asset is used # combo items publish = pm.util.common.path(publish) files = publish.dirname().files("*.ma") items = sorted([f.split(".")[1] for f in files])[::-1] # combo current text match = 0 current = None reference = None for ref in pm.listReferences(recursive=0): # find the reference matching this asset name if asset_name == ref.path.dirname().basename(): match += 1 current = ref.path.split(".")[1] reference = ref.refNode.__unicode__() else: continue # if this asset is referenced multiple times # find the reference linked to this specific extra if match == number: current = ref.path.split(".")[1] reference = ref.refNode.__unicode__() break references += [[ number, asset_name, publish, reference, current, items ]] # CREATE ROWS WITH QUERIED DATA index = 0 # first row is the header for ast in references: index += 1 # which row ast.append(index) row = self.create_row(*ast) self.ui.central_vlayout.insertWidget(index, row) return
def removeAllReferenceEdits(): for ref in pm.listReferences(): for editType in ['addAttr', 'deleteAttr', 'setAttr', 'disconnectAttr']: ref.removeReferenceEdits(editCommand=editType, force=True)
def test_fullNamespace(self): # first, test that the namespaces are as expected, when all the ref # nodes are "normal" / unaltered expected = [(u'sphere1', pm.FileReference(u'/usr/tmp/referencesTest/sphere.ma', refnode=u'sphere1RN')), (u'sphere2', pm.FileReference(u'/usr/tmp/referencesTest/sphere.ma{1}', refnode=u'sphere2RN')), (u'cube1', pm.FileReference(u'/usr/tmp/referencesTest/cube.ma', refnode=u'cube1RN')), (u'cube1:sphere', pm.FileReference(u'/usr/tmp/referencesTest/sphere.ma{2}', refnode=u'cube1:sphereRN')), (u'cone1', pm.FileReference(u'/usr/tmp/referencesTest/cone.ma', refnode=u'cone1RN')), (u'cone1:cubeInCone', pm.FileReference(u'/usr/tmp/referencesTest/cube.ma{1}', refnode=u'cone1:cubeInConeRN')), (u'cone1:cubeInCone:sphere', pm.FileReference(u'/usr/tmp/referencesTest/sphere.ma{3}', refnode=u'cone1:cubeInCone:sphereRN'))] self.assertEqual(pm.listReferences(namespaces=1, recursive=1), expected) self.assertEqual(self.coneRef1.namespace, 'cone1') self.assertEqual(self.coneRef1.fullNamespace, 'cone1') self.assertEqual(self.coneRef1.refNode.namespace(), '') cubeInConeRef = pm.FileReference(refnode='cone1:cubeInConeRN') self.assertEqual(cubeInConeRef.namespace, 'cubeInCone') self.assertEqual(cubeInConeRef.fullNamespace, 'cone1:cubeInCone') self.assertEqual(cubeInConeRef.refNode.namespace(), 'cone1:') sphereInCubeInConeRef = pm.FileReference( refnode='cone1:cubeInCone:sphereRN') self.assertEqual(sphereInCubeInConeRef.namespace, 'sphere') self.assertEqual(sphereInCubeInConeRef.fullNamespace, 'cone1:cubeInCone:sphere') self.assertEqual(sphereInCubeInConeRef.refNode.namespace(), 'cone1:cubeInCone:') # now, try changing the namespace of one of the refnodes... pm.Namespace.create('foobar') coneRefNode = self.coneRef1.refNode coneRefNode.unlock() coneRefNode.rename('foobar:%s' % coneRefNode) coneRefNode.lock() # now, make sure that results are as expected (ie, the namespace of the # reference itself should be UNCHANGED, even though the namespace of the # reference node has changed... self.assertEqual(pm.listReferences(namespaces=1, recursive=1), expected) self.assertEqual(self.coneRef1.namespace, 'cone1') self.assertEqual(self.coneRef1.fullNamespace, 'cone1') self.assertEqual(self.coneRef1.refNode.namespace(), 'foobar:') self.assertEqual(cubeInConeRef.namespace, 'cubeInCone') self.assertEqual(cubeInConeRef.fullNamespace, 'cone1:cubeInCone') self.assertEqual(cubeInConeRef.refNode.namespace(), 'cone1:') self.assertEqual(sphereInCubeInConeRef.namespace, 'sphere') self.assertEqual(sphereInCubeInConeRef.fullNamespace, 'cone1:cubeInCone:sphere') self.assertEqual(sphereInCubeInConeRef.refNode.namespace(), 'cone1:cubeInCone:')
def main(): s = pm.polySphere()[ 0] # second in list is the history node, if construction history is on c = pm.polyCube()[0] print(c, s) c.setTranslation([0, 2, 0]) s.setTranslation([1, -2, 0]) g = pm.group(s, c, n='newGroup') print("The children of %s are %s" % (g, g.getChildren())) # print g.getChildren()[0].getShape() print("difference =", c.translate.get() - s.translate.get()) # basic vector operation s2 = s.duplicate()[0] # move the new sphere relatively along the z axis s2.setTranslation([0, 0, -2], relative=1) # cycle through and move some verts. # we're moving each verts a relative amount based on its vertex number num = s2.numVertices() for i, vert in enumerate(s2.verts): pm.move(vert, [i / float(num), 0, 0], r=1) # save the current scene scene currScene = pm.saveAs('pymel_test_main.ma') # the parent property gives the parent directory of the current scene. # the / (slash or divide) operator serves as an os independent way of concatenating paths # it is a shortut to os.path.join exportScene = currScene.parent / 'pymel_test_ref.ma' # if a file already exists where we want to export, delete it first if exportScene.exists(): print("removing existing pymel export scene") exportScene.remove() print("exporting new scene:", exportScene) pm.exportSelected(exportScene, f=1) # delete the original group pm.delete(g) # reference it in a few times for i in range(1, 4): ref = pm.createReference(exportScene, namespace=('foo%02d' % i)) # offset each newly created reference: # first we list all the nodes in the new reference, and get the first in the list. # this will be the 'newGroup' node. allRefNodes = ref.nodes() print("moving", allRefNodes[0]) allRefNodes[0].tx.set(2 * i) # print out some information about our newly created references allRefs = pm.listReferences() for r in allRefs: print(r.namespace, r.refNode, r.withCopyNumber()) # the namespace property of the FileReference class can be used to set the namespace as well as to get it. allRefs[2].namespace = 'super' # but if we have to change the namespace of the objects after they have been imported # there is a different, albeit, more complicated way ns = allRefs[0].namespace allRefs[0].importContents() # heres one way to change the namespace try: pm.namespace(add='bar') except: pass for node in pm.ls(ns + ':*', type='transform'): newname = node.swapNamespace('bar') print("renaming %s to %s" % (node, newname)) node.rename(newname) # unload the other one allRefs[1].unload()