コード例 #1
1
    def setUpClass(cls):
        standalone.initialize('usd')

        # Stage with simple (non-nested) instancing.
        mayaFile = os.path.abspath('InstancedShading.ma')
        cmds.file(mayaFile, open=True, force=True)

        usdFilePath = os.path.abspath('InstancedShading.usda')
        cmds.loadPlugin('pxrUsd')
        cmds.usdExport(mergeTransformAndShape=True, file=usdFilePath,
                shadingMode='displayColor', exportInstances=True,
                materialsScopeName='Materials',
                exportCollectionBasedBindings=True,
                exportMaterialCollections=True,
                materialCollectionsPath="/World")

        cls._simpleStage = Usd.Stage.Open(usdFilePath)

        # Stage with nested instancing.
        mayaFile = os.path.abspath('NestedInstancedShading.ma')
        cmds.file(mayaFile, open=True, force=True)

        usdFilePath = os.path.abspath('NestedInstancedShading.usda')
        cmds.loadPlugin('pxrUsd')
        cmds.usdExport(mergeTransformAndShape=True, file=usdFilePath,
                shadingMode='displayColor', exportInstances=True,
                materialsScopeName='Materials',
                exportCollectionBasedBindings=True,
                exportMaterialCollections=True,
                materialCollectionsPath="/World")

        cls._nestedStage = Usd.Stage.Open(usdFilePath)
コード例 #2
0
ファイル: gui.py プロジェクト: mkolar/Tapp
    def add_pushButton_released(self):

        msg = 'Do you want to add an empty row,'
        msg += ' or open a file to fill in the row?'
        addRow = cmds.confirmDialog(title='Cameras', message=msg,
                                    button=['Empty', 'Open File'])

        if addRow == 'Open File':

            filePath = QtGui.QFileDialog.getOpenFileName(self,
                                                         'Open Maya File',
                                                         '../..',
                                                         'Maya File (*.ma)')
            filePath = filePath[0]
            if filePath:
                utils.SavePrompt()
                cmds.file(filePath, open=True, force=True)

                cameras = []
                for cam in pm.ls(type='camera'):
                    if not cam.orthographic.get():
                        cameras.append(str(cam.getParent()))

                cameras.append('Close')
                msg = 'Select camera to add.'
                camera = cmds.confirmDialog(title='Cameras', message=msg,
                                            button=cameras)

                self.addRow(filePath, camera)
        else:
            self.addRow('', '')
コード例 #3
0
ファイル: reference.py プロジェクト: RiggingDojoAdmin/glTools
def replaceReference(refNode,refPath):
	'''
	Replace the reference file path for a specified reference node.
	@param refNode: Reference node to replace file path for
	@type refNode: str
	@param refPath: New reference file path
	@type refPath: str
	'''
	# Check reference node
	if not isReference(refNode):
		raise Exception('Object "'+refNode+'" is not a valid reference node!')
	
	# Check reference file
	if getReferenceFile(refNode,withoutCopyNumber=True) == refPath:
		print ('Reference "'+refNode+'" already referencing "'+refPath+'"!')
		return
	
	# Get file type
	refType = ''
	if refPath.endswith('.ma'): refType = 'mayaAscii'
	elif refPath.endswith('.mb'): refType = 'mayaBinary'
	else: raise Exception('Invalid file type! ("'+refPath+'")')
	
	# Replace reference
	mc.file(refPath,loadReference=refNode,typ=refType,options='v=0')
	
	# Return result
	return refPath
コード例 #4
0
	def __init__(self) :

		# create a promptDialog for the base group name of our mesh this will help to
		# avoid name conflicts, may be good to modify this at some stage to check if mesh
		# exists and prompt to replace data / key
		result = cmds.promptDialog(
															title='Name',
															message='Enter Name for import',
															button=['OK', 'Cancel'],
															defaultButton='OK',
															cancelButton='Cancel',
															dismissString='Cancel')

		# if ok was pressed lets process the data
		if result == 'OK':
			# first we get the text entered by the user
			self.m_text = cmds.promptDialog(query=True, text=True)
			# now get the obj file to import
			self.m_objFileName=cmds.fileDialog2(caption="Please select obj file to import",fileFilter="*.obj", fm=1)

			cmds.file(self.m_objFileName,i=True,type="OBJ",ns=self.m_text)
			# now the xml file
			basicFilter = "*.xml"
			self.m_pointBakeFile=cmds.fileDialog2(caption="Please select xml file to import",fileFilter=basicFilter, fm=1)
			# select the object imported
			print self.m_pointBakeFile
			cmds.select("%s:Mesh"%(self.m_text))
			# and pass control back to the parser
			parser = xml.sax.make_parser()
			parser.setContentHandler(ParseHandler("%s:Mesh"%(self.m_text)))
			parser.parse(open(str(self.m_pointBakeFile[0]),"r"))
コード例 #5
0
ファイル: rcFileManager.py プロジェクト: RobRuckus/rcTools
def exportL2F():#EXPORT RL 2 FILES
	#list ON LAYERS
	onlayers=[] 
	for each in getSceneData()['layers']:
		if mc.getAttr(str(each)+'.renderable'):
			if not ':' in each:
				onlayers.append(each)       
	fileNames=[]
	for each in l2fOutputFiles():
		fileNames.append(l2fOutputFolder()+each)
		
	#PROCEDURE
	if mc.getAttr('renderLayerManager.enableSingleFileName')==True:#IF SINGLE FILE OPTION 
		try:
			mc.file(os.path.normpath(fileNames[0]),ea=1,typ='mayaBinary')
		finally:
			print 'OUTPUT FILE:\n'+ os.path.normpath(fileNames[0])
	else:#MULTI FILE OPTION
		progress=1/len(onlayers)
		for each in onlayers: mc.setAttr(str(each)+'.renderable',0) #TURN OFF ON
		#MULTIPLE EXPORT
		mc.progressWindow(t='Saving..',min=0,max=len(onlayers),pr=progress,st='Copying\n'+onlayers[0])
		try:
			for index,each in enumerate(onlayers): #SEQUENTIALLY TURN ON, EXPORT, THEN TURN OFF
				mc.setAttr(str(each)+'.renderable',1)
				mc.file(os.path.normpath(fileNames[index]),ea=1,typ='mayaBinary')
				print 'OUTPUT FILE:'+ os.path.normpath(os.path.join(l2fOutputFolder()+each+'.mb'))
				progress=progress+1
				mc.progressWindow(e=1,pr=progress,st='Save Success \n'+ each)  
				mc.setAttr(str(each)+'.renderable',0)
		finally: mc.progressWindow(ep=1)        
		for each in onlayers: mc.setAttr(str(each)+'.renderable',1)#TURN BACK ON ONLAYERS
コード例 #6
0
ファイル: shaderLibrary.py プロジェクト: skarone/PipeL
	def publish(self, makePreview = False, tags = '', notes = '' ):
		"""export shader to library"""
		shad = mn.Node( self.name )
		mc.select( shad.shader, ne = True )
		exportPath = self.path + self.name + '.ma'
		if self.published: #Make a bakup
			self.saveVersion()
			print 'make a f*****g backup!'
		else:
			os.makedirs( self.path )
		self._savedata( tags, notes, None )
		n = mn.Node( self.name )
		"""
		if not n.a.id.exists:
			n.a.id.add()
			print self.id
			n.a.id.v = self.id
		if not n.a.category.exists:
			n.a.category.add()
			n.a.category = self.category
		if not n.a.path.exists:
			n.a.path.add()
			n.a.path.v = exportPath
		"""
		mc.file( exportPath , force = True, options = "v=0", typ = "mayaAscii", pr = True, es = True )
		self.settexturespath( self.path + 'Textures/', exportPath )
		previewtime = 0
		if makePreview:
			print 'creating preview'
			previewtime = self.preview()
コード例 #7
0
    def _do_maya_post_publish(self, work_template, progress_cb, user_data):
        """
        Do any Maya post-publish work

        :param work_template:   The primary work template used for the publish
        :param progress_cb:     Callback to be used when reporting progress
        :param user_data:       A dictionary containing any data shared by other hooks run prior to
                                this hook. Additional data may be added to this dictionary that will
                                then be accessible from user_data in any hooks run after this one.
        """        
        import maya.cmds as cmds
        
        progress_cb(0, "Versioning up the scene file")
        
        # get the current scene path:
        scene_path = os.path.abspath(cmds.file(query=True, sn=True))
        
        # increment version and construct new file name:
        progress_cb(25, "Finding next version number")
        fields = work_template.get_fields(scene_path)
        next_version = self._get_next_work_file_version(work_template, fields)
        fields["version"] = next_version 
        new_scene_path = work_template.apply_fields(fields)
        
        # log info
        self.parent.log_debug("Version up work file %s --> %s..." % (scene_path, new_scene_path))
        
        # rename and save the file
        progress_cb(50, "Saving the scene file")
        cmds.file(rename=new_scene_path)
        cmds.file(save=True)
        
        progress_cb(100)
コード例 #8
0
ファイル: __init__.py プロジェクト: Italic-/maya-scripts
def purge_data(arg=None):
    """Purge all data. Tag scene as modified."""
    log.debug("Purging global data...")
    pmc.fileInfo("CMan_data", "")
    _CMan.clear_list()
    cmds.file(modified=True)
    log.warning("Purge complete")
コード例 #9
0
 def setUp(self):
     cmds.file(newFile=True,f=True)
     
     self.sphere = cmds.polySphere()
     self.loc1 = cmds.spaceLocator()[0]
     self.loc2 = cmds.spaceLocator()[0]
     self.joint = cmds.joint()        
コード例 #10
0
	def saveToNW(self, *pArgs):
		self.nameOfCurr = self.getNameOfFile()
		self.filepathnew = os.path.dirname(self.nameOfCurr)
		versionslocal = []
		matchObj = re.match( r'.*\\(.+)\..+', self.nameOfCurr, re.M|re.I)
		if matchObj:
			substringForName=matchObj.group(1)[:-5]
		for f in os.listdir(self.filepathnew):
			if substringForName in f:
				addThis = f[-6:-3]
				versionslocal.append(addThis)
		#getting the number part of the string and adding that to versionList
		versionslocal = sorted(versionslocal)
		theversionToOpen = versionslocal[len(versionslocal)-1]
		temp = str((int(theversionToOpen)+1)).zfill(3)
		#incrementing version number and then converting back to a string
		subs = self.nameOfCurr.replace(self.nameOfCurr[-6:-3], temp)
		cmds.file(rename = subs)
		cmds.file(save = True)
		de=cmds.optionMenu( "deptList", query = True, value = True)
		sc=cmds.optionMenu( "sceneList", query = True, value = True)
		print dept, scene
		self.comboChBgLoad(dept, scene)
		#to reload all other combo boxes according to the new information
		self.makeSaveVisible()
コード例 #11
0
    def __init__(self, parent=getMayaWindow()):
        super(BasicDialog, self).__init__(parent)

        self.sceneDirectory=(cmds.file(q=1, sn=1)).rsplit('/', 1)[0]
        self.mayaScene=(cmds.file(q=1, sn=1)).rsplit('/', 1)[-1]
        
        self.setWindowTitle("lightSnapshot")
        self.shapeTypeCB=QtGui.QComboBox(parent=self)
        #self.populateCB()
       
        #createWidgets
        self.timelbl=QtGui.QLabel("description", parent=self)                 
        self.scenelbl=QtGui.QLabel("scene", parent=self)
        self.loadBtn=QtGui.QPushButton("Load")
        self.saveBtn=QtGui.QPushButton("Save")

        #Layout Widgets
        actionLayout = QtGui.QBoxLayout(QtGui.QBoxLayout.LeftToRight, self)
        actionLayout.addWidget(self.shapeTypeCB)

        actionLayout.addWidget(self.timelbl)
        actionLayout.addWidget(self.scenelbl)
        actionLayout.addWidget(self.loadBtn)          
        actionLayout.addWidget(self.saveBtn)
        
        self.populateCB()
        self.changeLabels()
          
        #Connecting Signals
        self.connect(self.shapeTypeCB, QtCore.SIGNAL("currentIndexChanged(int)"), self.changeLabels)           
        self.connect(self.saveBtn, QtCore.SIGNAL("clicked()"), self.saveButton)
        self.connect(self.loadBtn, QtCore.SIGNAL("clicked()"), self.loadAttr)
コード例 #12
0
def referenceHeroPages():
	mc.file(
		"X:/Projects/GREY11_ANM71_Rewe_Starzone/GR11A71_Shots/GR11A71_Animatic/Animatic_Maya/scenes/05_Rigging/GR11A71_heroPages_Rigging_v002_tR.mb",
		reference=1,
		r=True,
		namespace="hp"
		)
コード例 #13
0
def publish():
	# Import Modules
	import shutil

	# Get SceneName and Root
	fullName = cmds.file(sceneName=True, q=True)
	paths = fullName.split("/")

	taskName = paths[-2].split("_")[2]
	assetCode = paths[-2].split("_")[1]
	assetName = Assets.getFullName(assetCode)

	outFolder =  "/".join(paths[:-1]) + "/" + assetCode + "_" + taskName + "_OUT"
	outName = assetName + "_" + taskName

	cmds.file( save=True, type='mayaAscii' )					# Save File
	shutil.copy2(fullName, outFolder + "/" + outName + ".ma")	# Copy File to MASTER
	cmds.warning("[Kroentlied Pipeline] Published !")

	# Copy File to BackUp
	oldFolder = outFolder + "/" + assetCode + "_" + taskName + "_OUT_OLD"
	backup = VersionControl.getLatest(oldFolder, 1)

	if not backup:	# No Backup found yet
	    backup = outName + "_BackUp_v001.ma"

	shutil.copy2(fullName, oldFolder + "/" + backup)
	print "[Kroentlied Pipeline] PublishBackup: " + backup
	return
コード例 #14
0
def open_scene(file_path, dir_path, all_process):
    # check if scene need saving
    new_scene = mel.eval('saveChanges("file -f -new")')
    if new_scene:
        # print('Opening: ' + file_path)
        set_workspace(dir_path, all_process)
        cmds.file(file_path, o=True)
コード例 #15
0
 def importAndActivate(self, active=True):
     '''
     If self was instantiated with filepath then this will import that wav
     into Maya and activate it on the timeline. Note that if there is already
     an instance of a sound node in Maya that points to this path them the 
     class will bind itself to that node.
     
     :param active: do we set the imported audio to be active on the timerange in Maya
     
     >>> # example of use:
     >>> audio = r9Audio.AudioNode(filepath = 'c:/my_audio.wav')
     >>> audio.importAndActivate()
     '''
     if not self.isLoaded:
         a=cmds.ls(type='audio')
         cmds.file(self.path, i=True, type='audio', options='o=0')
         b=cmds.ls(type='audio')
   
         if not a == b:
             self.audioNode = (list(set(a) ^ set(b))[0])
         else:
             matchingnode = [audio for audio in a if cmds.getAttr('%s.filename' % audio) == self.path]
             if matchingnode:
                 self.audioNode = matchingnode[0]
             else:
                 log.warning("can't find match audioNode for path : %s" % self.path)
                 return
         self.isLoaded=True
     else:
         log.info('given Audio Path is already loaded in the Maya Scene')
     if active:
         self.setActive()
コード例 #16
0
    def presetSave(self):
        currentSelection = cmds.ls(sl=True)
        newPresetName = self.presetName.text()

        if not newPresetName == "":
            print self.allElements.isChecked()
            if self.allElements.isChecked():
                elementsToSave = self.getVrayElements()
            else:
                elementsToSave = mel.eval("treeView -query -selectItem listAdded")

            if elementsToSave:
                cmds.select(elementsToSave)
                print "saving %s " % elementsToSave
                cmds.file("%s%s" % (presetPath, newPresetName), force=True, options="v=0", type="mayaBinary", pr=True, es=True)
                self.findPresets()

            self.presetName.clear()

            if currentSelection:
                cmds.select(currentSelection)
            else:
                cmds.select(d=True)
        else:
            print "please give the preset a name before saving"
コード例 #17
0
    def setUp(self):
        cmds.file(os.path.abspath('UsdExportColorSetsTest.ma'),
                        open=True,
                        force=True)

        self._colorSetSourceMesh = self._GetCubeMayaMesh(
            'ColorSetSourceCubeShape')
コード例 #18
0
ファイル: bedazzle.py プロジェクト: ahamburger/bedazzler
def makeGem(size):
	#import gem
	if not cmds.objExists('gem'):
		cmds.file("gem.ma", i=True)

	cmds.select('gem')
	cmds.xform(s=(size,size,size))
コード例 #19
0
ファイル: libGeo.py プロジェクト: pritishd/PKD_Tools
    def export_hierarchy_obj(self):
        """Export the individual meshes in the hierarchy"""
        file_info = {}
        # Reverse the geo list so that the deepest geo is deleted first in case there is a geo inside geo
        geo_list = self.geo_list
        geo_list.reverse()
        for self.current_target in geo_list:
            pm.delete(self.current_target, ch=1)
            parent = pm.listRelatives(self.current_target, parent=True)
            pm.parent(self.current_target, w=True)
            pm.select(self.current_target)
            path = libFile.linux_path(libFile.join(self.export_dir, self.current_target + ".obj"))
            # Load the obj plugin
            cmds.file(path,
                      pr=1,
                      typ="OBJexport",
                      force=1,
                      options="groups=0;ptgroups=0;materials=0;smoothing=0;normals=0",
                      es=1)
            file_info[self.current_target] = path
            logger.info("Exporting\n%s" % file_info[self.current_target])
            if not self.new_scene and self.cleansing_mode:
                pm.delete(self.current_target)
                pm.refresh()
            else:
                pm.parent(self.current_target, parent)

            self.update_progress()

        # Write the geo file_info
        self.geo_file_info = file_info
コード例 #20
0
ファイル: libGeo.py プロジェクト: pritishd/PKD_Tools
 def import_hierarchy_geo(self):
     """Import all the obj objects"""
     file_info = self.geo_file_info
     for self.current_target in file_info.keys():
         cmds.file(file_info[self.current_target],
                   rpr="PKD_Temp",
                   i=1,
                   type="OBJ",
                   loadReferenceDepth="all",
                   ra=True,
                   mergeNamespacesOnClash=False,
                   options="mo=1")
         # Delete Existing geo if it exists
         if not self.cleansing_mode:
             if pm.objExists(self.current_target):
                 pm.delete(self.current_target)
         logger.info("Importing\n%s" % file_info[self.current_target])
         if self.cleansing_mode:
             os.remove(file_info[self.current_target])
         for top in pm.ls(assemblies=True, ud=True):
             if top.getShape():
                 if top.getShape().type() == "mesh" and top.name() == "PKD_Temp_Mesh":
                     top.rename(self.current_target)
                     pm.select(self.current_target)
                     mel.eval("polySetToFaceNormal")
                     mel.eval("polySoftEdge -a 180 -ch 1 %s" % self.current_target)
                     pm.delete(self.current_target, ch=1)
                     pm.refresh()
         self.update_progress()
コード例 #21
0
ファイル: pk_rig_window.py プロジェクト: PavelCrow/pkRig
	def create_module(self, moduleType):
		# new module dialog
		moduleName, ok = QtGui.QInputDialog().getText(self, 'Add ' + moduleType + ' Module', 'Enter module name:', QtGui.QLineEdit.Normal, moduleType)

		if ok and moduleName != "":

			# If module with name is exist
			if cmds.objExists(moduleName+":main"):
				QtGui.QMessageBox.information(self, "Warning", "This module is exist.")
			else:
				# add module to list
				item = QtGui.QListWidgetItem(moduleName)
				self.modules_listWidget.addItem(item)
				self.modules_listWidget.setCurrentItem(item)
				
				# import module  
				cmds.file("G:/Projects New/AnimaCord/pk_rig/%s/%s_rig.mb" %(moduleType,moduleType), r=True, type="mayaBinary", namespace=moduleName)
				cmds.file("G:/Projects New/AnimaCord/pk_rig/%s/%s_rig.mb" %(moduleType,moduleType), importReference=True )
				cmds.parent(moduleName+":main", characterRoot)
				
				# set module name
				cmds.setAttr(moduleName+":main.moduleName", moduleName, type="string")
				
				cmds.hide(moduleName+":controls")
				cmds.select(moduleName+":main_poser")
				
				self.update_modules_list()
コード例 #22
0
    def addCharacter(self, close, *args):
        project = cmds.optionMenu(self.widgets["project"], q=True, value=True)
        selectedCharacter = cmds.textScrollList(self.widgets["characterList"], q=True, si=True)[0]
        rigPath = os.path.join(
            self.mayaToolsDir, "General", "ART", "Projects", project, "AnimRigs", selectedCharacter + ".mb"
        )
        # find existing namespaces in scene
        namespaces = cmds.namespaceInfo(listOnlyNamespaces=True)
        # reference the rig file
        cmds.file(
            rigPath, r=True, type="mayaBinary", loadReferenceDepth="all", namespace=selectedCharacter, options="v=0"
        )
        # clear selection and fit view
        cmds.select(clear=True)
        cmds.viewFit()
        panels = cmds.getPanel(type="modelPanel")
        # turn on smooth shading
        for panel in panels:
            editor = cmds.modelPanel(panel, q=True, modelEditor=True)
            cmds.modelEditor(editor, edit=True, displayAppearance="smoothShaded", displayTextures=True, textures=True)
        # find new namespaces in scene (this is here in case I need to do something later and I need the new name that was created)
        newCharacterName = selectedCharacter
        newNamespaces = cmds.namespaceInfo(listOnlyNamespaces=True)
        for name in newNamespaces:
            if name not in namespaces:
                newCharacterName = name
        # launch UI
        import ART_animationUI

        reload(ART_animationUI)
        ART_animationUI.AnimationUI()
        if close:
            cmds.deleteUI(self.widgets["window"])
コード例 #23
0
    def _do_maya_post_publish(self, work_template, progress_cb):
        """
        Do any Maya post-publish work

        :param work_template:   The primary work template used for the publish
        :param progress_cb:     Callback to be used when reporting progress
        """        
        import maya.cmds as cmds
        
        progress_cb(0, "Versioning up the scene file")
        
        # get the current scene path:
        scene_path = os.path.abspath(cmds.file(query=True, sn=True))
        
        # increment version and construct new file name:
        progress_cb(25, "Finding next version number")
        fields = work_template.get_fields(scene_path)
        next_version = self._get_next_work_file_version(work_template, fields)
        fields["version"] = next_version 
        new_scene_path = work_template.apply_fields(fields)
        
        # log info
        self.parent.log_debug("Version up work file %s --> %s..." % (scene_path, new_scene_path))
        
        # rename and save the file
        progress_cb(50, "Saving the scene file")
#         cmds.file(rename=new_scene_path)
        cmds.file(save=True)
        
        progress_cb(100)
コード例 #24
0
def importCloth(filePath):
    try:
        spaceName = ''
        spaceName = filePath.rsplit('/', 1 )[1].split('.')[0]
        cmds.file ( filePath, i=1, type='mayaAscii', ra=True, namespace=spaceName, options='v=0', pr=1, loadReferenceDepth='all' )
    except:
        pass
コード例 #25
0
ファイル: examples.py プロジェクト: 2gDigitalPost/tactic_src
    def execute(my):

        # get the sobject passed in
        sobject = my.get_input_value('sobject')
        code = sobject.get('code')
        search_key = my.get_package_value("search_key")

        # get the designated local directory to put temporary files
        tmp_dir = my.get_package_value("local_dir")
        path = "%s/%s.ma" % (tmp_dir, code)

        context = my.get_package_value("asset_context")
        # FIXME: ignore subcontext for now
        #subcontext = my.get_package_value("asset_sub_context")
        #if subcontext:
        #    context = "%s/%s" % (context, subcontext)

        # save out the file
        cmds.file( rename=path)
        cmds.file( save=True, type='mayaAscii')

        # checkin the file that was just saved
        my.server.upload_file(path)
        snapshot = my.server.simple_checkin(search_key, context, path)

        # add a mock dependency
        snapshot_code = snapshot.get("code")
        my.server.add_dependency(snapshot_code, "C:/tt.pdf")
コード例 #26
0
    def testExportWithKindFlag(self):
        """
        Tests exporting a Maya file with no USD_kind custom attributes
        and using the usdExport -kind flag.
        """
        cmds.file(os.path.abspath('KindTest.ma'), open=True, force=True)
        cmds.loadPlugin('pxrUsd')

        usdFilePath = os.path.abspath('KindTest.usda')

        # Check the error mark; this ensures that we actually got a Tf error
        # (that was eventually converted into a Maya error, which Maya raises
        # in Python as a RuntimeError).
        mark = Tf.Error.Mark()
        mark.SetMark()
        with self.assertRaises(RuntimeError):
            cmds.usdExport(mergeTransformAndShape=True,
                           file=usdFilePath,
                           kind='assembly')
        errors = mark.GetErrors()
        self.assertEqual(len(errors), 1)
        self.assertIn(
            "</KindTest> has kind 'assembly', which is derived from 'assembly'",
            str(errors[0]))

        cmds.usdExport(mergeTransformAndShape=True,
            file=usdFilePath,
            kind='fakeKind')
        stage = Usd.Stage.Open(usdFilePath)
        self.assertTrue(stage)

        rootPrim = stage.GetPrimAtPath('/KindTest')
        self.assertTrue(Kind.Registry().IsA(Usd.ModelAPI(rootPrim).GetKind(),
                'fakeKind'))
コード例 #27
0
    def testExportWithKindAttrAndKindFlag(self):
        """
        Tests exporting a Maya file with both USD_kind custom attributes and
        using the usdExport -kind flag; there should be an error if the USD_kind
        is not derived from the kind specified in the -kind flag.
        """
        cmds.file(os.path.abspath('KindTestUsdKindAttr.ma'), open=True, force=True)
        cmds.loadPlugin('pxrUsd')

        usdFilePath = os.path.abspath('KindTestUsdKindAttr.usda')
        with self.assertRaises(RuntimeError):
            cmds.usdExport(mergeTransformAndShape=True,
                           file=usdFilePath,
                           kind='assembly')

        cmds.usdExport(mergeTransformAndShape=True,
            file=usdFilePath,
            kind='model')
        stage = Usd.Stage.Open(usdFilePath)
        self.assertTrue(stage)

        rootPrim = stage.GetPrimAtPath('/KindTest')
        self.assertTrue(Kind.Registry().IsA(Usd.ModelAPI(rootPrim).GetKind(),
                'component'))
        rootPrim2 = stage.GetPrimAtPath('/KindTest2')
        self.assertTrue(Kind.Registry().IsA(Usd.ModelAPI(rootPrim2).GetKind(),
                'assembly'))
コード例 #28
0
ファイル: minime.py プロジェクト: sanfx/pythonScripts
        def openSelectedCallback(self,*args):
                """
                This method is called on double click selected item in textscrollList
                """
                #try block also saves from error caused by directories that we do not have rights to access.
                cMsg="Selected scene file is already open. Do you want to reload it?\n If you reload all changes you made since last save will be lost."
                try:
                        cmds.button('upBtn',edit=True,enable=True)
                        self.selectedItem=str(os.path.join(cmds.textField('location',q=True,tx=True),str(cmds.textScrollList('fileLister',q=True,si=True)[0]).split(" > ")[0])).replace("/","\\")

                        if (self.selectedItem.endswith("ma")) or (self.selectedItem.endswith("mb")):
                                print "selected Item: %s\n scene Open: %s"%(self.selectedItem , cmds.file(q=True,sn=True))
                                if self.selectedItem==cmds.file(q=True,sn=True).replace("/","\\"):
                                        print "Same Scene"
                                        result=cmds.confirmDialog(t='Warning',m=cMsg,button=['Yes','No'], defaultButton='Yes', cancelButton='No', dismissString='No' )
                                        if result== "Yes":
                                                state= cmds.file(self.selectedItem,open=True, f=True)
                                else:
                                        print"Saved scene file %s"%os.path.basename(cmds.file(q=True,sn=True))
##                      if not cmds.file(q=True,save=True):
##                         cmds.file(save=True)
                                        state= cmds.file(self.selectedItem,open=True, f=True)
                        else:
                                if os.path.isdir(self.selectedItem):
                                        cmds.textField('location',edit=True,tx=self.selectedItem)
                                        self.populatescrollList(self.selectedItem.replace("/","\\"))

                except Exception as e:
                        print e
コード例 #29
0
    def testExportWithAssemblyAndMesh(self):
        """
        Tests exporting a Maya file with a root prim containing an assembly
        and a mesh.
        """
        cmds.file(os.path.abspath('KindTestAssemblyAndMesh.ma'), open=True,
                force=True)
        cmds.loadPlugin('pxrUsd')

        # Should fail due to the mesh.
        usdFilePath = os.path.abspath('KindTestAssemblyAndMesh.usda')
        with self.assertRaises(RuntimeError):
            cmds.usdExport(mergeTransformAndShape=True,
                           file=usdFilePath,
                           kind='assembly')

        # Should be 'component' because of the mesh
        usdFilePath = os.path.abspath('KindTestAssemblyAndMesh.usda')
        cmds.usdExport(mergeTransformAndShape=True,
            file=usdFilePath)
        stage = Usd.Stage.Open(usdFilePath)
        self.assertTrue(stage)

        rootPrim = stage.GetPrimAtPath('/KindTest')
        self.assertTrue(Kind.Registry().IsA(Usd.ModelAPI(rootPrim).GetKind(),
                'component'))
コード例 #30
0
 def setup(self):
     cmds.file(new=True,f=True)
     r9Meta.MetaClass(name='MetaClass_Test')
     r9Meta.MetaRig(name='MetaRig_Test')
     r9Meta.MetaRigSupport(name='MetaRigSupport_Test')
     r9Meta.MetaFacialRig(name='MetaFacialRig_Test')
     r9Meta.MetaFacialRigSupport(name='MetaFacialRigSupport_Test')
コード例 #31
0
    def importMap(
        self,
        map_dest,
        xmodel_folder,
        mapname,
        ):
 
        # Load Model Properties
        json_models = open(map_dest + "_xmodels.json") 
        modeldata = json.load(json_models)

        # Model Count
        curAmount = 1
        badModels = []

        # Create groups for the loaded data
        cmds.group(em=True, name='xmodels')
        cmds.group(em=True, name=mapname + '_group')
        cmds.group(em=True, name='mapGeoemtry')
 
        # Import the map .obj fom Husky folder
        cmds.file(map_dest + '.obj', i=True)
 
        # Create a list of all geometry in the scene
        MapList = cmds.ls(geometry=True)
 
        # Parent each geoemtry into 'mapGeometry' group (no xmodels)
        for m in MapList:
            cmds.parent(m, 'mapGeoemtry')
 
        # Create .txt file with all bad models
        f_badmodels = open(map_dest + "_badModels.txt", "w")
        
        # Read XModels data
        for XModel in modeldata:
            # Load current XModel data from JSON
            if 'Name' in XModel:    
                self.addXModel(xmodel_folder, XModel, 1, badModels)

 
            # Loading progress
            print("loaded " + str(curAmount) + " of " + str(len(modeldata)))
            curAmount += 1
            #reporter = mel.eval('string $tmp = $gCommandReporter;')
            #cmds.cmdScrollFieldReporter(reporter, e=True, clear=True

        # Write all bad imports to file
        for b in badModels:
            f_badmodels.write(b)
        f_badmodels.close()
			
        # Delete all corrupted models & joints
        for o in cmds.ls(mat = False):
            if "|" not in o:
                if "Joints" in o or "LOD" in o:
                    try:
                        cmds.delete(o)
                    except:
                        print('')

        # Rescale mapGeo
        cmds.scale(0.3937007874015748,
                   0.3937007874015748,
                   0.3937007874015748,
                   'mapGeoemtry', absolute=True)
 
        # Group both xmodels & mapGeo to one final group
        cmds.parent('xmodels', mapname + '_group')
        cmds.parent('mapGeoemtry', mapname + '_group')
        cmds.polyColorSet( delete= True, colorSet= 'colorSet1')

        # Rescale the map to 0.01 because it's too damn huge
        cmds.scale(0.01, 0.01, 0.01, mapname + '_group', absolute=True)
 
        # Success
        print('imported %i models' % (len(modeldata)-(len(badModels))))
        print('%i corruputed models' % len(badModels))
コード例 #32
0
ファイル: app.py プロジェクト: all-in-one-of/lsapipeline
    def fetchAllShaders(self, tk):        
        """
        Function to handle fetching the shaders
        """
        inprogressBar = pbui.ProgressBarUI(title = 'Building Shaders for All Assets:')
        inprogressBar.show()
        if self.mcLoaded:
            inprogressBar.updateProgress(percent = 5, doingWhat = 'Processing scene info...')      
            scene_path = '%s' % os.path.abspath(cmds.file(query=True, sn= True))
            debug(self.app, method = 'fetchAllShaders', message = 'scene_path... %s' % scene_path, verbose = False)
            
            ## Build an entity type to get some values from.
            entity = self.app.context.entity                                                                                    ## returns {'type': 'Shot', 'name': 'ep100_sh010', 'id': 1166}
            debug(self.app, method = 'fetchAllShaders', message = 'entity... %s' % entity, verbose = False)
                        
            ## Filter for the matching ID for the shot
            sg_filters = [["id", "is", entity["id"]]]
            debug(self.app, method = 'fetchAllShaders', message = 'sg_filters... %s' % sg_filters, verbose = False)
            
            ## Build an entity type to get some values from.
            sg_entity_type = self.app.context.entity["type"]                                                                   ## returns Shot
            debug(self.app, method = 'fetchAllShaders', message = 'sg_entity_type...\n%s' % sg_entity_type, verbose = False)
            
            ## DATA
            ## NOTES SO HERE WE DON'T NEED TO CALL THE ASSETS FIELD FROM SHOTGUN
            ## WE CAN JUST GRAB THE LATEST PUBLISH FILE FROM EACH OF THE TEMPLATE STEPS
            inprogressBar.updateProgress(percent = 10, doingWhat = 'Processing scene info...')
            shadersTemplate = tk.templates[self.app.get_setting('maya_asset_SHD_XML_template')]
            debug(self.app, method = 'fetchAllShaders', message = 'shadersTemplate...\n%s' % shadersTemplate, verbose = False)
            
            texturesTemplate = tk.templates[self.app.get_setting('maya_asset_textures_template')]
            debug(self.app, method = 'fetchAllShaders', message = 'texturesTemplate...\n%s' % texturesTemplate, verbose = False)

            ## Now get a list of assets in the scene
            inprogressBar.updateProgress(percent = 15, doingWhat = 'Processing assets...')
            inprogressBar.updateProgress(percent = 20, doingWhat = 'Processing UV and SHD XML files...')
            assetDict = {} ## key: shotgunName  var: inSceneName
            dupAssets = {}
            for parentGrp in cmds.ls(assemblies = True, long = True):
                if cmds.ls(parentGrp, dag=True, type="mesh"):
                    for each in cmds.listRelatives(parentGrp, children = True):
                        ## Check for duplicate or base assets
                        if not cmds.objExists('%s.dupAsset' % each):
                            assetDict[each.split('_hrc')[0]] = parentGrp
                            #{assetName: parentGrpName}
                        else: # handle the duplicate naming
                            origAssetName = each.split('_hrc')[0]
                            dupAssets[each] = [origAssetName, parentGrp]
                            #{duplicateGrName : [origAssetName, parentGrpName]}
                            debug(self.app, method = 'fetchAllShaders', message = 'DUPLICATE FOUND... origAssetName: %s' % origAssetName, verbose = False)

            debug(self.app, method = 'fetchAllShaders', message = 'Assets... %s' % assetDict, verbose = False)
            debug(self.app, method = 'fetchAllShaders', message = 'Duplicate Assets... %s' % dupAssets, verbose = False)
                                      
            ## Now process SHD XML
            debug(self.app, method = 'fetchAllShaders', message = 'Processing template... %s' % shadersTemplate, verbose = False)
            self.processSHDTemplate(tk = tk, templateFile = shadersTemplate, assetDict = assetDict, selected = False)
            
            self.finalBuildStuff(False, inprogressBar)
        else:
            inprogressBar.close()
            cmds.warning("NO MENTAL CORE FOUND!")
コード例 #33
0
rootDir = "D:\\assets\\avatars\\morph\\females"
#rootDir = "D:\\assets\\avatars\\morph\\males\\copyOfAllSizes"
objs = os.listdir(rootDir)
for obj in objs:
    objPath = rootDir + "\\" + obj
    if os.path.isdir(objPath):
        dirPath = objPath
        print "dirPath = " + dirPath
        objs2 = os.listdir(dirPath)
        for obj2 in objs2:
            obj2Path = dirPath + "\\" + obj2
            print("obj2Path = " + obj2Path)
            if os.path.isfile(obj2Path):
                if not obj2.endswith(".db"):
                    fileSize = os.path.getsize(obj2Path) * 1e-6
                    cmds.file(obj2Path, o=True, f=True)
                    cmds.select(cmds.listRelatives(cmds.ls(geometry=True,
                                                           visible=True),
                                                   p=True,
                                                   path=True),
                                r=True)
                    triCount = cmds.polyEvaluate(t=True)
                    triCount = round(triCount, 2)
                    print(obj2 + " = " + repr(round(fileSize, 2)) + "mbs")
                    av['name'] = obj2
                    av['fileSize'] = repr(round(fileSize, 2)) + "mbs"
                    av['triCount'] = triCount
            if obj2.endswith(".fbm"):
                print("In .fbm")
                av['texSize'] = getFolderSize(obj2Path)
            if obj2 == "textures":
コード例 #34
0
    def process(self, instance):

        yeti_nodes = cmds.ls(instance, type="pgYetiMaya")
        if not yeti_nodes:
            raise RuntimeError("No pgYetiMaya nodes found in the instance")

        # Define extract output file path
        dirname = self.staging_dir(instance)
        settings_path = os.path.join(dirname, "yeti.rigsettings")

        # Yeti related staging dirs
        maya_path = os.path.join(dirname, "yeti_rig.ma")

        self.log.info("Writing metadata file")

        # Create assumed destination folder for imageSearchPath
        assumed_temp_data = instance.data["assumedTemplateData"]
        template = instance.data["template"]
        template_formatted = template.format(**assumed_temp_data)

        destination_folder = os.path.dirname(template_formatted)

        image_search_path = os.path.join(destination_folder, "resources")
        image_search_path = os.path.normpath(image_search_path)

        settings = instance.data.get("rigsettings", None)
        if settings:
            settings["imageSearchPath"] = image_search_path
            with open(settings_path, "w") as fp:
                json.dump(settings, fp, ensure_ascii=False)

        # Ensure the imageSearchPath is being remapped to the publish folder
        attr_value = {
            "%s.imageSearchPath" % n: str(image_search_path)
            for n in yeti_nodes
        }

        # Get input_SET members
        input_set = next(i for i in instance if i == "input_SET")

        # Get all items
        set_members = cmds.sets(input_set, query=True)
        set_members += cmds.listRelatives(
            set_members, allDescendents=True, fullPath=True) or []
        members = cmds.ls(set_members, long=True)

        nodes = instance.data["setMembers"]
        resources = instance.data.get("resources", {})
        with disconnect_plugs(settings, members):
            with yetigraph_attribute_values(destination_folder, resources):
                with maya.attribute_values(attr_value):
                    cmds.select(nodes, noExpand=True)
                    cmds.file(maya_path,
                              force=True,
                              exportSelected=True,
                              typ="mayaAscii",
                              preserveReferences=False,
                              constructionHistory=True,
                              shader=False)

        # Ensure files can be stored
        if "files" not in instance.data:
            instance.data["files"] = list()

        instance.data["files"].extend(["yeti_rig.ma", "yeti.rigsettings"])

        self.log.info("Extracted {} to {}".format(instance, dirname))

        cmds.select(clear=True)
コード例 #35
0
    def __init__(self, parent=None):
        super(Form, self).__init__(parent)

        self.listItem = QListWidget()
        self.listNodes = [user for user in cm.ls(sl=True)]
        self.listItem.addItems(self.listNodes)
        self.listItem.setSelectionMode(QAbstractItemView.ExtendedSelection)

        self.searchBar = QLineEdit()
        self.searchBar.setPlaceholderText("Search in items")
        self.searchBar.textChanged.connect(self.filterList)

        self.selectFrame = QCheckBox()
        self.selectFrame.setText("Select a frame number: ")
        self.selectFrameLine = QLineEdit()
        self.selectFrameLine.setPlaceholderText("Frame")

        self.selectFrameLine.returnPressed.connect(self.printID)

        self.selectFrame.stateChanged.connect(self.selectFrameOperation)

        self.allFrames = QCheckBox()
        self.allFrames.setText("Current Time Slider (visible time range)")
        self.allFrames.stateChanged.connect(self.allFramesOperation)

        self.frameRange = QCheckBox()
        self.frameRange.setText("Frame range:")
        self.startFrame = QLineEdit()
        self.startFrame.setPlaceholderText("Start Frame")
        self.endFrame = QLineEdit()
        self.endFrame.setPlaceholderText("End Frame")
        self.frameRange.stateChanged.connect(self.frameRangeOperation)

        self.exportButton = QPushButton()
        self.exportButton.setText("Export")

        self.closeButton = QPushButton("Close")
        self.closeButton.clicked.connect(self.close)

        self.infoLabel = QLabel("")
        pathinfo = os.path.dirname(cm.file(
            q=1, sn=1)) + "/" + os.path.basename(cm.file(q=1, sn=1))
        self.infoPathLabel = QLineEdit()

        self.infoPathLabel.setText(str(pathinfo))
        self.itemName = QLabel()

        self.progressBar = QProgressBar()
        self.mainProgressBar = QProgressBar()

        self.mainLayout = QVBoxLayout()

        self.checkLayout = QVBoxLayout()

        self.selectedFrameLayout = QHBoxLayout()

        self.frameRangeLayout = QHBoxLayout()
        self.frameRangeLayout.addWidget(self.frameRange)
        self.frameRangeLayout.addWidget(self.startFrame)
        self.frameRangeLayout.addWidget(self.endFrame)

        self.checkLayout.addWidget(self.allFrames)
        self.mainLayout.addWidget(self.listItem)
        self.mainLayout.addWidget(self.infoPathLabel)
        self.mainLayout.addWidget(self.searchBar)
        self.selectedFrameLayout.addWidget(self.selectFrame)
        self.selectedFrameLayout.addWidget(self.selectFrameLine)
        self.mainLayout.addLayout(self.selectedFrameLayout)

        self.mainLayout.addLayout(self.checkLayout)
        self.mainLayout.addLayout(self.frameRangeLayout)
        self.mainLayout.addWidget(self.infoLabel)
        self.mainLayout.addWidget(self.exportButton)
        self.mainLayout.addWidget(self.closeButton)

        self.setLayout(self.mainLayout)
コード例 #36
0
ファイル: extract_model.py プロジェクト: simonebarbieri/pype
    def process(self, instance):
        """Plugin entry point."""
        ext_mapping = (
            instance.context.data["project_settings"]["maya"]["ext_mapping"])
        if ext_mapping:
            self.log.info("Looking in settings for scene type ...")
            # use extension mapping for first family found
            for family in self.families:
                try:
                    self.scene_type = ext_mapping[family]
                    self.log.info("Using {} as scene type".format(
                        self.scene_type))
                    break
                except KeyError:
                    # no preset found
                    pass
        # Define extract output file path
        stagingdir = self.staging_dir(instance)
        filename = "{0}.{1}".format(instance.name, self.scene_type)
        path = os.path.join(stagingdir, filename)

        # Perform extraction
        self.log.info("Performing extraction ...")

        # Get only the shape contents we need in such a way that we avoid
        # taking along intermediateObjects
        members = instance.data("setMembers")
        members = cmds.ls(members,
                          dag=True,
                          shapes=True,
                          type=("mesh", "nurbsCurve"),
                          noIntermediate=True,
                          long=True)

        with lib.no_display_layers(instance):
            with lib.displaySmoothness(members,
                                       divisionsU=0,
                                       divisionsV=0,
                                       pointsWire=4,
                                       pointsShaded=1,
                                       polygonObject=1):
                with lib.shader(members, shadingEngine="initialShadingGroup"):
                    with avalon.maya.maintained_selection():
                        cmds.select(members, noExpand=True)
                        cmds.file(
                            path,
                            force=True,
                            typ="mayaAscii" if self.scene_type == "ma" else
                            "mayaBinary",  # noqa: E501
                            exportSelected=True,
                            preserveReferences=False,
                            channels=False,
                            constraints=False,
                            expressions=False,
                            constructionHistory=False)

                        # Store reference for integration

        if "representations" not in instance.data:
            instance.data["representations"] = []

        representation = {
            'name': self.scene_type,
            'ext': self.scene_type,
            'files': filename,
            "stagingDir": stagingdir,
        }
        instance.data["representations"].append(representation)

        self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))
コード例 #37
0
    def testExportTexturedMaterialXpPlane1(self):
        '''
        Tests that pPlane1 exported as planned:

        this plane is a basic RGB texture without any customizations.
        '''
        cmds.file(f=True, new=True)

        mayaFile = os.path.join(self._inputPath, 'UsdExportMaterialXTest',
            'StandardSurfaceTextured.ma')
        cmds.file(mayaFile, force=True, open=True)

        # Export to USD.
        usdFilePath = os.path.abspath('UsdExportMaterialXTest.usda')
        cmds.mayaUSDExport(mergeTransformAndShape=True, file=usdFilePath,
            shadingMode='useRegistry', convertMaterialsTo=['MaterialX'],
            materialsScopeName='Materials')

        stage = Usd.Stage.Open(usdFilePath)
        self.assertTrue(stage)

        # Exploring this path:
        base_path = "/pPlane1/Materials/standardSurface2SG"

        mesh_prim = stage.GetPrimAtPath('/pPlane1')
        self.assertTrue(mesh_prim)

        # Validate the Material prim bound to the Mesh prim.
        self.assertTrue(mesh_prim.HasAPI(UsdShade.MaterialBindingAPI))
        mat_binding = UsdShade.MaterialBindingAPI(mesh_prim)
        mat = mat_binding.ComputeBoundMaterial("mtlx")[0]
        self.assertTrue(mat)
        material_path = mat.GetPath().pathString
        self.assertEqual(material_path, base_path)

        # Needs a resolved inputs:file1:varnameStr attribute:
        self.assertEqual(mat.GetInput("file1:varnameStr").GetAttr().Get(), "st")

        # Needs a MaterialX surface source:
        shader = mat.ComputeSurfaceSource("mtlx")[0]
        self.assertTrue(shader)

        # Which is a standard surface:
        self.assertEqual(shader.GetIdAttr().Get(),
                         "ND_standard_surface_surfaceshader")

        # With a connected file texture on base_color going to baseColor on the
        # nodegraph:
        attr = shader.GetInput('base_color')
        self.assertTrue(attr)
        cnxTuple = attr.GetConnectedSource()
        self.assertTrue(cnxTuple)

        ng_path = base_path + '/MayaNG_standardSurface2SG'
        ng = UsdShade.NodeGraph(cnxTuple[0])
        self.assertEqual(ng.GetPath(), ng_path)
        self.assertEqual(cnxTuple[1], "baseColor")

        # Should have an outputs connected to a file node:
        attr = ng.GetOutput('baseColor')
        self.assertTrue(attr)
        cnxTuple = attr.GetConnectedSource()
        self.assertTrue(cnxTuple)

        # Which is a color3 image:
        shader = UsdShade.Shader(cnxTuple[0])
        self.assertEqual(shader.GetIdAttr().Get(), "ND_image_color3")
        self.assertEqual(shader.GetPath(), ng_path + "/file1")

        # Check a few values:
        self.assertTrue(self.compareValue(shader, "uaddressmode", "periodic"))
        self.assertTrue(self.compareValue(shader, "default", (0.5, 0.5, 0.5)))

        # Which is itself connected to a primvar reader:
        attr = shader.GetInput('texcoord')
        self.assertTrue(attr)
        cnxTuple = attr.GetConnectedSource()
        self.assertTrue(cnxTuple)

        # Which is a geompropvalue node:
        shader = UsdShade.Shader(cnxTuple[0])
        self.assertEqual(shader.GetIdAttr().Get(), "ND_geompropvalue_vector2")
        self.assertEqual(shader.GetPath(),
                         ng_path + "/place2dTexture1")

        base_path = "/pPlane{0}/Materials/standardSurface{1}SG/MayaNG_standardSurface{1}SG/{2}"
        to_test = [
            (7, 8, "file7", "ND_image_float"),
            (6, 7, "file6", "ND_image_vector2"),
            (1, 2, "file1", "ND_image_color3"),
            (4, 5, "file4", "ND_image_color4"),

            (1, 2, "place2dTexture1", "ND_geompropvalue_vector2"),

            (4, 5, "MayaSwizzle_file4_rgb", "ND_swizzle_color4_color3"),
            (6, 7, "MayaSwizzle_file6_xxx", "ND_swizzle_vector2_color3"),
            (19, 21, "MayaSwizzle_file20_x", "ND_swizzle_vector2_float"),
            (7, 8, "MayaSwizzle_file7_rrr", "ND_swizzle_float_color3"),
            (8, 9, "MayaSwizzle_file8_r", "ND_swizzle_color4_float"),
            (13, 14, "MayaSwizzle_file13_g", "ND_swizzle_color3_float"),

            (27, 20, "MayaLuminance_file27", "ND_luminance_color3"),
            (12, 13, "MayaLuminance_file12", "ND_luminance_color4"),

            (14, 15, "MayaConvert_file14_color3f_float3", 
             "ND_convert_color3_vector3"),
            (15, 16, "MayaNormalMap_standardSurface16_normalCamera",
             "ND_normalmap"),
        ]

        for prim_idx, sg_idx, node_name, id_attr in to_test:
            prim_path = base_path.format(prim_idx, sg_idx, node_name)

            prim = stage.GetPrimAtPath(prim_path)
            self.assertTrue(prim, prim_path)
            shader = UsdShade.Shader(prim)
            self.assertTrue(shader, prim_path)
            self.assertEqual(shader.GetIdAttr().Get(), id_attr, id_attr)
コード例 #38
0
    def _do_maya_publish(self, task, work_template, comment, thumbnail_path,
                         sg_task, progress_cb):
        """
        Publish the main Maya scene

        :param task:            The primary task to publish
        :param work_template:   The primary work template to use
        :param comment:         The publish description/comment
        :param thumbnail_path:  The path to the thumbnail to associate with the published file
        :param sg_task:         The Shotgun task that this publish should be associated with
        :param progress_cb:     A callback to use when reporting any progress
                                to the UI
        :returns:               The path to the file that has been published        
        """
        import maya.cmds as cmds

        progress_cb(0.0, "Finding scene dependencies", task)
        dependencies = self._maya_find_additional_scene_dependencies()

        # get scene path
        scene_path = os.path.abspath(cmds.file(query=True, sn=True))

        if not work_template.validate(scene_path):
            raise TankError(
                "File '%s' is not a valid work path, unable to publish!" %
                scene_path)

        # use templates to convert to publish path:
        output = task["output"]
        fields = work_template.get_fields(scene_path)
        #    Edited by Vipul Rathod
        fields["Step"] = "Master"
        fields["name"] = fields["Asset"].replace("_", "") + fields["Step"]
        work_path = work_template.apply_fields(fields)
        work_dir = work_path.split(fields["name"])[0]
        work_files = []
        for fl in os.listdir(work_dir):
            if fl.endswith("ma"):
                work_files.append(fl)
        if work_files:
            if os.path.join(work_dir, max(work_files)):
                latest_master_work_file = os.path.join(work_dir,
                                                       max(work_files))
                curr_version = str(
                    latest_master_work_file.split(".ma")[0].split(".v")
                    [1]).lstrip("0")
                fields["version"] = int(curr_version) + 1
                latest_work_path = work_template.apply_fields(fields)
        else:
            fields["version"] = 1
            latest_work_path = work_template.apply_fields(fields)
            print "no work FILES"
        #    End of editing
        fields["TankType"] = output["tank_type"]
        publish_template = output["publish_template"]
        publish_path = publish_template.apply_fields(fields)

        if os.path.exists(publish_path):
            raise TankError("The published file named '%s' already exists!" %
                            publish_path)

        # save the scene:
        progress_cb(10.0, "Saving the scene")
        self.parent.log_debug("Saving the scene...")
        cmds.file(rename=latest_work_path)
        cmds.file(save=True, force=True)

        # copy the file:
        progress_cb(50.0, "Copying the file")
        try:
            publish_folder = os.path.dirname(publish_path)
            self.parent.ensure_folder_exists(publish_folder)
            self.parent.log_debug("Copying %s --> %s..." %
                                  (scene_path, publish_path))
            self.parent.copy_file(scene_path, publish_path, task)
        except Exception, e:
            raise TankError("Failed to copy file from %s to %s - %s" %
                            (scene_path, publish_path, e))
コード例 #39
0
ファイル: han.py プロジェクト: xbxcw/scripts
                           step=1,
                           maxValue=len(edges))
        mc.select(b)
        mc.SewUVs()
        mc.select(nodes)


myFolder = 'D:/myFolder/'
folders = os.listdir(myFolder)
textures = []
for folder in folders:
    i = 0
    transforms = []
    obj = myFolder + folder + '/' + mc.getFileList(folder=myFolder + folder,
                                                   filespec='*.obj')[0]
    mc.file(obj, i=True, ns='wang%02d' % i)
    geometry = mc.ls(geometry=True)
    mc.hyperShade(a=shader, assign=True)
    shader = mc.shadingNode('blinn', asShader=True)
    file_node = mc.shadingNode('file', asTexture=True)
    mc.setAttr(file_node + '.fileTextureName',
               'D:\HKW\danrenzuoyi\sourceimages\Hanjiangfu\T_Cabinet01_B.png',
               type="string")
    mc.connectAttr('%s.outColor' % file_node, '%s.color' % shader)
    mc.FBXExport('-file', 'C:/Users/HYC/Desktop/aa/' + geometry[0])
    mc.file(new=True, force=True)

shader = mc.shadingNode('blinn', asShader=True)
file_node = mc.shadingNode('file', asTexture=True)
mc.setAttr(file_node + '.fileTextureName',
           'D:\HKW\danrenzuoyi\sourceimages\Hanjiangfu\T_Cabinet01_B.png',
コード例 #40
0
ファイル: app.py プロジェクト: all-in-one-of/lsapipeline
    def fetchShadersForSelected(self, tk):
        """
        Function to handle fetching the shaders for selected _hrc groups only.
        """
        inprogressBar = pbui.ProgressBarUI(title = 'Building Shaders For Selected Assets:')
        inprogressBar.show()
        inprogressBar.updateProgress(percent = 5, doingWhat = 'Processing scene info...')      
        if self.mcLoaded:
            ## ASSSIGN DEFAULT LAMBERT AND CLEAN THE HYERPSHADE!
            for each in cmds.ls(sl = True):
                try:
                    cmds.sets(each, e = True , forceElement = 'initialShadingGroup')
                except:
                    cmds.warning('FAILED to set initial Shading group for %s' % each)
                    pass
            [cmds.lockNode(cp, lock = True) for cp in cmds.ls(type = 'core_renderpass')] ## Lock all the core_renderpasses before deleting unused to preserve...
            mel.eval("MLdeleteUnused();")
            
            scene_path = '%s' % os.path.abspath(cmds.file(query=True, sn= True))
            debug(self.app, method = 'fetchShadersForSelected', message = 'scene_path... %s' % scene_path, verbose = False)
            
            ## Build an entity type to get some values from.
            entity = self.app.context.entity                                                                                    ## returns {'type': 'Shot', 'name': 'ep100_sh010', 'id': 1166}
            debug(self.app, method = 'fetchShadersForSelected', message = 'entity... %s' % entity, verbose = False)
                        
            ## Filter for the matching ID for the shot
            sg_filters = [["id", "is", entity["id"]]]
            debug(self.app, method = 'fetchShadersForSelected', message = 'sg_filters... %s' % sg_filters, verbose = False)
            
            ## Build an entity type to get some values from.
            sg_entity_type = self.app.context.entity["type"]                                                                   ## returns Shot
            debug(self.app, method = 'fetchShadersForSelected', message = 'sg_entity_type...\n%s' % sg_entity_type, verbose = False)
            
            ## DATA
            ## NOTES SO HERE WE DON'T NEED TO CALL THE ASSETS FIELD FROM SHOTGUN
            ## WE CAN JUST GRAB THE LATEST PUBLISH FILE FROM EACH OF THE TEMPLATE STEPS
            inprogressBar.updateProgress(percent = 10, doingWhat = 'Processing scene info...')
            shadersTemplate = tk.templates[self.app.get_setting('maya_asset_SHD_XML_template')]
            debug(self.app, method = 'fetchShadersForSelected', message = 'shadersTemplate...\n%s' % shadersTemplate, verbose = False)
            
            texturesTemplate = tk.templates[self.app.get_setting('maya_asset_textures_template')]
            debug(self.app, method = 'fetchShadersForSelected', message = 'texturesTemplate...\n%s' % texturesTemplate, verbose = False)
    
            ## Now get a list of assets in the scene
            inprogressBar.updateProgress(percent = 15, doingWhat = 'Processing assets...')
            inprogressBar.updateProgress(percent = 20, doingWhat = 'Processing xml...')
            assetDict = {} ## key: shotgunName  var: inSceneName
            for grp in cmds.ls(sl= True):
                if cmds.ls(grp, dag=True, type="mesh"):
                    getParent = cmds.listRelatives(grp, parent = True)
                    if getParent:
                        assetDict[grp.split('_hrc')[0]] = [cmds.listRelatives(grp, parent = True)[0], grp]
                    else:
                        assetDict[grp.split('_hrc')[0]] = ['', grp] ##make the parentGroup nothing so it paths to a root asset in the scene correctly

            debug(self.app, method = 'fetchShadersForSelected', message = 'Assets... %s' % assetDict, verbose = False)

            ## Now process XML
            debug(self.app, method = 'fetchShadersForSelected', message = 'Processing template... %s' % shadersTemplate, verbose = False)
            self.processSHDTemplate(tk = tk, templateFile = shadersTemplate, assetDict = assetDict, selected = True)
            
            self.finalBuildStuff(True, inprogressBar)

        else:
            inprogressBar.close()
            cmds.warning("NO MENTAL CORE FOUND!")
            pass
コード例 #41
0
 def setUp(self):
     MayaCmds.file(new=True, force=True)
     self.__files = []
コード例 #42
0
ファイル: weight.py プロジェクト: jeanim/SISideBar
    def main(self,
             skinMeshes,
             mode='copy',
             saveName='default',
             method='index',
             weightFile='auto',
             threshold=0.2,
             engine='maya',
             tgt=1,
             path='default'):
        '''
        ウェイトデータの保存、読み込み関数
        mode→コピーするかペーストするか'copy'or'paste'
        saveName→ウェイトデータの保存フォルダ名。ツール、モデル名とかで分けたい場合に指定
        method→ペーストの仕方,「index」、「nearest」、「barycentric」、「over」
        「index」法は、頂点インデックスを使用してウェイトをオブジェクトにマッピングします。マッピング先のオブジェクトと書き出し後のデータのトポロジが同じ場合、これが最も便利な手法です。
        「nearest」法は、読み込んだデータのニアレスト頂点を検索し、ウェイト値をその値に設定します。これは、高解像度メッシュを低解像度メッシュにマッピングする場合に最適です。
        「barycentric」法はポリゴン メッシュでのみサポートされます。ターゲット ジオメトリのニアレスト三角を検索し、
        ソース ポイントと頂点の距離に応じてウェイトを再スケールします。これは通常、高解像度メッシュにマッピングされる粗いメッシュで使用されます。
        「over」法は「index」法に似ていますが、マッピング前に対象メッシュのウェイトがクリアされないため、一致していないインデックスのウェイトがそのまま維持されます。

        nearest と barycentricは不具合のため現状仕様不可能(処理が終わらない)2016/11/03現在
        →barycentric、bylinearはMaya2016Extention2から利用可能

        weightFile→メッシュ名検索でなく手動指定したい場合にパスを指定。methodのnearest、barycentricとセットで使う感じ。
        →Mayaコピー時にファイル名指定すると複数保存できないので注意。
        
        threshold→nearest,barycentricの位置検索範囲
        '''
        self.skinMeshes = skinMeshes
        self.saveName = saveName
        self.method = method
        self.weightFile = weightFile
        self.threshold = threshold
        self.engine = engine
        self.memShapes = {}
        self.target = tgt
        self.pasteMode = {'index': 1, 'nearest': 3}
        # リストタイプじゃなかったらリストに変換する
        if not isinstance(self.skinMeshes, list):
            temp = self.skinMeshes
            self.skinMeshes = []
            self.skinMeshes.append(temp)
        # ファイルパスを生成しておく
        if path == 'default':
            self.filePath = os.getenv(
                'MAYA_APP_DIR') + '\\Scripting_Files\\weight\\' + self.saveName
        elif path == 'project':
            self.scene_path = '/'.join(
                cmds.file(q=True, sceneName=True).split('/')[:-1])
            self.protect_path = os.path.join(self.scene_path,
                                             'weight_protector')
            try:
                if not os.path.exists(self.protect_path):
                    os.makedirs(self.protect_path)
            except Exception as e:
                print e.message
                return
            self.filePath = self.protect_pat + '\\' + self.saveName
        self.fileName = os.path.join(self.filePath, self.saveName + '.json')
        self.apiName = os.path.join(self.filePath, self.saveName + '.skn')
        # コピーかペーストをそれぞれ呼び出し
        if mode == 'copy':
            self.weightCopy()
        if mode == 'paste':
            self.weightPaste()
コード例 #43
0

#Vars
#-----------------------------------------
verbose = True
plugin_name = 'ocio_maya.mll'
source_dir = 'C:/symlinks/paper_sketch/OpenColorIO_tools/ocio_maya_build/x64/msvc10/maya2014x64/Release'
target_dir = 'C:/symlinks/maya/maya2014x64_plugins'
texture_name = 'enzo_v0001_tw.png' #base_color_test_image.png
texture_dir = 'C:/symlinks/temp'


#Unload plugin
#-----------------------------------------
#open new file
cmds.file(new = True, f = True)
#unload
pm.unloadPlugin(plugin_name)
if(verbose): 
		print('Unloaded plugin: {0}'.format(plugin_name))


#Copy plugin
#-----------------------------------------
try:
	shutil.copy2(source_dir +'/' +plugin_name, target_dir)
	if(verbose): print('Copied plugin from {0} to {1}'.format(source_dir +'/' +plugin_name,
																		target_dir))
except:
	if(verbose): print('Error copying plugin from {0} to {1}'.format(source_dir +'/' +plugin_name,
																		target_dir))
コード例 #44
0
 def setUp(self):
     cmds.file(force=True, new=True)
     cmds.loadPlugin("AL_USDMayaPlugin", quiet=True)
     self.assertTrue(
         cmds.pluginInfo("AL_USDMayaPlugin", query=True, loaded=True))
コード例 #45
0
    def process(self, instance):
        from maya import cmds
        from avalon import maya
        from reveries import utils
        from reveries.maya import lib, capsule, utils as maya_utils

        staging_dir = utils.stage_dir()

        filename = "%s.ma" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)

        linkfile = "%s.json" % instance.data["subset"]
        linkpath = "%s/%s" % (staging_dir, linkfile)

        instance.data["repr.LookDev._stage"] = staging_dir
        instance.data["repr.LookDev._files"] = [filename, linkfile]
        instance.data["repr.LookDev.entryFileName"] = filename
        instance.data["repr.LookDev.linkFname"] = linkfile

        # Serialise shaders relationships
        #
        self.log.info("Serialising shaders..")

        shader_by_id = lib.serialise_shaders(instance.data["dagMembers"])
        assert shader_by_id, "The map of shader relationship is empty."

        # Extract shaders
        #
        self.log.info("Extracting shaders..")

        child_instances = instance.data.get("childInstances", [])
        try:
            texture = next(chd for chd in child_instances
                           if chd.data["family"] == "reveries.texture")
        except StopIteration:
            file_node_attrs = dict()
        else:
            file_node_attrs = texture.data.get("fileNodeAttrs", dict())

        with contextlib.nested(
                maya.maintained_selection(),
                capsule.ref_edit_unlock(),
                # (NOTE) Ensure attribute unlock
                capsule.attribute_states(file_node_attrs.keys(), lock=False),
                # Change to published path
                capsule.attribute_values(file_node_attrs),
                capsule.no_refresh(),
        ):
            # Select full shading network
            # If only select shadingGroups, and if there are any node
            # connected to Dag node (i.e. drivenKey), then the command
            # will not only export selected shadingGroups' shading network,
            # but also export other related DAG nodes (i.e. full hierarchy)
            cmds.select(instance, replace=True, noExpand=True)

            cmds.file(
                outpath,
                options="v=0;",
                type="mayaAscii",
                force=True,
                exportSelected=True,
                preserveReferences=False,
                constructionHistory=False,
                channels=True,  # allow animation
                constraints=False,
                shader=True,
                expressions=True)

        # Animatable attrs
        # Custom attributes in assembly node which require to be animated.
        self.log.info("Serialising 'avnlook_' prefixed attributes..")
        avnlook_anim = dict()
        for node in cmds.ls(instance.data["dagMembers"], type="transform"):
            id = maya_utils.get_id(node)
            user_attrs = cmds.listAttr(node, userDefined=True) or []
            for attr in user_attrs:
                if not attr.startswith("avnlook_"):
                    continue
                connected = cmds.listConnections(node + "." + attr,
                                                 source=False,
                                                 destination=True,
                                                 plugs=True)
                if connected:
                    avnlook_anim[id + "." + attr] = connected

        surfaces = cmds.ls(instance.data["dagMembers"],
                           noIntermediate=True,
                           type="surfaceShape")

        # UV Chooser
        uv_chooser = dict()
        for chooser in cmds.ls(instance, type="uvChooser"):
            chooser_id = maya_utils.get_id(chooser)

            for src in cmds.listConnections(chooser + ".uvSets",
                                            source=True,
                                            destination=False,
                                            plugs=True) or []:
                geo, attr = src.split(".", 1)
                geo = cmds.listRelatives(geo, parent=True, path=True)[0]
                geo_attr = maya_utils.get_id(geo) + "." + attr

                if chooser_id not in uv_chooser:
                    uv_chooser[chooser_id] = list()
                if geo_attr not in uv_chooser[chooser_id]:
                    uv_chooser[chooser_id].append(geo_attr)

        # CreaseSet
        crease_sets = dict()
        creases = list()

        for node in surfaces:
            creases += cmds.ls(cmds.listSets(object=node), type="creaseSet")

        creases = list(set(creases))

        for cres in creases:
            # Grouping crease set members with crease level value.
            level = cmds.getAttr(cres + ".creaseLevel")
            if level not in crease_sets:
                crease_sets[level] = list()

            for member in cmds.ls(cmds.sets(cres, query=True), long=True):
                node, edges = member.split(".")
                if node not in instance.data["dagMembers"]:
                    continue
                # We have validated Avalon UUID, so there must be a valid ID.
                id = maya_utils.get_id(node)
                crease_sets[level].append(id + "." + edges)

        # Arnold attributes
        arnold_attrs = dict()

        try:
            # (TODO) This should be improved. see issue #65
            from reveries.maya import arnold
        except RuntimeError as e:
            self.log.debug(e)
        else:
            ai_sets = dict()
            for objset in cmds.ls(type="objectSet"):
                if not lib.hasAttr(objset, "aiOverride"):
                    continue
                if not cmds.getAttr(objset + ".aiOverride"):
                    continue
                # Ignore pyblish family instance
                if (lib.hasAttr(objset, "id")
                        and read(objset + ".id") == "pyblish.avalon.instance"):
                    continue

                ai_sets[objset] = cmds.ls(cmds.sets(objset, query=True),
                                          long=True)

            # (TODO) Validate only transform nodes in ai set
            transforms = cmds.ls(cmds.listRelatives(surfaces, parent=True),
                                 long=True)
            for node in transforms:
                # There must be a valid ID
                id = maya_utils.get_id(node)

                attrs = dict()

                # Collect all `ai*` attributes from shape
                shape = cmds.listRelatives(
                    node, shapes=True, noIntermediate=True,
                    fullPath=True) or [None]
                shape = shape[0]
                if shape is None:
                    continue

                for attr in cmds.listAttr(shape, fromPlugin=True) or []:
                    value = read(shape + "." + attr)
                    if value is not None:
                        attrs[attr] = value

                # Collect all override attributes from objectSet
                for ai_set, member in ai_sets.items():
                    if node not in member:
                        continue

                    for attr in cmds.listAttr(ai_set, userDefined=True) or []:
                        # Collect all user attributes from objecSet
                        # (NOTE) Some attribute like `castsShadows` does not
                        #        startswith "ai", but also affect rendering in
                        #        Arnold.
                        value = read(node + "." + attr)
                        if value is not None:
                            attrs[attr] = value

                arnold_attrs[id] = attrs

        # VRay Attributes
        vray_attrs = dict()

        try:
            from reveries.maya import vray
        except RuntimeError as e:
            self.log.debug(e)
        else:
            for node in surfaces:
                # - shape
                values = vray.attributes_gather(node)
                if values:
                    vray_attrs[node] = values

                # - transfrom
                parent = cmds.listRelatives(node, parent=True)
                if parent:
                    values = vray.attributes_gather(parent[0])
                    if values:
                        vray_attrs[parent[0]] = values

        relationships = {
            "shaderById": shader_by_id,
            "avnlookAttrs": avnlook_anim,
            "uvChooser": uv_chooser,
            "creaseSets": crease_sets,
            "arnoldAttrs": arnold_attrs,
            "vrayAttrs": vray_attrs,
        }

        self.log.info("Extracting serialisation..")

        with open(linkpath, "w") as f:
            json.dump(relationships, f)
    def execute(self, **kwargs):
        """
        Main hook entry point
        :returns:       A list of any items that were found to be published.  
                        Each item in the list should be a dictionary containing 
                        the following keys:
                        {
                            type:   String
                                    This should match a scene_item_type defined in
                                    one of the outputs in the configuration and is 
                                    used to determine the outputs that should be 
                                    published for the item
                                    
                            name:   String
                                    Name to use for the item in the UI
                            
                            description:    String
                                            Description of the item to use in the UI
                                            
                            selected:       Bool
                                            Initial selected state of item in the UI.  
                                            Items are selected by default.
                                            
                            required:       Bool
                                            Required state of item in the UI.  If True then
                                            item will not be deselectable.  Items are not
                                            required by default.
                                            
                            other_params:   Dictionary
                                            Optional dictionary that will be passed to the
                                            pre-publish and publish hooks
                        }
        """

        items = []
        # get the main scene:
        scene_name = cmds.file(query=True, sn=True)
        if not scene_name:
            raise TankError("Please Save your file before Publishing")
        scene_path = os.path.abspath(scene_name)
        name = os.path.basename(scene_path)
        # create the primary item - this will match the primary output 'scene_item_type':
        items.append({"type": "work_file", "name": name})

        ### CLEANUP ################################################################################
        ### NOW DO SCENE CRITICAL CHECKS LIKE DUPLICATE OBJECT NAMES ETC AND FAIL HARD IF THESE FAIL!
        ############################################################################################
        #############################
        ## INITAL HARD FAILS
        ## Do a quick check for geo_hrc and rig_hrc
        ## geo_hrc
        if not cmds.objExists('geo_hrc'):
            raise TankError(
                "Please Group all your geo under a geo_hrc group under the root node."
            )
        ## rig_hrc
        ## UNCOMMENT FOR MDL STEP
        #  if cleanup.rigGroupCheck():
        #      raise TankError('Rig group found!! Please use the RIG menus to publish rigs...')
        ## UNCOMMENT FOR RIG STEP
        if not cleanup.rigGroupCheck():
            raise TankError(
                'No rig group found!! Please make sure your animation controls are under rig_hrc.'
            )
        ## Now check it's the right KIND of asset eg CHAR or PROP
        cleanup.assetCheckAndTag(type='BLD', customTag='animBLD')

        #############################
        ## SECONDARIES FOR PUBLISHING
        ## WE NEED TO FIND THE MAIN GROUP THAT HAS MESHES IN IT NOW AND PUSH THIS INTO THE ITEMS LIST FOR SECONDARY PUBLISHING
        ## Look for root level groups that have meshes as children:
        for grp in cmds.ls(assemblies=True, long=True):
            if cmds.ls(grp, dag=True, type="mesh"):
                # include this group as a 'mesh_group' type
                ### UNCOMMENT FOR PROP CHAR LND ASSETS
                items.append({"type": "mesh_group", "name": grp})
        ### UNCOMMENT FOR BLD MLD ASSET
        #          if cleanup.BLDTransformCheck(grp): ## Check for BLD step only to make sure the transforms are not frozen on the BLD grps
        #              items.append({"type":"mesh_group", "name":grp})
        #              cleanup.assetCheckAndTag(type = 'BLD', customTag = 'staticBLD')

        #############################
        ## HARD FAILS
        ## Duplicate name check
        if not cleanup.duplicateNameCheck():
            raise TankError(
                "Duplicate names found please fix before publishing.\nCheck the outliner for the duplicate name set."
            )
        ## Incorrect Suffix check
        checkSceneGeo = cleanup._geoSuffixCheck(items)
        if not checkSceneGeo:
            raise TankError(
                "Incorrect Suffixes found! Fix suffixes before publishing.\nCheck the outliner for the duplicate name set."
            )
        ## Incorrect root name
        if not utils.checkRoot_hrc_Naming(items):
            assetName = cmds.file(query=True, sn=True).split('/')[4]
            raise TankError(
                "YOUR ASSET IS NAMED INCORRECTLY! Remember it is CASE SENSITIVE!\nIt should be %s_hrc"
                % assetName)
        ##RIG SPECIALS
        ## Set the smooth previews from MDL back on
        cleanup.setRiggedSmoothPreviews()
        #############################
        ## NOW PREP THE GEO FOR EXPORT!!!
        ## THESE CLEANUPS SHOULD NOT FAIL THEY SHOULD JUST BE PERFORMED
        ## UNCOMMENT FOR MDL STEP
        ## PEFORM MDL CLEANUP
        #  cleanup.cleanUp(items = items, checkShapes = True, history = True, pivots = True, freezeXFRM = True, smoothLvl = True, tagSmoothed = True, checkVerts = True,
        #                 renderflags = True, deleteIntermediate = True, turnOffOpposite = True, instanceCheck = True, shaders = True)
        ## UNCOMMENT FOR RIG STEP
        ## PEFORM RIG CLEANUP
        cleanup.cleanUp(items=items,
                        checkShapes=False,
                        history=False,
                        pivots=False,
                        freezeXFRM=False,
                        smoothLvl=True,
                        tagSmoothed=True,
                        checkVerts=False,
                        renderflags=True,
                        deleteIntermediate=False,
                        turnOffOpposite=True,
                        instanceCheck=False,
                        shaders=True)
        ############################################################################################
        ## NOW MOVE ON TO PUBLISHING
        return items
コード例 #47
0
ファイル: commands.py プロジェクト: VSPipe/reveries-config
def get_workfolder():
    return os.path.dirname(cmds.file(query=True, sceneName=True))
コード例 #48
0
ファイル: __init__.py プロジェクト: ireneher/EZLookdevTools
def unsaved_scene():
    """Check for unsaved changes."""
    import maya.cmds as cmds

    return cmds.file(q=True, modified=True)
コード例 #49
0
def execute(*args):
    # get list of current ref objs
    currentRefs = ReferenceObject.currentObjList
    for obj in currentRefs:
        path, state, namespace, killstate, loadstate = obj.gather_info()
        if state:
            print "===== AssetManager.execute: processing namespace: {0} ---- path: {1} . . . . . ".format(
                namespace, path)
            if killstate:
                # throw up a confirm dialog?
                print "   --killing reference"
                cmds.file(path, removeReference=True)
            # do other stuff
            else:
                rfn = cmds.file(obj.ref, q=True, referenceNode=True)
                # replace the file
                if obj.vchange:
                    "    --replacing reference file"
                    cmds.file(path, loadReference=rfn, type="mayaBinary")
                #update the load state
                if loadstate:
                    "    --loading reference"
                    cmds.file(path, loadReference=rfn, type="mayaBinary")
                if not loadstate:
                    "    --unloading reference"
                    cmds.file(path, unloadReference=rfn)

    # get list of proposed ref objs
    pendingRefs = ReferenceObject.pendingObjList
    for obj in pendingRefs:
        path, state, namespace, killstate, loadstate = obj.gather_info()
        print "===== AssetManager.execute: creating ref in namespace: {0} ---- path: {1} . . . . . ".format(
            namespace, path)
        print "    --creating and loading reference"
        cmds.file(path, r=True, ns=namespace)

    # clear all the lists and objs
    print "      clearing and reloading UI. . . "
    clear_ref_list()
    clear_all_object_info()
    print "      ref info cleared"
    # refresh the whole
    assetManager()
    print "      ui reloaded"
コード例 #50
0
ファイル: commands.py プロジェクト: VSPipe/reveries-config
def get_workfile():
    path = cmds.file(query=True, sceneName=True) or "untitled"
    return os.path.basename(path)
コード例 #51
0
def buildRigForModel( scene=None, referenceModel=True, deletePlacers=False ):
	'''
	given a model scene whose skeleton is assumed to have been built by the
	skeletonBuilder tool, this function will create a rig scene by referencing
	in said model, creating the rig as best it knows how, saving the scene in
	the appropriate spot etc...
	'''

	#if no scene was passed, assume we're acting on the current scene
	if scene is None:
		scene = filesystem.Path( cmd.file( q=True, sn=True ) )
	#if the scene WAS passed in, open the desired scene if it isn't already open
	else:
		scene = filesystem.Path( scene )
		curScene = filesystem.Path( cmd.file( q=True, sn=True ) )
		if curScene:
			if scene != curScene:
				mel.saveChanges( 'file -f -open "%s"' % scene )
		else: cmd.file( scene, f=True, open=True )

	#if the scene is still none bail...
	if not scene and referenceModel:
		raise SceneNotSavedError( "Uh oh, your scene hasn't been saved - Please save it somewhere on disk so I know where to put the rig.  Thanks!" )

	#backup the current state of the scene, just in case something goes south...
	if scene.exists:
		backupFilename = scene.up() / ('%s_backup.%s' % (scene.name(), scene.getExtension()))
		if backupFilename.exists: backupFilename.delete()
		cmd.file( rename=backupFilename )
		cmd.file( save=True, force=True )
		cmd.file( rename=scene )

	#finalize
	failedParts = finalizeAllParts()
	if failedParts:
		confirmDialog( t='Finalization Failure', m='The following parts failed to finalize properly:\n\n%s' % '\n'.join( map( str, failedParts ) ), b='OK', db='OK' )
		return

	#delete placers if desired - NOTE: this should be done after after finalization because placers are often used to define alignment for end joints
	if deletePlacers:
		for part in SkeletonPart.IterAllParts():
			placers = part.getPlacers()
			if placers:
				delete( placers )

	#if desired, create a new scene and reference in the model
	if referenceModel:

		#remove any unknown nodes in the scene - these cause maya to barf when trying to save
		unknownNodes = ls( type='unknown' )
		if unknownNodes:
			delete( unknownNodes )

		#scene.editoradd()
		cmd.file( f=True, save=True )
		cmd.file( f=True, new=True )

		api.referenceFile( scene, 'model' )

		#rename the scene to the rig
		rigSceneName = '%s_rig.ma' % scene.name()
		rigScene = scene.up() / rigSceneName
		cmd.file( rename=rigScene )
		cmd.file( f=True, save=True, typ='mayaAscii' )
	else:
		rigScene = scene

	buildRigForAllParts()
	setupMirroring()

	return rigScene
コード例 #52
0
    def process(self, instance):
        """Plugin entry point."""
        if instance.data.get("exportOnFarm"):
            self.log.info("vrayscenes will be exported on farm.")
            raise NotImplementedError(
                "exporting vrayscenes is not implemented")

        # handle sequence
        if instance.data.get("vraySceneMultipleFiles"):
            self.log.info("vrayscenes will be exported on farm.")
            raise NotImplementedError(
                "exporting vrayscene sequences not implemented yet")

        vray_settings = cmds.ls(type="VRaySettingsNode")
        if not vray_settings:
            node = cmds.createNode("VRaySettingsNode")
        else:
            node = vray_settings[0]

        # setMembers on vrayscene_layer shoudl contain layer name.
        layer_name = instance.data.get("layer")

        staging_dir = self.staging_dir(instance)
        self.log.info("staging: {}".format(staging_dir))
        template = cmds.getAttr("{}.vrscene_filename".format(node))
        start_frame = instance.data.get(
            "frameStartHandle") if instance.data.get(
                "vraySceneMultipleFiles") else None
        formatted_name = self.format_vray_output_filename(
            os.path.basename(instance.data.get("source")), layer_name,
            template, start_frame)

        file_path = os.path.join(staging_dir, "vrayscene",
                                 *formatted_name.split("/"))

        # Write out vrscene file
        self.log.info("Writing: '%s'" % file_path)
        with avalon.maya.maintained_selection():
            if "*" not in instance.data["setMembers"]:
                self.log.info("Exporting: {}".format(
                    instance.data["setMembers"]))
                set_members = instance.data["setMembers"]
                cmds.select(set_members, noExpand=True)
            else:
                self.log.info("Exporting all ...")
                set_members = cmds.ls(long=True,
                                      objectsOnly=True,
                                      geometry=True,
                                      lights=True,
                                      cameras=True)
                cmds.select(set_members, noExpand=True)

            self.log.info("Appending layer name {}".format(layer_name))
            set_members.append(layer_name)

            export_in_rs_layer(file_path,
                               set_members,
                               export=lambda: cmds.file(file_path,
                                                        type="V-Ray Scene",
                                                        pr=True,
                                                        es=True,
                                                        force=True))

        if "representations" not in instance.data:
            instance.data["representations"] = []

        files = file_path

        representation = {
            'name': 'vrscene',
            'ext': 'vrscene',
            'files': os.path.basename(files),
            "stagingDir": os.path.dirname(files),
        }
        instance.data["representations"].append(representation)

        self.log.info("Extracted instance '%s' to: %s" %
                      (instance.name, staging_dir))
コード例 #53
0
ファイル: setdress_api.py プロジェクト: tokejepsen/pype
def load_package(filepath, name, namespace=None):
    """Load a package that was gathered elsewhere.

    A package is a group of published instances, possibly with additional data
    in a hierarchy.

    """

    if namespace is None:
        # Define a unique namespace for the package
        namespace = os.path.basename(filepath).split(".")[0]
        unique_namespace(namespace)
    assert isinstance(namespace, basestring)

    # Load the setdress package data
    with open(filepath, "r") as fp:
        data = json.load(fp)

    # Load the setdress alembic hierarchy
    #   We import this into the namespace in which we'll load the package's
    #   instances into afterwards.
    alembic = filepath.replace(".json", ".abc")
    hierarchy = cmds.file(alembic,
                          reference=True,
                          namespace=namespace,
                          returnNewNodes=True,
                          groupReference=True,
                          groupName="{}:{}".format(namespace, name),
                          typ="Alembic")

    # Get the top root node (the reference group)
    root = "{}:{}".format(namespace, name)

    containers = []
    all_loaders = api.discover(api.Loader)
    for representation_id, instances in data.items():

        # Find the compatible loaders
        loaders = api.loaders_from_representation(all_loaders,
                                                  representation_id)

        for instance in instances:
            container = _add(instance=instance,
                             representation_id=representation_id,
                             loaders=loaders,
                             namespace=namespace,
                             root=root)
            containers.append(container)

    # TODO: Do we want to cripple? Or do we want to add a 'parent' parameter?
    # Cripple the original avalon containers so they don't show up in the
    # manager
    # for container in containers:
    #     cmds.setAttr("%s.id" % container,
    #                  "setdress.container",
    #                  type="string")

    # TODO: Lock all loaded nodes
    #   This is to ensure the hierarchy remains unaltered by the artists
    # for node in nodes:
    #      cmds.lockNode(node, lock=True)

    return containers + hierarchy
コード例 #54
0
    def addXModel(self, xmodel_folder, XModel, duplicates, badModels):
        good_model = True

        # Define model & fix it's name since Maya doesn't like some characters
        modelname = XModel['Name']
        if "/" in modelname:
            modelname = modelname.split("/")[1]
        if "@" in modelname:
            mayamodel = modelname.replace("@","_")
        else:
            mayamodel = modelname

        xmodelPos = [XModel['PosX'],XModel['PosY'],XModel['PosZ']]

        # Check if the model has Rotation values
        if 'RotX' in XModel:
            xmodelRot = [XModel['RotX'],XModel['RotY'],XModel['RotZ']]
        else:
            xmodelRot = 0,0,0
        
        # Check if model exists and duplicate, to avoid importing same model twice
        if cmds.objExists(mayamodel + '__%i' % duplicates) == True:
            # Go through duplicates untill there's an available name
            while cmds.objExists(mayamodel + '__%i'  % duplicates) == True:
                duplicates += 1
            
            # Duplicate last copy of the model
            cmds.duplicate(mayamodel + '__%i' % (duplicates-1))

        # If current XModel doesn't exist in the scene, load it
        else:
            # Import
            try:
                cmds.file(xmodel_folder + '\\' + modelname + '\\'
                            + modelname + '_LOD0.ma', i=True)
            except:
                good_model = False
                e = "import error"

            # Delete Joints
            try:
                # Delete XModel's joints
                cmds.delete('Joints')
            except:
                good_model = False
                e = "joints error"

            # Rename model
            try:
                # Rename model from modelname_LOD0 to modelname_DUPLICATENUMBER
                cmds.rename(modelname + '_LOD0', modelname + '__%i'
                            % duplicates)
            except:
                good_model = False
                e = "rename error"     

            # Parent to '_xmodels' group
            try:
                cmds.parent(modelname + '__%i' % duplicates, 'xmodels')

            except:
                good_model = False
                e = "parent error"        


        # Check if model was loaded succesfully into Maya
        if good_model:
            currentModel = modelname + '__%i' % duplicates

            # Move model
            cmds.move(float(xmodelPos[0]),
                        float(xmodelPos[1]),
                        float(xmodelPos[2]), currentModel,
                        absolute=True)

            # Rotate model
            cmds.rotate(float(xmodelRot[0]),
                        float(xmodelRot[1]),
                        float(xmodelRot[2]), currentModel,
                        absolute=True)
            
            # Scale model
            cmds.scale(float(XModel['Scale']) * 0.3937007874015748,
                        float(XModel['Scale']) * 0.3937007874015748,
                        float(XModel['Scale']) * 0.3937007874015748, currentModel,
                        absolute=True)

        # If model was not imported succesfully, add to '_badModels.txt' file
        if not good_model:
            if modelname not in badModels:
                badModels.append(modelname)
                badModels.append(e)
                badModels.append("\n")

        return badModels
コード例 #55
0
        cmds.textField(self.expPath,
                       edit=True,
                       width=140,
                       enterCommand=('cmds.setFocus(\"' + self.expPath +
                                     '\")'))
        cmds.button(label='Browse', command='obj.abc()')
        cmds.setParent(tRow)
        cmds.setParent(uRow)
        cmds.text(label=" ")
        sRow = cmds.rowLayout(numberOfColumns=2,
                              cl2=("center", "center"),
                              cw2=(120, 130),
                              ct2=("both", "both"),
                              co2=(10, 05))
        butExp = cmds.button(label='Push Export',
                             command='obj.sortAndExport()')
        cmds.button(label=" Close ", command=("obj.close()"))
        cmds.setParent(sRow)
        cmds.setParent(mainCol)
        #		cmds.text(label="")
        cmds.showWindow(self.win)


obj = maya_Stereo_Fbx()
if not (cmds.file(query=True, shn=True, sn=True)):
    OpenMaya.MGlobal.displayWarning(
        "scene file should give you data you need. Without it you will be blank as now"
    )
else:
    obj.nu()
コード例 #56
0
		
def exportAbc(asset, start, end, export):
     '''This function exports the mesh and returns the abc filename and the save path
    only exports transforms with visibility == True, so make sure visibility 
    animation is done on the ROOT node'''
    
    panel = 'modelPanel4'
    cmd.modelEditor(panel, e=1, allObjects=0)
    objects = ''
    obs = [x for x in cmd.listRelatives('%s:render_GP' % asset, ad = True, typ = 'mesh')] 
    for one in obs:
        pNode = cmd.listRelatives(one, p = True)[0]
        if gA('%s.v' % pNode) == 1: 
            objects+= '-root %s ' % pNode 
             
    baseName = cmd.file(q = True, sn = True, shn=True)
    filePath = cmd.file(q=True, sn=True)
    outPath = os.path.join(filePath.split(baseName)[0], '%s.ma' % asset)
    renderPath = outPath.replace('animation', 'rendering')
    fileName = baseName.split('.')[0] + '_' + asset + '.abc'
    dirPath = 'X:/cache/alembic/'
    command = '-frameRange %s %s -attr shaderGp -uvWrite -worldSpace -dataFormat ogawa %s-file %s%s'  % (start-3, end+3, objects, dirPath, fileName)
    if export:
        cmd.AbcExport(j = '%s' % command)
    abcPath = '%s%s' % (dirPath, fileName)
    cmd.modelEditor(panel, e=1, allObjects=1)
    print renderPath
    return abcPath, renderPath

def importShaders(shader = 'Boy'):
    log = []
コード例 #57
0
def shaderRecon(assetName=None,fileName="temp_recon.json", shaderNamae = "temp_shaders.mb"):
    
    import json
    import os

    root_grp = mc.ls(sl=True)
    if len(root_grp) < 1:
        mc.warning("please select target.")
        return

    target = root_grp[0]
    print "target: ", target
    
    #get asset name
    if not assetName:
        assetName = getAssName(target)
    print "asset is: " + assetName
    
    #dir setup
    userName = os.getenv("USER")
    showDir = os.getenv("SHOW_DIR")
    shotDir = os.path.join(showDir, "SHOTS", assetName)
    userDir = os.path.join(shotDir, userName)
    mayaDataDir = os.path.join(userDir, "maya/data")
    print "getting data from: " + mayaDataDir
    reconFile = os.path.join(mayaDataDir, fileName)
    shaderFile = os.path.join(mayaDataDir, shaderNamae)
    
    print "recon file:  " + reconFile
    print "shader file: " + shaderFile

    #check if file exists
    if os.path.isfile(reconFile)==False or os.path.isfile(shaderFile)==False:
        print "no lookdev found!!"

    #reconnectin
    #import shaders
    fileImported = mc.file(shaderFile, i=True, mergeNamespacesOnClash=False)
    print "shader imported from: " + shaderFile


    #get all mesh shapes
    all_target_shape = mc.listRelatives(target, ad=True, type = "mesh")
    print all_target_shape

    #json
    try:
        json_data = open(reconFile)
        data = json.load(json_data)
    except:
        print "error loading recon file."
        pass

    for name, path in data.iteritems():
        for a in all_target_shape:
            if name in a:
                mc.sets(a,e = 1, forceElement = path)
                print " shader assigned: ", name, " ==>> ", path
  
    #done
    print "shader recon finished."
コード例 #58
0
ファイル: setdress_api.py プロジェクト: tokejepsen/pype
def update_scene(set_container, containers, current_data, new_data, new_file):
    """Updates the hierarchy, assets and their matrix

    Updates the following withing the scene:
        * Setdress hierarchy alembic
        * Matrix
        * Parenting
        * Representations

    It removes any assets which are not present in the new build data

    Args:
        set_container (dict): the setdress container of the scene
        containers (list): the list of containers under the setdress container
        current_data (dict): the current build data of the setdress
        new_data (dict): the new build data of the setdres

    Returns:
        processed_containers (list): all new and updated containers

    """

    from pype.maya.lib import DEFAULT_MATRIX, get_container_transforms

    set_namespace = set_container['namespace']

    # Update the setdress hierarchy alembic
    set_root = get_container_transforms(set_container, root=True)
    set_hierarchy_root = cmds.listRelatives(set_root, fullPath=True)[0]
    set_hierarchy_reference = cmds.referenceQuery(set_hierarchy_root,
                                                  referenceNode=True)
    new_alembic = new_file.replace(".json", ".abc")
    assert os.path.exists(new_alembic), "%s does not exist." % new_alembic
    with unlocked(cmds.listRelatives(set_root, ad=True, fullPath=True)):
        cmds.file(new_alembic,
                  loadReference=set_hierarchy_reference,
                  type="Alembic")

    identity = DEFAULT_MATRIX[:]

    processed_namespaces = set()
    processed_containers = list()

    new_lookup = _instances_by_namespace(new_data)
    old_lookup = _instances_by_namespace(current_data)
    for container in containers:
        container_ns = container['namespace']

        # Consider it processed here, even it it fails we want to store that
        # the namespace was already available.
        processed_namespaces.add(container_ns)
        processed_containers.append(container['objectName'])

        if container_ns in new_lookup:
            root = get_container_transforms(container, root=True)
            if not root:
                log.error("Can't find root for %s", container['objectName'])
                continue

            old_instance = old_lookup.get(container_ns, {})
            new_instance = new_lookup[container_ns]

            # Update the matrix
            # check matrix against old_data matrix to find local overrides
            current_matrix = cmds.xform(root,
                                        query=True,
                                        matrix=True,
                                        objectSpace=True)

            original_matrix = old_instance.get("matrix", identity)
            has_matrix_override = not matrix_equals(current_matrix,
                                                    original_matrix)

            if has_matrix_override:
                log.warning("Matrix override preserved on %s", container_ns)
            else:
                new_matrix = new_instance.get("matrix", identity)
                cmds.xform(root, matrix=new_matrix, objectSpace=True)

            # Update the parenting
            if old_instance.get("parent", None) != new_instance["parent"]:

                parent = to_namespace(new_instance['parent'], set_namespace)
                if not cmds.objExists(parent):
                    log.error("Can't find parent %s", parent)
                    continue

                # Set the new parent
                cmds.lockNode(root, lock=False)
                root = cmds.parent(root, parent, relative=True)
                cmds.lockNode(root, lock=True)

            # Update the representation
            representation_current = container['representation']
            representation_old = old_instance['representation']
            representation_new = new_instance['representation']
            has_representation_override = (representation_current !=
                                           representation_old)

            if representation_new != representation_current:

                if has_representation_override:
                    log.warning(
                        "Your scene had local representation "
                        "overrides within the set. New "
                        "representations not loaded for %s.", container_ns)
                    continue

                # We check it against the current 'loader' in the scene instead
                # of the original data of the package that was loaded because
                # an Artist might have made scene local overrides
                if new_instance['loader'] != container['loader']:
                    log.warning(
                        "Loader is switched - local edits will be "
                        "lost. Removing: %s", container_ns)

                    # Remove this from the "has been processed" list so it's
                    # considered as new element and added afterwards.
                    processed_containers.pop()
                    processed_namespaces.remove(container_ns)
                    api.remove(container)
                    continue

                # Check whether the conversion can be done by the Loader.
                # They *must* use the same asset, subset and Loader for
                # `api.update` to make sense.
                old = io.find_one({"_id": io.ObjectId(representation_current)})
                new = io.find_one({"_id": io.ObjectId(representation_new)})
                is_valid = compare_representations(old=old, new=new)
                if not is_valid:
                    log.error("Skipping: %s. See log for details.",
                              container_ns)
                    continue

                new_version = new["context"]["version"]
                api.update(container, version=new_version)

        else:
            # Remove this container because it's not in the new data
            log.warning("Removing content: %s", container_ns)
            api.remove(container)

    # Add new assets
    all_loaders = api.discover(api.Loader)
    for representation_id, instances in new_data.items():

        # Find the compatible loaders
        loaders = api.loaders_from_representation(all_loaders,
                                                  representation_id)
        for instance in instances:

            # Already processed in update functionality
            if instance['namespace'] in processed_namespaces:
                continue

            container = _add(instance=instance,
                             representation_id=representation_id,
                             loaders=loaders,
                             namespace=set_container['namespace'],
                             root=set_root)

            # Add to the setdress container
            cmds.sets(container, addElement=set_container['objectName'])

            processed_containers.append(container)

    return processed_containers
コード例 #59
0
def sandboxLookdevPublish(q=False, ignoreIndex = False, fileName="temp_recon.json", shaderNamae = "temp_shaders.mb"):
    '''
    write out all child mesh shader connection.
    Ignores all namespaces
    '''
    import json
    import os
    
    #selection
    root_grp = mc.ls(sl=True)
    #check selection
    if len(root_grp) < 1:
        mc.warning("please select source.")
        return

    #user shader recon dir
    userDir = os.getenv("USER_SHOT_DIR")
    mayaDataDir = os.path.join(userDir, "maya/data")
    reconFile = os.path.join(mayaDataDir, fileName)
    shaderFile = os.path.join(mayaDataDir, shaderNamae)
    print "writing recon file " + fileName + " to " + reconFile
    print "writing shader file " + shaderNamae + " to " + shaderFile

    #only one grp for now. addin loop later
    source = root_grp[0]
    print "root selected: ", source

    #get all mesh shapes
    all_source_shape = mc.listRelatives(source, ad=True, type = "mesh")

    #init data
    shaderReconDict = {}
    exportSG = []

    #looping through src shape
    for source_shape in all_source_shape:
        
        #ignor namespaces
        clean_source_name = source_shape.split(":")[-1]
        
        #get connected SG name
        srcSGName = mc.listConnections(source_shape, type = "shadingEngine")[0]
        print clean_source_name + "==>>" + srcSGName

        #add to dict
        shaderReconDict[source_shape]=srcSGName
        
        #add shading group to export
        if srcSGName not in exportSG:
            exportSG.append(srcSGName)

    #writeout recon file
    print "writing recon file ..."
    data = shaderReconDict
    with open(reconFile, 'w') as outfile:
        json.dump(data, outfile)
    print "writing recon file succesful: ", reconFile
    
    #exporting shaders
    print "exporting shaders: ", exportSG
    mc.select(cl=True)
    for sg in exportSG:
        #hack.. select it. todo: find better way
        mc.select(sg, ne=True, add=True)
    print "exporting shaders to: " + shaderFile
    
    #export shaders
    mc.file( shaderFile, op="v=0", typ="mayaBinary", pr=True, es=True)
    print "writing shader file succesful: ", shaderFile
    
    #done
    print "lookdev published to sandbox."
コード例 #60
0
# MAYA SHIT ###################################################################
import maya.standalone
from maya import cmds
maya.standalone.initialize(name='python')

shot_root = os.environ['IJ_SHOT_PATH']
anim_file_path = glob.glob(shot_root + os.sep + '*_animation_v*.mb')
if len(anim_file_path) != 0:
    anim_file_path.sort()
    anim_file_path = anim_file_path[-1]

print 'Opening fake file'
cmds.file(
    '/mnt/luma_i/assets/chr/rig/ij_chr_alejandro/50/characters_ij_chr_alejandro_50.mb',
    open=True,
    force=True,
    resetError=True)
print 'Opening ', anim_file_path
cmds.file(anim_file_path, open=True, force=True, resetError=True)
cmds.loadPlugin('AbcExport')

# The fun starts here... ######################################################
print '\n' * 5
print '*' * 80
print 'Starting Animation Shot Scene Export... Good luck!'

# OPEN UNDO CHUNK
cmds.undoInfo(openChunk=True)

# PREP ########################################################################