Example #1
2
def lcObj_exportObjs(*args, **kwargs):
  ''' Export .obj files from selected geometry, either as one combined file or as individual files per object.  Will recognize and convert poly smooth preview to geometry for export '''
  global prefix
  path = pm.textField(prefix+'_textField_export_path', query=True, text=True)
  objPrefix = pm.textField(prefix+'_textField_prefix', query=True, text=True)
  if objPrefix:
    objPrefix+='_'

  if path:

    sel = pm.ls(sl=True)

    if sel:
      sel = geometry.filterForGeometry(sel)
      print sel

      #undo is the easiest way to work on geometry temporarily
      pm.undoInfo(openChunk=True)

      if pm.checkBox(prefix+'_checkBox_use_smooth', query=True, v=True):
        for obj in sel:
          pm.select(obj)
          #find the objects currently displayed as smooth and create converted poly copies
          if pm.displaySmoothness(q=True, polygonObject=True)[0] == 3:
            pm.mel.performSmoothMeshPreviewToPolygon()

      if pm.checkBox(prefix+'_checkBox_export_indi', query=True, v=True):
        #export objects individually
        for obj in sel:
          pm.select(obj)
          name = str(obj)
          exportString = path+'/'+objPrefix+name+'.obj'
          pm.exportSelected(exportString, force=True, options='groups=1;ptgroups=1;materials=0;smoothing=1;normals=1', type='OBJexport', pr=True, es=True)

      else:
        #export as one object
        pm.select(sel)
        name = ''
        while name == '':
          dialog = pm.promptDialog(title='OBJ Name', message='Enter Name:', button=['OK', 'Cancel'], defaultButton='OK', cancelButton='Cancel', dismissString='Cancel')
          if dialog == 'OK':
            name = pm.promptDialog(query=True, text=True)
            if name:
              exportString = path+'/'+objPrefix+name+'.obj'
              pm.exportSelected(exportString, force=True, options='groups=1;ptgroups=1;materials=0;smoothing=1;normals=1', type='OBJexport', pr=True, es=True)
            else:
              pm.warning("You didn't type a name for your obj")
          if dialog == 'Cancel':
            break

      pm.undoInfo(closeChunk=True)
      pm.undo()
      pm.select(clear=True)

  else:
    pm.warning('Did you specify a path?')
def exportSG( filePath):
    #filePath = "Y:/2015_R&D_Project/11_Alembic/soo/3D_project/cache/alembic/B_01_SGs.ma"
    
    import pymel.core as pm
    SGs = []
    shaders = []
    connections = []
    for node in pm.ls(sl=True, o=True):
        shape = node.getShape()        
        shadingGrps = shape.outputs( type='shadingEngine' )
        if shadingGrps:
            shader = shadingGrps[0].surfaceShader.inputs()[0]       
            SGs.append(shadingGrps[0])
            shaders.append(shader)
            node_name = node.split(":")[-1]
            print node_name
            #connections.append([node_name,shader.name()])
            connections.append([node.name(),shader.name()])

    #.abc를 없애준다
    filePathDeletAbc = filePath.replace('.abc','')
    # 세이더 정보 txt파일에 쓴다
    connectionsTxtFile=open(filePathDeletAbc+'.txt','w')
    import pickle
    pickle.dump(connections,connectionsTxtFile)
    connectionsTxtFile.close()
	
    # export
    exportLs = shaders
    pm.select(exportLs)
    pm.exportSelected( filePathDeletAbc+'.ma' )
Example #3
0
    def export_as(self, version):
        """the export action for maya environment
        """
        # check if there is something selected
        if len(pm.ls(sl=True)) < 1:
            raise RuntimeError("There is nothing selected to export")

        # do not save if there are local files
        self.check_external_files()

        # set the extension to ma by default
        version.extension = '.ma'

        # create the folder if it doesn't exists
        utils.createFolder(version.path)

        workspace_path = os.path.dirname(version.path)

        self.create_workspace_file(workspace_path)
        self.create_workspace_folders(workspace_path)

        # export the file
        pm.exportSelected(version.full_path, type='mayaAscii')

        # save the version
        version.save()

        return True
def exportSG( filePath):
    #filePath = "Y:/2015_R&D_Project/11_Alembic/soo/3D_project/cache/alembic/B_01_SGs.ma"
    
    import pymel.core as pm
    SGs = []
    shaders = []
    connections = []
    aiShaders = []
    disShaders=[]
    for node in pm.ls(sl=True, o=True):
        shape = node.getShape()        
        shadingGrps = shape.outputs( type='shadingEngine' )

        if shadingGrps:
            #arnold shader탭에 세이더가 있는지 확인한다
            try:
                print shadingGrps[0].aiSurfaceShader.inputs()[0]
                aiSFShader = shadingGrps[0].aiSurfaceShader.inputs()[0]
            except:
                print "false"
                aiSFShader=shadingGrps[0].surfaceShader.inputs()[0]
            
            #dispalcement세이더가 있는가 확인한다.
            try:
                print shadingGrps[0].displacementShader.inputs()[0]
                disShader=shadingGrps[0].displacementShader.inputs()[0]
                disShaderName=disShader.name()
                disShaders.append(disShader)
            except:
                disShaderName="0"

            shader = shadingGrps[0].surfaceShader.inputs()[0]       
            SGs.append(shadingGrps[0])
            shaders.append(shader)
            aiShaders.append(aiSFShader)
            node_name = node.split(":")[-1]
            print node_name
            #connections.append([node_name,shader.name()])
            connections.append([node.name(),shader.name(),aiSFShader.name(),disShaderName])

    #.abc를 없애준다
    filePathDeletAbc = filePath.replace('.abc','')
    # 세이더 정보 txt파일에 쓴다
    connectionsTxtFile=open(filePathDeletAbc+'.txt','w')
    import pickle
    pickle.dump(connections,connectionsTxtFile)
    connectionsTxtFile.close()
	
    # export
    exportLs = shaders+aiShaders+disShaders
    pm.select(exportLs)
    pm.exportSelected( filePathDeletAbc+'.ma' )
Example #5
0
 def _exportShaders(self):
     shaders = pm.ls(materials=1)
     selection = []
     # Select all shaders that are attatched to meshes.
     for shd in shaders:
         for grp in shd.shadingGroups():
             for member in grp.members():
                 if "Mesh" in str(repr(member)):
                     selection.append(shd)
                     selection.append(grp)
     pm.select(clear=1)
     pm.select(selection, ne=1)
     pm.exportSelected(self.outFile, shader=1, force=1)
Example #6
0
  def openMesh(self, mesh, *args, **kwargs):
    """ export a mesh as obj and open that file in photoshop """
    self.texture = kwargs.get('texture', False)
    
    sel = pm.ls(sl=True)
    path = pm.workspace(q=True,rd=True)
    fileName = 'lcMtPs_temp.obj'
    exportFile = path+fileName
    
    if texture: pm.exportSelected(exportFile, f=2, pr=0, typ='OBJexport', es=1, op="groups=1;ptgroups=1;materials=1;smoothing=1;normals=1")
    else: pm.exportSelected(exportFile, f=2, pr=0, typ='OBJexport', es=1, op="groups=1;ptgroups=1;materials=0;smoothing=1;normals=1")

    os.system('start "" "photoshop.exe" "'+os.path.normcase(exportFile)+'"')
Example #7
0
def exportObj(center=False):
    sel = pm.selected()
    if len(sel) == 0:
        pm.warning('Nothing is Selected!')
    else:
        if center:
            for obj in sel:
                oldLoc = obj.getRotatePivot()
                centerPiv(obj)
                pm.exportSelected(objPath, pr=True, typ='OBJexport', es=1, force=True, op="groups=1;ptgroups=1;materials=1;smoothing=1;normals=1")
                sys.stderr.write('NvilClipboard Exported!')
                obj.translateBy(oldLoc)
        else:
            pm.exportSelected(objPath, pr=True, typ='OBJexport', es=1, force=True, op="groups=1;ptgroups=1;materials=1;smoothing=1;normals=1")
            sys.stderr.write('NvilClipboard Exported!')
Example #8
0
def _exportCurvesFile():
    
    pymelLogger.debug('Starting: _exportCurvesFile()...') 
    result = pm.promptDialog(title='Curves Files Name',message='Enter Name:',button=['OK', 'Cancel'],
                                defaultButton='OK',cancelButton='Cancel',dismissString='Cancel')
    if result == 'OK': fileName = pm.promptDialog(query=True, text=True)
    else: raise 'Curves not exported because No name was passed'             
    
    if os.path.exists(curvesFolder):
        pm.select('curvesExport', r=1)
        pm.exportSelected(curvesFolder + fileName, type='mayaAscii', f=1)
        pm.delete('curvesExport')
    else: raise 'Path to curves folder does no exist!'
    pymelLogger.debug('Starting: _exportCurvesFile()...') 

    
    
Example #9
0
    def export_to_ma(self):
        
        if self.export == True:
            pm.select(clear= True)
            pm.select(self.main_group)
            current_scene = pm.sceneName()
            dir_name = os.path.dirname(current_scene)
            new_scene_name = os.path.join(dir_name, self.name)
            try:
                pm.exportSelected(new_scene_name, force= True, channels= True,
                              type=  'mayaAscii')
            except:
                pm.exportSelected(new_scene_name, force= True, channels= True,
                              type=  'mayaBinary')

            
        if self.export == False:
            pass
Example #10
0
 def switchToStandIn(renderGeometry):
     rndGeoName = (pm.ls(sl = 1)[0]).split('_Geo')
     rndAssName = str(rndGeoName[0]) + '_Ass'
     assList = []
     for geo in list: #export ass from selected group 
         
         assName = geo + assVer
         assExport = os.path.join(sPath, assDir, assName ).replace('\\' , '/')
         mayaSmoothOff(geo)
         pm.select(geo)
         pm.hyperShade(assign= 'lambert1' )
         pm.exportSelected(assExport, force = 1)
         pm.importFile(assExport) #import ass and rename
         standIn = pm.PyNode('ArnoldStandIn')
         
         standIn.rename('ASS_' + geo)
         standIn.mode.set(0) #set standIn display mode
         copyMTOAAttr(geo,standIn) # copy mtoa attributes from render geo to standIn
         assList.append(standIn)
         standIn.translate.lock() # lock TRANSFORM for STANDIN
         standIn.rotate.lock()
     standInGRP = pm.group(assList, n = rndAssName)
     standInGRP.translate.lock()
     standInGRP.rotate.lock()
     pm.parent(standInGRP,asset)
     pm.parent(rndGeo, w=1) #Unparent Render geo
     pm.select(asset)
     if os.path.exists(SI(asset, siVer)):
         confirm = pm.confirmDialog ( title='File exists!', message = str(SI(asset, siVer)).split('/')[-1], 
                                      button=['OVERWRITE', 'CANCEL'],
                                      defaultButton= 'OVERWRITE',
                                      cancelButton= 'CANCEL',
                                      dismissString= 'CANCEL' )
         if confirm == 'OVERWRITE':
             siExport = pm.exportSelected(SI(asset, siVer), force = 1) #export SI file
             print 'ASSET OVERWRITEN TO: ' + str(siExport)
         else:
             print 'CANCELED!'
             sys.exit()
     else:
         siExport = pm.exportSelected(SI(asset, siVer), force = 1) #export SI file
         print 'ASSET CONVERTED TO: ' + str(siExport)
Example #11
0
def export_layer( layer ):
  
  layer_node = get_bake_layer( layer )
  members = get_members( layer_node )
  
  pmc.select( clear = True )
  
  meshes = [ ]
  
  project_dir =  pmc.workspace.getPath( )
  data_dir = project_dir + r'/xn_bake_data'
  
  if not os.path.exists( data_dir ):
    os.mkdir( data_dir )
    
  if not members == None:
    for orig_obj in members:
      
      new_obj = pmc.duplicate( orig_obj )[ 0 ]
      
      pmc.delete( new_obj, constructionHistory = True )
      
      relatives = new_obj.listRelatives( )
      
      for r in relatives:
        if r.nodeType( ) == 'mesh':
          
          meshes.append( new_obj )
          
  bake_mesh = make_bake_mesh( meshes )
  
  if bake_mesh == False:
    return False
  
  if not os.path.exists( data_dir + r'/xnezbake/' ):
    os.mkdir( data_dir + r'/xnezbake/' )
    
  pmc.select( bake_mesh )
  
  # Check that OBJ Export is enabled
  if not pmc.pluginInfo( 'objExport.mll', q = True, loaded = True ):
    pmc.loadPlugin( 'objExport.mll' )
  
  output = pmc.exportSelected( data_dir + r'/xnezbake/' + layer_node.name( ) + '.obj',
                               force = True,
                               options = 'groups=0;ptgroups=0;materials=0;smoothing=1;normals=1',
                               type = 'OBJexport' )
  
  pmc.delete( )

  return output
Example #12
0
 def animOut(self):
     font = QtGui.QFont()
     font.setBold(1)
     font.setPixelSize(12)
     connectItem = []
     getList = []
     inputText = '--- anim OutputFile log ---\n'
     getAnimNode = []
     
     for x in xrange(self.listWg.count()):
         getList.append(self.listWg.item(x))
     for y in getList:
         if not str(y.text()).find('connectAttr') == -1:
             connectItem.append(y)
     for z in connectItem:
         z.setFont(font)
         nodeName = str(z.text()).split('"')[1].split('.')[0]
         inNodeName = str(z.text()).split('"')[3]
         node = pm.PyNode(nodeName)
         if not str(node.nodeType()).find('animCurve') == -1:
             inText ='connectAttr "' + 'copy_' + z.text().split('"')[1].split(':')[-1] + '" "' + str(z.text()).split('"')[3] + '"'
             z.setText(inText)
             getAnimNode.append(node)
             inputText += 'animCurve --- ' + node.name() + '\n'
         elif not str(node.nodeType()).find('animBlendNode') == -1:
             z.setText('//' + z.text())
             inputText += (inNodeName + '\n')
         else:
             z.setText('//' + z.text())
             inputText += (inNodeName + '\n')
     
     outNodeList = self.copyAnimCurve(getAnimNode)
     self.allOutFile()
     
     ###
     
     pm.select(cl=1)
     pm.select(outNodeList, r=1)
     outPath = str(self.outPathLineEditer.text()).replace('.mel','.ma')
     out = pm.exportSelected(outPath, typ='mayaAscii')
     pm.select(cl=1)
     inputText += out + '\n'
     
     ###
     
     self.logLabel.setText(inputText)
Example #13
0
 def export_to_ma(self):
     '''
     # this exports to a new ma file if True
     '''
     if self.export == True:
         if self.group== True:
             pm.select(clear= True)
             pm.select(self.main_group)
             current_scene = pm.sceneName()
             dir_name = os.path.dirname(current_scene)
             new_scene_name = os.path.join(dir_name, self.name)
             try:
                 pm.exportSelected(new_scene_name, force= True, channels= True,
                               type=  'mayaAscii')
             except:
                 pm.exportSelected(new_scene_name, force= True, channels= True,
                               type=  'mayaBinary')
         if self.group == False:
             joints = []
             geometry = []
             for bound_geo in self.bound_geo_instances:
                 joint = bound_geo.get_joint()
                 joints.append('%s' % (joint))
                 
             for bound_geo in self.bound_geo_instances:
                 geo = bound_geo.get_bound_geo()
                 geometry.append(geo)
                 
             pm.select(clear= True)
             pm.select(joints, geometry)
             current_scene = pm.sceneName()
             dir_name = os.path.dirname(current_scene)
             new_scene_name = os.path.join(dir_name, self.name)
             try:
                 pm.exportSelected(new_scene_name, force= True, channels= True,
                               type=  'mayaAscii')
             except:
                 pm.exportSelected(new_scene_name, force= True, channels= True,
                               type=  'mayaBinary')
Example #14
0
    def setAttributes(self):
        attrType = self.type

        if attrType == 'camera':
            for t in [ 'centerOfInterest', 'fStop', 'focalLength', 'focusDistance', 'horizontalFilmAperture', 'lensSqueezeRatio', 'shutterAngle', 'verticalFilmAperture']:
                self.attributeList.append(t)
                setattr(self, t, self.checkAttribute(self.transform.attr(t)))
                
        elif attrType == 'mesh':
            for t in ['file']:
                self.attributeList.append(t)
                #print 'Exporting', self.transform
                pm.select(self.transform)
                setattr(self, t, dict({t:pm.exportSelected('%s/%s' %( os.getenv("HOME"), self.transform.shortName()), constraints=False, force=True, type='FBX export')}))
                
        elif attrType in ['pointLight', 'directionalLight', 'spotLight']:
            #Light2
            for t in [ 'intensity', 'color' ]:
               self.attributes.append(t)
               setattr(self, t, self.checkAttribute(self.transform.attr(t)))
        else:
            print '%s', attrType
            
        '''
        Check if there is any animation, 
        if there is return animation in nuke format back
        else return the value in nuke format
        ['tx', 'ty', 'tz', 'rx', 'ry', 'rz', 'sx', 'sy', 'sz', 'v']
        vector3s needs to go together
        '''
            
        for t in ['translate', 'rotate', 'scale', 'rotateOrder']:
            #print self.transform.attr(t)
            self.attributeList.append(t)
            attributes = self.checkAttribute(self.transform.attr(t))
            #print attributes
            try:
                setattr(self, t, attributes)
            except Exception as e:
                print e
Example #15
0
    def generate_gpu(self):
        """generates the GPU representation of the current scene
        """
        # validate the version first
        self.version = self._validate_version(self.version)

        self.open_version(self.version)

        # load necessary plugins
        pm.loadPlugin('gpuCache')
        pm.loadPlugin('AbcExport')
        pm.loadPlugin('AbcImport')

        # check if all references have an GPU repr first
        refs_with_no_gpu_repr = []
        for ref in pm.listReferences():
            if ref.version and not ref.has_repr('GPU'):
                refs_with_no_gpu_repr.append(ref)

        if len(refs_with_no_gpu_repr):
            raise RuntimeError(
                'Please generate the GPU Representation of the references '
                'first!!!\n%s' %
                '\n'.join(map(lambda x: str(x.path), refs_with_no_gpu_repr)))

        # unload all references
        for ref in pm.listReferences():
            ref.unload()

        # for local models generate an ABC file
        output_path = os.path.join(self.version.absolute_path,
                                   'Outputs/alembic/').replace('\\', '/')

        abc_command = \
            'AbcExport -j "-frameRange %(start_frame)s ' \
            '%(end_frame)s ' \
            '-ro -stripNamespaces ' \
            '-uvWrite ' \
            '-wholeFrameGeo ' \
            '-worldSpace ' \
            '-root |%(node)s -file %(file_path)s";'

        gpu_command = \
            'gpuCache -startTime %(start_frame)s ' \
            '-endTime %(end_frame)s ' \
            '-optimize -optimizationThreshold 40000 ' \
            '-writeMaterials ' \
            '-directory "%(path)s" ' \
            '-fileName "%(filename)s" ' \
            '%(node)s;'

        start_frame = end_frame = int(pm.currentTime(q=1))

        if not self.is_scene_assembly_task(self.version.task):

            if self.is_vegetation_task(self.version.task):
                # in vegetation files, we export the GPU files directly from
                # the Base version, also we use the geometry under
                # "pfxPolygons" and parent the resulting Stand-In nodes to the
                # pfxPolygons
                # load all references
                for ref in pm.listReferences():
                    ref.load()

                # find the _pfxPolygons node
                pfx_polygons_node = pm.PyNode('kks___vegetation_pfxPolygons')

                for node in pfx_polygons_node.getChildren():
                    for child_node in node.getChildren():
                        child_node_name = child_node.name().split('___')[-1]
                        child_node_shape = child_node.getShape()
                        child_node_shape_name = None

                        if child_node_shape:
                            child_node_shape_name = child_node_shape.name()

                        pm.select(child_node)
                        temp_output_fullpath = \
                            tempfile.mktemp().replace('\\', '/')
                        temp_output_path, temp_output_filename = \
                            os.path.split(temp_output_fullpath)

                        output_filename = '%s_%s' % (
                            self.version.nice_name,
                            child_node_name.split(':')[-1].replace(
                                ':', '_').replace('|', '_'))

                        # run the mel command
                        # check if file exists
                        pm.mel.eval(
                            gpu_command % {
                                'start_frame': start_frame,
                                'end_frame': end_frame,
                                'node': child_node.fullPath(),
                                'path': temp_output_path,
                                'filename': temp_output_filename
                            })

                        cache_file_full_path = \
                            os.path\
                            .join(output_path, output_filename + '.abc')\
                            .replace('\\', '/')

                        # create the intermediate directories
                        try:
                            os.makedirs(os.path.dirname(cache_file_full_path))
                        except OSError:
                            # directory exists
                            pass

                        # now move in to its place
                        shutil.move(temp_output_fullpath + '.abc',
                                    cache_file_full_path)

                        # set rotate and scale pivots
                        rp = pm.xform(child_node, q=1, ws=1, rp=1)
                        sp = pm.xform(child_node, q=1, ws=1, sp=1)
                        #child_node.setRotatePivotTranslation([0, 0, 0])

                        # delete the child and add a GPU node instead
                        pm.delete(child_node)

                        # check if file exists and create nodes
                        if os.path.exists(cache_file_full_path):
                            gpu_node = pm.createNode('gpuCache')
                            gpu_node_tra = gpu_node.getParent()

                            pm.parent(gpu_node_tra, node)
                            gpu_node_tra.rename(child_node_name)

                            if child_node_shape_name is not None:
                                gpu_node.rename(child_node_shape_name)

                            pm.xform(gpu_node_tra, ws=1, rp=rp)
                            pm.xform(gpu_node_tra, ws=1, sp=sp)

                            gpu_node.setAttr('cacheFileName',
                                             cache_file_full_path,
                                             type="string")
                        else:
                            print('File not found!: %s' % cache_file_full_path)

                # clean up other nodes
                pm.delete('kks___vegetation_pfxStrokes')
                pm.delete('kks___vegetation_paintableGeos')

            else:
                root_nodes = self.get_local_root_nodes()
                if len(root_nodes):
                    for root_node in root_nodes:
                        # export each child of each root as separate nodes
                        for child_node in root_node.getChildren():

                            # check if it is a transform node
                            if not isinstance(child_node, pm.nt.Transform):
                                continue

                            if not auxiliary.has_shape(child_node):
                                continue

                            child_name = child_node.name()
                            child_shape = child_node.getShape()
                            child_shape_name = None
                            if child_shape:
                                child_shape_name = child_shape.name()

                            child_full_path = \
                                child_node.fullPath()[1:].replace('|', '_')

                            temp_output_fullpath = \
                                tempfile.mktemp().replace('\\', '/')
                            temp_output_path, temp_output_filename = \
                                os.path.split(temp_output_fullpath)

                            output_filename =\
                                '%s_%s' % (
                                    self.version.nice_name,
                                    child_full_path
                                )

                            # run the mel command
                            # check if file exists
                            pm.mel.eval(
                                gpu_command % {
                                    'start_frame': start_frame,
                                    'end_frame': end_frame,
                                    'node': child_node.fullPath(),
                                    'path': temp_output_path,
                                    'filename': temp_output_filename
                                })

                            cache_file_full_path = \
                                os.path\
                                .join(
                                    output_path,
                                    '%s.abc' % (
                                        output_filename
                                    )
                                )\
                                .replace('\\', '/')

                            # create the intermediate directories
                            try:
                                os.makedirs(
                                    os.path.dirname(cache_file_full_path))
                            except OSError:
                                # directory exists
                                pass

                            # now move in to its place
                            shutil.move(temp_output_fullpath + '.abc',
                                        cache_file_full_path)

                            # set rotate and scale pivots
                            rp = pm.xform(child_node, q=1, ws=1, rp=1)
                            sp = pm.xform(child_node, q=1, ws=1, sp=1)
                            # rpt = child_node.getRotatePivotTranslation()

                            # delete the child and add a GPU node instead
                            pm.delete(child_node)

                            # check if file exists
                            if os.path.exists(cache_file_full_path):
                                gpu_node = pm.createNode('gpuCache')
                                gpu_node_tra = gpu_node.getParent()

                                pm.parent(gpu_node_tra, root_node)
                                gpu_node_tra.rename(child_name)

                                if child_shape_name is not None:
                                    gpu_node.rename(child_shape_name)

                                pm.xform(gpu_node_tra, ws=1, rp=rp)
                                pm.xform(gpu_node_tra, ws=1, sp=sp)
                                # child_node.setRotatePivotTranslation(rpt)

                                gpu_node.setAttr('cacheFileName',
                                                 cache_file_full_path,
                                                 type="string")

        # load all references again
        # convert all references to GPU
        logger.debug('converting all references to GPU')
        for ref in pm.listReferences():
            # check if this is a Model reference
            ref.to_repr('GPU')
            ref.load()

        # if this is an Exterior/Interior -> Layout -> Hires task flatten it
        task = self.version.task

        is_exterior_or_interior_task = self.is_exterior_or_interior_task(task)
        if is_exterior_or_interior_task:
            logger.debug('importing all references')
            # and import all of the references
            all_refs = pm.listReferences()
            while len(all_refs) != 0:
                for ref in all_refs:
                    if not ref.isLoaded():
                        ref.load()
                    ref.importContents()
                all_refs = pm.listReferences()

            # assign lambert1 to all GPU nodes
            pm.sets('initialShadingGroup', e=1, fe=auxiliary.get_root_nodes())

            # clean up
            self.clean_up()

        # 6. save the scene as {{original_take}}___GPU
        # use maya
        take_name = '%s%s%s' % (self.base_take_name,
                                Representation.repr_separator, 'GPU')
        v = self.get_latest_repr_version(take_name)
        self.maya_env.save_as(v)

        # export the root nodes under the same file
        if is_exterior_or_interior_task:
            logger.debug('exporting root nodes')
            pm.select(auxiliary.get_root_nodes())
            pm.exportSelected(v.absolute_full_path,
                              type='mayaAscii',
                              force=True)

        logger.debug('renewing scene')
        # clear scene
        pm.newFile(force=True)
Example #16
0
def exportAssetAssembly(name, rigTopNode, meshTopNode, path, postScript=None):
    """Export the asset assembly. Model, rig and connections dict.

    Args:
        name (str): Name of the asset
        rigTopNode (str): Name of the rig top node
        meshTopNode (str): Name of the model top node
        path (str): Pestination directory
        postScript (path, optional): Script to run before export

    Returns:
        None: None
    """
    if pm.ls(rigTopNode):
        rigTopNode = pm.PyNode(rigTopNode)
    else:
        pm.displayError("{} doesn't exist or duplicated. Please check your "
                        "scene".format(rigTopNode))
        return

    if pm.ls(meshTopNode):
        meshTopNode = pm.PyNode(meshTopNode)
    else:
        pm.displayError("{} doesn't exist or duplicated. Please check "
                        "your scene".format(meshTopNode))
        return
    # check the folder and script
    # if the target name exist abort and request another name

    deformer_jnts = rigTopNode.rigGroups[3].connections()[0].members()
    if not deformer_jnts:
        pm.displayError(
            "{} is empty. The tool can't find any joint".format(meshTopNode))

    # export connections and cut joint connections
    file_path = os.path.join(path, name + ".jmm")
    dm_nodes = exportConnections(source=deformer_jnts,
                                 filePath=file_path,
                                 disc=True)

    # cut al possible remaining connection and adjust hierarchy
    # joint or visibility
    jnt_org = pm.PyNode("jnt_org")
    pm.disconnectAttr(rigTopNode.jnt_vis, jnt_org.visibility)

    # restructure model
    model = pm.createNode("transform", n="model", p=None, ss=True)
    pm.addAttr(model, ln="rigGroups", at='message', m=1)
    pm.parent(meshTopNode, jnt_org, model)

    # disconnect jnt set
    sets = rigTopNode.listConnections(type="objectSet")

    deformersGrp = None
    for oSet in sets:
        if "deformers_grp" in oSet.name():
            deformersGrp = oSet

    if deformersGrp:
        for cnx in deformersGrp.message.listConnections(p=True):
            pm.disconnectAttr(deformersGrp.message, cnx)
        pm.connectAttr(deformersGrp.message, model.attr("rigGroups[0]"))

    # disconnect bindPoses
    dg_poses = rigTopNode.message.listConnections(type="dagPose", p=True)
    for dgp in dg_poses:
        if dgp.node().name().startswith("bindPose"):
            pm.disconnectAttr(rigTopNode.message, dgp)

    # post script
    if postScript:
        try:
            execfile(postScript)
        except Exception as ex:
            template = "An exception of type {0} occured. Arguments:\n{1!r}"
            message = template.format(type(ex).__name__, ex.args)
            pm.displayError(message)
            cont = pm.confirmBox(
                "FAIL: Script Fail", "Do you want to export anyway?" + "\n\n" +
                message + "\n\n" + traceback.format_exc(), "Continue",
                "Cancel")
            if not cont:
                pm.undo()
                return

    # export rig model
    pm.select(dm_nodes, r=True)
    pm.select(rigTopNode, add=True)
    file_path = os.path.join(path, name + "_rig.ma")
    exp = pm.exportSelected(file_path, f=True, type="mayaAscii")
    pm.displayInfo(exp)

    # export mesh and joints
    pm.select(model, r=True)
    file_path = os.path.join(path, name + "_model.ma")
    exp = pm.exportSelected(file_path, f=True, type="mayaAscii")
    pm.displayInfo(exp)
def run():
    if not pm.ls('cam_focus_*'):
        pm.warning(u'没有可以导出的相机')
        return

    if pm.ls(type='unknown'):
        pm.delete(pm.ls(type='unknown'))
    if pm.ls('shaveGlobals'):
        pm.lockNode(pm.ls('shaveGlobals'), l=False)
        pm.delete(pm.ls('shaveGlobals'))

    path_obj = pft.PathDetails.parse_path(pm.sceneName())
    pubfile = path_obj.getNewVersionForTask('cam-focus', publish=True)
    if not path_obj.user:
        pm.warning(u'请检查本场景文件的路径,特别注意下文件名本身')
        return
    os.makedirs(os.path.dirname(pubfile))

    # --------------------------------------------------------------------------------------------------------
    # 序列帧输出
    # projectdir = pm.Workspace.getPath()
    # imagedir = os.path.join(projectdir, 'images/focus')

    images_in_project = pm.workspace(
        en=pm.workspace('images', q=True, fre=True))
    imagedir = os.path.join(images_in_project, 'focus')

    # os.makedirs(os.path.join(os.path.dirname(pubfile), 'images'))
    # pm.sysFile(imagedir, copy=os.path.join(os.path.dirname(pubfile), 'images'))
    shutil.copytree(imagedir,
                    os.path.join(os.path.dirname(pubfile), 'images/focus'))
    # --------------------------------------------------------------------------------------------------------
    # 相机输出
    cams = map(
        lambda x: x.getParent(),
        filter(lambda x: x.name().startswith('cam_focus_'),
               pm.ls(type='camera')))
    pm.select(cams, r=True)
    camsFile = os.path.splitext(pubfile)[0] + '.ma'
    pm.exportSelected(camsFile, force=True)
    # --------------------------------------------------------------------------------------------------------
    # 生成缩略图
    mmfpeg = 'ffmpeg.exe'
    seqfile = os.path.normpath(
        os.path.join(os.path.dirname(pubfile), 'images/focus/focus.%04d.exr'))
    movfile = os.path.join(
        os.path.dirname(pubfile),
        '{}.mov'.format(os.path.basename(pubfile).split('.')[0]))
    beginframe, endframe = psg.get_cut_range(
        path_obj.project, '%s_%s' % (path_obj.seq, path_obj.shot))
    framenum = endframe - beginframe + 2
    # 这里有个bug在ffmpeg里
    convertcmd = '{} -apply_trc iec61966_2_1 -start_number 1001 -f image2 -r 24 -i {} -vcodec h264 -vframes {} -preset veryslow -qp 0 {}'.format(
        mmfpeg, seqfile, framenum, movfile)

    p = subprocess.Popen(convertcmd, shell=True, cwd=os.path.dirname(__file__))
    p.wait()
    # -----------------------------------------------------------------------------------
    # 上传shotgun
    versionID = psg.addToShotgun(camsFile, '')
    if versionID:
        psg.uploadQuicktime(versionID, movfile)
    # --------------------------------------------------------------------------------------------------------
    # 输出制作文件
    animSourceFile = os.path.join(
        os.path.dirname(pubfile),
        os.path.basename(pm.sceneName()).replace("anim", 'focus'))
    pm.saveAs(animSourceFile)

    # --------------------------------------------------------------------------------------------------------
    # 打开publish目录
    pm.warning(u'导出CamFocus相机完成,文件另存为完成,序列帧复制完成')
    os.startfile(os.path.dirname(pubfile))
Example #18
0
 def exportMe(self, outPath):
     parts = pm.ls("*_PLY")
     print (outPath)
     for x in parts:
         pm.select(x)
         pm.exportSelected(outPath + x, force=True, options="", type="FBX export", pr=True)
Example #19
0
def export_maya_file(**kwargs):
    file_name = kwargs.pop('file_name', 'reference_points')
    full_path = '{}/mayaFiles'.format(config.output.file_path)
    pm.exportSelected('{}/{}.ma'.format(full_path, file_name))
    def __publish_yeti_node(self, item, output, work_template, primary_publish_path,
                                        sg_task, comment, thumbnail_path, progress_cb):
        """
        Publish an Alembic cache file for the scene and publish it to Shotgun.
        
        :param item:                    The item to publish
        :param output:                  The output definition to publish with
        :param work_template:           The work template for the current scene
        :param primary_publish_path:    The path to the primary published file
        :param sg_task:                 The Shotgun task we are publishing for
        :param comment:                 The publish comment/description
        :param thumbnail_path:          The path to the publish thumbnail
        :param progress_cb:             A callback that can be used to report progress
        """
        # determine the publish info to use
        #
        progress_cb(10, "Determining publish details")


        # get the current scene path and extract fields from it
        # using the work template:
        scene_path = os.path.abspath(cmds.file(query=True, sn=True))
        fields = work_template.get_fields(scene_path)
        publish_version = fields["version"]
        tank_type = output["tank_type"]

        # create the publish path by applying the fields 
        # with the publish template:
        publish_template = output["publish_template"]
        publish_path = publish_template.apply_fields(fields)
        
        # ensure the publish folder exists:
        publish_folder = os.path.dirname(publish_path)
        self.parent.ensure_folder_exists(publish_folder)

        # determine the publish name:
        publish_name = fields.get("Asset") + "_" + fields.get("Step")

        # Find additional info from the scene:
        progress_cb(10, "Analysing scene")

        # Added by Chetan Patel
        # May 2016 (KittenWitch Project)
        # ------------------------------------------------
        #    Add attributes to the yeti nodes.
        # ------------------------------------------------

        self.__add_atributes_to_yeti_nodes()

        # Added by Chetan Patel
        # May 2016 (KittenWitch Project)
        # ------------------------------------------------
        #    select and  export the yeti nodes
        # ------------------------------------------------

        cmds.select(cl=True)
        cmds.select(str(item["name"]))

        progress_cb(30, "Exporting Yeti Nodes")
        try:
            self.parent.log_debug("Executing pymel export command:")
            pm.exportSelected(publish_path,
                              constructionHistory=False,
                              constraints=False,
                              expressions=False,
                              shader=True,
                              type="mayaAscii",
                              force=True)

            # close undo chunk if the export is successful and undo attr additions
            cmds.undoInfo(state=True, closeChunk=True)
            cmds.undo()
        except Exception, e:
            # close undo chunks if the export fails and undo attr additions
            cmds.undoInfo(state=True, closeChunk=True)
            cmds.undo()
            raise TankError("Failed to export Yeti Nodes: %s" % e)
Example #21
0
                                         defaultButton='OK',
                                         cancelButton='Cancel',
                                         dismissString='Cancel')
    if endNameInputWindow == u'\u786e\u8ba4':
        endName = pm.promptDialog(query=True, text=True)
    else:
        endName = ''
    startIndexInputWindow = pm.promptDialog(title='起始值设置',
                                            message='起始值:',
                                            button=['确认', '取消'],
                                            defaultButton='OK',
                                            cancelButton='Cancel',
                                            dismissString='Cancel')
    if startIndexInputWindow == u'\u786e\u8ba4':
        startIndex = pm.promptDialog(query=True, text=True)
    else:
        startIndex = '0'
    for j in range(fileCount):
        pm.select('_Mesh' + str(j))
        mel.eval(
            'polyCleanupArgList 4 { "0","1","1","0","0","0","0","0","0","1e-05","0","1e-05","0","1e-05","0","1","0","1" }'
        )
        pm.select('_Mesh' + str(j))
        pm.polyReduce(ver=1, p=reducePercent, n="_Mesh" + str(j))
        indexName = '%04d' % (j + int(startIndex))
        pm.exportSelected(
            pathOfFiles + '/ReducedFiles/' + startName + indexName + endName +
            '.obj',
            force=1,
            options='groups=0;ptgroups=0;materials=0;smoothing=0;normals=0')
Example #22
0
    def generate_ass(self):
        """generates the ASS representation of the current scene

        For Model Tasks the ASS is generated over the LookDev Task because it
        is not possible to assign a material to an object inside an ASS file.
        """
        # before doing anything, check if this is a look dev task
        # and export the objects from the referenced files with their current
        # shadings, then replace all of the references to ASS repr and than
        # add Stand-in nodes and parent them under the referenced models

        # load necessary plugins
        pm.loadPlugin('mtoa')

        # disable "show plugin shapes"
        active_panel = auxiliary.Playblaster.get_active_panel()
        show_plugin_shapes = pm.modelEditor(active_panel, q=1, pluginShapes=1)
        pm.modelEditor(active_panel, e=1, pluginShapes=False)

        # validate the version first
        self.version = self._validate_version(self.version)

        self.open_version(self.version)

        task = self.version.task

        # export_command = 'arnoldExportAss -f "%(path)s" -s -mask 24 ' \
        #                  '-lightLinks 0 -compressed -boundingBox ' \
        #                  '-shadowLinks 0 -cam perspShape;'

        export_command = 'arnoldExportAss -f "%(path)s" -s -mask 60' \
                         '-lightLinks 1 -compressed -boundingBox ' \
                         '-shadowLinks 1 -cam perspShape;'

        # calculate output path
        output_path = \
            os.path.join(self.version.absolute_path, 'Outputs/ass/')\
            .replace('\\', '/')

        # check if all references have an ASS repr first
        refs_with_no_ass_repr = []
        for ref in pm.listReferences():
            if ref.version and not ref.has_repr('ASS'):
                refs_with_no_ass_repr.append(ref)

        if len(refs_with_no_ass_repr):
            raise RuntimeError(
                'Please generate the ASS Representation of the references '
                'first!!!\n%s' %
                '\n'.join(map(lambda x: str(x.path), refs_with_no_ass_repr))
            )

        if self.is_look_dev_task(task):
            # in look dev files, we export the ASS files directly from the Base
            # version and parent the resulting Stand-In node to the parent of
            # the child node

            # load only Model references
            for ref in pm.listReferences():
                v = ref.version
                load_ref = False
                if v:
                    ref_task = v.task
                    if self.is_model_task(ref_task):
                        load_ref = True

                if load_ref:
                    ref.load()
                    ref.importContents()

            # Make all texture paths relative
            # replace all "$REPO#" from all texture paths first
            #
            # This is needed to properly render textures with any OS
            types_and_attrs = {
                'aiImage': 'filename',
                'file': 'fileTextureName',
                'imagePlane': 'imageName'
            }

            for node_type in types_and_attrs.keys():
                attr_name = types_and_attrs[node_type]
                for node in pm.ls(type=node_type):
                    orig_path = node.getAttr(attr_name).replace("\\", "/")
                    path = re.sub(
                        r'(\$REPO[0-9/]+)',
                        '',
                        orig_path
                    )
                    tx_path = self.make_tx(path)
                    inputs = node.attr(attr_name).inputs(p=1)
                    if len(inputs):
                        # set the input attribute
                        for input_node_attr in inputs:
                            input_node_attr.set(tx_path)
                    else:
                        node.setAttr(attr_name, tx_path)

            # randomize all render node names
            # This is needed to prevent clashing of materials in a bigger scene
            for node in pm.ls(type=RENDER_RELATED_NODE_TYPES):
                if node.referenceFile() is None and \
                   node.name() not in READ_ONLY_NODE_NAMES:
                    node.rename('%s_%s' % (node.name(), uuid.uuid4().hex))

            nodes_to_ass_files = {}

            # export all root ass files as they are
            for root_node in auxiliary.get_root_nodes():
                for child_node in root_node.getChildren():
                    # check if it is a transform node
                    if not isinstance(child_node, pm.nt.Transform):
                        continue

                    if not auxiliary.has_shape(child_node):
                        continue

                    # randomize child node name
                    # TODO: This is not working as intended, node names are like |NS:node1|NS:node2
                    #       resulting a child_node_name as "node2"
                    child_node_name = child_node\
                        .fullPath()\
                        .replace('|', '_')\
                        .split(':')[-1]

                    child_node_full_path = child_node.fullPath()

                    pm.select(child_node)
                    child_node.rename('%s_%s' % (child_node.name(), uuid.uuid4().hex))

                    output_filename =\
                        '%s_%s.ass' % (
                            self.version.nice_name,
                            child_node_name
                        )

                    output_full_path = \
                        os.path.join(output_path, output_filename)

                    # run the mel command
                    pm.mel.eval(
                        export_command % {
                            'path': output_full_path.replace('\\', '/')
                        }
                    )
                    nodes_to_ass_files[child_node_full_path] = \
                        '%s.gz' % output_full_path
                    # print('%s -> %s' % (
                    #     child_node_full_path,
                    #     output_full_path)
                    # )

            # reload the scene
            pm.newFile(force=True)
            self.open_version(self.version)

            # convert all references to ASS
            # we are doing it a little bit early here, but we need to
            for ref in pm.listReferences():
                ref.to_repr('ASS')

            all_stand_ins = pm.ls(type='aiStandIn')
            for ass_node in all_stand_ins:
                ass_tra = ass_node.getParent()
                full_path = ass_tra.fullPath()
                if full_path in nodes_to_ass_files:
                    ass_file_path = \
                        Repository.to_os_independent_path(
                            nodes_to_ass_files[full_path]
                        )
                    ass_node.setAttr('dso', ass_file_path)

        elif self.is_vegetation_task(task):
            # in vegetation files, we export the ASS files directly from the
            # Base version, also we use the geometry under "pfxPolygons"
            # and parent the resulting Stand-In nodes to the
            # pfxPolygons
            # load all references
            for ref in pm.listReferences():
                ref.load()

            # Make all texture paths relative
            # replace all "$REPO#" from all texture paths first
            #
            # This is needed to properly render textures with any OS
            types_and_attrs = {
                'aiImage': 'filename',
                'file': 'fileTextureName',
                'imagePlane': 'imageName'
            }

            for node_type in types_and_attrs.keys():
                attr_name = types_and_attrs[node_type]
                for node in pm.ls(type=node_type):
                    orig_path = node.getAttr(attr_name).replace("\\", "/")
                    path = re.sub(
                        r'(\$REPO[0-9/]+)',
                        '',
                        orig_path
                    )
                    tx_path = self.make_tx(path)
                    inputs = node.attr(attr_name).inputs(p=1)
                    if len(inputs):
                        # set the input attribute
                        for input_node_attr in inputs:
                            input_node_attr.set(tx_path)
                    else:
                        node.setAttr(attr_name, tx_path)

            # randomize all render node names
            # This is needed to prevent clashing of materials in a bigger scene
            for node in pm.ls(type=RENDER_RELATED_NODE_TYPES):
                if node.referenceFile() is None and \
                   node.name() not in READ_ONLY_NODE_NAMES:
                    node.rename('%s_%s' % (node.name(), uuid.uuid4().hex))

            # find the _pfxPolygons node
            pfx_polygons_node = pm.PyNode('kks___vegetation_pfxPolygons')

            for node in pfx_polygons_node.getChildren():
                for child_node in node.getChildren():
                    #print('processing %s' % child_node.name())
                    child_node_name = child_node.name().split('___')[-1]

                    pm.select(child_node)
                    output_filename =\
                        '%s_%s.ass' % (
                            self.version.nice_name,
                            child_node_name.replace(':', '_').replace('|', '_')
                        )

                    output_full_path = \
                        os.path.join(output_path, output_filename)

                    # run the mel command
                    pm.mel.eval(
                        export_command % {
                            'path': output_full_path.replace('\\', '/')
                        }
                    )

                    # generate an aiStandIn node and set the path
                    ass_node = auxiliary.create_arnold_stand_in(
                        path='%s.gz' % output_full_path
                    )
                    ass_tra = ass_node.getParent()

                    # parent the ass node under the current node
                    # under pfx_polygons_node
                    pm.parent(ass_tra, node)

                    # set pivots
                    rp = pm.xform(child_node, q=1, ws=1, rp=1)
                    sp = pm.xform(child_node, q=1, ws=1, sp=1)
                    # rpt = child_node.getRotatePivotTranslation()

                    pm.xform(ass_node, ws=1, rp=rp)
                    pm.xform(ass_node, ws=1, sp=sp)
                    # ass_node.setRotatePivotTranslation(rpt)

                    # delete the child_node
                    pm.delete(child_node)

                    # give it the same name with the original
                    ass_tra.rename('%s' % child_node_name)

            # clean up other nodes
            pm.delete('kks___vegetation_pfxStrokes')
            pm.delete('kks___vegetation_paintableGeos')

        elif self.is_model_task(task):
            # convert all children of the root node
            # to an empty aiStandIn node
            # and save it as it is
            root_nodes = self.get_local_root_nodes()

            for root_node in root_nodes:
                for child_node in root_node.getChildren():
                    child_node_name = child_node.name()

                    rp = pm.xform(child_node, q=1, ws=1, rp=1)
                    sp = pm.xform(child_node, q=1, ws=1, sp=1)

                    pm.delete(child_node)

                    ass_node = auxiliary.create_arnold_stand_in(path='')
                    ass_tra = ass_node.getParent()
                    pm.parent(ass_tra, root_node)
                    ass_tra.rename(child_node_name)

                    # set pivots
                    pm.xform(ass_tra, ws=1, rp=rp)
                    pm.xform(ass_tra, ws=1, sp=sp)

                    # because there will be possible material assignments
                    # in look dev disable overrideShaders
                    ass_node.setAttr('overrideShaders', False)

                    # we definitely do not use light linking in our studio,
                    # which seems to create more problems then it solves.
                    ass_node.setAttr('overrideLightLinking', False)

        # convert all references to ASS
        for ref in pm.listReferences():
            ref.to_repr('ASS')
            ref.load()

        # fix an arnold bug
        for node_name in ['initialShadingGroup', 'initialParticleSE']:
            node = pm.PyNode(node_name)
            node.setAttr("ai_surface_shader", (0, 0, 0), type="float3")
            node.setAttr("ai_volume_shader", (0, 0, 0), type="float3")

        # if this is an Exterior/Interior -> Layout -> Hires task flatten it
        is_exterior_or_interior_task = self.is_exterior_or_interior_task(task)
        if is_exterior_or_interior_task:
            # and import all of the references
            all_refs = pm.listReferences()
            while len(all_refs) != 0:
                for ref in all_refs:
                    if not ref.isLoaded():
                        ref.load()
                    ref.importContents()
                all_refs = pm.listReferences()

            # assign lambert1 to all GPU nodes
            pm.sets('initialShadingGroup', e=1, fe=auxiliary.get_root_nodes())

            # now remove them from the group
            pm.sets('initialShadingGroup', e=1, rm=pm.ls())

            # and to make sure that no override is enabled
            [node.setAttr('overrideLightLinking', False)
             for node in pm.ls(type='aiStandIn')]

            # clean up
            self.clean_up()

        # check if all aiStandIn nodes are included in
        # ArnoldStandInDefaultLightSet set
        try:
            arnold_stand_in_default_light_set = \
                pm.PyNode('ArnoldStandInDefaultLightSet')
        except pm.MayaNodeError:
            # just create it
            arnold_stand_in_default_light_set = \
                pm.createNode(
                    'objectSet',
                    name='ArnoldStandInDefaultLightSet'
                )

        pm.select(None)
        pm.sets(
            arnold_stand_in_default_light_set,
            fe=pm.ls(type='aiStandIn')
        )

        # save the scene as {{original_take}}___ASS
        # use maya
        take_name = '%s%s%s' % (
            self.base_take_name, Representation.repr_separator, 'ASS'
        )
        v = self.get_latest_repr_version(take_name)
        self.maya_env.save_as(v)

        # export the root nodes under the same file
        if is_exterior_or_interior_task:
            pm.select(auxiliary.get_root_nodes())
            pm.exportSelected(
                v.absolute_full_path,
                type='mayaAscii',
                force=True
            )

        # new scene
        pm.newFile(force=True)

        # reset show plugin shapes option
        active_panel = auxiliary.Playblaster.get_active_panel()
        pm.modelEditor(active_panel, e=1, pluginShapes=show_plugin_shapes)
Example #23
0
 def write(self, export_path, options):
     pm.exportSelected(export_path, type='OBJexport', options=options)
Example #24
0
# save the current scene scene
currScene = pm.saveAs( 'pymel_test_main.ma')

# the parent property gives the parent directory of the current scene.
# the / (slash or divide) operator serves as an os independent way of concatenating paths
# it is a shortut to os.path.join
exportScene = currScene.parent / 'pymel_test_ref.ma'

# if a file already exists where we want to export, delete it first
if exportScene.exists():
	print "removing existing pymel export scene"
	exportScene.remove()

print "exporting new scene:", exportScene
pm.exportSelected( exportScene, f=1 )

# delete the original group
pm.delete(g)

# reference it in a few times
for i in range(1,4):
	ref = pm.createReference( exportScene, namespace=('foo%02d' % i) )
	# offset each newly created reference:
	# first we list all the nodes in the new reference, and get the first in the list.
	# this will be the 'newGroup' node.
	allRefNodes = ref.nodes()
	print "moving" , allRefNodes[0]
	allRefNodes[0].tx.set( 2*i )

# print out some information about our newly created references
Example #25
0
def main():
    s = pm.polySphere()[
        0]  # second in list is the history node, if construction history is on
    c = pm.polyCube()[0]

    print(c, s)
    c.setTranslation([0, 2, 0])
    s.setTranslation([1, -2, 0])

    g = pm.group(s, c, n='newGroup')

    print("The children of %s are %s" % (g, g.getChildren()))
    # print g.getChildren()[0].getShape()
    print("difference =",
          c.translate.get() - s.translate.get())  # basic vector operation

    s2 = s.duplicate()[0]

    # move the new sphere relatively along the z axis
    s2.setTranslation([0, 0, -2], relative=1)

    # cycle through and move some verts.
    # we're moving each verts a relative amount based on its vertex number
    num = s2.numVertices()
    for i, vert in enumerate(s2.verts):
        pm.move(vert, [i / float(num), 0, 0], r=1)

    # save the current scene scene
    currScene = pm.saveAs('pymel_test_main.ma')

    # the parent property gives the parent directory of the current scene.
    # the / (slash or divide) operator serves as an os independent way of concatenating paths
    # it is a shortut to os.path.join
    exportScene = currScene.parent / 'pymel_test_ref.ma'

    # if a file already exists where we want to export, delete it first
    if exportScene.exists():
        print("removing existing pymel export scene")
        exportScene.remove()

    print("exporting new scene:", exportScene)
    pm.exportSelected(exportScene, f=1)

    # delete the original group
    pm.delete(g)

    # reference it in a few times
    for i in range(1, 4):
        ref = pm.createReference(exportScene, namespace=('foo%02d' % i))
        # offset each newly created reference:
        # first we list all the nodes in the new reference, and get the first in the list.
        # this will be the 'newGroup' node.
        allRefNodes = ref.nodes()
        print("moving", allRefNodes[0])
        allRefNodes[0].tx.set(2 * i)

    # print out some information about our newly created references
    allRefs = pm.listReferences()
    for r in allRefs:
        print(r.namespace, r.refNode, r.withCopyNumber())

    # the namespace property of the FileReference class can be used to set the namespace as well as to get it.
    allRefs[2].namespace = 'super'

    # but if we have to change the namespace of the objects after they have been imported
    # there is a different, albeit, more complicated way
    ns = allRefs[0].namespace
    allRefs[0].importContents()

    # heres one way to change the namespace
    try:
        pm.namespace(add='bar')
    except:
        pass

    for node in pm.ls(ns + ':*', type='transform'):
        newname = node.swapNamespace('bar')
        print("renaming %s to %s" % (node, newname))
        node.rename(newname)

    # unload the other one
    allRefs[1].unload()
Example #26
0
# save the current scene scene
currScene = pm.saveAs('pymel_test_main.ma')

# the parent property gives the parent directory of the current scene.
# the / (slash or divide) operator serves as an os independent way of concatenating paths
# it is a shortut to os.path.join
exportScene = currScene.parent / 'pymel_test_ref.ma'

# if a file already exists where we want to export, delete it first
if exportScene.exists():
    print "removing existing pymel export scene"
    exportScene.remove()

print "exporting new scene:", exportScene
pm.exportSelected(exportScene, f=1)

# delete the original group
pm.delete(g)

# reference it in a few times
for i in range(1, 4):
    ref = pm.createReference(exportScene, namespace=('foo%02d' % i))
    # offset each newly created reference:
    # first we list all the nodes in the new reference, and get the first in the list.
    # this will be the 'newGroup' node.
    allRefNodes = ref.nodes()
    print "moving", allRefNodes[0]
    allRefNodes[0].tx.set(2 * i)

# print out some information about our newly created references
Example #27
0
def asset_publish(name, category, type, version, model, lookdev):
    '''
    :example
        name = 'jasmin'
        category = 'character'
        type = 'model'
        version = '1.0.0'
        model = 'None'
        lookdev = 'None'
        from tools.maya_publish import publish
        asset_publish(name, category, type, version, model, lookdev)
    '''
    from pymel import core
    print '#info asset publish(%s)' % type
    py_node = core.PyNode(name)
    attributes = {
        'name': name,
        'category': category,
        'type': type,
        'model': model,
        'version': version,
        'lookdev': lookdev
    }
    orders = ['name', 'category', 'type', 'version', 'model', 'lookdev']
    for order in orders:
        current_attribute = '%s.%s' % (py_node.name(), order)
        if core.objExists(current_attribute):
            core.deleteAttr(current_attribute)
        py_node.addAttr(order, dt='string')
        py_node.setAttr(order, attributes[order])
    dirname = os.path.join(
        resources.get_show_path(),
        # '/venture/shows/katana_tutorials/asset',
        category,
        name,
        type,
        version)
    if os.path.isdir(dirname):
        try:
            shutil.rmtree(dirname)
        except Exception:
            pass
    if not os.path.isdir(dirname):
        os.makedirs(dirname)
    maya_path = os.path.join(dirname, '%s.ma' % name)
    abc_path = os.path.join(dirname, '%s.abc' % name)
    manifest_path = os.path.join(dirname, 'manifest.json')
    py_node.select(r=True)
    core.exportSelected(maya_path, f=True)
    asset_attributes = '-attr ' + ' -attr '.join(orders)
    command = '-frameRange 1001 1002 %s -uvWrite -attrPrefix xgen -worldSpace -root %s -stripNamespaces -file %s ' % (
        asset_attributes, name, abc_path)
    py_node.select(r=True)
    core.AbcExport(j=command)
    manifest_data = {'pipe': 'asset', 'data': attributes}
    with open(manifest_path, 'w') as manifest:
        manifest.write(json.dumps(manifest_data, indent=4))
    os.system('xdg-open \"%s\"' % dirname)
    print '\t', maya_path
    print '\t', abc_path
    print '\t', manifest_path
Example #28
0
def scene_publish(sequence, shot, scene, version, puppets=None):
    '''
    :example
        sequence = 'sequence_101'
        shot = 'shot_1001'        
        scene = 'animation'
        version = '0.0.0'
        assets = ['batman:batman', 'jasmin:jasmin', 'scene:scene']
        # assets = ['batman:batman', 'jasmin:jasmin', 'scene:scene', 'motorcycle:motorcycle']    
        from tools.maya_publish import publish
        scene_publish(sequence, shot, scene, version)
    '''
    from pymel import core
    print '#info scene publish(%s)' % scene
    model_panels = core.getPanel(type='modelPanel')
    for model_panel in model_panels:
        core.modelEditor(model_panel,
                         edit=True,
                         displayAppearance='boundingBox')
        core.modelEditor(model_panel, edit=True, allObjects=False)
        core.modelEditor(model_panel, edit=True, nurbsCurves=True)
        core.modelEditor(model_panel, edit=True, polymeshes=True)
    if not puppets:
        puppets = get_valid_puppets()
    dirname = os.path.join(
        resources.get_show_path(),
        # '/venture/shows/katana_tutorials/scene',
        sequence,
        shot,
        scene,
        version)
    if os.path.isdir(dirname):
        try:
            shutil.rmtree(dirname)
        except Exception:
            pass
    if not os.path.isdir(dirname):
        os.makedirs(dirname)
    min = int(core.playbackOptions(q=True, min=True))
    max = int(core.playbackOptions(q=True, max=True))
    puppet_attributes = [
        'name', 'category', 'type', 'version', 'model', 'lookdev'
    ]
    amination_attributes = ['min', 'max', 'latest_lookdev']
    puppet_contents = {}
    for puppet in puppets:
        py_node = core.PyNode(puppet)
        for each in amination_attributes:
            attribute = '%s.%s' % (py_node.name(), each)
            if core.objExists(attribute):
                core.deleteAttr(attribute)
            py_node.addAttr(each, dt='string')
        py_node.setAttr('min', str(min))
        py_node.setAttr('max', str(max))
        puppet_name = puppet.split(':')[0]
        for puppet_attribute in puppet_attributes:
            value = py_node.getAttr(puppet_attribute)
            name = puppet.split(':')[0]
            if puppet_name not in puppet_contents:
                puppet_contents.setdefault(puppet_name, {})
            puppet_contents[puppet_name].setdefault(puppet_attribute, value)
        category = py_node.getAttr('category')
        name = py_node.getAttr('name')
        model = py_node.getAttr('model')
        model_depnendency = get_lookdev_model_depnendency(category, name)
        latest_lookdev = 'None'
        if model_depnendency:
            if model_depnendency[model]:
                latest_lookdev = model_depnendency[model][0]
        py_node.setAttr('latest_lookdev', latest_lookdev)
        abc_path = os.path.join(dirname, '%s.abc' % puppet_name)
        ud_attrubutes = []
        for attrubute in py_node.listAttr(ud=True):
            ud_attrubutes.append(attrubute.attrName())
        asset_attributes = '-attr ' + ' -attr '.join(ud_attrubutes)
        command = '-frameRange %s %s %s -uvWrite -attrPrefix xgen -worldSpace -root %s -stripNamespaces -file %s ' % (
            min, max, asset_attributes, puppet, abc_path)
        py_node.select(r=True)
        core.AbcExport(j=command)
        print '\t', abc_path
    core.select(puppets, r=True)
    maya_path = os.path.join(dirname, '%s.ma' % shot)
    manifest_path = os.path.join(dirname, 'manifest.json')
    core.exportSelected(maya_path, preserveReferences=True, f=True)
    manifest_data = {
        'pipe': 'scene',
        'data': {
            'frame_range': [min, max],
            'sequence': sequence,
            'scene': scene,
            'puppets': puppet_contents,
            'version': version
        }
    }
    with open(manifest_path, 'w') as manifest:
        manifest.write(json.dumps(manifest_data, indent=4))
    os.system('xdg-open \"%s\"' % dirname)
    print '\t', manifest_path
    print '\t', maya_path
Example #29
0
def save(filename,
         objs=None,
         forceOverwrite=False,
         forceKeys=False,
         start=None,
         end=None):
    '''
    Given a list of objects, save all the anim curves for t/r/s/v and user defined
    to the given filename.
    
    :param bool forceOverwrite: Allow prompting if the dest file already exists
    :param bool forceKeys: Put keys on the objects
    
    ..  todo::
        * Check if an attribute ISN'T keyed in the source and mark the static
            value somehow.  Specifically, if parent/world stuff isn't present,
            copying animations goes poorly.
        * At some point animation layers need to be addressed properly.
    
    '''
    global TAGGING_ATTR
    # USING CMDS VERSION FOR SPEED
    #listAttr = cmds.listAttr
    #listConnections = cmds.listConnections
    #addAttr = cmds.addAttr
    #setAttr = cmds.setAttr
    #duplicate = cmds.duplicate
    # ---
    sel = selected()
    objs = objs if objs else selected()

    info = createNode('network')
    info.addAttr('start', at='long')
    info.addAttr('end', at='long')
    info.addAttr('staticValues', dt='string')

    if start is None:
        start = playbackOptions(q=True, min=True)
    if end is None:
        end = playbackOptions(q=True, max=True)

    if start >= end:
        end = start + 1

    info.start.set(start)
    info.end.set(end)

    defaultAttrs = [t + a for t in 'trs' for a in 'xyz'] + ['visibility']

    dups = []
    staticValues = {}

    for obj in objs:
        zooHack = ['ikBlend'] if obj.hasAttr('ikBlend') else [
        ]  # Since use uses builtin ik trans, this doesn't get picked up.

        if obj.hasAttr('tx'):
            attrs = chain(listAttr(obj.name(), ud=True, k=True), defaultAttrs,
                          zooHack)
        else:
            attrs = chain(listAttr(obj.name(), ud=True, k=True), zooHack)

        for attr in attrs:
            _processAttr(obj.name() + '.' + attr, dups, forceKeys,
                         staticValues, start, end)

    if not dups:
        warning("Nothing was animated")
        return

    info.staticValues.set(core.text.asciiCompress(json.dumps(staticValues)))

    select(dups, info)
    exportSelected(filename, force=forceOverwrite)
    select(sel)
    delete(dups)
Example #30
0
File: core.py Project: etic/mayabox
	def _exportNodeXnormal(self, node):
		pm.loadPlugin('objExport.mll', qt=1)
		path, historyPath = self._getExportPaths(node, 'xnormal', '.obj')
		options = 'groups=1;ptgroups=1;materials=1;smoothing=1;normals=1'
		pm.exportSelected(path, force=1, type='OBJexport', op=options)
		shutil.copyfile(path, historyPath)
Example #31
0
def export(*a):
    """ Parses an asset's custom attributes for information about the name and
        location of the asset in the filesystem.  Makes a backup of the asset 
        on export, if necessary."""
    # Backup logic
    def __incrVersion(backup_path, version, name):
        backup_name = name + '_' + str(int(version)).zfill(4) + '.mb'
        if os.path.exists(backup_path + backup_name):
            version += 1
            backup_name = __incrVersion(backup_path, version, name)
        return backup_name

    ## Selection manipulations
    # The goal here is to sort out the main asset node from any sort groups that are also selected.
    # The main asset then has to be passed down the chain for sanity checking, while the sort nodes
    # are stashed away for later use.
    
    sel = pm.ls(sl=True)
    xform_ct = 0
    sort_groups = []
    #print sel
    if len(sel) == 1:
        check = pm.confirmDialog(
            title='No sort sets?',
            message='No sort sets are selected. Export anyway?',
            button=['Yes','Cancel'],
            defaultButton='Yes',
            dismissString='Cancel')
        
        if check == 'Cancel':
            return False
        else: pass

    valid_node_list = [
        'VRayObjectProperties',
        'VRayRenderElementSet',
        'VRayLightMesh',
        'VRayDisplacement'
        ]
    
    # Check that only a single transform node and any number of valid nodetypes
    # are selected
    for obj in sel:
        if obj.nodeType() == 'transform' and xform_ct == 0:
            main_node = obj
            xform_ct = 1
            continue
        if obj.nodeType() == 'transform' and xform_ct == 1:
            pm.warning('More than one top-level transform selected.  Export cancelled.')
            return False
        if not obj.nodeType() in valid_node_list:
            pm.warning('Invalid top-level node selected.  Export cancelled.')
            return False
        
    # Bless object
    name, version, path = bless(main_node)
    
    # Assign an export path (if none)
    if path == None or path == '':
        path = pm.fileDialog2(fm=3, dir=cfb.MAIN_ASSET_DIR)[0]
        path += '\\' + name + '\\'
        main_node.assetPath.set(path)

    ### Backup & export
    # Build new file name
    export_name = path + name + '.mb'
    # If a file with that name already exists:
    if os.path.exists(export_name):
        ## Backup operation
        backup_path = path + 'backup\\'
        # Make the backup folder if it doesn't exist
        if not os.path.isdir(backup_path):
            os.mkdir(backup_path)

        # Increment the backup file version tag
        backup_name = backup_path + __incrVersion(backup_path, version, name)
        # & Copy the current asset into the backup folder, with the incremented file name
        os.rename(export_name, backup_name)
        pm.warning(
            'Successfully backed up asset to: ' + backup_name.replace('/','\\')
            )    
        # Update the asset in the scene with the current version
        main_node.assetVersion.set(main_node.assetVersion.get()+1)

    master_save = pm.exportSelected(
        export_name, 
        constructionHistory=True,
        channels=True,
        constraints=True,
        expressions=True,
        shader=True,
        preserveReferences=True,
        type='mayaBinary'
        )
    
    pm.warning('Successfully wrote asset to: ' + master_save.replace('/','\\'))
Example #32
0
    def generate_gpu(self):
        """generates the GPU representation of the current scene
        """
        # validate the version first
        self.version = self._validate_version(self.version)

        self.open_version(self.version)

        # load necessary plugins
        pm.loadPlugin('gpuCache')
        pm.loadPlugin('AbcExport')
        pm.loadPlugin('AbcImport')

        # check if all references have an GPU repr first
        refs_with_no_gpu_repr = []
        for ref in pm.listReferences():
            if ref.version and not ref.has_repr('GPU'):
                refs_with_no_gpu_repr.append(ref)

        if len(refs_with_no_gpu_repr):
            raise RuntimeError(
                'Please generate the GPU Representation of the references '
                'first!!!\n%s' %
                '\n'.join(map(lambda x: str(x.path), refs_with_no_gpu_repr))
            )

        # unload all references
        for ref in pm.listReferences():
            ref.unload()

        # for local models generate an ABC file
        output_path = os.path.join(
            self.version.absolute_path,
            'Outputs/alembic/'
        ).replace('\\', '/')

        abc_command = \
            'AbcExport -j "-frameRange %(start_frame)s ' \
            '%(end_frame)s ' \
            '-ro -stripNamespaces ' \
            '-uvWrite ' \
            '-wholeFrameGeo ' \
            '-worldSpace ' \
            '-root |%(node)s -file %(file_path)s";'

        gpu_command = \
            'gpuCache -startTime %(start_frame)s ' \
            '-endTime %(end_frame)s ' \
            '-optimize -optimizationThreshold 40000 ' \
            '-writeMaterials ' \
            '-directory "%(path)s" ' \
            '-fileName "%(filename)s" ' \
            '%(node)s;'

        start_frame = end_frame = int(pm.currentTime(q=1))

        if not self.is_scene_assembly_task(self.version.task):

            if self.is_vegetation_task(self.version.task):
                # in vegetation files, we export the GPU files directly from
                # the Base version, also we use the geometry under
                # "pfxPolygons" and parent the resulting Stand-In nodes to the
                # pfxPolygons
                # load all references
                for ref in pm.listReferences():
                    ref.load()

                # find the _pfxPolygons node
                pfx_polygons_node = pm.PyNode('kks___vegetation_pfxPolygons')

                for node in pfx_polygons_node.getChildren():
                    for child_node in node.getChildren():
                        child_node_name = child_node.name().split('___')[-1]
                        child_node_shape = child_node.getShape()
                        child_node_shape_name = None

                        if child_node_shape:
                            child_node_shape_name = child_node_shape.name()

                        pm.select(child_node)
                        temp_output_fullpath = \
                            tempfile.mktemp().replace('\\', '/')
                        temp_output_path, temp_output_filename = \
                            os.path.split(temp_output_fullpath)

                        output_filename = '%s_%s' % (
                            self.version.nice_name,
                            child_node_name.split(':')[-1]
                            .replace(':', '_')
                            .replace('|', '_')
                        )

                        # run the mel command
                        # check if file exists
                        pm.mel.eval(
                            gpu_command % {
                                'start_frame': start_frame,
                                'end_frame': end_frame,
                                'node': child_node.fullPath(),
                                'path': temp_output_path,
                                'filename': temp_output_filename
                            }
                        )

                        cache_file_full_path = \
                            os.path\
                            .join(output_path, output_filename + '.abc')\
                            .replace('\\', '/')

                        # create the intermediate directories
                        try:
                            os.makedirs(
                                os.path.dirname(cache_file_full_path)
                            )
                        except OSError:
                            # directory exists
                            pass

                        # now move in to its place
                        shutil.move(
                            temp_output_fullpath + '.abc',
                            cache_file_full_path
                        )

                        # set rotate and scale pivots
                        rp = pm.xform(child_node, q=1, ws=1, rp=1)
                        sp = pm.xform(child_node, q=1, ws=1, sp=1)
                        #child_node.setRotatePivotTranslation([0, 0, 0])

                        # delete the child and add a GPU node instead
                        pm.delete(child_node)

                        # check if file exists and create nodes
                        if os.path.exists(cache_file_full_path):
                            gpu_node = pm.createNode('gpuCache')
                            gpu_node_tra = gpu_node.getParent()

                            pm.parent(gpu_node_tra, node)
                            gpu_node_tra.rename(child_node_name)

                            if child_node_shape_name is not None:
                                gpu_node.rename(child_node_shape_name)

                            pm.xform(gpu_node_tra, ws=1, rp=rp)
                            pm.xform(gpu_node_tra, ws=1, sp=sp)

                            gpu_node.setAttr(
                                'cacheFileName',
                                cache_file_full_path,
                                type="string"
                            )
                        else:
                            print('File not found!: %s' % cache_file_full_path)

                # clean up other nodes
                pm.delete('kks___vegetation_pfxStrokes')
                pm.delete('kks___vegetation_paintableGeos')

            else:
                root_nodes = self.get_local_root_nodes()
                if len(root_nodes):
                    for root_node in root_nodes:
                        # export each child of each root as separate nodes
                        for child_node in root_node.getChildren():

                            # check if it is a transform node
                            if not isinstance(child_node, pm.nt.Transform):
                                continue

                            if not auxiliary.has_shape(child_node):
                                continue

                            child_name = child_node.name()
                            child_shape = child_node.getShape()
                            child_shape_name = None
                            if child_shape:
                                child_shape_name = child_shape.name()

                            child_full_path = \
                                child_node.fullPath()[1:].replace('|', '_')

                            temp_output_fullpath = \
                                tempfile.mktemp().replace('\\', '/')
                            temp_output_path, temp_output_filename = \
                                os.path.split(temp_output_fullpath)

                            output_filename =\
                                '%s_%s' % (
                                    self.version.nice_name,
                                    child_full_path
                                )

                            # run the mel command
                            # check if file exists
                            pm.mel.eval(
                                gpu_command % {
                                    'start_frame': start_frame,
                                    'end_frame': end_frame,
                                    'node': child_node.fullPath(),
                                    'path': temp_output_path,
                                    'filename': temp_output_filename
                                }
                            )

                            cache_file_full_path = \
                                os.path\
                                .join(
                                    output_path,
                                    '%s.abc' % (
                                        output_filename
                                    )
                                )\
                                .replace('\\', '/')

                            # create the intermediate directories
                            try:
                                os.makedirs(
                                    os.path.dirname(cache_file_full_path)
                                )
                            except OSError:
                                # directory exists
                                pass

                            # now move in to its place
                            shutil.move(
                                temp_output_fullpath + '.abc',
                                cache_file_full_path
                            )

                            # set rotate and scale pivots
                            rp = pm.xform(child_node, q=1, ws=1, rp=1)
                            sp = pm.xform(child_node, q=1, ws=1, sp=1)
                            # rpt = child_node.getRotatePivotTranslation()

                            # delete the child and add a GPU node instead
                            pm.delete(child_node)

                            # check if file exists
                            if os.path.exists(cache_file_full_path):
                                gpu_node = pm.createNode('gpuCache')
                                gpu_node_tra = gpu_node.getParent()

                                pm.parent(gpu_node_tra, root_node)
                                gpu_node_tra.rename(child_name)

                                if child_shape_name is not None:
                                    gpu_node.rename(child_shape_name)

                                pm.xform(gpu_node_tra, ws=1, rp=rp)
                                pm.xform(gpu_node_tra, ws=1, sp=sp)
                                # child_node.setRotatePivotTranslation(rpt)

                                gpu_node.setAttr(
                                    'cacheFileName',
                                    cache_file_full_path,
                                    type="string"
                                )

        # load all references again
        # convert all references to GPU
        logger.debug('converting all references to GPU')
        for ref in pm.listReferences():
            # check if this is a Model reference
            ref.to_repr('GPU')
            ref.load()

        # if this is an Exterior/Interior -> Layout -> Hires task flatten it
        task = self.version.task

        is_exterior_or_interior_task = self.is_exterior_or_interior_task(task)
        if is_exterior_or_interior_task:
            logger.debug('importing all references')
            # and import all of the references
            all_refs = pm.listReferences()
            while len(all_refs) != 0:
                for ref in all_refs:
                    if not ref.isLoaded():
                        ref.load()
                    ref.importContents()
                all_refs = pm.listReferences()

            # assign lambert1 to all GPU nodes
            pm.sets('initialShadingGroup', e=1, fe=auxiliary.get_root_nodes())

            # clean up
            self.clean_up()

        # 6. save the scene as {{original_take}}___GPU
        # use maya
        take_name = '%s%s%s' % (
            self.base_take_name, Representation.repr_separator, 'GPU'
        )
        v = self.get_latest_repr_version(take_name)
        self.maya_env.save_as(v)

        # export the root nodes under the same file
        if is_exterior_or_interior_task:
            logger.debug('exporting root nodes')
            pm.select(auxiliary.get_root_nodes())
            pm.exportSelected(
                v.absolute_full_path,
                type='mayaAscii',
                force=True
            )

        logger.debug('renewing scene')
        # clear scene
        pm.newFile(force=True)
Example #33
0
 def sgSelExport(self, sgList, oPath):
     pm.select(cl=1)
     pm.select(sgList, noExpand=1)
     # ToDo : check this command
     pm.exportSelected(oPath, op="v=0", f=1, es=1, type="mayaBinary")
Example #34
0
    def generate_ass(self):
        """generates the ASS representation of the current scene

        For Model Tasks the ASS is generated over the LookDev Task because it
        is not possible to assign a material to an object inside an ASS file.
        """
        # before doing anything, check if this is a look dev task
        # and export the objects from the referenced files with their current
        # shadings, then replace all of the references to ASS repr and than
        # add Stand-in nodes and parent them under the referenced models

        # load necessary plugins
        pm.loadPlugin('mtoa')

        # disable "show plugin shapes"
        active_panel = auxiliary.Playblaster.get_active_panel()
        show_plugin_shapes = pm.modelEditor(active_panel, q=1, pluginShapes=1)
        pm.modelEditor(active_panel, e=1, pluginShapes=False)

        # validate the version first
        self.version = self._validate_version(self.version)

        self.open_version(self.version)

        task = self.version.task

        # export_command = 'arnoldExportAss -f "%(path)s" -s -mask 24 ' \
        #                  '-lightLinks 0 -compressed -boundingBox ' \
        #                  '-shadowLinks 0 -cam perspShape;'

        export_command = 'arnoldExportAss -f "%(path)s" -s -mask 60' \
                         '-lightLinks 1 -compressed -boundingBox ' \
                         '-shadowLinks 1 -cam perspShape;'

        # calculate output path
        output_path = \
            os.path.join(self.version.absolute_path, 'Outputs/ass/')\
            .replace('\\', '/')

        # check if all references have an ASS repr first
        refs_with_no_ass_repr = []
        for ref in pm.listReferences():
            if ref.version and not ref.has_repr('ASS'):
                refs_with_no_ass_repr.append(ref)

        if len(refs_with_no_ass_repr):
            raise RuntimeError(
                'Please generate the ASS Representation of the references '
                'first!!!\n%s' %
                '\n'.join(map(lambda x: str(x.path), refs_with_no_ass_repr)))

        if self.is_look_dev_task(task):
            # in look dev files, we export the ASS files directly from the Base
            # version and parent the resulting Stand-In node to the parent of
            # the child node

            # load only Model references
            for ref in pm.listReferences():
                v = ref.version
                load_ref = False
                if v:
                    ref_task = v.task
                    if self.is_model_task(ref_task):
                        load_ref = True

                if load_ref:
                    ref.load()
                    ref.importContents()

            # Make all texture paths relative
            # replace all "$REPO#" from all texture paths first
            #
            # This is needed to properly render textures with any OS
            types_and_attrs = {
                'aiImage': 'filename',
                'file': 'fileTextureName',
                'imagePlane': 'imageName'
            }

            for node_type in types_and_attrs.keys():
                attr_name = types_and_attrs[node_type]
                for node in pm.ls(type=node_type):
                    orig_path = node.getAttr(attr_name).replace("\\", "/")
                    path = re.sub(r'(\$REPO[0-9/]+)', '', orig_path)
                    tx_path = self.make_tx(path)
                    inputs = node.attr(attr_name).inputs(p=1)
                    if len(inputs):
                        # set the input attribute
                        for input_node_attr in inputs:
                            input_node_attr.set(tx_path)
                    else:
                        node.setAttr(attr_name, tx_path)

            # randomize all render node names
            # This is needed to prevent clashing of materials in a bigger scene
            for node in pm.ls(type=RENDER_RELATED_NODE_TYPES):
                if node.referenceFile() is None and \
                   node.name() not in READ_ONLY_NODE_NAMES:
                    node.rename('%s_%s' % (node.name(), uuid.uuid4().hex))

            nodes_to_ass_files = {}

            # export all root ass files as they are
            for root_node in auxiliary.get_root_nodes():
                for child_node in root_node.getChildren():
                    # check if it is a transform node
                    if not isinstance(child_node, pm.nt.Transform):
                        continue

                    if not auxiliary.has_shape(child_node):
                        continue

                    # randomize child node name
                    # TODO: This is not working as intended, node names are like |NS:node1|NS:node2
                    #       resulting a child_node_name as "node2"
                    child_node_name = child_node\
                        .fullPath()\
                        .replace('|', '_')\
                        .split(':')[-1]

                    child_node_full_path = child_node.fullPath()

                    pm.select(child_node)
                    child_node.rename('%s_%s' %
                                      (child_node.name(), uuid.uuid4().hex))

                    output_filename =\
                        '%s_%s.ass' % (
                            self.version.nice_name,
                            child_node_name
                        )

                    output_full_path = \
                        os.path.join(output_path, output_filename)

                    # run the mel command
                    pm.mel.eval(export_command %
                                {'path': output_full_path.replace('\\', '/')})
                    nodes_to_ass_files[child_node_full_path] = \
                        '%s.gz' % output_full_path
                    # print('%s -> %s' % (
                    #     child_node_full_path,
                    #     output_full_path)
                    # )

            # reload the scene
            pm.newFile(force=True)
            self.open_version(self.version)

            # convert all references to ASS
            # we are doing it a little bit early here, but we need to
            for ref in pm.listReferences():
                ref.to_repr('ASS')

            all_stand_ins = pm.ls(type='aiStandIn')
            for ass_node in all_stand_ins:
                ass_tra = ass_node.getParent()
                full_path = ass_tra.fullPath()
                if full_path in nodes_to_ass_files:
                    ass_file_path = \
                        Repository.to_os_independent_path(
                            nodes_to_ass_files[full_path]
                        )
                    ass_node.setAttr('dso', ass_file_path)

        elif self.is_vegetation_task(task):
            # in vegetation files, we export the ASS files directly from the
            # Base version, also we use the geometry under "pfxPolygons"
            # and parent the resulting Stand-In nodes to the
            # pfxPolygons
            # load all references
            for ref in pm.listReferences():
                ref.load()

            # Make all texture paths relative
            # replace all "$REPO#" from all texture paths first
            #
            # This is needed to properly render textures with any OS
            types_and_attrs = {
                'aiImage': 'filename',
                'file': 'fileTextureName',
                'imagePlane': 'imageName'
            }

            for node_type in types_and_attrs.keys():
                attr_name = types_and_attrs[node_type]
                for node in pm.ls(type=node_type):
                    orig_path = node.getAttr(attr_name).replace("\\", "/")
                    path = re.sub(r'(\$REPO[0-9/]+)', '', orig_path)
                    tx_path = self.make_tx(path)
                    inputs = node.attr(attr_name).inputs(p=1)
                    if len(inputs):
                        # set the input attribute
                        for input_node_attr in inputs:
                            input_node_attr.set(tx_path)
                    else:
                        node.setAttr(attr_name, tx_path)

            # import shaders that are referenced to this scene
            # there is only one reference in the vegetation task and this is
            # the shader scene
            for ref in pm.listReferences():
                ref.importContents()

            # randomize all render node names
            # This is needed to prevent clashing of materials in a bigger scene
            for node in pm.ls(type=RENDER_RELATED_NODE_TYPES):
                if node.referenceFile() is None and \
                   node.name() not in READ_ONLY_NODE_NAMES:
                    node.rename('%s_%s' % (node.name(), uuid.uuid4().hex))

            # find the _pfxPolygons node
            pfx_polygons_node = pm.PyNode('kks___vegetation_pfxPolygons')

            for node in pfx_polygons_node.getChildren():
                for child_node in node.getChildren():
                    #print('processing %s' % child_node.name())
                    child_node_name = child_node.name().split('___')[-1]

                    pm.select(child_node)
                    output_filename =\
                        '%s_%s.ass' % (
                            self.version.nice_name,
                            child_node_name.replace(':', '_').replace('|', '_')
                        )

                    output_full_path = \
                        os.path.join(output_path, output_filename)

                    # run the mel command
                    pm.mel.eval(export_command %
                                {'path': output_full_path.replace('\\', '/')})

                    # generate an aiStandIn node and set the path
                    ass_node = auxiliary.create_arnold_stand_in(
                        path='%s.gz' % output_full_path)
                    ass_tra = ass_node.getParent()

                    # parent the ass node under the current node
                    # under pfx_polygons_node
                    pm.parent(ass_tra, node)

                    # set pivots
                    rp = pm.xform(child_node, q=1, ws=1, rp=1)
                    sp = pm.xform(child_node, q=1, ws=1, sp=1)
                    # rpt = child_node.getRotatePivotTranslation()

                    pm.xform(ass_node, ws=1, rp=rp)
                    pm.xform(ass_node, ws=1, sp=sp)
                    # ass_node.setRotatePivotTranslation(rpt)

                    # delete the child_node
                    pm.delete(child_node)

                    # give it the same name with the original
                    ass_tra.rename('%s' % child_node_name)

            # clean up other nodes
            pm.delete('kks___vegetation_pfxStrokes')
            pm.delete('kks___vegetation_paintableGeos')

        elif self.is_model_task(task):
            # convert all children of the root node
            # to an empty aiStandIn node
            # and save it as it is
            root_nodes = self.get_local_root_nodes()

            for root_node in root_nodes:
                for child_node in root_node.getChildren():
                    child_node_name = child_node.name()

                    rp = pm.xform(child_node, q=1, ws=1, rp=1)
                    sp = pm.xform(child_node, q=1, ws=1, sp=1)

                    pm.delete(child_node)

                    ass_node = auxiliary.create_arnold_stand_in(path='')
                    ass_tra = ass_node.getParent()
                    pm.parent(ass_tra, root_node)
                    ass_tra.rename(child_node_name)

                    # set pivots
                    pm.xform(ass_tra, ws=1, rp=rp)
                    pm.xform(ass_tra, ws=1, sp=sp)

                    # because there will be possible material assignments
                    # in look dev disable overrideShaders
                    ass_node.setAttr('overrideShaders', False)

                    # we definitely do not use light linking in our studio,
                    # which seems to create more problems then it solves.
                    ass_node.setAttr('overrideLightLinking', False)

        # convert all references to ASS
        for ref in pm.listReferences():
            ref.to_repr('ASS')
            ref.load()

        # fix an arnold bug
        for node_name in ['initialShadingGroup', 'initialParticleSE']:
            node = pm.PyNode(node_name)
            node.setAttr("ai_surface_shader", (0, 0, 0), type="float3")
            node.setAttr("ai_volume_shader", (0, 0, 0), type="float3")

        # if this is an Exterior/Interior -> Layout -> Hires task flatten it
        is_exterior_or_interior_task = self.is_exterior_or_interior_task(task)
        if is_exterior_or_interior_task:
            # and import all of the references
            all_refs = pm.listReferences()
            while len(all_refs) != 0:
                for ref in all_refs:
                    if not ref.isLoaded():
                        ref.load()
                    ref.importContents()
                all_refs = pm.listReferences()

            # assign lambert1 to all GPU nodes
            pm.sets('initialShadingGroup', e=1, fe=auxiliary.get_root_nodes())

            # now remove them from the group
            pm.sets('initialShadingGroup', e=1, rm=pm.ls())

            # and to make sure that no override is enabled
            [
                node.setAttr('overrideLightLinking', False)
                for node in pm.ls(type='aiStandIn')
            ]

            # make sure motion blur is disabled
            [
                node.setAttr('motionBlur', False)
                for node in pm.ls(type='aiStandIn')
            ]

            # clean up
            self.clean_up()

        # check if all aiStandIn nodes are included in
        # ArnoldStandInDefaultLightSet set
        try:
            arnold_stand_in_default_light_set = \
                pm.PyNode('ArnoldStandInDefaultLightSet')
        except pm.MayaNodeError:
            # just create it
            arnold_stand_in_default_light_set = \
                pm.createNode(
                    'objectSet',
                    name='ArnoldStandInDefaultLightSet'
                )

        pm.select(None)
        pm.sets(arnold_stand_in_default_light_set, fe=pm.ls(type='aiStandIn'))

        # save the scene as {{original_take}}___ASS
        # use maya
        take_name = '%s%s%s' % (self.base_take_name,
                                Representation.repr_separator, 'ASS')
        v = self.get_latest_repr_version(take_name)
        self.maya_env.save_as(v)

        # export the root nodes under the same file
        if is_exterior_or_interior_task:
            pm.select(auxiliary.get_root_nodes())
            pm.exportSelected(v.absolute_full_path,
                              type='mayaAscii',
                              force=True)

        # new scene
        pm.newFile(force=True)

        # reset show plugin shapes option
        active_panel = auxiliary.Playblaster.get_active_panel()
        pm.modelEditor(active_panel, e=1, pluginShapes=show_plugin_shapes)
Example #35
0
import pymel.core as pm

pm.openFile(args[0], loadReferenceDepth="none", force=True)

details = []
for transform in pm.listTransforms():
    try:
        lod = transform.resolution_export.get()
        if len(lod):
            path = args[1].format(lod=lod)
            pm.showHidden(transform)
            pm.select(transform)
            # unparent, otherwise the top node is also exported
            pm.parent(transform, world=True)
            pm.exportSelected(path, preserveReferences=True, force=True)
            details.append({"element": {"path": path},
                            "context": {"LOD": lod}})
    except AttributeError:
        pass

# pass outputs back
print "begin-json details"
print json.dumps(details, indent=4)
print "===="

# flush stdout, as os._exit does not do so
sys.stdout.flush()

os._exit(0)
Example #36
0
def bakeScene():

    sel = pm.ls(sl=True)
    if sel == [] or sel == None:
        pm.warning("Please select a camera or camera group!")
        return False

    new_nulls = pm.confirmDialog(
        title='Rebake nulls?',
        message='Do you want to rebake your scene nulls?',
        button=['Yes', 'No'],
        defaultButton='Yes',
        cancelButton='No',
        dismissString='Cancel')

    # Break operation on user cancel
    if new_nulls == 'Cancel':
        return None

    # Cleanup lists
    constraints = []
    exports = []

    # Generate export objects
    export_camera, src_cam = __createExportCamera()
    if new_nulls == 'Yes':
        nulls = __createNulls()
    elif new_nulls == 'No':
        nulls = __createNullList()

    # List of objects to be baked
    exports = nulls
    exports.append(export_camera)

    # Frame range
    range = (pm.playbackOptions(q=True,
                                min=True), pm.playbackOptions(q=True,
                                                              max=True))
    # Bake it
    pm.cycleCheck(e=False)
    pm.bakeResults(exports, t=range)

    # Clear the constraints from the exported objects
    __clearConstraints(exports)

    export_list = ""
    for obj in exports:
        export_list += (str(obj) + "\n")

    # Confirm the export before proceeding
    do_export = pm.confirmDialog(
        title='Confirm Export',
        message='The following objects will be exported:\n' + str(export_list),
        button=['Looks Good', 'Oops'],
        defaultButton='Looks Good',
        cancelButton='Oops',
        dismissString='Oops')

    if do_export == 'Looks Good':
        try:
            out_file = __createOutputFile(src_cam)
            pm.select(exports)
            pm.exportSelected(out_file)
            pm.warning('File exported to: ' + str(out_file))
        except:
            return False

    elif do_export == 'Oops':
        pm.delete(exports)
        return False

    pm.delete(exports)
    pm.select(sel)
    return True
    def exportSelected(self, scene, shot, filename, dumbify, history):
        # geo list is the one that will be exported
        geoList = []
        # dumbify set contains all geo under current selection
        dumbifySet = set()
        lightSet = set()

        selection = pm.selected()

        # add file node from displacements to selection. otherwise it will be ignored in export
        for obj in selection:
            if obj.type() =='VRayDisplacement' and obj.displacement.connections():
                selection.append(obj.displacement.connections()[0])

        if dumbify:
            # dumbify
            # this duplicates all geometry and links them with a blendshape.
            # the duplicates are moved to a seperate group and then cached.
            # this has the effect of removing any logic and just leaving geometry
            for sel in selection:
                for mesh in sel.getChildren(ad=True, type='mesh'):
                    if not mesh.intermediateObject.get() and self.visibleInHierarchy(mesh):
                        dumbifySet.add(mesh.getParent())
                for light in sel.getChildren(ad=True, type=['VRayLightSphereShape', 'VRayLightDomeShape', 'VRayLightRectShape','VRayLightIESShape', 'VRayLightMesh']):
                    lightSet.add(light.getParent())

            exportGrp = pm.group(em=True, name=filename+'_grp')
            geoGrp = pm.group(em=True, name='geoGrp')
            geoGrp.setParent(exportGrp)

            for obj in dumbifySet:
                dupli = pm.duplicate(obj)[0]
                for child in dupli.getChildren():
                    if not child.type() == 'mesh':
                        pm.delete(child)
                    elif child.hasAttr('intermediateObject') and child.intermediateObject.get():
                        pm.delete(child)
                dupli.setParent(geoGrp)
                # renaming the duplicate so it get's the namespace back
                dupli.rename(obj.nodeName())
                try:
                    pm.blendShape(self.getNonIntermediateShape(obj), self.getNonIntermediateShape(dupli), origin='world', n=dupli.nodeName()+'_bs', w=[0,1])
                except Exception as e:
                    print '%s throws %s'%(obj,e)
                # keep object sets for easy vray clipping
                inheritedSets = self.getInheritedSetMembership(obj)
                for s in inheritedSets:
                    if not pm.objExists(s.nodeName(stripNamespace=True)+'Export'):
                        newSet = pm.sets(em=True, n = s.nodeName(stripNamespace=True)+'Export')
                        geoList.append(newSet)
                    pm.sets(pm.PyNode(s.nodeName(stripNamespace=True)+'Export'), forceElement=dupli)

            if lightSet:
                lightGrp = pm.group(em=True, name='lightGrp')        
                lightGrp.setParent(exportGrp)
                for light in lightSet:
                    dupli = pm.duplicate(light)[0]                
                    dupli.setParent(lightGrp)
                    dupli.rename(light.nodeName())
                
            geoList.append(exportGrp)
        else:
            geoList = selection


        # export geo
        pm.select(geoList, r=True, ne=True)
        geoPath = pm.exportSelected(os.path.join(self.cachePath, scene, shot, filename+'.ma'), 
                typ='mayaAscii', 
                constructionHistory = history, 
                channels = False, 
                constraints = False, 
                expressions = False, 
                force =True, 
                shader = True,
                preserveReferences=False
                )
        self.removeStudentLic(geoPath)
        self.removeNamespaces(geoPath)


        # export abc
        abcOptions = []
        for sel in geoList:
            if 'dagNode' in sel.type(inherited=True):
                abcOptions.append('-root %s'%sel.fullPath())

        frameRange = [pm.playbackOptions(q=True, min=True), pm.playbackOptions(q=True, max=True)]
        abcOptions.append('-frameRange %s %s'%(frameRange[0], frameRange[1]))

        abcPath = os.path.join(self.cachePath, scene, shot, filename+'.abc')
        abcOptions.append('-file "' + abcPath.replace('\\', '/') + '"')
        abcOptions.append('-uvWrite')
        abcOptions.append('-wholeFrameGeo')
        abcOptions.append('-worldSpace')
        abcOptions.append('-writeVisibility')
        abcOptions.append('-writeCreases')
        abcOptions.append('-writeUVSets')
        abcOptions.append('-dataFormat ogawa')

        pm.AbcExport(verbose = True, jobArg = ' '.join(abcOptions))

        # cleanup
        if dumbify:
            for obj in geoList:
                pm.delete(obj)