def aiExportFrame( self, frame, objFilename ): # Export a single mental ray archive frame. cmds.currentTime( frame ) cmds.arnoldExportAss(f=objFilename+".ass.gz", s=True, c=True, bb=True, mask=56) #cmds.Mayatomr( mi=True, exportFilter=7020488, # active=True, binary=True, compression=9, # fragmentExport=True, fragmentChildDag =True, passContributionMaps =True, # assembly=True, assemblyRootName="obj", exportPathNames="n", # file=objFilename + ".mi" ) self.log( "assExport " + objFilename + ".ass.gz")
def export_ass(self, ass_file): make_file_dir(ass_file) self.select_myself() try: mc.arnoldExportAss(f=ass_file, s=True, mask=56, lightLinks=0, shadowLinks=0, boundingBox=True, cam='perspShape') return True except Exception, e: print(e) return False
def exportAsAss(self, ass_file): if not mc.pluginInfo('mtoa', q = True, loaded = True): try: mc.loadPlugin('mtoa') except: print 'No Mtoa.' return False self.selectMyself() mc.arnoldExportAss(f = ass_file, s = True, mask = 56, lightLinks = 0, shadowLinks = 0,\ boundingBox = True, cam = 'perspShape') return True
def process(self, instance): staging_dir = self.staging_dir(instance) filename = "{}.ass".format(instance.name) file_path = os.path.join(staging_dir, filename) # Write out .ass file self.log.info("Writing: '%s'" % file_path) with avalon.maya.maintained_selection(): self.log.info("Writing: {}".format(instance.data["setMembers"])) cmds.select(instance.data["setMembers"], noExpand=True) cmds.arnoldExportAss(filename=file_path, selected=True, asciiAss=True, shadowLinks=True, lightLinks=True, boundingBox=True) if "representations" not in instance.data: instance.data["representations"] = [] representation = { 'name': 'ass', 'ext': 'ass', 'files': filename, "stagingDir": staging_dir } instance.data["representations"].append(representation) self.log.info("Extracted instance '%s' to: %s" % (instance.name, staging_dir))
def exportGeo(): export_file="" #Import references refs = cmds.ls(type='reference', rf=True) #rf stops it from trying to import the sharedReferenceNode, which can't be imported. for i in refs: rFile = cmds.referenceQuery(i, f=True) cmds.file(rFile, importReference=True) referenceFiles=cmds.ls( references=True) print referenceFiles #Remove namespaces nameSpacedGeo=cmds.namespaceInfo(listOnlyNamespaces=True) nameSpacedGeo=[n for n in nameSpacedGeo if n not in [u'MergeWithFlagTempNamespaceName', u'UI', u'shared']] for ns in nameSpacedGeo: cmds.namespace(removeNamespace=ns,mergeNamespaceWithRoot=True) #gets the frame range taskId = os.getenv('FTRACK_TASKID') task = ftrack.Task(taskId) shot = task.getParent() seq =shot.getParent() show = seq.getParent() print "Current shot: " +show.get("name")+"_"+seq.get("name")+"_"+shot.get("name") shotName = show.get("name")+"_"+seq.get("name")+"_"+shot.get("name") print "Frame range : %s-%s"%(shot.get("fstart"),shot.get("fend")) print "Exporting file {}".format(export_file) fStart = shot.get("fstart") fEnd = shot.get("fend") #Makes the export path proj=cmds.workspace(q=True,act=True) export_file=(proj+"/standin/"+fileName+".ass") print export_file #Select originally selected geo. Uses MEL because python complains if the nodes have the same names (ie 'tentacle') mel.eval("select -r selectedGeo ;") #Exports Arnold sequence of selected geo #mask=253 should export all the things #mask=173 mesh no shaders, but tse on. cmds.arnoldExportAss(f=export_file,fsh=True,s=True,startFrame=fStart,endFrame=fEnd,frameStep=1,mask=173,lightLinks=0,shadowLinks=0,selected=True,boundingBox=True) #Cleans up selection set cmds.delete("selectedGeo") print "Exported seq: {} from {}-{}".format(export_file, fStart, fEnd)
def extract_Ass(self): # Ensure mtoa loaded cmds.loadPlugin("mtoa", quiet=True) package_path = self.create_package() cache_file = self.file_name("ass") cache_path = os.path.join(package_path, cache_file) with contextlib.nested( capsule.no_undo(), capsule.no_refresh(), capsule.evaluation("off"), capsule.maintained_selection(), capsule.ref_edit_unlock(), remove_file_env_path(self.data), ): cmds.select(self.member, replace=True) asses = cmds.arnoldExportAss(filename=cache_path, selected=True, startFrame=self.data["startFrame"], endFrame=self.data["endFrame"], frameStep=self.data["byFrameStep"], shadowLinks=1, lightLinks=1, expandProcedurals=True, mask=24) use_sequence = self.data["startFrame"] != self.data["endFrame"] entry_file = os.path.basename(asses[0]) self.add_data({ "entryFileName": entry_file, "useSequence": use_sequence }) if use_sequence: self.add_data({ "startFrame": self.data["startFrame"], "endFrame": self.data["endFrame"] })
def abcoutput(arg): filename = os.path.basename(os.path.splitext(cmds.file(expandName=1,q=1))[0]) path =cmds.textField('pathnum',tx=1,q=1).replace("\\","/") rangea =cmds.textField('rangenuma',tx=1,q=1) rangeb =cmds.textField('rangenumb',tx=1,q=1) rangesam =cmds.textField('rangesamnum',tx=1,q=1) geocb=cmds.checkBox("geonlycb" ,q=True,v=True) furcb = cmds.checkBox("furonlycb" ,q=True,v=True) if cmds.pluginInfo("AbcExport",q=1,loaded=1,name=1)==0: try: cmds.loadPlugin("AbcExport") except: pass if os.path.exists(path): sels = cmds.ls(sl=1) yeticols =[] yetiAss =[] locatercols = [] geoout = [] if sels!=[]: for sel in sels: locsels = [ y for y in cmds.listRelatives(sel,c=1) if y.find("arnold_loc")!=-1 and cmds.getAttr(y+".visibility")==1] yetiselgrps = [ y for y in cmds.listRelatives(sel,c=1) if y.find("yeti_G")!=-1] if geocb==1: geodels = [] Miajudge = sel.find('Miarmy_Contents') if Miajudge ==-1: geogrps = [ y for y in cmds.listRelatives(sel,c=1) if y.find("_geo")!=-1] else: Agent =[a for a in cmds.listRelatives(sel,c=1) if a.find('Agent')!=-1] if Agent!=[]: geogrps = [b for b in cmds.listRelatives(Agent[0],c=1) if b.find('Geometry')!=-1] else: geogrps = [] if geogrps!=[]: for geogrp in geogrps: if cmds.getAttr(geogrp+".visibility")==0: try: cmds.setAttr(geogrp+".visibility",1) except: cmds.warning(geogrp+".visibility cant be set!!") #mainCtrl if cmds.listConnections(geogrp,d=0,type="transform")!=None: mainctrl =[m for m in cmds.listConnections(geogrp,d=0,type="transform") if m.find("Main")!=-1] if mainctrl!=[]: mainFurAttrs =[f for f in cmds.listAttr(mainctrl[0],k=1) if f in['hair','yeti','hairYeti']] if mainFurAttrs!=[]: for mainFurAttr in mainFurAttrs: if cmds.getAttr(mainctrl[0]+'.'+mainFurAttr)!=1: cmds.setAttr(mainctrl[0]+'.'+mainFurAttr,1) #geoall geoalls =[a for a in cmds.listRelatives(geogrp,ad=1) if cmds.nodeType(a)=="transform"] for geoall in geoalls: if cmds.listConnections(geoall+".visibility",type="animCurve")==None: if cmds.getAttr(geoall+".visibility")==False: geodels.append(geoall) geoout.append(geogrp) if geodels !=[]: if cmds.objExists(str(geogrps[0]+"_del"))==True: cmds.rename(str(geogrps[0]+"_del"),str(geogrp[0]+"_tmp")) cmds.parent(geodels,w=1) cmds.group(geodels,name=str(geogrps[0]+"_del")) else: cmds.parent(geodels,w=1) cmds.group(geodels,name=str(geogrps[0]+"_del") ) #bake blender try: blctrl = sel[:len(sel.split(":")[-1])]+":key_Ani" except: blctrl = [] if blctrl!=[]: if cmds.objExists(blctrl): blnode = [ bln for bln in cmds.hyperShade(listDownstreamNodes =blctrl) if cmds.nodeType(bln)=="blendColors"] cmds.bakeResults(blnode,simulation=1,t=rangea+":"+rangeb,sampleBy = int(rangesam)) if furcb==1: locatercols += locsels if yetiselgrps!=[]: yetishowgrps = [yt for yt in cmds.listRelatives(yetiselgrps,c=1) if yt.find("yeti_show_G")!=-1] yetinodes =cmds.listRelatives(yetishowgrps,c=1) if yetinodes!=None: for yetinode in yetinodes: if cmds.getAttr(yetinode+".visibility")==True: if cmds.getAttr(cmds.listRelatives(yetinode,p=1)[0]+".visibility")==True: yeticols.append(yetinode) if geocb==False: geoout=[] if furcb==False: yeticols=[] locatercols=[] #output abc cmds.select(geoout,r=1) abcname ="-frameRange {0} {1} -uvWrite -worldSpace -writeVisibility -dataFormat hdf".format(rangea,rangeb) for ou in range(len(geoout)): abcname = abcname+" -root "+geoout[ou] if abcname!="-frameRange {0} {1} -uvWrite -worldSpace -writeVisibility -dataFormat hdf".format(rangea,rangeb): abcoutputfurpath = path+"/"+filename+".abc" nn=1 while os.path.exists(abcoutputfurpath)==True: abcoutputfurpath = path+"/"+filename+"_"+str(nn)+".abc" nn =nn+1 cmds.AbcExport(j = abcname+" -file {0}".format(abcoutputfurpath)) #output yeticache if yeticols!=[]: yeticachename = yeticols[0].replace(":","__") cmds.select(yeticols,r=1) cmds.pgYetiCommand(flushGeometryCache=1) cmds.pgYetiCommand(flushTextureCache=1) cmds.pgYetiCommand(flushDisplayCache=1) if os.path.exists(path+"/yetiAss")==False: os.mkdir(path+"/yetiAss") cmds.select(yeticols,r=1) cmds.arnoldExportAss(f=path+"/yetiAss/"+yeticachename+".ass",s=1,expandProcedurals=1,startFrame=int(rangea),endFrame=int(rangeb),frameStep=int(rangesam),lightLinks=0,compressed=1,boundingBox=1,shadowLinks=0,mask=24,cam='perspShape') mtoa.core.createStandIn(path+"/yetiAss/"+yeticachename+r".####.ass.gz") assShape = cmds.ls(sl=1,type="aiStandIn") cmds.expression(s =assShape[0]+ ".frameNumber=frame") yetiAss = cmds.listRelatives(assShape[0],p=1) print "已输出"+str(len(yeticols))+"个毛发节点!!", #bak hairAss locater if locatercols!=[]: for locatercol in locatercols: locaterAttrs = cmds.listAttr(locatercol,v=1,k=1) for locaterAttr in locaterAttrs: lockjudge = cmds.getAttr(locatercol+"."+locaterAttr,l=1) if lockjudge==1: cmds.setAttr(locatercol+"."+locaterAttr,l=0) pm.bakeResults(locatercols,simulation=1,t=rangea+":"+rangeb,sampleBy = int(rangesam)) for locatercol in locatercols: parentCons = cmds.listRelatives(locatercol,c=1,type=("parentConstraint","scaleConstraint")) cmds.delete(parentCons) lacattrv = cmds.listConnections(locatercol+".visibility",d=0,plugs=1) if lacattrv!=[]: cmds.disconnectAttr(lacattrv[0],locatercol+".visibility") cmds.parent(locatercols,w=1) if yetiAss!=[] and locatercols!=[]: rmparentCons(filename,"Ass_G",locatercols) furcachegrp = cmds.group(yetiAss,filename+"_Ass_G",name=filename+"_furCache_G") cmds.select(furcachegrp,r=1,hi=1) cmds.rename(yetiAss[0],filename+"_yetiAss_Aist") outputfurpath = path+"/"+filename+"_fur.mb" nn=1 while os.path.exists(outputfurpath)==True: outputfurpath = path+"/"+filename+"_fur_"+str(nn)+".mb" nn =nn+1 cmds.file(outputfurpath,force = 1,options ='v=0;' ,typ = 'mayaBinary',pr=1,es=1 ) elif locatercols!=[]: rmparentCons(filename,"Ass_G",locatercols) furcachegrp = cmds.group(filename+"_Ass_G",name=filename+"_furCache_G") cmds.select(furcachegrp,r=1,hi=1) outputfurpath = path+"/"+filename+"_fur.mb" nn=1 while os.path.exists(outputfurpath)==True: outputfurpath = path+"/"+filename+"_fur_"+str(nn)+".mb" nn =nn+1 cmds.file(outputfurpath,force = 1,options ='v=0;' ,typ = 'mayaBinary',pr=1,es=1 ) else: cmds.warning('目标路径不存在!!!')
def process(self, instance): sequence = instance.data.get("exportSequence", False) staging_dir = self.staging_dir(instance) filename = "{}.ass".format(instance.name) filenames = list() file_path = os.path.join(staging_dir, filename) # Write out .ass file self.log.info("Writing: '%s'" % file_path) with avalon.maya.maintained_selection(): self.log.info("Writing: {}".format(instance.data["setMembers"])) cmds.select(instance.data["setMembers"], noExpand=True) if sequence: self.log.info("Extracting ass sequence") # Collect the start and end including handles start = instance.data.get("frameStart", 1) end = instance.data.get("frameEnd", 1) handles = instance.data.get("handles", 0) step = instance.data.get("step", 0) if handles: start -= handles end += handles exported_files = cmds.arnoldExportAss(filename=file_path, selected=True, asciiAss=self.asciiAss, shadowLinks=True, lightLinks=True, boundingBox=True, startFrame=start, endFrame=end, frameStep=step) for file in exported_files: filenames.append(os.path.split(file)[1]) self.log.info("Exported: {}".format(filenames)) else: self.log.info("Extracting ass") cmds.arnoldExportAss(filename=file_path, selected=True, asciiAss=False, shadowLinks=True, lightLinks=True, boundingBox=True) self.log.info("Extracted {}".format(filename)) filenames = filename optionals = [ "frameStart", "frameEnd", "step", "handles", "handleEnd", "handleStart" ] for key in optionals: instance.data.pop(key, None) if "representations" not in instance.data: instance.data["representations"] = [] representation = { 'name': 'ass', 'ext': 'ass', 'files': filenames, "stagingDir": staging_dir } if sequence: representation['frameStart'] = start instance.data["representations"].append(representation) self.log.info("Extracted instance '%s' to: %s" % (instance.name, staging_dir))
def export_ass(self, nodes, outpath, file_node_attrs, has_yeti, start, end, step, expand_procedurals=True): from maya import cmds, mel from reveries.maya import arnold, capsule # Ensure option created arnold.utils.create_options() render_settings = { # Disable Auto TX update and enable to use existing TX "defaultArnoldRenderOptions.autotx": False, "defaultArnoldRenderOptions.use_existing_tiled_textures": True, # Ensure frame padding == 4 "defaultRenderGlobals.extensionPadding": 4, } # Yeti if has_yeti: # In Deadline, this is a script job instead of rendering job, so # the `pgYetiPreRender` Pre-Render MEL will not be triggered. # We need to call it by ourselve, or Yeti will complain about # cache temp dir not exist. mel.eval("pgYetiPreRender;") with contextlib.nested( capsule.no_undo(), capsule.no_refresh(), capsule.evaluation("off"), capsule.maintained_selection(), capsule.ref_edit_unlock(), # (NOTE) Ensure attribute unlock capsule.attribute_states(file_node_attrs.keys(), lock=False), # Change to published path capsule.attribute_values(file_node_attrs), # Fixed render settings capsule.attribute_values(render_settings), ): cmds.select(nodes, replace=True) asses = cmds.arnoldExportAss( filename=outpath, selected=True, startFrame=start, endFrame=end, frameStep=step, expandProcedurals=expand_procedurals, boundingBox=True, # Mask: # Shapes, # Shaders, # Override Nodes, # Operators, # # mask=4152, # No Color Manager mask=6200) # With Color Manager # Change to environment var embedded path root = avalon.api.registered_root().replace("\\", "/") project = avalon.api.Session["AVALON_PROJECT"] for ass in asses: lines = list() has_change = False with open(ass, "r") as assf: for line in assf.readlines(): if line.startswith(" filename "): line = line.replace(root, "[AVALON_PROJECTS]", 1) line = line.replace(project, "[AVALON_PROJECT]", 1) has_change = True lines.append(line) # Remove color manager # (NOTE): If Color Manager included, # may raise error if rendering # in Houdini or other DCC. try: s = lines.index("color_manager_syncolor\n") except ValueError: # No color manager found pass else: e = lines.index("}\n", s) + 1 lines = lines[:s] + lines[e:] has_change = True # Re-write if has_change: with open(ass, "w") as assf: assf.write("".join(lines))
def export_ass(data, start, end, step): arnold_tx_settings = { "defaultArnoldRenderOptions.autotx": False, "defaultArnoldRenderOptions.use_existing_tiled_textures": True, } # Yeti if data["hasYeti"]: # In Deadline, this is a script job instead of rendering job, so # the `pgYetiPreRender` Pre-Render MEL will not be triggered. # We need to call it by ourselve, or Yeti will complain about # cache temp dir not exist. mel.eval("pgYetiPreRender;") with contextlib.nested( capsule.no_undo(), capsule.no_refresh(), capsule.evaluation("off"), capsule.maintained_selection(), capsule.ref_edit_unlock(), # (NOTE) Ensure attribute unlock capsule.attribute_states(data["fileNodeAttrs"].keys(), lock=False), # Change to published path capsule.attribute_values(data["fileNodeAttrs"]), # Disable Auto TX update and enable to use existing TX capsule.attribute_values(arnold_tx_settings), ): cmds.select(data["member"], replace=True) asses = cmds.arnoldExportAss( filename=data["cachePath"], selected=True, startFrame=start, endFrame=end, frameStep=step, expandProcedurals=True, boundingBox=True, # Mask: # Shapes, # Shaders, # Override Nodes, # Operators, # # (NOTE): If Color Manager included, # may raise error if rendering # in Houdini or other DCC. # mask=6200, # With Color Manager # mask=4152) # No Color Manager # Change to environment var embedded path root = avalon.api.registered_root().replace("\\", "/") project = avalon.api.Session["AVALON_PROJECT"] for ass in asses: lines = list() has_change = False with open(ass, "r") as assf: for line in assf.readlines(): if line.startswith(" filename "): line = line.replace(root, "[AVALON_PROJECTS]", 1) line = line.replace(project, "[AVALON_PROJECT]", 1) has_change = True lines.append(line) if has_change: with open(ass, "w") as assf: assf.write("".join(lines))
def export_ass(self, destination, outliner_set, start_frame, end_frame, step=1.0): """Main Arnold ASS export function. Args: start_frame (int): Start frame. end_frame (int): End frame. destination (str): Path to the output file. outliner_set (tuple): A list of transforms contained in a geometry set. """ common.check_type(destination, str) common.check_type(outliner_set, (tuple, list)) common.check_type(start_frame, (int, float)) common.check_type(end_frame, (int, float)) common.check_type(step, (float, int)) import arnold # Let's get the first renderable camera. This is a bit of a leap of faith but # ideally there's only one renderable camera in the scene. cams = cmds.ls(cameras=True) cam = None for cam in cams: if cmds.getAttr(f'{cam}.renderable'): break cmds.select(outliner_set, replace=True) ext = destination.split('.')[-1] _destination = str(destination) start_time = time.time() for fr in range(start_frame, end_frame + 1): QtWidgets.QApplication.instance().processEvents() if self._interrupt_requested: self._interrupt_requested = False return cmds.currentTime(fr, edit=True) if self.progress_widget.wasCanceled(): return else: self.progress_widget.setValue(fr) if not start_frame == end_frame: # Create a mock version, if does not exist open(destination, 'a').close() _destination = destination.replace(f'.{ext}', '') _destination += '_' _destination += str(fr).zfill(mayabase.DefaultPadding) _destination += '.' _destination += ext cmds.arnoldExportAss( f=_destination, cam=cam, s=True, # selected mask=arnold.AI_NODE_CAMERA | arnold.AI_NODE_SHAPE | arnold.AI_NODE_SHADER | arnold.AI_NODE_OVERRIDE | arnold.AI_NODE_LIGHT) mayabase.report_export_progress(start_frame, fr, end_frame, start_time)
def w13A_uiExportAss(self, *args): end = cmds.intField('w13A_count', q=True, v=True) exportDir = cmds.textFieldGrp( 'w13A_uiAssDirectory', q=True, tx=True) #cmds.workspace(q =True, rootDirectory=True) exportDir = exportDir.replace('\\', '/') if os.path.exists(exportDir) == False: try: os.makedirs(exportDir) except: sceneName = cmds.file(q=True, sn=True, shn=True).rsplit('.', 1)[0] assDir = os.path.join( cmds.workspace(q=True, rootDirectory=True), 'data', sceneName, 'ass_hair') assDir = assDir.replace('\\', '/') cmds.textFieldGrp('w13A_uiAssDirectory', e=True, tx=assDir) exportDir = assDir startF = cmds.playbackOptions(q=True, min=True) endF = cmds.playbackOptions(q=True, max=True) assInfoStr = '' for index in range(1, end): objs = cmds.textFieldGrp('w13A_uiRootGrp%03d' % (index), q=True, tx=True) expable = cmds.checkBox('w13A_uiExport%03d' % (index), q=True, v=True) assInfoStr += '\n' + str() assInfoFile = open(os.path.join(exportDir, 'ass_hairInfo.txt'), 'w') import datetime curTime = datetime.datetime.now().date().timetuple() today = int("%d%02d%02d" % (curTime[0], curTime[1], curTime[2])) infoFileName = os.path.join(exportDir, 'ass_hairInfo.txt') infoFile = open(infoFileName, 'a') sceneName = cmds.file(q=True, sn=True, shn=True).rsplit('.', 1)[0] infoStr = '\n\n%s%s\n%s' % (sceneName, '*' * 50, today) infoFile.write(infoStr) infoFile.close() for index in range(1, end): objs = cmds.textFieldGrp('w13A_uiRootGrp%03d' % (index), q=True, tx=True) expable = cmds.checkBox('w13A_uiExport%03d' % (index), q=True, v=True) if objs != '' and expable: assFile = cmds.textFieldGrp('w13A_uiAssName%03d' % (index), q=True, tx=True) assFilePath = '%s/%s/%s.ass' % (exportDir, assFile, assFile) objs = eval(objs) hairs = cmds.listRelatives(objs, ad=True, type=['shaveHair', 'pfxHair'], f=True) hairs = cmds.listRelatives(hairs, parent=True, f=True) cmds.select(hairs, r=True) try: cmds.arnoldExportAss(f=assFilePath, s=True, startFrame=startF, endFrame=endF, mask=57, lightLinks=False, frameStep=1.0, compressed=True, boundingBox=True, shadowLinks=False, cam='perspShape') infoFile = open(infoFileName, 'a') infoStr = '\n\n' + assFile + '\n' + str(objs) infoFile.write(infoStr) infoFile.close() except: pass
cmds.select( selected ) mel.eval('doGroup 0 1 1;') cmds.rename( groupname ) #Sets the name of the ass file and path and exports the .ass #Change this path! #export_dir = str(cmds.fileDialog2(fm=3, dialogStyle=2, cap='Select output location', okCaption='Save here')) export_dir = "/mnt/projects/shr/assets/Environment/sinkhole/SHD/work/maya/cache" export_file = export_dir+groupname+'.ass' print export_file output = cmds.arnoldExportAss( f=export_file, fsh=True, mask=253, lightLinks=0, shadowLinks=0, selected=True) #imports .ass container and fills it with export_file path cmds.file( export_file, i=True ) #cmds.setAttr( "ArnoldStandInShape.deferStandinLoad", 0) #No longer applicable in Arnold 5 imported = cmds.ls( 'ArnoldStandIn*', sl=False ) #selects and renames the standin swapOutName = ( groupname+"_standIn" ) cmds.rename( imported[0], swapOutName) cmds.select( groupname, swapOutName ) #modify > replace geo with standin