def process(self, instance): yeti_nodes = cmds.ls(instance, type="pgYetiMaya") if not yeti_nodes: raise RuntimeError("No pgYetiMaya nodes found in the instance") # Define extract output file path dirname = self.staging_dir(instance) # Yeti related staging dirs data_file = os.path.join(dirname, "yeti.fursettings") # Collect information for writing cache start_frame = instance.data.get("startFrame") end_frame = instance.data.get("endFrame") preroll = instance.data.get("preroll") if preroll > 0: start_frame -= preroll kwargs = {} samples = instance.data.get("samples", 0) if samples == 0: kwargs.update({"sampleTimes": "0.0 1.0"}) else: kwargs.update({"samples": samples}) self.log.info("Writing out cache") # Start writing the files for snap shot # <NAME> will be replace by the Yeti node name path = os.path.join(dirname, "<NAME>.%04d.fur") cmds.pgYetiCommand(yeti_nodes, writeCache=path, range=(start_frame, end_frame), updateViewport=False, generatePreview=False, **kwargs) cache_files = [x for x in os.listdir(dirname) if x.endswith(".fur")] self.log.info("Writing metadata file") settings = instance.data.get("fursettings", None) if settings is not None: with open(data_file, "w") as fp: json.dump(settings, fp, ensure_ascii=False) # Ensure files can be stored if "files" not in instance.data: instance.data["files"] = list() instance.data["files"].extend([cache_files, "yeti.fursettings"]) self.log.info("Extracted {} to {}".format(instance, dirname))
def export_yeti_nodes(self, len_nspacelist, percent): directory = utils.temp_dir() for node in self.yeti_nodes_list: node_name = node.split(':')[-1] fur_ext = prefs.custom_pub_ext_dic[self.asset.stage][ self.asset.software] if fur_ext == 'abc': file = os.path.join(directory, '{}.abc'.format(node_name)) cmds.pgYetiCommand(node, writeAlembic=file, range=(self.range[0], self.range[-1]), samples=3, sampleTimes="-0.2 0.0 0.2") else: file = os.path.join(directory, '{}.%04d.fur'.format(node_name)) cmds.pgYetiCommand(node, writeCache=file, range=(self.range[0], self.range[-1]), samples=3, sampleTimes="-0.2 0.0 0.2") current_percent = float(percent) + (100.0 / int(len_nspacelist)) / 2 print('percent:{}'.format(current_percent)) try: exported_files_list = os.listdir(directory) export_files = self.asset.export_multiple( '{}_{}_{}'.format(self.fur_asset.name, self.fur_asset.variant, self.count), exported_files_list) percent_step = ( (100.0 / int(len_nspacelist)) / 2) / len(exported_files_list) percent = current_percent for file in exported_files_list: full_file = os.path.join(directory, file) print('current_task:Copying file {}'.format(full_file)) print('percent:{}'.format(percent)) time.sleep(0.01) index = exported_files_list.index(file) shutil.copyfile(full_file, export_files[index]) percent += percent_step except: print(str(traceback.format_exc()))
def curvesToGrom(sel_set,sel_obj): if cmds.nodeType(sel_obj) != "mesh": sel_obj = cmds.listRelatives(sel_obj,shapes=True)[0] ### create pgYetiGroom node newPgYetiGroom = cmds.createNode('pgYetiGroom') cmds.connectAttr(sel_obj + ".worldMesh[0]",newPgYetiGroom + ".inputGeometry",f=True) cmds.connectAttr("time1.outTime",newPgYetiGroom+".currentTime",f=True) ### rename the parent and node transforms = cmds.listRelatives(newPgYetiGroom,p=True)[0] cmds.rename(newPgYetiGroom,"tempPgYetiGroomName") transform_name = cmds.rename(transforms,"pgYetiGroom") newPgYetiGroom = cmds.rename("tempPgYetiGroomName",transform_name + "Shape" ) ### curves to Yeti Groom,Use Yeti's command cmds.pgYetiCommand(newPgYetiGroom,convertFromCurves=sel_set,inputGeometry=sel_obj,stepSize=0.1)
def get_yeti_resources(self, node): """Get all texture file paths If a texture is a sequence it gathers all sibling files to ensure the texture sequence is complete. Args: node (str): node name of the pgYetiMaya node Returns: list """ resources = [] image_search_path = cmds.getAttr("{}.imageSearchPath".format(node)) texture_filenames = cmds.pgYetiCommand(node, listTextures=True) if texture_filenames and not image_search_path: raise ValueError("pgYetiMaya node '%s' is missing the path to the " "files in the 'imageSearchPath " "atttribute'" % node) for texture in texture_filenames: node_resources = {"files": [], "source": texture, "node": node} texture_filepath = os.path.join(image_search_path, texture) if len(texture.split(".")) > 2: # For UDIM based textures (tiles) if "<UDIM>" in texture: sequences = self.get_sequence(texture_filepath, pattern="<UDIM>") node_resources["files"].extend(sequences) # Based textures (animated masks f.e) elif "%04d" in texture: sequences = self.get_sequence(texture_filepath, pattern="%04d") node_resources["files"].extend(sequences) # Assuming it is a fixed name else: node_resources["files"].append(texture_filepath) else: node_resources["files"].append(texture_filepath) resources.append(node_resources) return resources
def abcoutput(arg): filename = os.path.basename(os.path.splitext(cmds.file(expandName=1,q=1))[0]) path =cmds.textField('pathnum',tx=1,q=1).replace("\\","/") rangea =cmds.textField('rangenuma',tx=1,q=1) rangeb =cmds.textField('rangenumb',tx=1,q=1) rangesam =cmds.textField('rangesamnum',tx=1,q=1) geocb=cmds.checkBox("geonlycb" ,q=True,v=True) furcb = cmds.checkBox("furonlycb" ,q=True,v=True) if cmds.pluginInfo("AbcExport",q=1,loaded=1,name=1)==0: try: cmds.loadPlugin("AbcExport") except: pass if os.path.exists(path): sels = cmds.ls(sl=1) yeticols =[] yetiAss =[] locatercols = [] geoout = [] if sels!=[]: for sel in sels: locsels = [ y for y in cmds.listRelatives(sel,c=1) if y.find("arnold_loc")!=-1 and cmds.getAttr(y+".visibility")==1] yetiselgrps = [ y for y in cmds.listRelatives(sel,c=1) if y.find("yeti_G")!=-1] if geocb==1: geodels = [] Miajudge = sel.find('Miarmy_Contents') if Miajudge ==-1: geogrps = [ y for y in cmds.listRelatives(sel,c=1) if y.find("_geo")!=-1] else: Agent =[a for a in cmds.listRelatives(sel,c=1) if a.find('Agent')!=-1] if Agent!=[]: geogrps = [b for b in cmds.listRelatives(Agent[0],c=1) if b.find('Geometry')!=-1] else: geogrps = [] if geogrps!=[]: for geogrp in geogrps: if cmds.getAttr(geogrp+".visibility")==0: try: cmds.setAttr(geogrp+".visibility",1) except: cmds.warning(geogrp+".visibility cant be set!!") #mainCtrl if cmds.listConnections(geogrp,d=0,type="transform")!=None: mainctrl =[m for m in cmds.listConnections(geogrp,d=0,type="transform") if m.find("Main")!=-1] if mainctrl!=[]: mainFurAttrs =[f for f in cmds.listAttr(mainctrl[0],k=1) if f in['hair','yeti','hairYeti']] if mainFurAttrs!=[]: for mainFurAttr in mainFurAttrs: if cmds.getAttr(mainctrl[0]+'.'+mainFurAttr)!=1: cmds.setAttr(mainctrl[0]+'.'+mainFurAttr,1) #geoall geoalls =[a for a in cmds.listRelatives(geogrp,ad=1) if cmds.nodeType(a)=="transform"] for geoall in geoalls: if cmds.listConnections(geoall+".visibility",type="animCurve")==None: if cmds.getAttr(geoall+".visibility")==False: geodels.append(geoall) geoout.append(geogrp) if geodels !=[]: if cmds.objExists(str(geogrps[0]+"_del"))==True: cmds.rename(str(geogrps[0]+"_del"),str(geogrp[0]+"_tmp")) cmds.parent(geodels,w=1) cmds.group(geodels,name=str(geogrps[0]+"_del")) else: cmds.parent(geodels,w=1) cmds.group(geodels,name=str(geogrps[0]+"_del") ) #bake blender try: blctrl = sel[:len(sel.split(":")[-1])]+":key_Ani" except: blctrl = [] if blctrl!=[]: if cmds.objExists(blctrl): blnode = [ bln for bln in cmds.hyperShade(listDownstreamNodes =blctrl) if cmds.nodeType(bln)=="blendColors"] cmds.bakeResults(blnode,simulation=1,t=rangea+":"+rangeb,sampleBy = int(rangesam)) if furcb==1: locatercols += locsels if yetiselgrps!=[]: yetishowgrps = [yt for yt in cmds.listRelatives(yetiselgrps,c=1) if yt.find("yeti_show_G")!=-1] yetinodes =cmds.listRelatives(yetishowgrps,c=1) if yetinodes!=None: for yetinode in yetinodes: if cmds.getAttr(yetinode+".visibility")==True: if cmds.getAttr(cmds.listRelatives(yetinode,p=1)[0]+".visibility")==True: yeticols.append(yetinode) if geocb==False: geoout=[] if furcb==False: yeticols=[] locatercols=[] #output abc cmds.select(geoout,r=1) abcname ="-frameRange {0} {1} -uvWrite -worldSpace -writeVisibility -dataFormat hdf".format(rangea,rangeb) for ou in range(len(geoout)): abcname = abcname+" -root "+geoout[ou] if abcname!="-frameRange {0} {1} -uvWrite -worldSpace -writeVisibility -dataFormat hdf".format(rangea,rangeb): abcoutputfurpath = path+"/"+filename+".abc" nn=1 while os.path.exists(abcoutputfurpath)==True: abcoutputfurpath = path+"/"+filename+"_"+str(nn)+".abc" nn =nn+1 cmds.AbcExport(j = abcname+" -file {0}".format(abcoutputfurpath)) #output yeticache if yeticols!=[]: yeticachename = yeticols[0].replace(":","__") cmds.select(yeticols,r=1) cmds.pgYetiCommand(flushGeometryCache=1) cmds.pgYetiCommand(flushTextureCache=1) cmds.pgYetiCommand(flushDisplayCache=1) if os.path.exists(path+"/yetiAss")==False: os.mkdir(path+"/yetiAss") cmds.select(yeticols,r=1) cmds.arnoldExportAss(f=path+"/yetiAss/"+yeticachename+".ass",s=1,expandProcedurals=1,startFrame=int(rangea),endFrame=int(rangeb),frameStep=int(rangesam),lightLinks=0,compressed=1,boundingBox=1,shadowLinks=0,mask=24,cam='perspShape') mtoa.core.createStandIn(path+"/yetiAss/"+yeticachename+r".####.ass.gz") assShape = cmds.ls(sl=1,type="aiStandIn") cmds.expression(s =assShape[0]+ ".frameNumber=frame") yetiAss = cmds.listRelatives(assShape[0],p=1) print "已输出"+str(len(yeticols))+"个毛发节点!!", #bak hairAss locater if locatercols!=[]: for locatercol in locatercols: locaterAttrs = cmds.listAttr(locatercol,v=1,k=1) for locaterAttr in locaterAttrs: lockjudge = cmds.getAttr(locatercol+"."+locaterAttr,l=1) if lockjudge==1: cmds.setAttr(locatercol+"."+locaterAttr,l=0) pm.bakeResults(locatercols,simulation=1,t=rangea+":"+rangeb,sampleBy = int(rangesam)) for locatercol in locatercols: parentCons = cmds.listRelatives(locatercol,c=1,type=("parentConstraint","scaleConstraint")) cmds.delete(parentCons) lacattrv = cmds.listConnections(locatercol+".visibility",d=0,plugs=1) if lacattrv!=[]: cmds.disconnectAttr(lacattrv[0],locatercol+".visibility") cmds.parent(locatercols,w=1) if yetiAss!=[] and locatercols!=[]: rmparentCons(filename,"Ass_G",locatercols) furcachegrp = cmds.group(yetiAss,filename+"_Ass_G",name=filename+"_furCache_G") cmds.select(furcachegrp,r=1,hi=1) cmds.rename(yetiAss[0],filename+"_yetiAss_Aist") outputfurpath = path+"/"+filename+"_fur.mb" nn=1 while os.path.exists(outputfurpath)==True: outputfurpath = path+"/"+filename+"_fur_"+str(nn)+".mb" nn =nn+1 cmds.file(outputfurpath,force = 1,options ='v=0;' ,typ = 'mayaBinary',pr=1,es=1 ) elif locatercols!=[]: rmparentCons(filename,"Ass_G",locatercols) furcachegrp = cmds.group(filename+"_Ass_G",name=filename+"_furCache_G") cmds.select(furcachegrp,r=1,hi=1) outputfurpath = path+"/"+filename+"_fur.mb" nn=1 while os.path.exists(outputfurpath)==True: outputfurpath = path+"/"+filename+"_fur_"+str(nn)+".mb" nn =nn+1 cmds.file(outputfurpath,force = 1,options ='v=0;' ,typ = 'mayaBinary',pr=1,es=1 ) else: cmds.warning('目标路径不存在!!!')
def replaceCallback(self, *args): self.classIf = {} if not self.obj: mc.confirmDialog(message=u'请添加目标物体放入列表中!') return duplicateValue = mc.checkBox(self.duplicate, q=True, v=True) oldNurbs = mc.ls(type='nurbsCurve') Scale = [] RotateX = '' RotateY = '' RotateZ = '' if mc.checkBox(self.uiScaleCheck, q=True, v=True): Scale = [ mc.floatFieldGrp(self.ScaleFieldX, q=True, v1=True), mc.floatFieldGrp(self.ScaleFieldX, q=True, v2=True) ] if mc.checkBox(self.RotateCheck, q=True, v=True): RotateX = [ mc.floatFieldGrp(self.RotateFieldX, q=True, v1=True), mc.floatFieldGrp(self.RotateFieldX, q=True, v2=True) ] RotateY = [ mc.floatFieldGrp(self.RotateFieldY, q=True, v1=True), mc.floatFieldGrp(self.RotateFieldY, q=True, v2=True) ] RotateZ = [ mc.floatFieldGrp(self.RotateFieldZ, q=True, v1=True), mc.floatFieldGrp(self.RotateFieldZ, q=True, v2=True) ] if self.newPgYetiGroom and self.obj: mc.pgYetiCommand(self.newPgYetiGroom, convertToCurves=True) newNurbs = mc.ls(type='nurbsCurve') nurbsObjs = list(set(newNurbs) - set(oldNurbs)) if nurbsObjs: for objs in nurbsObjs: transformNode = mc.listRelatives(objs, p=True)[0] transf = mc.xform('%s.cv[0]' % transformNode, q=True, ws=True, t=True) dkeys = self.obj.keys() dag = self.obj[dkeys[rand.randint(0, len(dkeys) - 1)]] shapeParent = mc.listRelatives(dag, parent=True) if duplicateValue: newObjectDAG = mc.instance(shapeParent[0]) else: newObjectDAG = mc.duplicate(shapeParent[0], un=True, ic=True) mc.move(transf[0], transf[1], transf[2], newObjectDAG[0], relative=True) if mc.checkBox(self.uiAlign, q=True, v=True): self.align(transf, newObjectDAG[0]) if Scale: ScaleXYZ = (round(rand.uniform(Scale[0], Scale[1]), 3)) mc.scale(ScaleXYZ, ScaleXYZ, ScaleXYZ, newObjectDAG[0], relative=True) if RotateX: randx = (round(rand.uniform(RotateX[0], RotateX[1]), 3)) randy = (round(rand.uniform(RotateY[0], RotateY[1]), 3)) randz = (round(rand.uniform(RotateZ[0], RotateZ[1]), 3)) mc.rotate(randx, randy, randz, newObjectDAG[0], os=True, r=True, rotateXYZ=True) if not dag in self.classIf.keys(): self.classIf.update({dag: [newObjectDAG[0]]}) else: self.classIf[dag].append(newObjectDAG[0]) mc.delete(transformNode) mc.delete(self.transform) self.transform = '' self.newPgYetiGroom = '' groupsName = mc.group(empty=True, name='group#') for key in self.classIf.keys(): keyName = key.split('|')[-1] groupName = mc.group(empty=True, name=keyName) mc.parent(self.classIf[key], groupName, relative=True) mc.parent(groupName, groupsName, relative=True)
def __publish_yeticache(self, item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb): """ Publish an Alembic cache file for the scene and publish it to Shotgun. :param item: The item to publish :param output: The output definition to publish with :param work_template: The work template for the current scene :param primary_publish_path: The path to the primary published file :param sg_task: The Shotgun task we are publishing for :param comment: The publish comment/description :param thumbnail_path: The path to the publish thumbnail :param progress_cb: A callback that can be used to report progress """ # determine the publish info to use # progress_cb(10, "Determining publish details") # the file and folder name is derived from the fur node furNodeName = item['name'] # get the current scene path and extract fields from it # using the work template: scene_path = os.path.abspath(cmds.file(query=True, sn=True)) fields = work_template.get_fields(scene_path) publish_version = fields["version"] tank_type = output["tank_type"] # create the publish path by applying the fields # with the publish template: publish_template = output["publish_template"] # publish path looks something like this at the time of writing # C:\mnt\workspace\projects\unPE\spt\tests\furPipeDev\fx\pub\fur\008 # this is what goes in shotgun, and i'll use it when loading in the # results at the other end sg_publish_path = publish_template.apply_fields(fields) # for performance i think it's best to put each sequence of fur cache # files in a subdirectory (we can more quickly get the list of caches # from a dir listing that way) # the final publish path will look like this # # C:\mnt\workspace\projects\unPE\spt\tests\furPipeDev\fx\pub\fur\008\namespace_furNodeShape\namespace_furnodeShape.####.fur basename = furNodeName.replace(":","_") filename = basename + ".%04d.fur" actual_publish_path = os.path.join(sg_publish_path, basename, filename) # shotgun publish name will be the rest of the path, past the version # eg namespace_furNodeShape/namespace_furnodeShape.####.fur #sg_publish_name = "%s/%s" % (basename, filename) # determine the publish name (this is kinda the element name master/fur): publish_name = fields.get("name") if not publish_name: publish_name = os.path.basename(sg_publish_path) # Find additional info from the scene: progress_cb(10, "Analysing scene") # for the given fur node work out the range to cache. this is the # minimum of playback start and the earliest simulation start time for # any of the connected grooms start_frame = int(cmds.playbackOptions(q=True, min=True)) end_frame = int(cmds.playbackOptions(q=True, max=True)) # get the groom nodes. to find an appropriate start frame # can't use the yeti command because it doesn't return the namespace of # the object # groomNodes = cmds.pgYetiCommand(furNodeName, listGrooms=True) groomNodes = [n for n in cmds.listConnections(furNodeName, sh=True) if cmds.nodeType(n)=="pgYetiGroom"] for groomNode in groomNodes: if cmds.getAttr(groomNode+".doSimulation"): start_frame = min([start_frame, cmds.getAttr(groomNode+".simStartFrame")]) # ensure the publish folder exists: publish_folder = os.path.dirname(actual_publish_path) self.parent.ensure_folder_exists(publish_folder) # run the command: progress_cb(20, "Exporting Yeti Cache") self.parent.log_info("Executing command: pgYetiCommand(%s,%s,%s)"\ % ( actual_publish_path, start_frame, end_frame ) ) cmds.pgYetiCommand(furNodeName, writeCache=actual_publish_path, range=(start_frame, end_frame), samples=3, updateViewport=False) # register the publish: progress_cb(75, "Registering the publish") args = { "tk": self.parent.tank, "context": self.parent.context, "comment": comment, "path": sg_publish_path, "name": publish_name, # "fur" "version_number": publish_version, "thumbnail_path": thumbnail_path, "task": sg_task, "dependency_paths": [primary_publish_path], "published_file_type":tank_type, } tank.util.register_publish(**args)
def get_yeti_resources(self, node): """Get all resource file paths If a texture is a sequence it gathers all sibling files to ensure the texture sequence is complete. References can be used in the Yeti graph, this means that it is possible to load previously caches files. The information will need to be stored and, if the file not publish, copied to the resource folder. Args: node (str): node name of the pgYetiMaya node Returns: list """ resources = [] image_search_paths = cmds.getAttr("{}.imageSearchPath".format(node)) texture_filenames = [] if image_search_paths: # TODO: Somehow this uses OS environment path separator, `:` vs `;` # Later on check whether this is pipeline OS cross-compatible. image_search_paths = [ p for p in image_search_paths.split(os.path.pathsep) if p ] # find all ${TOKEN} tokens and replace them with $TOKEN env. variable image_search_paths = self._replace_tokens(image_search_paths) # List all related textures texture_filenames = cmds.pgYetiCommand(node, listTextures=True) self.log.info("Found %i texture(s)" % len(texture_filenames)) # Get all reference nodes reference_nodes = cmds.pgYetiGraph(node, listNodes=True, type="reference") self.log.info("Found %i reference node(s)" % len(reference_nodes)) if texture_filenames and not image_search_paths: raise ValueError("pgYetiMaya node '%s' is missing the path to the " "files in the 'imageSearchPath " "atttribute'" % node) # Collect all texture files # find all ${TOKEN} tokens and replace them with $TOKEN env. variable texture_filenames = self._replace_tokens(texture_filenames) for texture in texture_filenames: files = [] if os.path.isabs(texture): self.log.debug("Texture is absolute path, ignoring " "image search paths for: %s" % texture) files = self.search_textures(texture) else: for root in image_search_paths: filepath = os.path.join(root, texture) files = self.search_textures(filepath) if files: # Break out on first match in search paths.. break if not files: self.log.warning("No texture found for: %s " "(searched: %s)" % (texture, image_search_paths)) item = {"files": files, "source": texture, "node": node} resources.append(item) # For now validate that every texture has at least a single file # resolved. Since a 'resource' does not have the requirement of having # a `files` explicitly mapped it's not explicitly validated. # TODO: Validate this as a validator invalid_resources = [] for resource in resources: if not resource['files']: invalid_resources.append(resource) if invalid_resources: raise RuntimeError("Invalid resources") # Collect all referenced files for reference_node in reference_nodes: ref_file = cmds.pgYetiGraph(node, node=reference_node, param="reference_file", getParamValue=True) # Create resource dict item = { "source": ref_file, "node": node, "graphnode": reference_node, "param": "reference_file", "files": [] } ref_file_name = os.path.basename(ref_file) if "%04d" in ref_file_name: item["files"] = self.get_sequence(ref_file) else: if os.path.exists(ref_file) and os.path.isfile(ref_file): item["files"] = [ref_file] if not item["files"]: self.log.warning("Reference node '%s' has no valid file " "path set: %s" % (reference_node, ref_file)) # TODO: This should allow to pass and fail in Validator instead raise RuntimeError("Reference node must be a full file path!") resources.append(item) return resources
def process(self, instance): yeti_nodes = cmds.ls(instance, type="pgYetiMaya") if not yeti_nodes: raise RuntimeError("No pgYetiMaya nodes found in the instance") # Define extract output file path dirname = self.staging_dir(instance) # Yeti related staging dirs data_file = os.path.join(dirname, "yeti.fursettings") # Collect information for writing cache start_frame = instance.data.get("frameStart") end_frame = instance.data.get("frameEnd") preroll = instance.data.get("preroll") if preroll > 0: start_frame -= preroll kwargs = {} samples = instance.data.get("samples", 0) if samples == 0: kwargs.update({"sampleTimes": "0.0 1.0"}) else: kwargs.update({"samples": samples}) self.log.info( "Writing out cache {} - {}".format(start_frame, end_frame)) # Start writing the files for snap shot # <NAME> will be replace by the Yeti node name path = os.path.join(dirname, "<NAME>.%04d.fur") cmds.pgYetiCommand(yeti_nodes, writeCache=path, range=(start_frame, end_frame), updateViewport=False, generatePreview=False, **kwargs) cache_files = [x for x in os.listdir(dirname) if x.endswith(".fur")] self.log.info("Writing metadata file") settings = instance.data.get("fursettings", None) if settings is not None: with open(data_file, "w") as fp: json.dump(settings, fp, ensure_ascii=False) # build representations if "representations" not in instance.data: instance.data["representations"] = [] self.log.info("cache files: {}".format(cache_files[0])) instance.data["representations"].append( { 'name': 'fur', 'ext': 'fur', 'files': cache_files[0] if len(cache_files) == 1 else cache_files, 'stagingDir': dirname, 'anatomy_template': 'publish', 'frameStart': int(start_frame), 'frameEnd': int(end_frame) } ) instance.data["representations"].append( { 'name': 'fursettings', 'ext': 'fursettings', 'files': os.path.basename(data_file), 'stagingDir': dirname, 'anatomy_template': 'publish' } ) self.log.info("Extracted {} to {}".format(instance, dirname))
def export_fur(file): nodes_list = cmds.sets( defaults._yeti_export_set_, q=True ) for node in nodes_list: fur_file = file.replace('.' + file.split('.')[-1], '.{}.fur'.format(node)) cmds.pgYetiCommand(node, writeCache=fur_file, range=(0,0), samples=1)
def _cache_yetis(yetis, apply_on_complete=False, samples=3, verbose=0): """Cache a list of yeti nodes. Args: yetis (HFnDependencyNode list): nodes to cache apply_on_complete (bool): apply cache on completion samples (int): samples per frame verbose (int): print process data """ from . import yeti_ui print 'CACHE YETIS', yetis _work = tk2.cur_work() _yetis, _outs, _namespaces = _prepare_yetis_and_outputs( yetis=yetis, work=_work) # Get cache path - if multiple namespace need to cache to tmp _tmp_fmt = abs_path('{}/yetiTmp/<NAME>.%04d.cache'.format( tempfile.gettempdir())) if len(_yetis) > 1: _cache_path = _tmp_fmt _tmp_dir = Dir(os.path.dirname(_tmp_fmt)) _tmp_dir.delete(force=True) _tmp_dir.test_path() else: assert len(_outs) == 1 _cache_path = _outs[0].path print "CACHE PATH", _cache_path # Generate caches dprint('GENERATING CACHES', _cache_path) print ' - SAMPLES', samples for _yeti in _yetis: _yeti.plug('cacheFileName').set_val('') _yeti.plug('fileMode').set_val(0) _yeti.plug('overrideCacheWithInputs').set_val(False) cmds.select(_yetis) cmds.pgYetiCommand( writeCache=_cache_path, range=host.t_range(), samples=samples) dprint('GENERATED CACHES', _cache_path) # Move tmp caches to outputs if len(_yetis) > 1: dprint('MOVING CACHES FROM TMP') for _yeti, _out in safe_zip(_yetis, _outs): print ' - MOVING', _out.path _name = str(_yeti).replace(":", "_") _tmp_seq = Seq(_tmp_fmt.replace('<NAME>', _name)) for _frame, _tmp_path in safe_zip( _tmp_seq.get_frames(), _tmp_seq.get_paths()): lprint(' -', _frame, _tmp_path, verbose=verbose) shutil.move(_tmp_path, _out[_frame]) # Apply cache to yeti nodes if apply_on_complete: dprint('APPLYING CACHES TO YETIS') for _yeti, _cache in safe_zip(_yetis, _outs): apply_cache(cache=_cache, yeti=_yeti) qt.notify( 'Cached {:d} yeti node{}.\n\nSee script editor for details.'.format( len(_yetis), get_plural(_yetis)), title='Cache complete', icon=yeti_ui.ICON, parent=yeti_ui.DIALOG) return _outs
def __publish_yeticache(self, item, output, work_template, primary_publish_path, sg_task, comment, thumbnail_path, progress_cb): """ Publish an Alembic cache file for the scene and publish it to Shotgun. :param item: The item to publish :param output: The output definition to publish with :param work_template: The work template for the current scene :param primary_publish_path: The path to the primary published file :param sg_task: The Shotgun task we are publishing for :param comment: The publish comment/description :param thumbnail_path: The path to the publish thumbnail :param progress_cb: A callback that can be used to report progress """ # determine the publish info to use # progress_cb(10, "Determining publish details") # the file and folder name is derived from the fur node furNodeName = item['name'] # get the current scene path and extract fields from it # using the work template: scene_path = os.path.abspath(cmds.file(query=True, sn=True)) fields = work_template.get_fields(scene_path) publish_version = fields["version"] tank_type = output["tank_type"] # create the publish path by applying the fields # with the publish template: publish_template = output["publish_template"] # publish path looks something like this at the time of writing # C:\mnt\workspace\projects\unPE\spt\tests\furPipeDev\fx\pub\fur\008 # this is what goes in shotgun, and i'll use it when loading in the # results at the other end sg_publish_path = publish_template.apply_fields(fields) # for performance i think it's best to put each sequence of fur cache # files in a subdirectory (we can more quickly get the list of caches # from a dir listing that way) # the final publish path will look like this # # C:\mnt\workspace\projects\unPE\spt\tests\furPipeDev\fx\pub\fur\008\namespace_furNodeShape\namespace_furnodeShape.####.fur basename = furNodeName.replace(":", "_") filename = basename + ".%04d.fur" actual_publish_path = os.path.join(sg_publish_path, basename, filename) # shotgun publish name will be the rest of the path, past the version # eg namespace_furNodeShape/namespace_furnodeShape.####.fur #sg_publish_name = "%s/%s" % (basename, filename) # determine the publish name (this is kinda the element name master/fur): publish_name = fields.get("name") if not publish_name: publish_name = os.path.basename(sg_publish_path) # Find additional info from the scene: progress_cb(10, "Analysing scene") # for the given fur node work out the range to cache. this is the # minimum of playback start and the earliest simulation start time for # any of the connected grooms start_frame = int(cmds.playbackOptions(q=True, min=True)) end_frame = int(cmds.playbackOptions(q=True, max=True)) # get the groom nodes. to find an appropriate start frame # can't use the yeti command because it doesn't return the namespace of # the object # groomNodes = cmds.pgYetiCommand(furNodeName, listGrooms=True) groomNodes = [ n for n in cmds.listConnections(furNodeName, sh=True) if cmds.nodeType(n) == "pgYetiGroom" ] for groomNode in groomNodes: if cmds.getAttr(groomNode + ".doSimulation"): start_frame = min( [start_frame, cmds.getAttr(groomNode + ".simStartFrame")]) # ensure the publish folder exists: publish_folder = os.path.dirname(actual_publish_path) self.parent.ensure_folder_exists(publish_folder) # run the command: progress_cb(20, "Exporting Yeti Cache") self.parent.log_info("Executing command: pgYetiCommand(%s,%s,%s)"\ % ( actual_publish_path, start_frame, end_frame ) ) cmds.pgYetiCommand(furNodeName, writeCache=actual_publish_path, range=(start_frame, end_frame), samples=3, updateViewport=False) # register the publish: progress_cb(75, "Registering the publish") args = { "tk": self.parent.tank, "context": self.parent.context, "comment": comment, "path": sg_publish_path, "name": publish_name, # "fur" "version_number": publish_version, "thumbnail_path": thumbnail_path, "task": sg_task, "dependency_paths": [primary_publish_path], "published_file_type": tank_type, } tank.util.register_publish(**args)