def extract_alembic(self, nodes, outpath): import maya.cmds as cmds from reveries.maya import capsule, io, lib with contextlib.nested( capsule.no_undo(), capsule.no_display_layers(nodes), capsule.no_smooth_preview(), capsule.maintained_selection(), capsule.without_extension(), ): cmds.select(nodes, noExpand=True) frame = cmds.currentTime(query=True) io.export_alembic( outpath, frame, frame, selection=True, renderableOnly=True, writeCreases=True, worldSpace=True, uvWrite=True, writeUVSets=True, attr=[ lib.AVALON_ID_ATTR_LONG, ], attrPrefix=[ "ai", # Write out Arnold attributes ], )
def extract_mayabinary(self, nodes, outpath): import maya.cmds as cmds from reveries.maya import capsule geo_id_and_hash = None with contextlib.nested( capsule.no_undo(), capsule.no_display_layers(nodes), capsule.no_smooth_preview(), capsule.maintained_selection(), capsule.without_extension(), ): mesh_nodes = cmds.ls(nodes, type="mesh", noIntermediate=True, long=True) clay_shader = "initialShadingGroup" # Perform extraction cmds.select(nodes, noExpand=True) with capsule.undo_chunk_when_no_undo(): # Remove mesh history, for removing all intermediate nodes transforms = cmds.ls(nodes, type="transform") cmds.delete(transforms, constructionHistory=True) # Remove all stray shapes, ensure no intermediate nodes all_meshes = set(cmds.ls(nodes, type="mesh", long=True)) cmds.delete(list(all_meshes - set(mesh_nodes))) geo_id_and_hash = self.hash(set(mesh_nodes)) with capsule.assign_shader(mesh_nodes, shadingEngine=clay_shader): cmds.file( outpath, force=True, typ="mayaBinary", exportSelectedStrict=True, preserveReferences=False, # Shader assignment is the responsibility of # riggers, for animators, and lookdev, for # rendering. shader=False, # Construction history inherited from collection # This enables a selective export of nodes # relevant to this particular plug-in. constructionHistory=False, channels=False, constraints=False, expressions=False, ) return geo_id_and_hash
def extract(self): with contextlib.nested( capsule.no_undo(), capsule.no_display_layers(self.member), capsule.no_smooth_preview(), capsule.maintained_selection(), capsule.without_extension(), ): super(ExtractModel, self).extract()
def extract(self): if self.data.get("staticCache"): self.start_frame = cmds.currentTime(query=True) self.end_frame = cmds.currentTime(query=True) else: context_data = self.context.data self.start_frame = context_data.get("startFrame") self.end_frame = context_data.get("endFrame") with contextlib.nested( capsule.no_undo(), capsule.no_refresh(), capsule.evaluation("off"), capsule.maintained_selection(), ): cmds.select(self.data["outCache"], replace=True) super(ExtractPointCache, self).extract()
def export_fbx(self, outpath, cachepath, cachename, nodes, keep_namespace): from reveries.maya import io, capsule from maya import cmds with contextlib.nested( capsule.no_undo(), capsule.no_refresh(), capsule.evaluation("off"), capsule.maintained_selection(), ): cmds.select(nodes, replace=True) with capsule.StripNamespace([] if keep_namespace else nodes): with io.export_fbx_set_pointcache("FBXCacheSET"): io.export_fbx(cachepath) io.wrap_fbx(outpath, [(cachename, "ROOT")])
def export_gpu(self, outpath, cachepath, cachename, start, end, assemblies, attr_values): from reveries.maya import io, capsule from maya import cmds with contextlib.nested( capsule.no_undo(), capsule.no_refresh(), capsule.evaluation("off"), capsule.maintained_selection(), ): cmds.select(assemblies, replace=True, noExpand=True) with contextlib.nested( capsule.attribute_values(attr_values), # Mute animated visibility channels capsule.attribute_mute(list(attr_values.keys())), ): io.export_gpu(cachepath, start, end) io.wrap_gpu(outpath, [(cachename, "ROOT")])
def extract_Ass(self): # Ensure mtoa loaded cmds.loadPlugin("mtoa", quiet=True) package_path = self.create_package() cache_file = self.file_name("ass") cache_path = os.path.join(package_path, cache_file) with contextlib.nested( capsule.no_undo(), capsule.no_refresh(), capsule.evaluation("off"), capsule.maintained_selection(), capsule.ref_edit_unlock(), remove_file_env_path(self.data), ): cmds.select(self.member, replace=True) asses = cmds.arnoldExportAss(filename=cache_path, selected=True, startFrame=self.data["startFrame"], endFrame=self.data["endFrame"], frameStep=self.data["byFrameStep"], shadowLinks=1, lightLinks=1, expandProcedurals=True, mask=24) use_sequence = self.data["startFrame"] != self.data["endFrame"] entry_file = os.path.basename(asses[0]) self.add_data({ "entryFileName": entry_file, "useSequence": use_sequence }) if use_sequence: self.add_data({ "startFrame": self.data["startFrame"], "endFrame": self.data["endFrame"] })
def extract_mayaBinary(self, packager): # Define extract output file path entry_file = packager.file_name("mb") package_path = packager.create_package() entry_path = os.path.join(package_path, entry_file) # Perform extraction self.log.info("Performing extraction..") with contextlib.nested( capsule.no_undo(), capsule.no_display_layers(self.member), maya.maintained_selection(), ): with capsule.undo_chunk_when_no_undo(): """(DEPRECATED, keeping namespaces) # - Remove referenced subset's namespace before exporting # (Not keeping model namespace) referenced_namespace = self.context.data["referencedNamespace"] for namespace in reversed(sorted(list(referenced_namespace))): if not cmds.namespace(exists=namespace): continue try: cmds.namespace(removeNamespace=namespace, mergeNamespaceWithRoot=True) except Exception: # Reload reference and try again. # The namespace of the reference will be able to # removed after reload. # (TODO) This publish workflow might not be a good # approach... ref_node = lib.reference_node_by_namespace(namespace) # There must be a reference node, since that's the # main reason why namespace can not be removed. cmds.file(loadReference=ref_node) cmds.namespace(removeNamespace=namespace, mergeNamespaceWithRoot=True) """ # - Remove loaded container member # If the mesh of the loaded model has been copied and edited # (mesh face detach and separation), the model container # might end up with a lots of facet member, which means there # are dag connections that would make the model container be # exported as well, and we don't want that happens. # So we just remove them all for good. for container in self.context.data["RootContainers"]: cmds.delete(container) mesh_nodes = cmds.ls(self.member, type="mesh", noIntermediate=True, long=True) geo_id_and_hash = self.hash(set(mesh_nodes)) packager.add_data({"modelProfile": geo_id_and_hash}) cmds.select(cmds.ls(self.member), noExpand=True) cmds.file(entry_path, force=True, typ="mayaBinary", exportSelected=True, preserveReferences=False, channels=True, constraints=True, expressions=True, constructionHistory=True, shader=True) packager.add_data({ "entryFileName": entry_file, }) self.log.info("Extracted {name} to {path}".format( name=self.data["subset"], path=entry_path))
def export_ass(self, nodes, outpath, file_node_attrs, has_yeti, start, end, step, expand_procedurals=True): from maya import cmds, mel from reveries.maya import arnold, capsule # Ensure option created arnold.utils.create_options() render_settings = { # Disable Auto TX update and enable to use existing TX "defaultArnoldRenderOptions.autotx": False, "defaultArnoldRenderOptions.use_existing_tiled_textures": True, # Ensure frame padding == 4 "defaultRenderGlobals.extensionPadding": 4, } # Yeti if has_yeti: # In Deadline, this is a script job instead of rendering job, so # the `pgYetiPreRender` Pre-Render MEL will not be triggered. # We need to call it by ourselve, or Yeti will complain about # cache temp dir not exist. mel.eval("pgYetiPreRender;") with contextlib.nested( capsule.no_undo(), capsule.no_refresh(), capsule.evaluation("off"), capsule.maintained_selection(), capsule.ref_edit_unlock(), # (NOTE) Ensure attribute unlock capsule.attribute_states(file_node_attrs.keys(), lock=False), # Change to published path capsule.attribute_values(file_node_attrs), # Fixed render settings capsule.attribute_values(render_settings), ): cmds.select(nodes, replace=True) asses = cmds.arnoldExportAss( filename=outpath, selected=True, startFrame=start, endFrame=end, frameStep=step, expandProcedurals=expand_procedurals, boundingBox=True, # Mask: # Shapes, # Shaders, # Override Nodes, # Operators, # # mask=4152, # No Color Manager mask=6200) # With Color Manager # Change to environment var embedded path root = avalon.api.registered_root().replace("\\", "/") project = avalon.api.Session["AVALON_PROJECT"] for ass in asses: lines = list() has_change = False with open(ass, "r") as assf: for line in assf.readlines(): if line.startswith(" filename "): line = line.replace(root, "[AVALON_PROJECTS]", 1) line = line.replace(project, "[AVALON_PROJECT]", 1) has_change = True lines.append(line) # Remove color manager # (NOTE): If Color Manager included, # may raise error if rendering # in Houdini or other DCC. try: s = lines.index("color_manager_syncolor\n") except ValueError: # No color manager found pass else: e = lines.index("}\n", s) + 1 lines = lines[:s] + lines[e:] has_change = True # Re-write if has_change: with open(ass, "w") as assf: assf.write("".join(lines))
def export_ass(data, start, end, step): arnold_tx_settings = { "defaultArnoldRenderOptions.autotx": False, "defaultArnoldRenderOptions.use_existing_tiled_textures": True, } # Yeti if data["hasYeti"]: # In Deadline, this is a script job instead of rendering job, so # the `pgYetiPreRender` Pre-Render MEL will not be triggered. # We need to call it by ourselve, or Yeti will complain about # cache temp dir not exist. mel.eval("pgYetiPreRender;") with contextlib.nested( capsule.no_undo(), capsule.no_refresh(), capsule.evaluation("off"), capsule.maintained_selection(), capsule.ref_edit_unlock(), # (NOTE) Ensure attribute unlock capsule.attribute_states(data["fileNodeAttrs"].keys(), lock=False), # Change to published path capsule.attribute_values(data["fileNodeAttrs"]), # Disable Auto TX update and enable to use existing TX capsule.attribute_values(arnold_tx_settings), ): cmds.select(data["member"], replace=True) asses = cmds.arnoldExportAss( filename=data["cachePath"], selected=True, startFrame=start, endFrame=end, frameStep=step, expandProcedurals=True, boundingBox=True, # Mask: # Shapes, # Shaders, # Override Nodes, # Operators, # # (NOTE): If Color Manager included, # may raise error if rendering # in Houdini or other DCC. # mask=6200, # With Color Manager # mask=4152) # No Color Manager # Change to environment var embedded path root = avalon.api.registered_root().replace("\\", "/") project = avalon.api.Session["AVALON_PROJECT"] for ass in asses: lines = list() has_change = False with open(ass, "r") as assf: for line in assf.readlines(): if line.startswith(" filename "): line = line.replace(root, "[AVALON_PROJECTS]", 1) line = line.replace(project, "[AVALON_PROJECT]", 1) has_change = True lines.append(line) if has_change: with open(ass, "w") as assf: assf.write("".join(lines))
def export_alembic(self, root, outpath, start, end, euler_filter): from reveries.maya import io, lib, capsule from maya import cmds with contextlib.nested( capsule.no_undo(), capsule.no_refresh(), capsule.evaluation("off"), capsule.maintained_selection(), ): # Selection may change if there are duplicate named nodes and # require instancing them to resolve with capsule.delete_after() as delete_bin: # (NOTE) We need to check any duplicate named nodes, or # error will raised during Alembic export. result = lib.ls_duplicated_name(root) duplicated = [n for m in result.values() for n in m] if duplicated: self.log.info("Duplicate named nodes found, resolving...") # Duplicate it so we could have a unique named new node unique_named = list() for node in duplicated: new_nodes = cmds.duplicate(node, inputConnections=True, renameChildren=True) new_nodes = cmds.ls(new_nodes, long=True) unique_named.append(new_nodes[0]) # New nodes will be deleted after the export delete_bin.extend(new_nodes) # Replace duplicate named nodes with unique named root = list(set(root) - set(duplicated)) + unique_named for node in set(root): # (NOTE) If a descendent is instanced, it will appear only # once on the list returned. root += cmds.listRelatives(node, allDescendents=True, fullPath=True, noIntermediate=True) or [] root = list(set(root)) cmds.select(root, replace=True, noExpand=True) def _export_alembic(): io.export_alembic( outpath, start, end, selection=True, renderableOnly=True, writeVisibility=True, writeCreases=True, worldSpace=True, eulerFilter=euler_filter, attr=[ lib.AVALON_ID_ATTR_LONG, ], attrPrefix=[ "ai", # Write out Arnold attributes "avnlook_", # Write out lookDev controls ], ) auto_retry = 1 while auto_retry: try: _export_alembic() except RuntimeError as err: if auto_retry: # (NOTE) Auto re-try export # For unknown reason, some artist may encounter # runtime error when exporting but re-run the # publish without any change will resolve. auto_retry -= 1 self.log.warning(err) self.log.warning("Retrying...") else: raise err else: break
def process(self, instance): from maya import cmds from avalon import maya from reveries import utils from reveries.maya import capsule staging_dir = utils.stage_dir() filename = "%s.mb" % instance.data["subset"] outpath = "%s/%s" % (staging_dir, filename) # Perform extraction self.log.info("Performing extraction..") with contextlib.nested( capsule.no_undo(), capsule.no_display_layers(instance[:]), maya.maintained_selection(), ): with capsule.undo_chunk_when_no_undo(): """(DEPRECATED, keeping namespaces) # - Remove referenced subset's namespace before exporting # (Not keeping model namespace) referenced_namespace = self.context.data["referencedNamespace"] for namespace in reversed(sorted(list(referenced_namespace))): if not cmds.namespace(exists=namespace): continue try: cmds.namespace(removeNamespace=namespace, mergeNamespaceWithRoot=True) except Exception: # Reload reference and try again. # The namespace of the reference will be able to # removed after reload. # (TODO) This publish workflow might not be a good # approach... ref_node = lib.reference_node_by_namespace(namespace) # There must be a reference node, since that's the # main reason why namespace can not be removed. cmds.file(loadReference=ref_node) cmds.namespace(removeNamespace=namespace, mergeNamespaceWithRoot=True) """ # - Remove loaded container member # If the mesh of the loaded model has been copied and edited # (mesh face detach and separation), the model container # might end up with a lots of facet member, which means there # are dag connections that would make the model container be # exported as well, and we don't want that happens. # So we just remove them all for good. for container in instance.context.data["RootContainers"]: cmds.delete(container) mesh_nodes = cmds.ls(instance, type="mesh", noIntermediate=True, long=True) geo_id_and_hash = self.hash(set(mesh_nodes)) cmds.select(cmds.ls(instance), noExpand=True) cmds.file(outpath, force=True, typ="mayaBinary", exportSelected=True, preserveReferences=False, channels=True, constraints=True, expressions=True, constructionHistory=True, shader=True) instance.data["repr.mayaBinary._stage"] = staging_dir instance.data["repr.mayaBinary._files"] = [filename] instance.data["repr.mayaBinary.entryFileName"] = filename instance.data["repr.mayaBinary.modelProfile"] = geo_id_and_hash
def extract_mayaBinary(self): # Define extract output file path entry_file = self.file_name("mb") package_path = self.create_package() entry_path = os.path.join(package_path, entry_file) mesh_nodes = cmds.ls(self.member, type="mesh", noIntermediate=True, long=True) # Hash model and collect Avalon UUID geo_id_and_hash = dict() hasher = utils.MeshHasher() for mesh in mesh_nodes: # Get ID transform = cmds.listRelatives(mesh, parent=True, fullPath=True)[0] id = utils.get_id(transform) assert id is not None, ("Some mesh has no Avalon UUID. " "This should not happend.") hasher.set_mesh(mesh) hasher.update_points() hasher.update_normals() hasher.update_uvmap() # It must be one mesh paring to one transform. geo_id_and_hash[id] = hasher.digest() hasher.clear() self.add_data({"modelProfile": geo_id_and_hash}) # Perform extraction self.log.info("Performing extraction..") with contextlib.nested( capsule.no_undo(), capsule.no_display_layers(self.member), maya.maintained_selection(), ): with capsule.undo_chunk_when_no_undo(): # (NOTE) Current workflow may keep model stay loaded as # referenced in scene, but need to take extra care while # extracting. (Will be undone) # - Remove referenced subset's namespace before exporting # (Not keeping model namespace) referenced_namespace = self.context.data["referencedNamespace"] for namespace in reversed(sorted(list(referenced_namespace))): if not cmds.namespace(exists=namespace): continue try: cmds.namespace(removeNamespace=namespace, mergeNamespaceWithRoot=True) except Exception: # Reload reference and try again. # The namespace of the reference will be able to # removed after reload. # (TODO) This publish workflow might not be a good # approach... ref_node = lib.reference_node_by_namespace(namespace) # There must be a reference node, since that's the # main reason why namespace can not be removed. cmds.file(loadReference=ref_node) cmds.namespace(removeNamespace=namespace, mergeNamespaceWithRoot=True) # - Remove loaded container member # If the mesh of the loaded model has been copied and edited # (mesh face detach and separation), the model container # might end up with a lots of facet member, which means there # are dag connections that would make the model container be # exported as well, and we don't want that happens. # So we just remove them all for good. for container in self.context.data["RootContainers"]: cmds.delete(container) cmds.select(cmds.ls(self.member), noExpand=True) cmds.file(entry_path, force=True, typ="mayaBinary", exportSelected=True, preserveReferences=False, channels=True, constraints=True, expressions=True, constructionHistory=True, shader=True) self.add_data({ "entryFileName": entry_file, }) self.log.info("Extracted {name} to {path}".format( name=self.data["subset"], path=entry_path) )
def extract_all(self, cam_transform, ma_outpath, abc_outpath, fbx_outpath, start, end, step, euler_filter, do_bake, donot_bake, duplicate_input_graph=False): from maya import cmds from reveries.maya import io, lib, capsule with contextlib.nested( capsule.no_refresh(), capsule.no_undo(), capsule.attribute_states(donot_bake, lock=False, keyable=False), capsule.attribute_states(do_bake, lock=False, keyable=True), capsule.evaluation("off"), ): with capsule.delete_after() as delete_bin: # bake to worldspace frame_range = (start, end) baked_camera = lib.bake_to_world_space( cam_transform, frame_range, step=step, # Remove baked from layer so to bake out all keys like # animLayers being merged. remove_baked_attr_from_layer=True, duplicate_input_graph=duplicate_input_graph)[0] delete_bin.append(baked_camera) cmds.select( baked_camera, hierarchy=True, # With shape replace=True, noExpand=True) with avalon.maya.maintained_selection(): io.export_alembic(abc_outpath, start, end, eulerFilter=euler_filter) with capsule.undo_chunk_when_no_undo(): if euler_filter: cmds.filterCurve(cmds.ls(sl=True)) with avalon.maya.maintained_selection(): cmds.file( ma_outpath, force=True, typ="mayaAscii", exportSelected=True, preserveReferences=False, constructionHistory=False, channels=True, # allow animation constraints=False, shader=False, expressions=False) with avalon.maya.maintained_selection(): io.export_fbx_set_camera() io.export_fbx(fbx_outpath)