def is_compatible(container): """Action will be visibile only if the selected container require this fix """ from maya import cmds from avalon.maya.pipeline import AVALON_CONTAINERS from reveries.maya import lib if not container: return False if not ("subsetGroup" in container and container["subsetGroup"]): return False namespace = lib.get_ns(container["subsetGroup"]) if container["namespace"] != namespace: # Use `UpdateNamespace` action to update namespace first. return if cache["mainContainers"] is None: cache["mainContainers"] = cmds.ls(AVALON_CONTAINERS[1:] + "*", recursive=True) if cache["loadedNamespaces"] is None: cache["loadedNamespaces"] = [cmds.getAttr(con + ".namespace") for con in avalon.maya.pipeline._ls()] main_containers = cache["mainContainers"] namespaces = cache["loadedNamespaces"] parents = cmds.listSets(object=container["objectName"]) or [] # Must be a root container if any(main in parents for main in main_containers): if namespaces.count(container["namespace"]) > 1: return True return False
def is_compatible(container): from reveries.maya import lib if not ("subsetGroup" in container and container["subsetGroup"]): return False namespace = lib.get_ns(container["subsetGroup"]) return container["namespace"] != namespace
def get_invalid_containers(cls, containers): from reveries.maya import lib invalids = list() for container in containers: if not container.get("subsetGroup"): continue namespace = lib.get_ns(container["subsetGroup"]) if container["namespace"] != namespace: invalids.append(container) return invalids
def process(self, containers): from maya import cmds from avalon.tools import sceneinventory from reveries.maya import lib for container in containers: namespace = lib.get_ns(container["subsetGroup"]) con_node = container["objectName"] cmds.setAttr(con_node + ".namespace", namespace, type="string") container["namespace"] = namespace sceneinventory.app.window.refresh()
def fix_invalid(cls, context): from maya import cmds from reveries.maya import lib containers = list(context.data["RootContainers"].values()) containers += list(context.data["SubContainers"].values()) invalids = cls.get_invalid_containers(containers) for container in invalids: namespace = lib.get_ns(container["subsetGroup"]) con_node = container["objectName"] cmds.setAttr(con_node + ".namespace", namespace, type="string") container["namespace"] = namespace
def process(self, instance): variant = instance.data["subset"][len("pointcache"):].lower() members = instance[:] if variant == "default": # Collect cacheable nodes from OutSet of loaded subset out_cache = dict() out_sets = list() created = False for node in cmds.ls(members, type="transform", long=True): try: container = pipeline.get_container_from_group(node) except AssertionError: continue sets = cmds.ls(cmds.sets(container, query=True), type="objectSet") out_sets += [s for s in sets if s.endswith("OutSet")] for node in out_sets: name = node.rsplit(":", 1)[-1][:-len("OutSet")] or "Default" self.log.info(name) namespace = lib.get_ns(node)[1:] # Remove root ":" cacheables = lib.pick_cacheable( cmds.sets(node, query=True) or []) cacheables = self.cache_by_visibility(cacheables) out_cache[(namespace, name)] = cacheables # Re-Create instances context = instance.context backup = instance source_data = instance.data for (namespace, name), cacheables in out_cache.items(): if not cacheables: self.log.debug("Skip empty OutSet %s in %s" % (name, namespace)) continue instance = context.create_instance(namespace + "." + name) created = True instance.data.update(source_data) instance.data["subset"] = ".".join( ["pointcache", namespace, name]) instance[:] = cacheables instance.data["outCache"] = cacheables instance.data["requireAvalonUUID"] = cacheables self.assign_contractor(instance) if not created: cmds.error("No pointcache instance created.") else: context.remove(backup) else: # Collect cacheable nodes from instance member cacheables = lib.pick_cacheable(members) cacheables = self.cache_by_visibility(cacheables) instance[:] = cacheables instance.data["outCache"] = cacheables instance.data["requireAvalonUUID"] = cacheables self.assign_contractor(instance)
def process_import(self, context, name, namespace, group, options): from maya import cmds, mel from reveries import plugins representation = context["representation"] asset_id = representation["data"]["animatedAssetId"] selected = cmds.ls(selection=True, long=True) # Collect namespace from selected nodes namespaces = defaultdict(set) for node in selected: ns = lib.get_ns(node) if ns == ":": continue namespaces[ns].add(node) for ns, nodes in namespaces.items(): try: container = pipeline.get_container_from_namespace(ns) except RuntimeError: continue if asset_id != cmds.getAttr(container + ".assetId"): confirm = plugins.message_box_warning( "Warning", "Applying animation to different asset, are you sure ?", optional=True, ) if not confirm: raise Exception("Operation canceled.") target_ns = ns members = nodes break else: raise Exception("No matched asset found.") cmds.loadPlugin("animImportExport", quiet=True) entry_path = self.file_path(representation).replace("\\", "/") sele_path = entry_path.rsplit("anim", 1)[0] + "mel" sele_path = os.path.expandvars(sele_path) with capsule.maintained_selection(): # Select nodes with order with contextlib.nested(capsule.namespaced(target_ns, new=False), capsule.relative_namespaced()): self._selection_patch(sele_path) mel.eval("source \"%s\"" % sele_path) targets = cmds.ls(selection=True, long=True) nodes = cmds.file(entry_path, force=True, type="animImport", i=True, importTimeRange="keep", ignoreVersion=True, returnNewNodes=True, options=("targetTime=4;" "option=replace;" "connect=0")) # Apply namespace by ourselves, since animImport does not # take -namespace flag namespaced_nodes = list() for node in nodes: node = cmds.rename(node, namespace + ":" + node) namespaced_nodes.append(node) # Delete not connected targets = set(targets) connected = list() for node in namespaced_nodes: future = cmds.listHistory(node, future=True) future = set(cmds.ls(future, long=True)) if targets.intersection(future): connected.append(node) else: cmds.delete(node) if not connected: raise Exception("No animation been applied.") self[:] = connected # Remove assigned from selection unprocessed = list(set(selected) - members) cmds.select(unprocessed, replace=True, noExpand=True)
def process(self, instance): import maya.cmds as cmds from reveries.maya import lib, pipeline # Frame range if instance.data["staticCache"]: start_frame = cmds.currentTime(query=True) end_frame = cmds.currentTime(query=True) else: get = (lambda f: cmds.playbackOptions(query=True, **f)) start_frame = get({"minTime": True}) end_frame = get({"maxTime": True}) members = instance[:] out_sets = list() # Find OutSet from *Subset Group nodes* # for group in cmds.ls(members, type="transform", long=True): if cmds.listRelatives(group, shapes=True): continue try: container = pipeline.get_container_from_group(group) except AssertionError: # Not a subset group node continue nodes = cmds.sets(container, query=True) sets = [ s for s in cmds.ls(nodes, type="objectSet") if s.endswith("OutSet") ] if sets: out_sets += sets members.remove(group) # Collect cacheable nodes created = False backup = instance if out_sets: # Cacheables from OutSet of loaded subset out_cache = dict() subset = backup.data["subset"][len("pointcache"):] for out_set in out_sets: variant = out_set.rsplit(":", 1)[-1][:-len("OutSet")] if variant: name = variant + "." + subset else: name = subset self.log.info(name) namespace = lib.get_ns(out_set) set_member = cmds.ls(cmds.sets(out_set, query=True), long=True) all_cacheables = lib.pick_cacheable(set_member) cacheables = lib.get_visible_in_frame_range( all_cacheables, int(start_frame), int(end_frame)) has_hidden = len(all_cacheables) > len(cacheables) # Plus locator cacheables += self.pick_locators(set_member) out_cache[(namespace, name)] = (has_hidden, cacheables) for n in cacheables: if n in members: members.remove(n) # Re-Create instances context = backup.context for k, (has_hidden, cacheables) in out_cache.items(): namespace, name = k if not cacheables: self.log.debug("Skip empty OutSet %s in %s" % (name, namespace)) if has_hidden: self.log.warning("Geometry in OutSet %s is hidden, " "possible wrong LOD ?" % namespace) continue if has_hidden: self.log.debug("Some geometry in OutSet %s is hidden." % namespace) namespace = namespace[1:] # Remove root ":" # For filesystem, remove other ":" if the namespace is nested namespace = namespace.replace(":", "._.") instance = context.create_instance(namespace + "." + name) created = True instance.data.update(backup.data) # New subset name # instance.data["subset"] = ".".join( ["pointcache", namespace, name]) instance[:] = cacheables instance.data["outCache"] = cacheables instance.data["_hasHidden"] = has_hidden instance.data["requireAvalonUUID"] = cacheables instance.data["startFrame"] = start_frame instance.data["endFrame"] = end_frame self.add_families(instance) if not members: # Nothing left, all in/has OutSet if not created: cmds.error("No pointcache instance created.") else: context.remove(backup) else: # Cache nodes that were not in any OutSet instance = backup # Cacheables from instance member all_cacheables = lib.pick_cacheable(members) cacheables = lib.get_visible_in_frame_range( all_cacheables, int(start_frame), int(end_frame)) has_hidden = len(all_cacheables) > len(cacheables) # Plus locator cacheables += self.pick_locators(members) instance[:] = cacheables instance.data["outCache"] = cacheables instance.data["_hasHidden"] = has_hidden instance.data["requireAvalonUUID"] = cacheables instance.data["startFrame"] = start_frame instance.data["endFrame"] = end_frame self.add_families(instance)
def process(self, instance): variant = instance.data["subset"][len("animation"):].lower() members = instance[:] # Re-Create instances context = instance.context context.remove(instance) source_data = instance.data ANIM_SET = "ControlSet" out_cache = dict() if variant == "default": # Collect animatable nodes from ControlSet of loaded subset out_sets = list() for node in cmds.ls(members, type="transform", long=True): try: container = pipeline.get_container_from_group(node) except AssertionError: continue sets = cmds.ls(cmds.sets(container, query=True), type="objectSet") out_sets += [s for s in sets if s.endswith(ANIM_SET)] for node in out_sets: name = node.rsplit(":", 1)[-1][:-len(ANIM_SET)] or "Default" self.log.info(name) namespace = lib.get_ns(node) animatables = cmds.ls(cmds.sets(node, query=True), long=True) out_cache[namespace] = (name, animatables) else: # Collect animatable nodes from instance member for node in cmds.ls(members, type="transform", long=True): namespace = lib.get_ns(node) try: # Must be containerized pipeline.get_container_from_namespace(namespace) except RuntimeError: continue if namespace not in out_cache: out_cache[namespace] = (variant, list()) out_cache[namespace][1].append(node) for namespace, (name, animatables) in out_cache.items(): instance = context.create_instance(namespace or name) container = pipeline.get_container_from_namespace(namespace) asset_id = cmds.getAttr(container + ".assetId") namespace = namespace[1:] # Remove root ":" instance.data.update(source_data) instance.data["subset"] = ".".join(["animation", namespace, name]) instance[:] = animatables instance.data["outAnim"] = animatables instance.data["animatedNamespace"] = namespace instance.data["animatedAssetId"] = asset_id # (NOTE) Although we put those animatable nodes to validate # AvalonUUID existence, but currently AvalonUUID is # not needed on load. instance.data["requireAvalonUUID"] = animatables
def process(self, instance): import maya.cmds as cmds from reveries.maya import lib, pipeline variant = instance.data["subset"][len("animation"):].lower() members = instance[:] # Re-Create instances context = instance.context context.remove(instance) source_data = instance.data ANIM_SET = "ControlSet" out_cache = dict() if variant == "default": # Collect animatable nodes from ControlSet of loaded subset out_sets = list() for node in cmds.ls(members, type="transform"): try: # Must be containerized subset group node pipeline.get_container_from_group(node) except AssertionError: continue namespace = lib.get_ns(node) out_sets += cmds.ls("%s:*%s" % (namespace, ANIM_SET), sets=True) for node in out_sets: name = node.rsplit(":", 1)[-1][:-len(ANIM_SET)] or "Default" namespace = lib.get_ns(node) animatables = cmds.ls(cmds.sets(node, query=True), type="transform") key = (namespace, name) self.log.info("%s, %s" % key) if not animatables: self.log.warning("No animatable (e.g. controllers) been " "found in '%s', skipping.." % node) continue out_cache[key] = animatables else: # Collect animatable nodes from instance member for node in cmds.ls(members, type="transform"): namespace = lib.get_ns(node) try: # Must be containerized pipeline.get_container_from_namespace(namespace) except RuntimeError: continue key = (namespace, variant) if key not in out_cache: self.log.info("%s, %s" % key) out_cache[key] = list() out_cache[key].append(node) for (namespace, name), animatables in sorted(out_cache.items()): container = pipeline.get_container_from_namespace(namespace) asset_id = cmds.getAttr(container + ".assetId") fixed_namespace = namespace[1:] # Remove root ":" # For filesystem, remove other ":" if the namespace is nested fixed_namespace = fixed_namespace.replace(":", "._.") subset = ".".join(["animation", fixed_namespace, name]) instance = context.create_instance(subset) instance.data.update(source_data) instance.data["subset"] = subset instance[:] = animatables instance.data["outAnim"] = animatables instance.data["animatedNamespace"] = namespace instance.data["animatedAssetId"] = asset_id # (NOTE) Although we put those animatable nodes to validate # AvalonUUID existence, but currently AvalonUUID is # not needed on load. instance.data["requireAvalonUUID"] = animatables