def walk_hierarchy(parent):
            for node in cmds.listRelatives(
                    parent, children=True, path=True, type="transform") or []:
                yield node

                try:
                    container = pipeline.get_container_from_group(node)
                except AssertionError:
                    # Not a subset group node
                    for n in walk_hierarchy(node):
                        yield n
                else:
                    # Look for OutSet
                    nodes = cmds.sets(container, query=True)
                    out_sets = [
                        s for s in cmds.ls(nodes, type="objectSet")
                        if s.endswith("OutSet")
                    ]
                    if out_sets:
                        out_set = sorted(out_sets)[0]
                        if len(out_sets) > 1:
                            self.log.warning(
                                "Multiple OutSet found in %s, but only one "
                                "OutSet will be expanded: %s" %
                                (container, out_set))

                        for n in cmds.sets(out_set, query=True) or []:
                            yield n
                    else:
                        for n in walk_hierarchy(node):
                            yield n
Exemple #2
0
    def process(self, instance):

        variant = instance.data["subset"][len("pointcache"):].lower()
        members = instance[:]

        if variant == "default":
            # Collect cacheable nodes from OutSet of loaded subset
            out_cache = dict()
            out_sets = list()
            created = False

            for node in cmds.ls(members, type="transform", long=True):
                try:
                    container = pipeline.get_container_from_group(node)
                except AssertionError:
                    continue

                sets = cmds.ls(cmds.sets(container, query=True),
                               type="objectSet")
                out_sets += [s for s in sets if s.endswith("OutSet")]

            for node in out_sets:
                name = node.rsplit(":", 1)[-1][:-len("OutSet")] or "Default"
                self.log.info(name)
                namespace = lib.get_ns(node)[1:]  # Remove root ":"
                cacheables = lib.pick_cacheable(
                    cmds.sets(node, query=True) or [])
                cacheables = self.cache_by_visibility(cacheables)

                out_cache[(namespace, name)] = cacheables

            # Re-Create instances
            context = instance.context
            backup = instance
            source_data = instance.data

            for (namespace, name), cacheables in out_cache.items():

                if not cacheables:
                    self.log.debug("Skip empty OutSet %s in %s" %
                                   (name, namespace))
                    continue

                instance = context.create_instance(namespace + "." + name)
                created = True

                instance.data.update(source_data)
                instance.data["subset"] = ".".join(
                    ["pointcache", namespace, name])
                instance[:] = cacheables
                instance.data["outCache"] = cacheables
                instance.data["requireAvalonUUID"] = cacheables

                self.assign_contractor(instance)

            if not created:
                cmds.error("No pointcache instance created.")
            else:
                context.remove(backup)

        else:
            # Collect cacheable nodes from instance member
            cacheables = lib.pick_cacheable(members)
            cacheables = self.cache_by_visibility(cacheables)

            instance[:] = cacheables
            instance.data["outCache"] = cacheables
            instance.data["requireAvalonUUID"] = cacheables

            self.assign_contractor(instance)
Exemple #3
0
    def process(self, instance):
        import maya.cmds as cmds
        from reveries.maya import lib, pipeline

        # Frame range
        if instance.data["staticCache"]:
            start_frame = cmds.currentTime(query=True)
            end_frame = cmds.currentTime(query=True)
        else:
            get = (lambda f: cmds.playbackOptions(query=True, **f))
            start_frame = get({"minTime": True})
            end_frame = get({"maxTime": True})

        members = instance[:]
        out_sets = list()

        # Find OutSet from *Subset Group nodes*
        #
        for group in cmds.ls(members, type="transform", long=True):
            if cmds.listRelatives(group, shapes=True):
                continue

            try:
                container = pipeline.get_container_from_group(group)
            except AssertionError:
                # Not a subset group node
                continue

            nodes = cmds.sets(container, query=True)
            sets = [
                s for s in cmds.ls(nodes, type="objectSet")
                if s.endswith("OutSet")
            ]
            if sets:
                out_sets += sets
                members.remove(group)

        # Collect cacheable nodes

        created = False
        backup = instance

        if out_sets:
            # Cacheables from OutSet of loaded subset
            out_cache = dict()
            subset = backup.data["subset"][len("pointcache"):]

            for out_set in out_sets:

                variant = out_set.rsplit(":", 1)[-1][:-len("OutSet")]
                if variant:
                    name = variant + "." + subset
                else:
                    name = subset

                self.log.info(name)

                namespace = lib.get_ns(out_set)
                set_member = cmds.ls(cmds.sets(out_set, query=True), long=True)
                all_cacheables = lib.pick_cacheable(set_member)
                cacheables = lib.get_visible_in_frame_range(
                    all_cacheables, int(start_frame), int(end_frame))
                has_hidden = len(all_cacheables) > len(cacheables)

                # Plus locator
                cacheables += self.pick_locators(set_member)

                out_cache[(namespace, name)] = (has_hidden, cacheables)

                for n in cacheables:
                    if n in members:
                        members.remove(n)

            # Re-Create instances
            context = backup.context

            for k, (has_hidden, cacheables) in out_cache.items():
                namespace, name = k

                if not cacheables:
                    self.log.debug("Skip empty OutSet %s in %s" %
                                   (name, namespace))
                    if has_hidden:
                        self.log.warning("Geometry in OutSet %s is hidden, "
                                         "possible wrong LOD ?" % namespace)
                    continue

                if has_hidden:
                    self.log.debug("Some geometry in OutSet %s is hidden." %
                                   namespace)

                namespace = namespace[1:]  # Remove root ":"
                # For filesystem, remove other ":" if the namespace is nested
                namespace = namespace.replace(":", "._.")

                instance = context.create_instance(namespace + "." + name)
                created = True

                instance.data.update(backup.data)

                # New subset name
                #
                instance.data["subset"] = ".".join(
                    ["pointcache", namespace, name])
                instance[:] = cacheables
                instance.data["outCache"] = cacheables
                instance.data["_hasHidden"] = has_hidden
                instance.data["requireAvalonUUID"] = cacheables
                instance.data["startFrame"] = start_frame
                instance.data["endFrame"] = end_frame

                self.add_families(instance)

        if not members:
            # Nothing left, all in/has OutSet

            if not created:
                cmds.error("No pointcache instance created.")
            else:
                context.remove(backup)

        else:
            # Cache nodes that were not in any OutSet

            instance = backup

            # Cacheables from instance member
            all_cacheables = lib.pick_cacheable(members)
            cacheables = lib.get_visible_in_frame_range(
                all_cacheables, int(start_frame), int(end_frame))
            has_hidden = len(all_cacheables) > len(cacheables)
            # Plus locator
            cacheables += self.pick_locators(members)

            instance[:] = cacheables
            instance.data["outCache"] = cacheables
            instance.data["_hasHidden"] = has_hidden
            instance.data["requireAvalonUUID"] = cacheables
            instance.data["startFrame"] = start_frame
            instance.data["endFrame"] = end_frame

            self.add_families(instance)
    def process(self, instance):

        variant = instance.data["subset"][len("animation"):].lower()
        members = instance[:]

        # Re-Create instances
        context = instance.context
        context.remove(instance)
        source_data = instance.data

        ANIM_SET = "ControlSet"
        out_cache = dict()

        if variant == "default":
            # Collect animatable nodes from ControlSet of loaded subset
            out_sets = list()

            for node in cmds.ls(members, type="transform", long=True):
                try:
                    container = pipeline.get_container_from_group(node)
                except AssertionError:
                    continue

                sets = cmds.ls(cmds.sets(container, query=True),
                               type="objectSet")
                out_sets += [s for s in sets if s.endswith(ANIM_SET)]

            for node in out_sets:
                name = node.rsplit(":", 1)[-1][:-len(ANIM_SET)] or "Default"
                self.log.info(name)
                namespace = lib.get_ns(node)
                animatables = cmds.ls(cmds.sets(node, query=True), long=True)

                out_cache[namespace] = (name, animatables)

        else:
            # Collect animatable nodes from instance member
            for node in cmds.ls(members, type="transform", long=True):
                namespace = lib.get_ns(node)
                try:
                    # Must be containerized
                    pipeline.get_container_from_namespace(namespace)
                except RuntimeError:
                    continue

                if namespace not in out_cache:
                    out_cache[namespace] = (variant, list())
                out_cache[namespace][1].append(node)

        for namespace, (name, animatables) in out_cache.items():
            instance = context.create_instance(namespace or name)
            container = pipeline.get_container_from_namespace(namespace)
            asset_id = cmds.getAttr(container + ".assetId")

            namespace = namespace[1:]  # Remove root ":"

            instance.data.update(source_data)
            instance.data["subset"] = ".".join(["animation", namespace, name])
            instance[:] = animatables
            instance.data["outAnim"] = animatables
            instance.data["animatedNamespace"] = namespace
            instance.data["animatedAssetId"] = asset_id
            # (NOTE) Although we put those animatable nodes to validate
            #        AvalonUUID existence, but currently AvalonUUID is
            #        not needed on load.
            instance.data["requireAvalonUUID"] = animatables
Exemple #5
0
    def process(self, instance):
        import maya.cmds as cmds
        from reveries.maya import lib, pipeline

        variant = instance.data["subset"][len("animation"):].lower()
        members = instance[:]

        # Re-Create instances
        context = instance.context
        context.remove(instance)
        source_data = instance.data

        ANIM_SET = "ControlSet"
        out_cache = dict()

        if variant == "default":
            # Collect animatable nodes from ControlSet of loaded subset
            out_sets = list()

            for node in cmds.ls(members, type="transform"):
                try:
                    # Must be containerized subset group node
                    pipeline.get_container_from_group(node)
                except AssertionError:
                    continue

                namespace = lib.get_ns(node)
                out_sets += cmds.ls("%s:*%s" % (namespace, ANIM_SET),
                                    sets=True)

            for node in out_sets:
                name = node.rsplit(":", 1)[-1][:-len(ANIM_SET)] or "Default"
                namespace = lib.get_ns(node)
                animatables = cmds.ls(cmds.sets(node, query=True),
                                      type="transform")

                key = (namespace, name)
                self.log.info("%s, %s" % key)
                if not animatables:
                    self.log.warning("No animatable (e.g. controllers) been "
                                     "found in '%s', skipping.." % node)
                    continue

                out_cache[key] = animatables

        else:
            # Collect animatable nodes from instance member
            for node in cmds.ls(members, type="transform"):
                namespace = lib.get_ns(node)
                try:
                    # Must be containerized
                    pipeline.get_container_from_namespace(namespace)
                except RuntimeError:
                    continue

                key = (namespace, variant)

                if key not in out_cache:
                    self.log.info("%s, %s" % key)
                    out_cache[key] = list()

                out_cache[key].append(node)

        for (namespace, name), animatables in sorted(out_cache.items()):
            container = pipeline.get_container_from_namespace(namespace)
            asset_id = cmds.getAttr(container + ".assetId")

            fixed_namespace = namespace[1:]  # Remove root ":"
            # For filesystem, remove other ":" if the namespace is nested
            fixed_namespace = fixed_namespace.replace(":", "._.")

            subset = ".".join(["animation", fixed_namespace, name])

            instance = context.create_instance(subset)
            instance.data.update(source_data)
            instance.data["subset"] = subset
            instance[:] = animatables
            instance.data["outAnim"] = animatables
            instance.data["animatedNamespace"] = namespace
            instance.data["animatedAssetId"] = asset_id
            # (NOTE) Although we put those animatable nodes to validate
            #        AvalonUUID existence, but currently AvalonUUID is
            #        not needed on load.
            instance.data["requireAvalonUUID"] = animatables