Exemplo n.º 1
0
class PointCacheReferenceLoader(ReferenceLoader, avalon.api.Loader):

    label = "Reference PointCache"
    order = -10
    icon = "flash"
    color = "orange"

    hosts = ["maya"]

    families = [
        "reveries.pointcache",
        "reveries.setdress",
    ]

    representations = [
        "Alembic",
        "FBXCache",
        "GPUCache",
    ]

    options = [
        qargparse.Integer("count", default=1, min=1, help="Batch load count."),
        qargparse.Double3("offset", help="Offset loaded subsets."),
    ]

    def process_reference(self, context, name, namespace, group, options):
        import maya.cmds as cmds

        representation = context["representation"]

        entry_path = self.file_path(representation)

        nodes = cmds.file(entry_path,
                          namespace=namespace,
                          ignoreVersion=True,
                          sharedReferenceFile=False,
                          groupReference=True,
                          groupName=group,
                          reference=True,
                          lockReference=False,
                          returnNewNodes=True)

        # (NOTE) Nodes loaded from Alembic did not have verifiers
        utils.update_id_verifiers(nodes)

        reveries.maya.lib.lock_transform(group)
        self[:] = nodes

    def switch(self, container, representation):
        self.update(container, representation)
Exemplo n.º 2
0
class ModelLoader(ReferenceLoader, avalon.api.Loader):
    """Load the model"""

    label = "Reference Model"
    order = -10
    icon = "code-fork"
    color = "orange"

    hosts = ["maya"]

    families = ["reveries.model"]

    representations = [
        "mayaBinary",
    ]

    options = [
        qargparse.Integer("count", default=1, min=1, help="Batch load count."),
        qargparse.Double3("offset", help="Offset loaded subsets."),
    ]

    def process_reference(self, context, name, namespace, group, options):

        import maya.cmds as cmds
        from avalon import maya

        representation = context["representation"]

        entry_path = self.file_path(representation)

        with maya.maintained_selection():
            nodes = cmds.file(entry_path,
                              namespace=namespace,
                              ignoreVersion=True,
                              reference=True,
                              returnNewNodes=True,
                              groupReference=True,
                              groupName=group)
        self[:] = nodes

    def switch(self, container, representation):
        self.update(container, representation)
Exemplo n.º 3
0
class RigLoader(ReferenceLoader, avalon.api.Loader):
    """Specific loader for rigs

    This automatically creates an instance for animators upon load.

    """
    label = "Reference rig"
    order = -10
    icon = "code-fork"
    color = "orange"

    hosts = ["maya"]

    families = ["reveries.rig"]

    representations = [
        "mayaBinary",
    ]

    options = [
        qargparse.Integer("count", default=1, min=1, help="Batch load count."),
        qargparse.Double3("offset", help="Offset loaded subsets."),
    ]

    def process_reference(self, context, name, namespace, group, options):

        import maya.cmds as cmds

        representation = context["representation"]

        entry_path = self.file_path(representation)

        nodes = cmds.file(entry_path,
                          namespace=namespace,
                          ignoreVersion=True,
                          reference=True,
                          returnNewNodes=True,
                          groupReference=True,
                          groupName=group)

        self[:] = nodes

    def switch(self, container, representation):
        self.update(container, representation)

    def update(self, container, representation):
        from maya import cmds
        from reveries import utils
        from reveries.maya import pipeline
        import avalon.io

        node = container["objectName"]

        # Get reference node from container
        reference_node = self.get_reference_node(container)

        with self.patch(reference_node):

            parents = avalon.io.parenthood(representation)
            self.package_path = utils.get_representation_path_(
                representation, parents)
            entry_path = self.file_path(representation)
            self.log.info("Reloading reference from: {!r}".format(entry_path))

            cmds.file(entry_path,
                      loadReference=reference_node,
                      type="mayaBinary",
                      defaultExtensions=False)

            # Add new nodes of the reference to the container
            nodes = cmds.referenceQuery(reference_node,
                                        nodes=True,
                                        dagPath=True)

            cmds.sets(nodes, forceElement=node)

            # Remove any placeHolderList attribute entries from the set that
            # are remaining from nodes being removed from the referenced file.
            # (NOTE) This ensures the reference update correctly when node name
            #   changed (e.g. shadingEngine) in different version.
            holders = (lambda N: [
                x for x in cmds.sets(N, query=True) or []
                if ".placeHolderList" in x
            ])
            cmds.sets(holders(node), remove=node)

            # Update container
            version, subset, asset, _ = parents
            pipeline.update_container(container, asset, subset, version,
                                      representation)

    @contextlib.contextmanager
    def patch(self, reference_node):
        """A patch after commit 06c2ef9 and should be removed ASAP

        Before commit 06c2ef9, published rig has no namespace on imported
        models or shaders.

        And sometimes animator may make their own change on referenced rig
        that may create extra meshes under referenced transform node, which
        Maya will not able to re-apply those edits when replacing reference
        if the original rig was published before commit 06c2ef9 and the new
        rig was published after that (because new node will have namespace).

        Hence we need this patch for the transition, and will remove this once
        we could pin config version on each project.

        """
        from maya import cmds

        referenced = cmds.referenceQuery(reference_node,
                                         nodes=True,
                                         dagPath=True)
        transforms = cmds.ls(referenced, type="transform", long=True)
        meshes = cmds.listRelatives(
            transforms, shapes=True, fullPath=True, type="mesh") or []

        # Collect meshes(uuid) that were created in scene

        mesh_uuids = dict()
        for mesh in meshes:
            if cmds.referenceQuery(mesh, isNodeReferenced=True):
                continue

            parent = cmds.ls(mesh.rsplit("|", 1)[0], uuid=True)[0]
            shading = cmds.ls(cmds.listConnections(mesh,
                                                   source=False,
                                                   destination=True,
                                                   connections=True,
                                                   type="shadingEngine"),
                              uuid=True)
            mesh = cmds.ls(mesh, uuid=True)[0]
            mesh_uuids[mesh] = (parent, shading)

        yield

        # Adopt new parent

        foster = cmds.listConnections(reference_node + ".fosterParent")
        if foster and mesh_uuids:
            foster = foster[0]

            shapes = cmds.listRelatives(foster, shapes=True, fullPath=True)
            shapes = cmds.ls(shapes, uuid=True)
            for uuid in shapes:
                if uuid not in mesh_uuids:
                    continue

                parent, shading = mesh_uuids[uuid]
                parent = cmds.ls(parent)
                fostered_shape = cmds.ls(uuid, long=True)

                if parent and fostered_shape:
                    # Move fostered mesh node to newly referenced transform
                    # node.
                    fostered_shape = fostered_shape[0]
                    shape = cmds.parent(fostered_shape,
                                        parent[0],
                                        addObject=True,
                                        shape=True)
                    cmds.parent(fostered_shape, removeObject=True, shape=True)

                    # Try to fix shading
                    if shading:
                        shading = iter(shading)
                        for src, dst in zip(shading, shading):
                            dst = cmds.ls(dst)
                            if not dst:
                                continue
                            src = shape[0] + "." + src.split(".", 1)[-1]
                            dst = dst[0] + ".dagSetMembers"

                            try:
                                cmds.connectAttr(src, dst, nextAvailable=True)
                            except Exception as e:
                                cmds.warning(str(e))
class ArnoldAssLoader(ImportLoader, avalon.api.Loader):

    label = "Load Arnold .ASS"
    order = -10
    icon = "coffee"
    color = "orange"

    hosts = ["maya"]

    families = [
        "reveries.standin",
    ]

    representations = [
        "Ass",
    ]

    options = [
        qargparse.Integer("count", default=1, min=1, help="Batch load count."),
        qargparse.Double3("offset", help="Offset loaded subsets."),
    ]

    def process_import(self, context, name, namespace, group, options):
        from maya import cmds
        from reveries.maya import capsule, arnold

        representation = context["representation"]
        entry_path, use_sequence = self.retrive(representation)

        with capsule.namespaced(namespace):
            standin = arnold.create_standin(entry_path)
            transform = cmds.listRelatives(standin, parent=True)[0]
            group = cmds.group(transform, name=group, world=True)

        if use_sequence:
            cmds.setAttr(standin + ".useFrameExtension", True)
            cmds.connectAttr("time1.outTime", standin + ".frameNumber")

        self[:] = [standin, transform, group]

    def retrive(self, representation):
        if "useSequence" not in representation["data"]:
            entry_path, use_sequence = self._compat(representation)
        else:
            entry_path = self.file_path(representation)
            use_sequence = representation["data"]["useSequence"]

        return entry_path, use_sequence

    def _compat(self, representation):
        """For backwards compatibility"""
        entry_path = self.file_path(representation)
        entry_dir = os.path.dirname(entry_path)
        asses = [f for f in os.listdir(os.path.expandvars(entry_dir))
                 if f.endswith(".ass")]

        entry_path = os.path.join(entry_dir, asses[0])
        use_sequence = len(asses) > 1

        return entry_path, use_sequence

    def update(self, container, representation):
        import maya.cmds as cmds
        from avalon import io
        from reveries.maya import pipeline
        from reveries.utils import get_representation_path_

        members = cmds.sets(container["objectName"], query=True)
        standins = cmds.ls(members, type="aiStandIn", long=True)

        if not standins:
            raise Exception("No Arnold Stand-In node, this is a bug.")

        parents = io.parenthood(representation)
        self.package_path = get_representation_path_(representation, parents)

        entry_path, use_sequence = self.retrive(representation)

        if not entry_path.endswith(".ass"):
            raise Exception("Not a Arnold Stand-In file, this is a bug: "
                            "%s" % entry_path)

        for standin in standins:
            # This would allow all copies getting updated together
            cmds.setAttr(standin + ".dso", entry_path, type="string")
            cmds.setAttr(standin + ".useFrameExtension", use_sequence)

        # Update container
        version, subset, asset, _ = parents
        pipeline.update_container(container,
                                  asset,
                                  subset,
                                  version,
                                  representation)

    def switch(self, container, representation):
        self.update(container, representation)
Exemplo n.º 5
0
class RigLoader(ReferenceLoader, avalon.api.Loader):
    """Specific loader for rigs

    This automatically creates an instance for animators upon load.

    """
    label = "Reference rig"
    order = -10
    icon = "code-fork"
    color = "orange"

    hosts = ["maya"]

    families = ["reveries.rig"]

    representations = [
        "mayaBinary",
    ]

    options = [
        qargparse.Integer("count", default=1, min=1, help="Batch load count."),
        qargparse.Double3("offset", help="Offset loaded subsets."),
    ]

    def process_reference(self, context, name, namespace, group, options):

        import maya.cmds as cmds

        representation = context["representation"]

        entry_path = self.file_path(representation)

        nodes = cmds.file(entry_path,
                          namespace=namespace,
                          ignoreVersion=True,
                          reference=True,
                          returnNewNodes=True,
                          groupReference=True,
                          groupName=group)

        self[:] = nodes

    def switch(self, container, representation):
        self.update(container, representation)

    def update(self, container, representation):
        from maya import cmds

        node = container["objectName"]

        # Get reference node from container
        reference_node = self.get_reference_node(container)

        with patch(reference_node):

            parents = avalon.io.parenthood(representation)
            self.package_path = utils.get_representation_path_(
                representation, parents)
            entry_path = self.file_path(representation)
            self.log.info("Reloading reference from: {!r}".format(entry_path))

            cmds.file(entry_path,
                      loadReference=reference_node,
                      type="mayaBinary",
                      defaultExtensions=False)

            # Add new nodes of the reference to the container
            nodes = cmds.referenceQuery(reference_node,
                                        nodes=True,
                                        dagPath=True)

            cmds.sets(nodes, forceElement=node)

            # Remove any placeHolderList attribute entries from the set that
            # are remaining from nodes being removed from the referenced file.
            # (NOTE) This ensures the reference update correctly when node name
            #   changed (e.g. shadingEngine) in different version.
            holders = (lambda N: [
                x for x in cmds.sets(N, query=True) or []
                if ".placeHolderList" in x
            ])
            cmds.sets(holders(node), remove=node)

            # Update container
            version, subset, asset, _ = parents
            pipeline.update_container(container, asset, subset, version,
                                      representation)
Exemplo n.º 6
0
class ReferenceLoader(api.Loader):
    """A basic ReferenceLoader for Maya

    This will implement the basic behavior for a loader to inherit from that
    will containerize the reference and will implement the `remove` and
    `update` logic.

    """

    options = [
        qargparse.Integer("count",
                          label="Count",
                          default=1,
                          min=1,
                          help="How many times to load?"),
        qargparse.Double3("offset",
                          label="Position Offset",
                          help="Offset loaded models for easier selection.")
    ]

    def load(self, context, name=None, namespace=None, options=None):

        import os
        from avalon.maya import lib
        from avalon.maya.pipeline import containerise

        assert os.path.exists(self.fname), "%s does not exist." % self.fname

        asset = context['asset']
        loaded_containers = []

        count = options.get("count") or 1
        for c in range(0, count):
            namespace = namespace or lib.unique_namespace(
                asset["name"] + "_",
                prefix="_" if asset["name"][0].isdigit() else "",
                suffix="_",
            )

            # Offset loaded subset
            if "offset" in options:
                offset = [i * c for i in options["offset"]]
                options["translate"] = offset

            self.log.info(options)

            self.process_reference(context=context,
                                   name=name,
                                   namespace=namespace,
                                   options=options)

            # Only containerize if any nodes were loaded by the Loader
            nodes = self[:]
            if not nodes:
                return

            loaded_containers.append(
                containerise(name=name,
                             namespace=namespace,
                             nodes=nodes,
                             context=context,
                             loader=self.__class__.__name__))

            c += 1
            namespace = None
        return loaded_containers

    def process_reference(self, context, name, namespace, data):
        """To be implemented by subclass"""
        raise NotImplementedError("Must be implemented by subclass")

    def _get_reference_node(self, members):
        """Get the reference node from the container members
        Args:
            members: list of node names

        Returns:
            str: Reference node name.

        """

        from maya import cmds

        # Collect the references without .placeHolderList[] attributes as
        # unique entries (objects only) and skipping the sharedReferenceNode.
        references = set()
        for ref in cmds.ls(members, exactType="reference", objectsOnly=True):

            # Ignore any `:sharedReferenceNode`
            if ref.rsplit(":", 1)[-1].startswith("sharedReferenceNode"):
                continue

            # Ignore _UNKNOWN_REF_NODE_ (PLN-160)
            if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"):
                continue

            references.add(ref)

        assert references, "No reference node found in container"

        # Get highest reference node (least parents)
        highest = min(references,
                      key=lambda x: len(get_reference_node_parents(x)))

        # Warn the user when we're taking the highest reference node
        if len(references) > 1:
            self.log.warning(
                "More than one reference node found in "
                "container, using highest reference node: "
                "%s (in: %s)", highest, list(references))

        return highest

    def update(self, container, representation):

        import os
        from maya import cmds

        node = container["objectName"]

        path = api.get_representation_path(representation)

        # Get reference node from container members
        members = cmds.sets(node, query=True, nodesOnly=True)
        reference_node = self._get_reference_node(members)

        file_type = {
            "ma": "mayaAscii",
            "mb": "mayaBinary",
            "abc": "Alembic"
        }.get(representation["name"])

        assert file_type, "Unsupported representation: %s" % representation

        assert os.path.exists(path), "%s does not exist." % path

        # Need to save alembic settings and reapply, cause referencing resets
        # them to incoming data.
        alembic_attrs = ["speed", "offset", "cycleType"]
        alembic_data = {}
        if representation["name"] == "abc":
            alembic_nodes = cmds.ls("{}:*".format(members[0].split(":")[0]),
                                    type="AlembicNode")
            if alembic_nodes:
                for attr in alembic_attrs:
                    node_attr = "{}.{}".format(alembic_nodes[0], attr)
                    alembic_data[attr] = cmds.getAttr(node_attr)
            else:
                cmds.warning("No alembic nodes found in {}".format(
                    cmds.ls("{}:*".format(members[0].split(":")[0]))))

        try:
            content = cmds.file(path,
                                loadReference=reference_node,
                                type=file_type,
                                returnNewNodes=True)
        except RuntimeError as exc:
            # When changing a reference to a file that has load errors the
            # command will raise an error even if the file is still loaded
            # correctly (e.g. when raising errors on Arnold attributes)
            # When the file is loaded and has content, we consider it's fine.
            if not cmds.referenceQuery(reference_node, isLoaded=True):
                raise

            content = cmds.referenceQuery(reference_node,
                                          nodes=True,
                                          dagPath=True)
            if not content:
                raise

            self.log.warning("Ignoring file read error:\n%s", exc)

        # Reapply alembic settings.
        if representation["name"] == "abc":
            alembic_nodes = cmds.ls("{}:*".format(members[0].split(":")[0]),
                                    type="AlembicNode")
            if alembic_nodes:
                for attr, value in alembic_data.items():
                    cmds.setAttr("{}.{}".format(alembic_nodes[0], attr), value)

        # Fix PLN-40 for older containers created with Avalon that had the
        # `.verticesOnlySet` set to True.
        if cmds.getAttr("{}.verticesOnlySet".format(node)):
            self.log.info("Setting %s.verticesOnlySet to False", node)
            cmds.setAttr("{}.verticesOnlySet".format(node), False)

        # Add new nodes of the reference to the container
        cmds.sets(content, forceElement=node)

        # Remove any placeHolderList attribute entries from the set that
        # are remaining from nodes being removed from the referenced file.
        members = cmds.sets(node, query=True)
        invalid = [x for x in members if ".placeHolderList" in x]
        if invalid:
            cmds.sets(invalid, remove=node)

        # Update metadata
        cmds.setAttr("{}.representation".format(node),
                     str(representation["_id"]),
                     type="string")

    def remove(self, container):
        """Remove an existing `container` from Maya scene

        Deprecated; this functionality is replaced by `api.remove()`

        Arguments:
            container (openpype:container-1.0): Which container
                to remove from scene.

        """

        from maya import cmds

        node = container["objectName"]

        # Assume asset has been referenced
        members = cmds.sets(node, query=True)
        reference_node = self._get_reference_node(members)

        assert reference_node, ("Imported container not supported; "
                                "container must be referenced.")

        self.log.info("Removing '%s' from Maya.." % container["name"])

        namespace = cmds.referenceQuery(reference_node, namespace=True)
        fname = cmds.referenceQuery(reference_node, filename=True)
        cmds.file(fname, removeReference=True)

        try:
            cmds.delete(node)
        except ValueError:
            # Already implicitly deleted by Maya upon removing reference
            pass

        try:
            # If container is not automatically cleaned up by May (issue #118)
            cmds.namespace(removeNamespace=namespace,
                           deleteNamespaceContent=True)
        except RuntimeError:
            pass
Exemplo n.º 7
0
class PointCacheReferenceLoader(ReferenceLoader, avalon.api.Loader):

    label = "Reference PointCache"
    order = -10
    icon = "flash"
    color = "orange"

    hosts = ["maya"]

    families = [
        "reveries.pointcache",
        "reveries.setdress",
    ]

    representations = [
        "Alembic",
        "FBXCache",
        "GPUCache",
    ]

    options = [
        qargparse.Integer("count", default=1, min=1, help="Batch load count."),
        qargparse.Double3("offset", help="Offset loaded subsets."),
    ]

    def process_reference(self, context, name, namespace, group, options):
        import maya.cmds as cmds
        from reveries.maya import utils

        representation = context["representation"]

        entry_path = self.file_path(representation)

        nodes = cmds.file(entry_path,
                          namespace=namespace,
                          ignoreVersion=True,
                          sharedReferenceFile=False,
                          groupReference=True,
                          groupName=group,
                          reference=True,
                          lockReference=False,
                          returnNewNodes=True)

        if representation["name"] == "Alembic":
            self.unit_conversion_patch(nodes)

        # (NOTE) Nodes loaded from Alembic did not have verifiers
        utils.update_id_verifiers(nodes)

        self[:] = nodes

    def update(self, container, representation):
        from reveries.maya.plugins import ReferenceLoader
        import maya.cmds as cmds

        uuid = cmds.ls(container["objectName"], uuid=True)

        ReferenceLoader.update(self, container, representation)

        if representation["name"] == "Alembic":
            nodes = cmds.sets(cmds.ls(uuid), query=True, nodesOnly=True)
            self.unit_conversion_patch(nodes)

    def switch(self, container, representation):
        self.update(container, representation)

    def unit_conversion_patch(self, nodes):
        """
        When referencing same Alembic file multiple times, the rotation
        misbehave except the first one, after scene saved and re-open.

        The observable cause was the unitConversion nodes which being
        connected in between alembic node's output and transform node's
        rotation, their `conversionFactor` did not properly configured
        after re-open. The value should be like `0.017453292519943295`
        but remain `1.0`.

        It's a known bug for about 4 years from now:
        https://gitter.im/getavalon/Lobby?at=5d36b894d61887416420bcda

        Current workaround that I can think of is to trigger reference
        edit on all newly created unitConversion nodes, let reference
        edit *pin* the factor value for us.

        """
        import maya.cmds as cmds

        for conversion in cmds.ls(nodes, type="unitConversion"):
            attr = conversion + ".conversionFactor"
            factor = cmds.getAttr(attr)
            cmds.setAttr(attr, 1)  # To trigger reference edit
            cmds.setAttr(attr, factor)