Beispiel #1
0
class CalculateOldVersions(DeleteOldVersions):

    label = "Calculate Old Versions"

    options = [
        qargparse.Integer("versions_to_keep",
                          default=2,
                          min=0,
                          help="Versions to keep:"),
        qargparse.Boolean("remove_publish_folder",
                          help="Remove publish folder:")
    ]

    def main(self, data, remove_publish_folder):
        size = 0

        if remove_publish_folder:
            size = self.delete_whole_dir_paths(data["dir_paths"].values(),
                                               delete=False)
        else:
            size = self.delete_only_repre_files(data["dir_paths"],
                                                data["file_paths_by_dir"],
                                                delete=False)

        msg = "Total size of files: " + self.sizeof_fmt(size)
        self.log.info(msg)
        self.message(msg)
Beispiel #2
0
class PointCacheReferenceLoader(ReferenceLoader, avalon.api.Loader):

    label = "Reference PointCache"
    order = -10
    icon = "flash"
    color = "orange"

    hosts = ["maya"]

    families = [
        "reveries.pointcache",
        "reveries.setdress",
    ]

    representations = [
        "Alembic",
        "FBXCache",
        "GPUCache",
    ]

    options = [
        qargparse.Integer("count", default=1, min=1, help="Batch load count."),
        qargparse.Double3("offset", help="Offset loaded subsets."),
    ]

    def process_reference(self, context, name, namespace, group, options):
        import maya.cmds as cmds

        representation = context["representation"]

        entry_path = self.file_path(representation)

        nodes = cmds.file(entry_path,
                          namespace=namespace,
                          ignoreVersion=True,
                          sharedReferenceFile=False,
                          groupReference=True,
                          groupName=group,
                          reference=True,
                          lockReference=False,
                          returnNewNodes=True)

        # (NOTE) Nodes loaded from Alembic did not have verifiers
        utils.update_id_verifiers(nodes)

        reveries.maya.lib.lock_transform(group)
        self[:] = nodes

    def switch(self, container, representation):
        self.update(container, representation)
Beispiel #3
0
class ModelLoader(ReferenceLoader, avalon.api.Loader):
    """Load the model"""

    label = "Reference Model"
    order = -10
    icon = "code-fork"
    color = "orange"

    hosts = ["maya"]

    families = ["reveries.model"]

    representations = [
        "mayaBinary",
    ]

    options = [
        qargparse.Integer("count", default=1, min=1, help="Batch load count."),
        qargparse.Double3("offset", help="Offset loaded subsets."),
    ]

    def process_reference(self, context, name, namespace, group, options):

        import maya.cmds as cmds
        from avalon import maya

        representation = context["representation"]

        entry_path = self.file_path(representation)

        with maya.maintained_selection():
            nodes = cmds.file(entry_path,
                              namespace=namespace,
                              ignoreVersion=True,
                              reference=True,
                              returnNewNodes=True,
                              groupReference=True,
                              groupName=group)
        self[:] = nodes

    def switch(self, container, representation):
        self.update(container, representation)
Beispiel #4
0
class DeleteOldVersions(api.Loader):

    representations = ["*"]
    families = ["*"]

    label = "Delete Old Versions"
    icon = "trash"
    color = "#d8d8d8"

    options = [
        qargparse.Integer("versions_to_keep",
                          default=2,
                          min=0,
                          help="Versions to keep:"),
        qargparse.Boolean("remove_publish_folder",
                          help="Remove publish folder:")
    ]

    def sizeof_fmt(self, num, suffix='B'):
        for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
            if abs(num) < 1024.0:
                return "%3.1f%s%s" % (num, unit, suffix)
            num /= 1024.0
        return "%.1f%s%s" % (num, 'Yi', suffix)

    def delete_whole_dir_paths(self, dir_paths, delete=True):
        size = 0

        for dir_path in dir_paths:
            # Delete all files and fodlers in dir path
            for root, dirs, files in os.walk(dir_path, topdown=False):
                for name in files:
                    file_path = os.path.join(root, name)
                    size += os.path.getsize(file_path)
                    if delete:
                        os.remove(file_path)
                        self.log.debug("Removed file: {}".format(file_path))

                for name in dirs:
                    if delete:
                        os.rmdir(os.path.join(root, name))

            if not delete:
                continue

            # Delete even the folder and it's parents folders if they are empty
            while True:
                if not os.path.exists(dir_path):
                    dir_path = os.path.dirname(dir_path)
                    continue

                if len(os.listdir(dir_path)) != 0:
                    break

                os.rmdir(os.path.join(dir_path))

        return size

    def path_from_representation(self, representation, anatomy):
        try:
            template = representation["data"]["template"]

        except KeyError:
            return (None, None)

        sequence_path = None
        try:
            context = representation["context"]
            context["root"] = anatomy.roots
            path = avalon.pipeline.format_template_with_optional_keys(
                context, template)
            if "frame" in context:
                context["frame"] = self.sequence_splitter
                sequence_path = os.path.normpath(
                    avalon.pipeline.format_template_with_optional_keys(
                        context, template))

        except KeyError:
            # Template references unavailable data
            return (None, None)

        return (os.path.normpath(path), sequence_path)

    def delete_only_repre_files(self, dir_paths, file_paths, delete=True):
        size = 0

        for dir_id, dir_path in dir_paths.items():
            dir_files = os.listdir(dir_path)
            collections, remainders = clique.assemble(dir_files)
            for file_path, seq_path in file_paths[dir_id]:
                file_path_base = os.path.split(file_path)[1]
                # Just remove file if `frame` key was not in context or
                # filled path is in remainders (single file sequence)
                if not seq_path or file_path_base in remainders:
                    if not os.path.exists(file_path):
                        self.log.debug(
                            "File was not found: {}".format(file_path))
                        continue

                    size += os.path.getsize(file_path)

                    if delete:
                        os.remove(file_path)
                        self.log.debug("Removed file: {}".format(file_path))

                    remainders.remove(file_path_base)
                    continue

                seq_path_base = os.path.split(seq_path)[1]
                head, tail = seq_path_base.split(self.sequence_splitter)

                final_col = None
                for collection in collections:
                    if head != collection.head or tail != collection.tail:
                        continue
                    final_col = collection
                    break

                if final_col is not None:
                    # Fill full path to head
                    final_col.head = os.path.join(dir_path, final_col.head)
                    for _file_path in final_col:
                        if os.path.exists(_file_path):

                            size += os.path.getsize(_file_path)

                            if delete:
                                os.remove(_file_path)
                                self.log.debug(
                                    "Removed file: {}".format(_file_path))

                    _seq_path = final_col.format("{head}{padding}{tail}")
                    self.log.debug("Removed files: {}".format(_seq_path))
                    collections.remove(final_col)

                elif os.path.exists(file_path):
                    size += os.path.getsize(file_path)

                    if delete:
                        os.remove(file_path)
                        self.log.debug("Removed file: {}".format(file_path))
                else:
                    self.log.debug("File was not found: {}".format(file_path))

        # Delete as much as possible parent folders
        if not delete:
            return size

        for dir_path in dir_paths.values():
            while True:
                if not os.path.exists(dir_path):
                    dir_path = os.path.dirname(dir_path)
                    continue

                if len(os.listdir(dir_path)) != 0:
                    break

                self.log.debug("Removed folder: {}".format(dir_path))
                os.rmdir(dir_path)

        return size

    def message(self, text):
        msgBox = QtWidgets.QMessageBox()
        msgBox.setText(text)
        msgBox.setStyleSheet(style.load_stylesheet())
        msgBox.setWindowFlags(msgBox.windowFlags()
                              | QtCore.Qt.FramelessWindowHint)
        msgBox.exec_()

    def get_data(self, context, versions_count):
        subset = context["subset"]
        asset = context["asset"]
        anatomy = Anatomy(context["project"]["name"])

        self.dbcon = AvalonMongoDB()
        self.dbcon.Session["AVALON_PROJECT"] = context["project"]["name"]
        self.dbcon.install()

        versions = list(
            self.dbcon.find({
                "type": "version",
                "parent": {
                    "$in": [subset["_id"]]
                }
            }))

        versions_by_parent = collections.defaultdict(list)
        for ent in versions:
            versions_by_parent[ent["parent"]].append(ent)

        def sort_func(ent):
            return int(ent["name"])

        all_last_versions = []
        for _parent_id, _versions in versions_by_parent.items():
            for idx, version in enumerate(
                    sorted(_versions, key=sort_func, reverse=True)):
                if idx >= versions_count:
                    break
                all_last_versions.append(version)

        self.log.debug("Collected versions ({})".format(len(versions)))

        # Filter latest versions
        for version in all_last_versions:
            versions.remove(version)

        # Update versions_by_parent without filtered versions
        versions_by_parent = collections.defaultdict(list)
        for ent in versions:
            versions_by_parent[ent["parent"]].append(ent)

        # Filter already deleted versions
        versions_to_pop = []
        for version in versions:
            version_tags = version["data"].get("tags")
            if version_tags and "deleted" in version_tags:
                versions_to_pop.append(version)

        for version in versions_to_pop:
            msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format(
                asset["name"], subset["name"], version["name"])
            self.log.debug(
                ("Skipping version. Already tagged as `deleted`. < {} >"
                 ).format(msg))
            versions.remove(version)

        version_ids = [ent["_id"] for ent in versions]

        self.log.debug("Filtered versions to delete ({})".format(
            len(version_ids)))

        if not version_ids:
            msg = "Skipping processing. Nothing to delete."
            self.log.info(msg)
            self.message(msg)
            return

        repres = list(
            self.dbcon.find({
                "type": "representation",
                "parent": {
                    "$in": version_ids
                }
            }))

        self.log.debug("Collected representations to remove ({})".format(
            len(repres)))

        dir_paths = {}
        file_paths_by_dir = collections.defaultdict(list)
        for repre in repres:
            file_path, seq_path = self.path_from_representation(repre, anatomy)
            if file_path is None:
                self.log.debug(
                    ("Could not format path for represenation \"{}\"").format(
                        str(repre)))
                continue

            dir_path = os.path.dirname(file_path)
            dir_id = None
            for _dir_id, _dir_path in dir_paths.items():
                if _dir_path == dir_path:
                    dir_id = _dir_id
                    break

            if dir_id is None:
                dir_id = uuid.uuid4()
                dir_paths[dir_id] = dir_path

            file_paths_by_dir[dir_id].append([file_path, seq_path])

        dir_ids_to_pop = []
        for dir_id, dir_path in dir_paths.items():
            if os.path.exists(dir_path):
                continue

            dir_ids_to_pop.append(dir_id)

        # Pop dirs from both dictionaries
        for dir_id in dir_ids_to_pop:
            dir_paths.pop(dir_id)
            paths = file_paths_by_dir.pop(dir_id)
            # TODO report of missing directories?
            paths_msg = ", ".join(
                ["'{}'".format(path[0].replace("\\", "/")) for path in paths])
            self.log.debug(
                ("Folder does not exist. Deleting it's files skipped: {}"
                 ).format(paths_msg))

        data = {
            "dir_paths": dir_paths,
            "file_paths_by_dir": file_paths_by_dir,
            "versions": versions,
            "asset": asset,
            "subset": subset,
            "archive_subset": versions_count == 0
        }

        return data

    def main(self, data, remove_publish_folder):
        # Size of files.
        size = 0

        if remove_publish_folder:
            size = self.delete_whole_dir_paths(data["dir_paths"].values())
        else:
            size = self.delete_only_repre_files(data["dir_paths"],
                                                data["file_paths_by_dir"])

        mongo_changes_bulk = []
        for version in data["versions"]:
            orig_version_tags = version["data"].get("tags") or []
            version_tags = [tag for tag in orig_version_tags]
            if "deleted" not in version_tags:
                version_tags.append("deleted")

            if version_tags == orig_version_tags:
                continue

            update_query = {"_id": version["_id"]}
            update_data = {"$set": {"data.tags": version_tags}}
            mongo_changes_bulk.append(UpdateOne(update_query, update_data))

        if data["archive_subset"]:
            mongo_changes_bulk.append(
                UpdateOne({
                    "_id": data["subset"]["_id"],
                    "type": "subset"
                }, {"$set": {
                    "type": "archived_subset"
                }}))

        if mongo_changes_bulk:
            self.dbcon.bulk_write(mongo_changes_bulk)

        self.dbcon.uninstall()

        # Set attribute `is_published` to `False` on ftrack AssetVersions
        session = ftrack_api.Session()
        query = ("AssetVersion where asset.parent.id is \"{}\""
                 " and asset.name is \"{}\""
                 " and version is \"{}\"")
        for v in data["versions"]:
            try:
                ftrack_version = session.query(
                    query.format(data["asset"]["data"]["ftrackId"],
                                 data["subset"]["name"], v["name"])).one()
            except ftrack_api.exception.NoResultFoundError:
                continue

            ftrack_version["is_published"] = False

        try:
            session.commit()

        except Exception:
            msg = ("Could not set `is_published` attribute to `False`"
                   " for selected AssetVersions.")
            self.log.error(msg)
            self.message(msg)

        msg = "Total size of files: " + self.sizeof_fmt(size)
        self.log.info(msg)
        self.message(msg)

    def load(self, context, name=None, namespace=None, options=None):
        try:
            versions_to_keep = 2
            remove_publish_folder = False
            if options:
                versions_to_keep = options.get("versions_to_keep",
                                               versions_to_keep)
                remove_publish_folder = options.get("remove_publish_folder",
                                                    remove_publish_folder)

            data = self.get_data(context, versions_to_keep)

            self.main(data, remove_publish_folder)

        except Exception:
            self.log.error("Failed to delete versions.", exc_info=True)
Beispiel #5
0
class RigLoader(ReferenceLoader, avalon.api.Loader):
    """Specific loader for rigs

    This automatically creates an instance for animators upon load.

    """
    label = "Reference rig"
    order = -10
    icon = "code-fork"
    color = "orange"

    hosts = ["maya"]

    families = ["reveries.rig"]

    representations = [
        "mayaBinary",
    ]

    options = [
        qargparse.Integer("count", default=1, min=1, help="Batch load count."),
        qargparse.Double3("offset", help="Offset loaded subsets."),
    ]

    def process_reference(self, context, name, namespace, group, options):

        import maya.cmds as cmds

        representation = context["representation"]

        entry_path = self.file_path(representation)

        nodes = cmds.file(entry_path,
                          namespace=namespace,
                          ignoreVersion=True,
                          reference=True,
                          returnNewNodes=True,
                          groupReference=True,
                          groupName=group)

        self[:] = nodes

    def switch(self, container, representation):
        self.update(container, representation)

    def update(self, container, representation):
        from maya import cmds
        from reveries import utils
        from reveries.maya import pipeline
        import avalon.io

        node = container["objectName"]

        # Get reference node from container
        reference_node = self.get_reference_node(container)

        with self.patch(reference_node):

            parents = avalon.io.parenthood(representation)
            self.package_path = utils.get_representation_path_(
                representation, parents)
            entry_path = self.file_path(representation)
            self.log.info("Reloading reference from: {!r}".format(entry_path))

            cmds.file(entry_path,
                      loadReference=reference_node,
                      type="mayaBinary",
                      defaultExtensions=False)

            # Add new nodes of the reference to the container
            nodes = cmds.referenceQuery(reference_node,
                                        nodes=True,
                                        dagPath=True)

            cmds.sets(nodes, forceElement=node)

            # Remove any placeHolderList attribute entries from the set that
            # are remaining from nodes being removed from the referenced file.
            # (NOTE) This ensures the reference update correctly when node name
            #   changed (e.g. shadingEngine) in different version.
            holders = (lambda N: [
                x for x in cmds.sets(N, query=True) or []
                if ".placeHolderList" in x
            ])
            cmds.sets(holders(node), remove=node)

            # Update container
            version, subset, asset, _ = parents
            pipeline.update_container(container, asset, subset, version,
                                      representation)

    @contextlib.contextmanager
    def patch(self, reference_node):
        """A patch after commit 06c2ef9 and should be removed ASAP

        Before commit 06c2ef9, published rig has no namespace on imported
        models or shaders.

        And sometimes animator may make their own change on referenced rig
        that may create extra meshes under referenced transform node, which
        Maya will not able to re-apply those edits when replacing reference
        if the original rig was published before commit 06c2ef9 and the new
        rig was published after that (because new node will have namespace).

        Hence we need this patch for the transition, and will remove this once
        we could pin config version on each project.

        """
        from maya import cmds

        referenced = cmds.referenceQuery(reference_node,
                                         nodes=True,
                                         dagPath=True)
        transforms = cmds.ls(referenced, type="transform", long=True)
        meshes = cmds.listRelatives(
            transforms, shapes=True, fullPath=True, type="mesh") or []

        # Collect meshes(uuid) that were created in scene

        mesh_uuids = dict()
        for mesh in meshes:
            if cmds.referenceQuery(mesh, isNodeReferenced=True):
                continue

            parent = cmds.ls(mesh.rsplit("|", 1)[0], uuid=True)[0]
            shading = cmds.ls(cmds.listConnections(mesh,
                                                   source=False,
                                                   destination=True,
                                                   connections=True,
                                                   type="shadingEngine"),
                              uuid=True)
            mesh = cmds.ls(mesh, uuid=True)[0]
            mesh_uuids[mesh] = (parent, shading)

        yield

        # Adopt new parent

        foster = cmds.listConnections(reference_node + ".fosterParent")
        if foster and mesh_uuids:
            foster = foster[0]

            shapes = cmds.listRelatives(foster, shapes=True, fullPath=True)
            shapes = cmds.ls(shapes, uuid=True)
            for uuid in shapes:
                if uuid not in mesh_uuids:
                    continue

                parent, shading = mesh_uuids[uuid]
                parent = cmds.ls(parent)
                fostered_shape = cmds.ls(uuid, long=True)

                if parent and fostered_shape:
                    # Move fostered mesh node to newly referenced transform
                    # node.
                    fostered_shape = fostered_shape[0]
                    shape = cmds.parent(fostered_shape,
                                        parent[0],
                                        addObject=True,
                                        shape=True)
                    cmds.parent(fostered_shape, removeObject=True, shape=True)

                    # Try to fix shading
                    if shading:
                        shading = iter(shading)
                        for src, dst in zip(shading, shading):
                            dst = cmds.ls(dst)
                            if not dst:
                                continue
                            src = shape[0] + "." + src.split(".", 1)[-1]
                            dst = dst[0] + ".dagSetMembers"

                            try:
                                cmds.connectAttr(src, dst, nextAvailable=True)
                            except Exception as e:
                                cmds.warning(str(e))
class ArnoldAssLoader(ImportLoader, avalon.api.Loader):

    label = "Load Arnold .ASS"
    order = -10
    icon = "coffee"
    color = "orange"

    hosts = ["maya"]

    families = [
        "reveries.standin",
    ]

    representations = [
        "Ass",
    ]

    options = [
        qargparse.Integer("count", default=1, min=1, help="Batch load count."),
        qargparse.Double3("offset", help="Offset loaded subsets."),
    ]

    def process_import(self, context, name, namespace, group, options):
        from maya import cmds
        from reveries.maya import capsule, arnold

        representation = context["representation"]
        entry_path, use_sequence = self.retrive(representation)

        with capsule.namespaced(namespace):
            standin = arnold.create_standin(entry_path)
            transform = cmds.listRelatives(standin, parent=True)[0]
            group = cmds.group(transform, name=group, world=True)

        if use_sequence:
            cmds.setAttr(standin + ".useFrameExtension", True)
            cmds.connectAttr("time1.outTime", standin + ".frameNumber")

        self[:] = [standin, transform, group]

    def retrive(self, representation):
        if "useSequence" not in representation["data"]:
            entry_path, use_sequence = self._compat(representation)
        else:
            entry_path = self.file_path(representation)
            use_sequence = representation["data"]["useSequence"]

        return entry_path, use_sequence

    def _compat(self, representation):
        """For backwards compatibility"""
        entry_path = self.file_path(representation)
        entry_dir = os.path.dirname(entry_path)
        asses = [f for f in os.listdir(os.path.expandvars(entry_dir))
                 if f.endswith(".ass")]

        entry_path = os.path.join(entry_dir, asses[0])
        use_sequence = len(asses) > 1

        return entry_path, use_sequence

    def update(self, container, representation):
        import maya.cmds as cmds
        from avalon import io
        from reveries.maya import pipeline
        from reveries.utils import get_representation_path_

        members = cmds.sets(container["objectName"], query=True)
        standins = cmds.ls(members, type="aiStandIn", long=True)

        if not standins:
            raise Exception("No Arnold Stand-In node, this is a bug.")

        parents = io.parenthood(representation)
        self.package_path = get_representation_path_(representation, parents)

        entry_path, use_sequence = self.retrive(representation)

        if not entry_path.endswith(".ass"):
            raise Exception("Not a Arnold Stand-In file, this is a bug: "
                            "%s" % entry_path)

        for standin in standins:
            # This would allow all copies getting updated together
            cmds.setAttr(standin + ".dso", entry_path, type="string")
            cmds.setAttr(standin + ".useFrameExtension", use_sequence)

        # Update container
        version, subset, asset, _ = parents
        pipeline.update_container(container,
                                  asset,
                                  subset,
                                  version,
                                  representation)

    def switch(self, container, representation):
        self.update(container, representation)
Beispiel #7
0
class LoadImage(api.Loader):
    """Load still image into Nuke"""

    families = ["render", "source", "plate", "review", "image"]
    representations = ["exr", "dpx", "jpg", "jpeg", "png", "psd"]

    label = "Load Image"
    order = -10
    icon = "image"
    color = "white"

    node_name_template = "{class_name}_{ext}"

    options = [
        qargparse.Integer("frame_number",
                          label="Frame Number",
                          default=int(nuke.root()["first_frame"].getValue()),
                          min=1,
                          max=999999,
                          help="What frame is reading from?")
    ]

    def load(self, context, name, namespace, options):
        from avalon.nuke import (containerise, viewer_update_and_undo_stop)
        self.log.info("__ options: `{}`".format(options))
        frame_number = options.get("frame_number", 1)

        version = context['version']
        version_data = version.get("data", {})
        repr_id = context["representation"]["_id"]

        self.log.info("version_data: {}\n".format(version_data))
        self.log.debug("Representation id `{}` ".format(repr_id))

        last = first = int(frame_number)

        # Fallback to asset name when namespace is None
        if namespace is None:
            namespace = context['asset']['name']

        file = self.fname

        if not file:
            repr_id = context["representation"]["_id"]
            self.log.warning(
                "Representation id `{}` is failing to load".format(repr_id))
            return

        file = file.replace("\\", "/")

        repr_cont = context["representation"]["context"]
        frame = repr_cont.get("frame")
        if frame:
            padding = len(frame)
            file = file.replace(frame,
                                format(frame_number, "0{}".format(padding)))

        name_data = {
            "asset": repr_cont["asset"],
            "subset": repr_cont["subset"],
            "representation": context["representation"]["name"],
            "ext": repr_cont["representation"],
            "id": context["representation"]["_id"],
            "class_name": self.__class__.__name__
        }

        read_name = self.node_name_template.format(**name_data)

        # Create the Loader with the filename path set
        with viewer_update_and_undo_stop():
            r = nuke.createNode("Read", "name {}".format(read_name))
            r["file"].setValue(file)

            # Set colorspace defined in version data
            colorspace = context["version"]["data"].get("colorspace")
            if colorspace:
                r["colorspace"].setValue(str(colorspace))

            preset_clrsp = get_imageio_input_colorspace(file)

            if preset_clrsp is not None:
                r["colorspace"].setValue(preset_clrsp)

            r["origfirst"].setValue(first)
            r["first"].setValue(first)
            r["origlast"].setValue(last)
            r["last"].setValue(last)

            # add additional metadata from the version to imprint Avalon knob
            add_keys = ["source", "colorspace", "author", "fps", "version"]

            data_imprint = {"frameStart": first, "frameEnd": last}
            for k in add_keys:
                if k == 'version':
                    data_imprint.update({k: context["version"]['name']})
                else:
                    data_imprint.update(
                        {k: context["version"]['data'].get(k, str(None))})

            data_imprint.update({"objectName": read_name})

            r["tile_color"].setValue(int("0x4ecd25ff", 16))

            return containerise(r,
                                name=name,
                                namespace=namespace,
                                context=context,
                                loader=self.__class__.__name__,
                                data=data_imprint)

    def switch(self, container, representation):
        self.update(container, representation)

    def update(self, container, representation):
        """Update the Loader's path

        Nuke automatically tries to reset some variables when changing
        the loader's path to a new file. These automatic changes are to its
        inputs:

        """

        from avalon.nuke import (update_container)

        node = nuke.toNode(container["objectName"])
        frame_number = node["first"].value()

        assert node.Class() == "Read", "Must be Read"

        repr_cont = representation["context"]

        file = api.get_representation_path(representation)

        if not file:
            repr_id = representation["_id"]
            self.log.warning(
                "Representation id `{}` is failing to load".format(repr_id))
            return

        file = file.replace("\\", "/")

        frame = repr_cont.get("frame")
        if frame:
            padding = len(frame)
            file = file.replace(frame,
                                format(frame_number, "0{}".format(padding)))

        # Get start frame from version data
        version = io.find_one({
            "type": "version",
            "_id": representation["parent"]
        })

        # get all versions in list
        versions = io.find({
            "type": "version",
            "parent": version["parent"]
        }).distinct('name')

        max_version = max(versions)

        version_data = version.get("data", {})

        last = first = int(frame_number)

        # Set the global in to the start frame of the sequence
        node["file"].setValue(file)
        node["origfirst"].setValue(first)
        node["first"].setValue(first)
        node["origlast"].setValue(last)
        node["last"].setValue(last)

        updated_dict = {}
        updated_dict.update({
            "representation": str(representation["_id"]),
            "frameStart": str(first),
            "frameEnd": str(last),
            "version": str(version.get("name")),
            "colorspace": version_data.get("colorspace"),
            "source": version_data.get("source"),
            "fps": str(version_data.get("fps")),
            "author": version_data.get("author"),
            "outputDir": version_data.get("outputDir"),
        })

        # change color of node
        if version.get("name") not in [max_version]:
            node["tile_color"].setValue(int("0xd84f20ff", 16))
        else:
            node["tile_color"].setValue(int("0x4ecd25ff", 16))

        # Update the imprinted representation
        update_container(node, updated_dict)
        self.log.info("udated to version: {}".format(version.get("name")))

    def remove(self, container):

        from avalon.nuke import viewer_update_and_undo_stop

        node = nuke.toNode(container['objectName'])
        assert node.Class() == "Read", "Must be Read"

        with viewer_update_and_undo_stop():
            nuke.delete(node)
class RigLoader(ReferenceLoader, avalon.api.Loader):
    """Specific loader for rigs

    This automatically creates an instance for animators upon load.

    """
    label = "Reference rig"
    order = -10
    icon = "code-fork"
    color = "orange"

    hosts = ["maya"]

    families = ["reveries.rig"]

    representations = [
        "mayaBinary",
    ]

    options = [
        qargparse.Integer("count", default=1, min=1, help="Batch load count."),
        qargparse.Double3("offset", help="Offset loaded subsets."),
    ]

    def process_reference(self, context, name, namespace, group, options):

        import maya.cmds as cmds

        representation = context["representation"]

        entry_path = self.file_path(representation)

        nodes = cmds.file(entry_path,
                          namespace=namespace,
                          ignoreVersion=True,
                          reference=True,
                          returnNewNodes=True,
                          groupReference=True,
                          groupName=group)

        self[:] = nodes

    def switch(self, container, representation):
        self.update(container, representation)

    def update(self, container, representation):
        from maya import cmds

        node = container["objectName"]

        # Get reference node from container
        reference_node = self.get_reference_node(container)

        with patch(reference_node):

            parents = avalon.io.parenthood(representation)
            self.package_path = utils.get_representation_path_(
                representation, parents)
            entry_path = self.file_path(representation)
            self.log.info("Reloading reference from: {!r}".format(entry_path))

            cmds.file(entry_path,
                      loadReference=reference_node,
                      type="mayaBinary",
                      defaultExtensions=False)

            # Add new nodes of the reference to the container
            nodes = cmds.referenceQuery(reference_node,
                                        nodes=True,
                                        dagPath=True)

            cmds.sets(nodes, forceElement=node)

            # Remove any placeHolderList attribute entries from the set that
            # are remaining from nodes being removed from the referenced file.
            # (NOTE) This ensures the reference update correctly when node name
            #   changed (e.g. shadingEngine) in different version.
            holders = (lambda N: [
                x for x in cmds.sets(N, query=True) or []
                if ".placeHolderList" in x
            ])
            cmds.sets(holders(node), remove=node)

            # Update container
            version, subset, asset, _ = parents
            pipeline.update_container(container, asset, subset, version,
                                      representation)
Beispiel #9
0
class ReferenceLoader(api.Loader):
    """A basic ReferenceLoader for Maya

    This will implement the basic behavior for a loader to inherit from that
    will containerize the reference and will implement the `remove` and
    `update` logic.

    """

    options = [
        qargparse.Integer("count",
                          label="Count",
                          default=1,
                          min=1,
                          help="How many times to load?"),
        qargparse.Double3("offset",
                          label="Position Offset",
                          help="Offset loaded models for easier selection.")
    ]

    def load(self, context, name=None, namespace=None, options=None):

        import os
        from avalon.maya import lib
        from avalon.maya.pipeline import containerise

        assert os.path.exists(self.fname), "%s does not exist." % self.fname

        asset = context['asset']
        loaded_containers = []

        count = options.get("count") or 1
        for c in range(0, count):
            namespace = namespace or lib.unique_namespace(
                asset["name"] + "_",
                prefix="_" if asset["name"][0].isdigit() else "",
                suffix="_",
            )

            # Offset loaded subset
            if "offset" in options:
                offset = [i * c for i in options["offset"]]
                options["translate"] = offset

            self.log.info(options)

            self.process_reference(context=context,
                                   name=name,
                                   namespace=namespace,
                                   options=options)

            # Only containerize if any nodes were loaded by the Loader
            nodes = self[:]
            if not nodes:
                return

            loaded_containers.append(
                containerise(name=name,
                             namespace=namespace,
                             nodes=nodes,
                             context=context,
                             loader=self.__class__.__name__))

            c += 1
            namespace = None
        return loaded_containers

    def process_reference(self, context, name, namespace, data):
        """To be implemented by subclass"""
        raise NotImplementedError("Must be implemented by subclass")

    def _get_reference_node(self, members):
        """Get the reference node from the container members
        Args:
            members: list of node names

        Returns:
            str: Reference node name.

        """

        from maya import cmds

        # Collect the references without .placeHolderList[] attributes as
        # unique entries (objects only) and skipping the sharedReferenceNode.
        references = set()
        for ref in cmds.ls(members, exactType="reference", objectsOnly=True):

            # Ignore any `:sharedReferenceNode`
            if ref.rsplit(":", 1)[-1].startswith("sharedReferenceNode"):
                continue

            # Ignore _UNKNOWN_REF_NODE_ (PLN-160)
            if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"):
                continue

            references.add(ref)

        assert references, "No reference node found in container"

        # Get highest reference node (least parents)
        highest = min(references,
                      key=lambda x: len(get_reference_node_parents(x)))

        # Warn the user when we're taking the highest reference node
        if len(references) > 1:
            self.log.warning(
                "More than one reference node found in "
                "container, using highest reference node: "
                "%s (in: %s)", highest, list(references))

        return highest

    def update(self, container, representation):

        import os
        from maya import cmds

        node = container["objectName"]

        path = api.get_representation_path(representation)

        # Get reference node from container members
        members = cmds.sets(node, query=True, nodesOnly=True)
        reference_node = self._get_reference_node(members)

        file_type = {
            "ma": "mayaAscii",
            "mb": "mayaBinary",
            "abc": "Alembic"
        }.get(representation["name"])

        assert file_type, "Unsupported representation: %s" % representation

        assert os.path.exists(path), "%s does not exist." % path

        # Need to save alembic settings and reapply, cause referencing resets
        # them to incoming data.
        alembic_attrs = ["speed", "offset", "cycleType"]
        alembic_data = {}
        if representation["name"] == "abc":
            alembic_nodes = cmds.ls("{}:*".format(members[0].split(":")[0]),
                                    type="AlembicNode")
            if alembic_nodes:
                for attr in alembic_attrs:
                    node_attr = "{}.{}".format(alembic_nodes[0], attr)
                    alembic_data[attr] = cmds.getAttr(node_attr)
            else:
                cmds.warning("No alembic nodes found in {}".format(
                    cmds.ls("{}:*".format(members[0].split(":")[0]))))

        try:
            content = cmds.file(path,
                                loadReference=reference_node,
                                type=file_type,
                                returnNewNodes=True)
        except RuntimeError as exc:
            # When changing a reference to a file that has load errors the
            # command will raise an error even if the file is still loaded
            # correctly (e.g. when raising errors on Arnold attributes)
            # When the file is loaded and has content, we consider it's fine.
            if not cmds.referenceQuery(reference_node, isLoaded=True):
                raise

            content = cmds.referenceQuery(reference_node,
                                          nodes=True,
                                          dagPath=True)
            if not content:
                raise

            self.log.warning("Ignoring file read error:\n%s", exc)

        # Reapply alembic settings.
        if representation["name"] == "abc":
            alembic_nodes = cmds.ls("{}:*".format(members[0].split(":")[0]),
                                    type="AlembicNode")
            if alembic_nodes:
                for attr, value in alembic_data.items():
                    cmds.setAttr("{}.{}".format(alembic_nodes[0], attr), value)

        # Fix PLN-40 for older containers created with Avalon that had the
        # `.verticesOnlySet` set to True.
        if cmds.getAttr("{}.verticesOnlySet".format(node)):
            self.log.info("Setting %s.verticesOnlySet to False", node)
            cmds.setAttr("{}.verticesOnlySet".format(node), False)

        # Add new nodes of the reference to the container
        cmds.sets(content, forceElement=node)

        # Remove any placeHolderList attribute entries from the set that
        # are remaining from nodes being removed from the referenced file.
        members = cmds.sets(node, query=True)
        invalid = [x for x in members if ".placeHolderList" in x]
        if invalid:
            cmds.sets(invalid, remove=node)

        # Update metadata
        cmds.setAttr("{}.representation".format(node),
                     str(representation["_id"]),
                     type="string")

    def remove(self, container):
        """Remove an existing `container` from Maya scene

        Deprecated; this functionality is replaced by `api.remove()`

        Arguments:
            container (openpype:container-1.0): Which container
                to remove from scene.

        """

        from maya import cmds

        node = container["objectName"]

        # Assume asset has been referenced
        members = cmds.sets(node, query=True)
        reference_node = self._get_reference_node(members)

        assert reference_node, ("Imported container not supported; "
                                "container must be referenced.")

        self.log.info("Removing '%s' from Maya.." % container["name"])

        namespace = cmds.referenceQuery(reference_node, namespace=True)
        fname = cmds.referenceQuery(reference_node, filename=True)
        cmds.file(fname, removeReference=True)

        try:
            cmds.delete(node)
        except ValueError:
            # Already implicitly deleted by Maya upon removing reference
            pass

        try:
            # If container is not automatically cleaned up by May (issue #118)
            cmds.namespace(removeNamespace=namespace,
                           deleteNamespaceContent=True)
        except RuntimeError:
            pass
class PointCacheReferenceLoader(ReferenceLoader, avalon.api.Loader):

    label = "Reference PointCache"
    order = -10
    icon = "flash"
    color = "orange"

    hosts = ["maya"]

    families = [
        "reveries.pointcache",
        "reveries.setdress",
    ]

    representations = [
        "Alembic",
        "FBXCache",
        "GPUCache",
    ]

    options = [
        qargparse.Integer("count", default=1, min=1, help="Batch load count."),
        qargparse.Double3("offset", help="Offset loaded subsets."),
    ]

    def process_reference(self, context, name, namespace, group, options):
        import maya.cmds as cmds
        from reveries.maya import utils

        representation = context["representation"]

        entry_path = self.file_path(representation)

        nodes = cmds.file(entry_path,
                          namespace=namespace,
                          ignoreVersion=True,
                          sharedReferenceFile=False,
                          groupReference=True,
                          groupName=group,
                          reference=True,
                          lockReference=False,
                          returnNewNodes=True)

        if representation["name"] == "Alembic":
            self.unit_conversion_patch(nodes)

        # (NOTE) Nodes loaded from Alembic did not have verifiers
        utils.update_id_verifiers(nodes)

        self[:] = nodes

    def update(self, container, representation):
        from reveries.maya.plugins import ReferenceLoader
        import maya.cmds as cmds

        uuid = cmds.ls(container["objectName"], uuid=True)

        ReferenceLoader.update(self, container, representation)

        if representation["name"] == "Alembic":
            nodes = cmds.sets(cmds.ls(uuid), query=True, nodesOnly=True)
            self.unit_conversion_patch(nodes)

    def switch(self, container, representation):
        self.update(container, representation)

    def unit_conversion_patch(self, nodes):
        """
        When referencing same Alembic file multiple times, the rotation
        misbehave except the first one, after scene saved and re-open.

        The observable cause was the unitConversion nodes which being
        connected in between alembic node's output and transform node's
        rotation, their `conversionFactor` did not properly configured
        after re-open. The value should be like `0.017453292519943295`
        but remain `1.0`.

        It's a known bug for about 4 years from now:
        https://gitter.im/getavalon/Lobby?at=5d36b894d61887416420bcda

        Current workaround that I can think of is to trigger reference
        edit on all newly created unitConversion nodes, let reference
        edit *pin* the factor value for us.

        """
        import maya.cmds as cmds

        for conversion in cmds.ls(nodes, type="unitConversion"):
            attr = conversion + ".conversionFactor"
            factor = cmds.getAttr(attr)
            cmds.setAttr(attr, 1)  # To trigger reference edit
            cmds.setAttr(attr, factor)