Ejemplo n.º 1
0
    def process(self, instance):

        yeti_nodes = cmds.ls(instance, type="pgYetiMaya")
        if not yeti_nodes:
            raise RuntimeError("No pgYetiMaya nodes found in the instance")

        # Define extract output file path
        dirname = self.staging_dir(instance)
        settings_path = os.path.join(dirname, "yeti.rigsettings")

        # Yeti related staging dirs
        maya_path = os.path.join(dirname, "yeti_rig.ma")

        self.log.info("Writing metadata file")

        image_search_path = resources_dir = instance.data["resourcesDir"]

        settings = instance.data.get("rigsettings", None)
        if settings:
            settings["imageSearchPath"] = image_search_path
            with open(settings_path, "w") as fp:
                json.dump(settings, fp, ensure_ascii=False)

        # add textures to transfers
        if 'transfers' not in instance.data:
            instance.data['transfers'] = []

        for resource in instance.data.get('resources', []):
            for file in resource['files']:
                src = file
                dst = os.path.join(image_search_path, os.path.basename(file))
                instance.data['transfers'].append([src, dst])

                self.log.info("adding transfer {} -> {}".format(src, dst))

        # Ensure the imageSearchPath is being remapped to the publish folder
        attr_value = {
            "%s.imageSearchPath" % n: str(image_search_path)
            for n in yeti_nodes
        }

        # Get input_SET members
        input_set = next(i for i in instance if i == "input_SET")

        # Get all items
        set_members = cmds.sets(input_set, query=True)
        set_members += cmds.listRelatives(
            set_members, allDescendents=True, fullPath=True) or []
        members = cmds.ls(set_members, long=True)

        nodes = instance.data["setMembers"]
        resources = instance.data.get("resources", {})
        with disconnect_plugs(settings, members):
            with yetigraph_attribute_values(destination_folder, resources):
                with maya.attribute_values(attr_value):
                    cmds.select(nodes, noExpand=True)
                    cmds.file(maya_path,
                              force=True,
                              exportSelected=True,
                              typ="mayaAscii",
                              preserveReferences=False,
                              constructionHistory=True,
                              shader=False)

        # Ensure files can be stored
        # build representations
        if "representations" not in instance.data:
            instance.data["representations"] = []

        self.log.info("rig file: {}".format("yeti_rig.ma"))
        instance.data["representations"].append({
            'name': "ma",
            'ext': 'ma',
            'files': "yeti_rig.ma",
            'stagingDir': dirname,
            'anatomy_template': 'publish'
        })
        self.log.info("settings file: {}".format("yeti.rigsettings"))
        instance.data["representations"].append({
            'name': 'rigsettings',
            'ext': 'rigsettings',
            'files': 'yeti.rigsettings',
            'stagingDir': dirname,
            'anatomy_template': 'publish'
        })

        self.log.info("Extracted {} to {}".format(instance, dirname))

        cmds.select(clear=True)
Ejemplo n.º 2
0
    def process(self, instance):

        # Define extract output file path
        dir_path = self.staging_dir(instance)
        maya_fname = "{0}.ma".format(instance.name)
        json_fname = "{0}.json".format(instance.name)

        # Make texture dump folder
        maya_path = os.path.join(dir_path, maya_fname)
        json_path = os.path.join(dir_path, json_fname)

        self.log.info("Performing extraction..")

        # Remove all members of the sets so they are not included in the
        # exported file by accident
        self.log.info("Extract sets (Maya ASCII) ...")
        lookdata = instance.data["lookData"]
        relationships = lookdata["relationships"]
        sets = relationships.keys()

        # Extract the textures to transfer, possibly convert with maketx and
        # remap the node paths to the destination path. Note that a source
        # might be included more than once amongst the resources as they could
        # be the input file to multiple nodes.
        resources = instance.data["resources"]
        do_maketx = instance.data.get("maketx", False)

        # Collect all unique files used in the resources
        files = set()
        for resource in resources:
            files.update(os.path.normpath(f) for f in resource["files"])

        # Process the resource files
        transfers = list()
        hardlinks = list()
        hashes = dict()
        for filepath in files:
            source, mode, hash = self._process_texture(filepath,
                                                       do_maketx,
                                                       staging=dir_path)
            destination = self.resource_destination(instance, source,
                                                    do_maketx)
            if mode == COPY:
                transfers.append((source, destination))
            elif mode == HARDLINK:
                hardlinks.append((source, destination))

            # Store the hashes from hash to destination to include in the
            # database
            hashes[hash] = destination

        # Remap the resources to the destination path (change node attributes)
        destinations = dict()
        remap = OrderedDict()  # needs to be ordered, see color space values
        for resource in resources:
            source = os.path.normpath(resource["source"])
            if source not in destinations:
                # Cache destination as source resource might be included
                # multiple times
                destinations[source] = self.resource_destination(
                    instance, source, do_maketx)

            # Remap file node filename to destination
            attr = resource['attribute']
            remap[attr] = destinations[source]

            # Preserve color space values (force value after filepath change)
            # This will also trigger in the same order at end of context to
            # ensure after context it's still the original value.
            color_space_attr = resource['node'] + ".colorSpace"
            remap[color_space_attr] = cmds.getAttr(color_space_attr)

        self.log.info("Finished remapping destinations ...")

        # Extract in correct render layer
        layer = instance.data.get("renderlayer", "defaultRenderLayer")
        with lib.renderlayer(layer):
            # TODO: Ensure membership edits don't become renderlayer overrides
            with lib.empty_sets(sets, force=True):
                # To avoid Maya trying to automatically remap the file
                # textures relative to the `workspace -directory` we force
                # it to a fake temporary workspace. This fixes textures
                # getting incorrectly remapped. (LKD-17, PLN-101)
                with no_workspace_dir():
                    with lib.attribute_values(remap):
                        with avalon.maya.maintained_selection():
                            cmds.select(sets, noExpand=True)
                            cmds.file(maya_path,
                                      force=True,
                                      typ="mayaAscii",
                                      exportSelected=True,
                                      preserveReferences=False,
                                      channels=True,
                                      constraints=True,
                                      expressions=True,
                                      constructionHistory=True)

        # Write the JSON data
        self.log.info("Extract json..")
        data = {
            "attributes": lookdata["attributes"],
            "relationships": relationships
        }

        with open(json_path, "w") as f:
            json.dump(data, f)

        if "files" not in instance.data:
            instance.data["files"] = list()
        if "hardlinks" not in instance.data:
            instance.data["hardlinks"] = list()
        if "transfers" not in instance.data:
            instance.data["transfers"] = list()

        instance.data["files"].append(maya_fname)
        instance.data["files"].append(json_fname)

        # Set up the resources transfers/links for the integrator
        instance.data["transfers"].extend(transfers)
        instance.data["hardlinks"].extend(hardlinks)

        # Source hash for the textures
        instance.data["sourceHashes"] = hashes

        self.log.info("Extracted instance '%s' to: %s" %
                      (instance.name, maya_path))
Ejemplo n.º 3
0
    def process(self, instance):

        yeti_nodes = cmds.ls(instance, type="pgYetiMaya")
        if not yeti_nodes:
            raise RuntimeError("No pgYetiMaya nodes found in the instance")

        # Define extract output file path
        dirname = self.staging_dir(instance)
        settings_path = os.path.join(dirname, "yeti.rigsettings")

        # Yeti related staging dirs
        maya_path = os.path.join(dirname, "yeti_rig.ma")

        self.log.info("Writing metadata file")

        # Create assumed destination folder for imageSearchPath
        assumed_temp_data = instance.data["assumedTemplateData"]
        template = instance.data["template"]
        template_formatted = template.format(**assumed_temp_data)

        destination_folder = os.path.dirname(template_formatted)

        image_search_path = os.path.join(destination_folder, "resources")
        image_search_path = os.path.normpath(image_search_path)

        settings = instance.data.get("rigsettings", None)
        if settings:
            settings["imageSearchPath"] = image_search_path
            with open(settings_path, "w") as fp:
                json.dump(settings, fp, ensure_ascii=False)

        # Ensure the imageSearchPath is being remapped to the publish folder
        attr_value = {
            "%s.imageSearchPath" % n: str(image_search_path)
            for n in yeti_nodes
        }

        # Get input_SET members
        input_set = next(i for i in instance if i == "input_SET")

        # Get all items
        set_members = cmds.sets(input_set, query=True)
        set_members += cmds.listRelatives(
            set_members, allDescendents=True, fullPath=True) or []
        members = cmds.ls(set_members, long=True)

        nodes = instance.data["setMembers"]
        resources = instance.data.get("resources", {})
        with disconnect_plugs(settings, members):
            with yetigraph_attribute_values(destination_folder, resources):
                with maya.attribute_values(attr_value):
                    cmds.select(nodes, noExpand=True)
                    cmds.file(maya_path,
                              force=True,
                              exportSelected=True,
                              typ="mayaAscii",
                              preserveReferences=False,
                              constructionHistory=True,
                              shader=False)

        # Ensure files can be stored
        if "files" not in instance.data:
            instance.data["files"] = list()

        instance.data["files"].extend(["yeti_rig.ma", "yeti.rigsettings"])

        self.log.info("Extracted {} to {}".format(instance, dirname))

        cmds.select(clear=True)