Пример #1
0
    def repair(cls, instance):

        renderer = instance.data['renderer']
        layer_node = instance.data['setMembers']

        with lib.renderlayer(layer_node):
            default = lib.RENDER_ATTRS['default']
            render_attrs = lib.RENDER_ATTRS.get(renderer, default)

            # Repair prefix
            if renderer != "renderman":
                node = render_attrs["node"]
                prefix_attr = render_attrs["prefix"]

                fname_prefix = cls.ImagePrefixTokens[renderer]
                cmds.setAttr("{}.{}".format(node, prefix_attr),
                             fname_prefix,
                             type="string")

                # Repair padding
                padding_attr = render_attrs["padding"]
                cmds.setAttr("{}.{}".format(node, padding_attr),
                             cls.DEFAULT_PADDING)
            else:
                # renderman handles stuff differently
                cmds.setAttr("rmanGlobals.imageFileFormat",
                             cls.ImagePrefixTokens[renderer],
                             type="string")
                cmds.setAttr("rmanGlobals.imageOutputDir",
                             cls.RendermanDirPrefix,
                             type="string")
    def repair(cls, instance):

        renderlayer = instance.data.get("setMembers")
        with lib.renderlayer(renderlayer):
            cls.log.info("Enabling Distributed Rendering "
                         "ignore in batch mode..")
            cmds.setAttr(cls.ignored_attr, True)
Пример #3
0
    def get_invalid(cls, instance):
        """Get all invalid nodes"""

        cls.log.info("Validating look content for "
                     "'{}'".format(instance.name))

        relationships = instance.data["lookData"]["relationships"]
        invalid = []

        renderlayer = instance.data.get("renderlayer", "defaultRenderLayer")
        with lib.renderlayer(renderlayer):
            for node in instance:
                # get the connected objectSets of the node
                sets = lib.get_related_sets(node)
                if not sets:
                    continue

                # check if any objectSets are not present ion the relationships
                missing_sets = [s for s in sets if s not in relationships]
                if missing_sets:
                    for set in missing_sets:
                        if '_SET' not in set:
                            # A set of this node is not coming along, this is wrong!
                            cls.log.error("Missing sets '{}' for node "
                                          "'{}'".format(missing_sets, node))
                            invalid.append(node)
                            continue

                # Ensure the node is in the sets that are collected
                for shaderset, data in relationships.items():
                    if shaderset not in sets:
                        # no need to check for a set if the node
                        # isn't in it anyway
                        continue

                    member_nodes = [member['name'] for member in
                                    data['members']]
                    if node not in member_nodes:
                        # The node is not found in the collected set
                        # relationships
                        cls.log.error("Missing '{}' in collected set node "
                                      "'{}'".format(node, shaderset))
                        invalid.append(node)

                        continue

        return invalid
Пример #4
0
    def process(self, instance):
        """Collect the Look in the instance with the correct layer settings"""

        with lib.renderlayer(instance.data["renderlayer"]):
            self.collect(instance)
Пример #5
0
    def process(self, instance):

        # Define extract output file path
        dir_path = self.staging_dir(instance)
        maya_fname = "{0}.ma".format(instance.name)
        json_fname = "{0}.json".format(instance.name)

        # Make texture dump folder
        maya_path = os.path.join(dir_path, maya_fname)
        json_path = os.path.join(dir_path, json_fname)

        self.log.info("Performing extraction..")

        # Remove all members of the sets so they are not included in the
        # exported file by accident
        self.log.info("Extract sets (Maya ASCII) ...")
        lookdata = instance.data["lookData"]
        relationships = lookdata["relationships"]
        sets = relationships.keys()

        # Extract the textures to transfer, possibly convert with maketx and
        # remap the node paths to the destination path. Note that a source
        # might be included more than once amongst the resources as they could
        # be the input file to multiple nodes.
        resources = instance.data["resources"]
        do_maketx = instance.data.get("maketx", False)

        # Collect all unique files used in the resources
        files = set()
        files_metadata = dict()
        for resource in resources:
            # Preserve color space values (force value after filepath change)
            # This will also trigger in the same order at end of context to
            # ensure after context it's still the original value.
            color_space = resource.get("color_space")

            for f in resource["files"]:

                files_metadata[os.path.normpath(f)] = {
                    "color_space": color_space}
                # files.update(os.path.normpath(f))

        # Process the resource files
        transfers = list()
        hardlinks = list()
        hashes = dict()
        forceCopy = instance.data.get("forceCopy", False)

        self.log.info(files)
        for filepath in files_metadata:

            cspace = files_metadata[filepath]["color_space"]
            linearise = False
            if do_maketx:
                if cspace == "sRGB":
                    linearise = True
                    # set its file node to 'raw' as tx will be linearized
                    files_metadata[filepath]["color_space"] = "raw"

            source, mode, hash = self._process_texture(
                filepath,
                do_maketx,
                staging=dir_path,
                linearise=linearise,
                force=forceCopy
            )
            destination = self.resource_destination(instance,
                                                    source,
                                                    do_maketx)

            # Force copy is specified.
            if forceCopy:
                mode = COPY

            if mode == COPY:
                transfers.append((source, destination))
                self.log.info('copying')
            elif mode == HARDLINK:
                hardlinks.append((source, destination))
                self.log.info('hardlinking')

            # Store the hashes from hash to destination to include in the
            # database
            hashes[hash] = destination

        # Remap the resources to the destination path (change node attributes)
        destinations = dict()
        remap = OrderedDict()  # needs to be ordered, see color space values
        for resource in resources:
            source = os.path.normpath(resource["source"])
            if source not in destinations:
                # Cache destination as source resource might be included
                # multiple times
                destinations[source] = self.resource_destination(
                    instance, source, do_maketx
                )

            # Preserve color space values (force value after filepath change)
            # This will also trigger in the same order at end of context to
            # ensure after context it's still the original value.
            color_space_attr = resource["node"] + ".colorSpace"
            color_space = cmds.getAttr(color_space_attr)
            if files_metadata[source]["color_space"] == "raw":
                # set colorpsace to raw if we linearized it
                color_space = "Raw"
            # Remap file node filename to destination
            attr = resource["attribute"]
            remap[attr] = destinations[source]
            remap[color_space_attr] = color_space

        self.log.info("Finished remapping destinations ...")

        # Extract in correct render layer
        layer = instance.data.get("renderlayer", "defaultRenderLayer")
        with lib.renderlayer(layer):
            # TODO: Ensure membership edits don't become renderlayer overrides
            with lib.empty_sets(sets, force=True):
                # To avoid Maya trying to automatically remap the file
                # textures relative to the `workspace -directory` we force
                # it to a fake temporary workspace. This fixes textures
                # getting incorrectly remapped. (LKD-17, PLN-101)
                with no_workspace_dir():
                    with lib.attribute_values(remap):
                        with avalon.maya.maintained_selection():
                            cmds.select(sets, noExpand=True)
                            cmds.file(
                                maya_path,
                                force=True,
                                typ="mayaAscii",
                                exportSelected=True,
                                preserveReferences=False,
                                channels=True,
                                constraints=True,
                                expressions=True,
                                constructionHistory=True,
                            )

        # Write the JSON data
        self.log.info("Extract json..")
        data = {
            "attributes": lookdata["attributes"],
            "relationships": relationships
        }

        with open(json_path, "w") as f:
            json.dump(data, f)

        if "files" not in instance.data:
            instance.data["files"] = list()
        if "hardlinks" not in instance.data:
            instance.data["hardlinks"] = list()
        if "transfers" not in instance.data:
            instance.data["transfers"] = list()

        instance.data["files"].append(maya_fname)
        instance.data["files"].append(json_fname)

        instance.data["representations"] = []
        instance.data["representations"].append(
            {
                "name": "ma",
                "ext": "ma",
                "files": os.path.basename(maya_fname),
                "stagingDir": os.path.dirname(maya_fname),
            }
        )
        instance.data["representations"].append(
            {
                "name": "json",
                "ext": "json",
                "files": os.path.basename(json_fname),
                "stagingDir": os.path.dirname(json_fname),
            }
        )

        # Set up the resources transfers/links for the integrator
        instance.data["transfers"].extend(transfers)
        instance.data["hardlinks"].extend(hardlinks)

        # Source hash for the textures
        instance.data["sourceHashes"] = hashes

        """
        self.log.info("Returning colorspaces to their original values ...")
        for attr, value in remap.items():
            self.log.info("  - {}: {}".format(attr, value))
            cmds.setAttr(attr, value, type="string")
        """
        self.log.info("Extracted instance '%s' to: %s" % (instance.name,
                                                          maya_path))