def process(self, instance):
        node = instance[0]

        ext = "nknc" if nuke.env["nc"] else "nk"

        staging_dir = utils.stage_dir()
        filename = "%s.%s" % (instance.data["subset"], ext)
        outpath = "%s/%s" % (staging_dir, filename)

        instance.data["repr.nkscript._stage"] = staging_dir
        instance.data["repr.nkscript._files"] = [filename]
        instance.data["repr.nkscript.scriptName"] = filename
        instance.data["repr.nkscript.outputNode"] = node.fullName()

        with lib.maintained_selection():
            lib.reset_selection()
            for n in walk_tree(node):
                n["selected"].setValue(True)

            if node.Class() == "Write":
                # Swap image file path to published path bedore copy
                output = node["file"].value()
                node["file"].setValue(instance.data["publishedSeqPatternPath"])
                nuke.nodeCopy(outpath)
                node["file"].setValue(output)

            else:
                nuke.nodeCopy(outpath)
    def _via_command(self, payload):
        def to_txt(document, out):
            # Write dict to key-value txt file
            with open(out, "w") as fp:
                for key, val in document.items():
                    fp.write("{key}={val}\n".format(key=key, val=val))

        job_info = payload["JobInfo"]
        plugin_info = payload["PluginInfo"]

        info_dir = utils.stage_dir(prefix="deadline_")
        job_info_file = os.path.join(info_dir, "job_info.job")
        plugin_info_file = os.path.join(info_dir, "plugin_info.job")

        to_txt(job_info, job_info_file)
        to_txt(plugin_info, plugin_info_file)

        cmd = self._cmd

        cmd += " %s %s" % (job_info_file, plugin_info_file)
        output = subprocess.check_output(cmd)

        output = output.decode("utf-8")
        if "Result=Success" not in output.split():
            raise Exception("Submission failed...")
        else:
            parts = re.split("[=\n\r]", output)
            jobid = parts[parts.index("JobID") + 1]
            self.log.info("Success. JobID: %s" % jobid)
            return jobid
    def process(self, instance):
        from maya import cmds
        from reveries import utils

        staging_dir = utils.stage_dir(dir=instance.data["_sharedStage"])
        filename = "%s.abc" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)

        instance.data["repr.Alembic._stage"] = staging_dir
        instance.data["repr.Alembic._hardlinks"] = [filename]
        instance.data["repr.Alembic.entryFileName"] = filename

        if instance.data.get("staticCache"):
            start = cmds.currentTime(query=True)
            end = cmds.currentTime(query=True)
        else:
            context_data = instance.context.data
            start = context_data["startFrame"]
            end = context_data["endFrame"]

        instance.data["startFrame"] = start
        instance.data["endFrame"] = end

        euler_filter = instance.data.get("eulerFilter", False)
        root = instance.data["outCache"]

        instance.data["repr.Alembic._delayRun"] = {
            "func": self.export_alembic,
            "args": [root, outpath, start, end, euler_filter],
        }
    def process(self, instance):
        from maya import cmds
        from reveries import utils

        staging_dir = utils.stage_dir(dir=instance.data["_sharedStage"])
        cachename = "%s.fbx" % instance.data["subset"]
        filename = "%s.ma" % instance.data["subset"]
        cachepath = "%s/%s" % (staging_dir, cachename)
        outpath = "%s/%s" % (staging_dir, filename)

        instance.data["repr.FBXCache._stage"] = staging_dir
        instance.data["repr.FBXCache._hardlinks"] = [filename, cachename]
        instance.data["repr.FBXCache.entryFileName"] = filename

        if instance.data.get("staticCache"):
            start = cmds.currentTime(query=True)
            end = cmds.currentTime(query=True)
        else:
            context_data = instance.context.data
            start = context_data["startFrame"]
            end = context_data["endFrame"]

        instance.data["startFrame"] = start
        instance.data["endFrame"] = end

        # (TODO) Make namespace preserving optional on GUI
        keep_namespace = instance.data.get("keepNamespace", False)
        nodes = instance.data["outCache"]

        instance.data["repr.FBXCache._delayRun"] = {
            "func": self.export_fbx,
            "args": [outpath, cachepath, cachename, nodes, keep_namespace],
        }
示例#5
0
    def process(self, instance):
        from maya import cmds
        from avalon import maya
        from reveries import utils

        staging_dir = utils.stage_dir()
        filename = "%s.ma" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)

        instance.data["repr.mayaAscii._stage"] = staging_dir
        instance.data["repr.mayaAscii._files"] = [filename]
        instance.data["repr.mayaAscii.entryFileName"] = filename

        # Perform extraction
        self.log.info("Performing extraction..")
        with maya.maintained_selection():
            # Set flag `noExpand` to True for sharing containers,
            # which will be ignored if the selection expanded since
            # they are objectSets.
            cmds.select(instance, noExpand=True)
            cmds.file(outpath,
                      force=True,
                      typ="mayaAscii",
                      exportSelected=True,
                      preserveReferences=True,
                      channels=True,
                      constraints=True,
                      expressions=True,
                      shader=True,
                      constructionHistory=True)
示例#6
0
    def process(self, instance):
        from reveries import utils

        staging_dir = utils.stage_dir(dir=instance.data["_sharedStage"])

        start = int(instance.data["startFrame"])
        end = int(instance.data["endFrame"])
        step = int(instance.data["step"])
        has_yeti = instance.data.get("hasYeti", False)
        nodes = instance[:]

        pattern = "%s.%%04d.ass" % instance.data["subset"]
        cachename = "%s.ass" % instance.data["subset"]

        firstfile = pattern % start
        outpath = "%s/%s" % (staging_dir, cachename)

        instance.data["outputPath"] = "%s/%s" % (staging_dir, pattern)

        use_sequence = start != end
        if use_sequence:
            instance.data["repr.Ass._hardlinks"] = [
                pattern % i for i in range(start, end + 1, step)
            ]
        else:
            instance.data["repr.Ass._hardlinks"] = [firstfile]

        instance.data["repr.Ass._stage"] = staging_dir
        instance.data["repr.Ass.entryFileName"] = firstfile
        instance.data["repr.Ass.useSequence"] = use_sequence

        self.log.info("Extracting standin..")

        child_instances = instance.data.get("childInstances", [])
        try:
            texture = next(chd for chd in child_instances
                           if chd.data["family"] == "reveries.texture")
        except StopIteration:
            file_node_attrs = dict()
        else:
            file_node_attrs = texture.data.get("fileNodeAttrs", dict())

        expand_procedurals = instance.data.get("expandProcedurals", True)

        instance.data["repr.Ass._delayRun"] = {
            "func": self.export_ass,
            "args": [
                nodes,
                outpath,
                file_node_attrs,
                has_yeti,
            ],
            "kwargs": {
                "start": start,
                "end": end,
                "step": step,
                "expand_procedurals": expand_procedurals,
            }
        }
    def process(self, instance):
        from reveries import utils

        staging_dir = utils.stage_dir()
        filename = "%s.abc" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)

        nodes = instance[:]

        instance.data["repr.Alembic._stage"] = staging_dir
        instance.data["repr.Alembic._files"] = [filename]
        instance.data["repr.Alembic.entryFileName"] = filename

        self.extract_alembic(nodes, outpath)
    def process(self, instance):
        from reveries import utils
        from AtomsMaya.hostbridge.commands import MayaCommandsHostBridge

        staging_dir = utils.stage_dir(dir=instance.data["_sharedStage"])

        start = int(instance.data["startFrame"])
        end = int(instance.data["endFrame"])

        # Get agentTypes
        agent_types = set()
        for node in instance.data["AtomsAgentGroups"]:
            agent_group = MayaCommandsHostBridge.get_agent_group(node)
            agent_types.update(agent_group.agentTypeMapper().keys())

        filename = "%s.atoms" % instance.data["subset"]  # Cache header
        agent_type = "agentTypes/%s.agentType"  # agentType file
        agent_script = "agentTypes/%s.py"  # Python event wrapper script
        frames = "%s.%%04d.%%s.atoms" % instance.data["subset"]  # Frame files
        variation = "%s.json" % instance.data["subset"]  # Crowd Variation
        # (NOTE) Atoms Crowd cache padding is always 4

        files = [frames % (f, x)
                 for f in range(start, end + 1)
                 for x in ("frame", "header", "meta", "pose")]
        files += [agent_type % agtype for agtype in agent_types]
        files += [agent_script % agtype for agtype in agent_types]
        files += [filename, variation]

        instance.data["repr.atoms._stage"] = staging_dir
        instance.data["repr.atoms._hardlinks"] = files
        instance.data["repr.atoms.entryFileName"] = filename
        instance.data["repr.atoms.variationFile"] = variation

        cache_dir = staging_dir
        cache_name = instance.data["subset"]

        agent_groups = instance.data["AtomsAgentGroups"]
        MayaCommandsHostBridge.export_atoms_cache(cache_dir,
                                                  cache_name,
                                                  start,
                                                  end,
                                                  agent_groups)

        variation_path = "%s/%s" % (staging_dir, variation)
        with open(variation_path, "w") as variation:
            variation.write(instance.data["variationStr"])
示例#9
0
    def process(self, instance):
        from maya import cmds
        from avalon import maya
        from reveries import utils
        from reveries.maya import capsule

        staging_dir = utils.stage_dir()
        filename = "%s.ma" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)

        instance.data["repr.LightSet._stage"] = staging_dir
        instance.data["repr.LightSet._files"] = [filename]
        instance.data["repr.LightSet.entryFileName"] = filename

        # Extract lights
        #
        self.log.info("Extracting lights..")

        # From texture extractor
        child_instances = instance.data.get("childInstances", [])
        try:
            texture = next(chd for chd in child_instances
                           if chd.data["family"] == "reveries.texture")
        except StopIteration:
            file_node_attrs = dict()
        else:
            file_node_attrs = texture.data.get("fileNodeAttrs", dict())

        with contextlib.nested(
                maya.maintained_selection(),
                capsule.attribute_values(file_node_attrs),
                capsule.no_refresh(),
        ):
            cmds.select(instance, replace=True, noExpand=True)

            cmds.file(
                outpath,
                options="v=0;",
                type="mayaAscii",
                force=True,
                exportSelected=True,
                preserveReferences=False,
                constructionHistory=False,
                channels=True,  # allow animation
                constraints=False,
                shader=False,
                expressions=True)
示例#10
0
    def process(self, instance):
        from reveries import utils

        staging_dir = utils.stage_dir()
        filename = "%s.mb" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)

        nodes = instance[:]

        instance.data["repr.mayaBinary._stage"] = staging_dir
        instance.data["repr.mayaBinary._files"] = [filename]
        instance.data["repr.mayaBinary.entryFileName"] = filename

        geo_id_and_hash = self.extract_mayabinary(nodes, outpath)
        assert geo_id_and_hash is not None, ("Geometry hash not calculated.")

        instance.data["repr.mayaBinary.modelProfile"] = geo_id_and_hash
示例#11
0
    def process(self, instance):
        from maya import cmds
        from reveries import utils
        from reveries.maya import io, lib

        staging_dir = utils.stage_dir()
        filename = "%s.abc" % instance.data["subset"]
        members = "%s.json" % instance.data["subset"]

        outpath = "%s/%s" % (staging_dir, filename)
        memberpath = "%s/%s" % (staging_dir, members)

        instance.data["repr.setPackage._stage"] = staging_dir
        instance.data["repr.setPackage._files"] = [filename, members]
        instance.data["repr.setPackage.entryFileName"] = filename

        self.parse_matrix(instance)

        self.log.info("Dumping setdress members data ..")
        with open(memberpath, "w") as fp:
            json.dump(instance.data["subsetData"], fp, ensure_ascii=False)
            self.log.debug("Dumped: {}".format(memberpath))

        self.log.info("Extracting hierarchy ..")
        cmds.select(instance.data["subsetSlots"])
        io.export_alembic(file=outpath,
                          startFrame=1.0,
                          endFrame=1.0,
                          selection=True,
                          uvWrite=True,
                          writeUVSets=True,
                          writeVisibility=True,
                          writeCreases=True,
                          attr=[lib.AVALON_ID_ATTR_LONG])

        self.log.debug("Exported: {}".format(outpath))

        cmds.select(clear=True)
示例#12
0
    def process(self, instance):
        from maya import cmds
        from reveries import utils
        from reveries.maya import xgen, capsule, utils as maya_utils

        staging_dir = utils.stage_dir()

        # Export preset
        # (NOTE) Saving as ext `.ma` instead of `.xgip` is because
        #        I'd like to use reference to load it later.
        #        Referencing file that was not `.ma`, `.mb` or other
        #        normal ext will crash Maya on file saving.
        filename = "%s.ma" % instance.data["subset"]
        linkfile = "%s.json" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)
        linkpath = "%s/%s" % (staging_dir, linkfile)

        instance.data["repr.XGenInteractive._stage"] = staging_dir
        instance.data["repr.XGenInteractive._files"] = [filename, linkfile]
        instance.data["repr.XGenInteractive.entryFileName"] = filename
        instance.data["repr.XGenInteractive.linkFname"] = linkfile

        child_instances = instance.data.get("childInstances", [])
        try:
            texture = next(chd for chd in child_instances
                           if chd.data["family"] == "reveries.texture")
        except StopIteration:
            file_node_attrs = dict()
        else:
            file_node_attrs = texture.data.get("fileNodeAttrs", dict())

        bound_map = dict()
        clay_shader = "initialShadingGroup"
        descriptions = instance.data["igsDescriptions"]
        with capsule.assign_shader(descriptions, shadingEngine=clay_shader):

            for description in descriptions:

                desc_id = maya_utils.get_id(description)

                # Get bounded meshes
                bound_map[desc_id] = list()
                for mesh in xgen.interactive.list_bound_meshes(description):
                    transform = cmds.listRelatives(mesh,
                                                   parent=True,
                                                   fullPath=True)
                    id = maya_utils.get_id(transform[0])
                    bound_map[desc_id].append(id)

            # (NOTE) Separating grooms and bounding meshes seems not able to
            #        preserve sculpt layer data entirely correct.
            #        For example, sculpting long hair strands to really short,
            #        may ends up noisy shaped after import back.
            #
            #        So now we export the grooms with bound meshes...
            #
            # io.export_xgen_IGS_presets(descriptions, outpath)

            with contextlib.nested(
                    capsule.no_display_layers(instance[:]),
                    # Change to published path
                    capsule.attribute_values(file_node_attrs),
                    capsule.maintained_selection(),
            ):
                cmds.select(descriptions)

                cmds.file(outpath,
                          force=True,
                          typ="mayaAscii",
                          exportSelected=True,
                          preserveReferences=False,
                          channels=True,
                          constraints=True,
                          expressions=True,
                          constructionHistory=True)

        # Parse preset bounding map

        with open(linkpath, "w") as fp:
            json.dump(bound_map, fp, ensure_ascii=False)
示例#13
0
    def process(self, instance):
        import avalon.api
        import avalon.io
        from reveries import lib, utils
        from reveries.maya import plugins, lib as maya_lib

        staging_dir = utils.stage_dir(dir=instance.data["_sharedStage"])
        published_dir = self.published_dir(instance)

        file_inventory = list()
        NEW_OR_CHANGED = list()

        PREVIOUS = dict()
        CURRENT = dict()

        USE_TX = instance.data.get("useTxMaps", False)
        files_to_tx = dict()
        files_to_copy = dict()

        # Get previous files
        path = [
            avalon.api.Session["AVALON_PROJECT"],
            avalon.api.Session["AVALON_ASSET"],
            instance.data["subset"],
            -1,  # latest version
            "TexturePack"
        ]
        representation_id = avalon.io.locate(path)
        if representation_id is not None:
            repr = avalon.io.find_one({"_id": representation_id})

            file_inventory = repr["data"].get("fileInventory", [])
            PREVIOUS = maya_lib.resolve_file_profile(repr, file_inventory)

        # Get current files
        for data in instance.data["fileData"]:
            file_node = data["node"]
            if file_node in instance.data["fileNodesToIgnore"]:
                continue

            dir_name = data["dir"]
            fnames = data["fnames"]
            fpattern = data["fpattern"]

            CURRENT[fpattern] = {
                "node": data["node"],
                "colorSpace": data["colorSpace"],
                "fnames": fnames,
                "pathMap": {fn: dir_name + "/" + fn
                            for fn in fnames},
            }

        # Extract textures
        #
        self.log.info("Extracting textures..")

        # For storing calculated published file path for look or lightSet
        # extractors to update file path.
        if "fileNodeAttrs" not in instance.data:
            instance.data["fileNodeAttrs"] = OrderedDict()

        # To transfer
        #
        new_version = instance.data["versionNext"]

        for fpattern, data in CURRENT.items():
            if not data["fnames"]:
                raise RuntimeError("Empty file list, this is a bug.")

            file_nodes = [
                dat["node"] for dat in instance.data["fileData"]
                if dat["fpattern"] == fpattern
            ]

            versioned_data = PREVIOUS.get(fpattern, list())
            versioned_data.sort(key=lambda elem: elem[0]["version"],
                                reverse=True)  # elem: (data, tmp_data)

            current_color_space = data["colorSpace"]

            for ver_data, tmp_data in versioned_data:

                previous_files = tmp_data["pathMap"]

                all_files = list()
                for file, abs_path in data["pathMap"].items():
                    if file not in previous_files:
                        # Possible different file pattern
                        break  # Try previous version

                    abs_previous = previous_files.get(file, "")

                    if not os.path.isfile(abs_previous):
                        # Previous file not exists (should not happen)
                        break  # Try previous version

                    # Checking on file size and modification time
                    same_file = lib.file_cmp(abs_path, abs_previous)
                    if not same_file:
                        # Possible new files
                        break  # Try previous version

                    all_files.append(file)

                else:
                    # Version matched, consider as same file
                    head_file = sorted(all_files)[0]
                    resolved_path = abs_previous[:-len(file)] + head_file
                    resolved_path = plugins.env_embedded_path(resolved_path)
                    self.update_file_node_attrs(instance, file_nodes,
                                                resolved_path,
                                                current_color_space)
                    # Update color space
                    # * Although files may be the same, but color space may
                    #   changed by artist.
                    # * We only keep track the color space, not apply them
                    #   from database.
                    ver_data["colorSpace"] = current_color_space

                    # Proceed to next pattern
                    break

            else:
                # Not match with any previous version, consider as new file
                self.log.info("New texture collected from '%s': %s"
                              "" % (data["node"], fpattern))

                NEW_OR_CHANGED.append({
                    "fpattern": fpattern,
                    "version": new_version,
                    "colorSpace": current_color_space,
                    "fnames": data["fnames"],
                })

                all_files = list()
                for file, abs_path in data["pathMap"].items():

                    files_to_copy[file] = abs_path

                    if USE_TX:
                        # Upload .tx file as well
                        tx_abs_path = to_tx(abs_path)
                        tx_stage_file = to_tx(file)

                        if current_color_space == "Raw":
                            input_colorspace = "linear"
                        else:
                            input_colorspace = current_color_space

                        files_to_copy[tx_stage_file] = tx_abs_path
                        files_to_tx[tx_abs_path] = (abs_path, input_colorspace)

                    all_files.append(file)

                head_file = sorted(all_files)[0]
                resolved_path = published_dir + "/" + head_file
                self.update_file_node_attrs(instance, file_nodes,
                                            resolved_path, current_color_space)

        file_inventory += NEW_OR_CHANGED

        instance.data["maketx"] = files_to_tx

        instance.data["repr.TexturePack._stage"] = staging_dir
        instance.data["repr.TexturePack._hardlinks"] = list(files_to_copy)
        instance.data["repr.TexturePack.fileInventory"] = file_inventory

        # (NOTE) We need to delay extract textrues is because the texture
        #   instance could be a child instance of `reveries.standin`, and
        #   a standin could be extracted in Deadline so the extraction of
        #   the child should be there, too.
        #
        #   But a standin could also be extracted in sequential Deadline
        #   tasks, which isn't fit for extracting it's textures.
        #
        #   So we extract it here in local, and put a mock function which
        #   will do nothing when the Deadline extraction script runs in
        #   each tasks.
        #
        instance.data["repr.TexturePack._delayRun"] = {
            "func": self.mock_stage,
        }
        self.stage_textures(staging_dir, files_to_copy)
示例#14
0
    def process(self, instance):
        from maya import cmds
        from avalon import maya
        from reveries import utils
        from reveries.maya import lib, capsule, utils as maya_utils

        staging_dir = utils.stage_dir()

        filename = "%s.ma" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)

        linkfile = "%s.json" % instance.data["subset"]
        linkpath = "%s/%s" % (staging_dir, linkfile)

        instance.data["repr.LookDev._stage"] = staging_dir
        instance.data["repr.LookDev._files"] = [filename, linkfile]
        instance.data["repr.LookDev.entryFileName"] = filename
        instance.data["repr.LookDev.linkFname"] = linkfile

        # Serialise shaders relationships
        #
        self.log.info("Serialising shaders..")

        shader_by_id = lib.serialise_shaders(instance.data["dagMembers"])
        assert shader_by_id, "The map of shader relationship is empty."

        # Extract shaders
        #
        self.log.info("Extracting shaders..")

        child_instances = instance.data.get("childInstances", [])
        try:
            texture = next(chd for chd in child_instances
                           if chd.data["family"] == "reveries.texture")
        except StopIteration:
            file_node_attrs = dict()
        else:
            file_node_attrs = texture.data.get("fileNodeAttrs", dict())

        with contextlib.nested(
                maya.maintained_selection(),
                capsule.ref_edit_unlock(),
                # (NOTE) Ensure attribute unlock
                capsule.attribute_states(file_node_attrs.keys(), lock=False),
                # Change to published path
                capsule.attribute_values(file_node_attrs),
                capsule.no_refresh(),
        ):
            # Select full shading network
            # If only select shadingGroups, and if there are any node
            # connected to Dag node (i.e. drivenKey), then the command
            # will not only export selected shadingGroups' shading network,
            # but also export other related DAG nodes (i.e. full hierarchy)
            cmds.select(instance, replace=True, noExpand=True)

            cmds.file(
                outpath,
                options="v=0;",
                type="mayaAscii",
                force=True,
                exportSelected=True,
                preserveReferences=False,
                constructionHistory=False,
                channels=True,  # allow animation
                constraints=False,
                shader=True,
                expressions=True)

        # Animatable attrs
        # Custom attributes in assembly node which require to be animated.
        self.log.info("Serialising 'avnlook_' prefixed attributes..")
        avnlook_anim = dict()
        for node in cmds.ls(instance.data["dagMembers"], type="transform"):
            id = maya_utils.get_id(node)
            user_attrs = cmds.listAttr(node, userDefined=True) or []
            for attr in user_attrs:
                if not attr.startswith("avnlook_"):
                    continue
                connected = cmds.listConnections(node + "." + attr,
                                                 source=False,
                                                 destination=True,
                                                 plugs=True)
                if connected:
                    avnlook_anim[id + "." + attr] = connected

        surfaces = cmds.ls(instance.data["dagMembers"],
                           noIntermediate=True,
                           type="surfaceShape")

        # UV Chooser
        uv_chooser = dict()
        for chooser in cmds.ls(instance, type="uvChooser"):
            chooser_id = maya_utils.get_id(chooser)

            for src in cmds.listConnections(chooser + ".uvSets",
                                            source=True,
                                            destination=False,
                                            plugs=True) or []:
                geo, attr = src.split(".", 1)
                geo = cmds.listRelatives(geo, parent=True, path=True)[0]
                geo_attr = maya_utils.get_id(geo) + "." + attr

                if chooser_id not in uv_chooser:
                    uv_chooser[chooser_id] = list()
                if geo_attr not in uv_chooser[chooser_id]:
                    uv_chooser[chooser_id].append(geo_attr)

        # CreaseSet
        crease_sets = dict()
        creases = list()

        for node in surfaces:
            creases += cmds.ls(cmds.listSets(object=node), type="creaseSet")

        creases = list(set(creases))

        for cres in creases:
            # Grouping crease set members with crease level value.
            level = cmds.getAttr(cres + ".creaseLevel")
            if level not in crease_sets:
                crease_sets[level] = list()

            for member in cmds.ls(cmds.sets(cres, query=True), long=True):
                node, edges = member.split(".")
                if node not in instance.data["dagMembers"]:
                    continue
                # We have validated Avalon UUID, so there must be a valid ID.
                id = maya_utils.get_id(node)
                crease_sets[level].append(id + "." + edges)

        # Arnold attributes
        arnold_attrs = dict()

        try:
            # (TODO) This should be improved. see issue #65
            from reveries.maya import arnold
        except RuntimeError as e:
            self.log.debug(e)
        else:
            ai_sets = dict()
            for objset in cmds.ls(type="objectSet"):
                if not lib.hasAttr(objset, "aiOverride"):
                    continue
                if not cmds.getAttr(objset + ".aiOverride"):
                    continue
                # Ignore pyblish family instance
                if (lib.hasAttr(objset, "id")
                        and read(objset + ".id") == "pyblish.avalon.instance"):
                    continue

                ai_sets[objset] = cmds.ls(cmds.sets(objset, query=True),
                                          long=True)

            # (TODO) Validate only transform nodes in ai set
            transforms = cmds.ls(cmds.listRelatives(surfaces, parent=True),
                                 long=True)
            for node in transforms:
                # There must be a valid ID
                id = maya_utils.get_id(node)

                attrs = dict()

                # Collect all `ai*` attributes from shape
                shape = cmds.listRelatives(
                    node, shapes=True, noIntermediate=True,
                    fullPath=True) or [None]
                shape = shape[0]
                if shape is None:
                    continue

                for attr in cmds.listAttr(shape, fromPlugin=True) or []:
                    value = read(shape + "." + attr)
                    if value is not None:
                        attrs[attr] = value

                # Collect all override attributes from objectSet
                for ai_set, member in ai_sets.items():
                    if node not in member:
                        continue

                    for attr in cmds.listAttr(ai_set, userDefined=True) or []:
                        # Collect all user attributes from objecSet
                        # (NOTE) Some attribute like `castsShadows` does not
                        #        startswith "ai", but also affect rendering in
                        #        Arnold.
                        value = read(node + "." + attr)
                        if value is not None:
                            attrs[attr] = value

                arnold_attrs[id] = attrs

        # VRay Attributes
        vray_attrs = dict()

        try:
            from reveries.maya import vray
        except RuntimeError as e:
            self.log.debug(e)
        else:
            for node in surfaces:
                # - shape
                values = vray.attributes_gather(node)
                if values:
                    vray_attrs[node] = values

                # - transfrom
                parent = cmds.listRelatives(node, parent=True)
                if parent:
                    values = vray.attributes_gather(parent[0])
                    if values:
                        vray_attrs[parent[0]] = values

        relationships = {
            "shaderById": shader_by_id,
            "avnlookAttrs": avnlook_anim,
            "uvChooser": uv_chooser,
            "creaseSets": crease_sets,
            "arnoldAttrs": arnold_attrs,
            "vrayAttrs": vray_attrs,
        }

        self.log.info("Extracting serialisation..")

        with open(linkpath, "w") as f:
            json.dump(relationships, f)
示例#15
0
    def process(self, instance):
        from maya import cmds
        from reveries import utils
        from reveries.maya import io, utils as maya_utils
        from reveries.maya.xgen import legacy as xgen

        staging_dir = utils.stage_dir()

        files = list()
        xgen_files = list()
        descriptions_data = dict()

        for desc in instance.data["xgenDescriptions"]:
            palette = xgen.get_palette_by_description(desc)

            # Save UUID and bounding
            descriptions_data[desc] = {
                "id": maya_utils.get_id(desc),
                "bound": xgen.list_bound_geometry(desc),
            }

            # Stage maps
            map_stage = staging_dir + "/maps/%s" % palette

            for head, src in xgen.maps_to_transfer(desc):
                relative = os.path.relpath(src, head)
                if os.path.isfile(src):
                    relative = os.path.dirname(relative)
                    ship = shutil.copy2
                elif os.path.isdir(src):
                    ship = shutil.copytree
                else:
                    continue

                dst_dir = map_stage + "/" + relative
                if not os.path.isdir(dst_dir):
                    os.makedirs(dst_dir)

                try:
                    ship(src, dst_dir)
                except OSError:
                    msg = "An unexpected error occurred."
                    self.log.critical(msg)
                    raise OSError(msg)

            for root, _, fnames in os.walk(map_stage):
                relative = os.path.relpath(root, staging_dir)
                relative = "" if relative == "." else (relative + "/")
                relative = relative.replace("\\", "/")
                for file in fnames:
                    map_file = relative + file
                    files.append(map_file)

            # Export guides
            guides = xgen.list_guides(desc)
            if guides:
                guide_file = "guides/%s/%s.abc" % (palette, desc)
                guide_path = "%s/%s" % (staging_dir, guide_file)
                io.export_xgen_LGC_guides(guides, guide_path)

                files.append(guide_file)

            # Export grooming
            groom = xgen.get_groom(desc)
            if groom and cmds.objExists(groom):
                groom_dir = "groom/%s/%s" % (palette, desc)
                groom_path = "%s/%s" % (staging_dir, groom_dir)
                xgen.export_grooming(desc, groom, groom_path)

                # Walk groom_path and add into files
                for root, _, fnames in os.walk(groom_path):
                    relative = os.path.relpath(root, staging_dir)
                    relative = "" if relative == "." else (relative + "/")
                    for file in fnames:
                        groom_file = relative + file
                        files.append(groom_file)

        # Extract palette
        for palette in instance.data["xgenPalettes"]:
            xgen_file = palette + ".xgen"
            xgen_path = "%s/%s" % (staging_dir, xgen_file)
            io.export_xgen_LGC_palette(palette, xgen_path)

            xgen_files.append(xgen_file)
            files.append(xgen_file)

            # Culled
            xgd_file = "deltas/%s/%s_culled.xgd" % (palette, palette)
            xgd_path = "%s/%s" % (staging_dir, xgd_file)
            if xgen.save_culled_as_delta(palette, xgd_path):
                self.log.info("Culled primitives saved.")

                files.append(xgd_file)

        instance.data["repr.XGenLegacy._stage"] = staging_dir
        instance.data["repr.XGenLegacy._files"] = files
        instance.data["repr.XGenLegacy.entryFileName"] = None  # no entry file
        instance.data["repr.XGenLegacy.descriptionsData"] = descriptions_data
        instance.data["repr.XGenLegacy.palettes"] = xgen_files
        instance.data["repr.XGenLegacy.step"] = instance.data["step"]
示例#16
0
    def process(self, instance):
        from maya import cmds
        from reveries import utils
        from reveries.maya import lib, utils as maya_utils

        staging_dir = utils.stage_dir(dir=instance.data["_sharedStage"])

        context_data = instance.context.data
        start = context_data["startFrame"]
        end = context_data["endFrame"]
        step = instance.data.get("bakeStep", 1.0)

        instance.data["startFrame"] = start
        instance.data["endFrame"] = end
        instance.data["step"] = step

        ma_filename = "%s.ma" % instance.data["subset"]
        ma_outpath = "%s/%s" % (staging_dir, ma_filename)

        abc_filename = "%s.abc" % instance.data["subset"]
        abc_outpath = "%s/%s" % (staging_dir, abc_filename)

        fbx_filename = "%s.fbx" % instance.data["subset"]
        fbx_outpath = "%s/%s" % (staging_dir, fbx_filename)

        DO_NOT_BAKE_THESE = [
            "motionBlurOverride",
            "aiUseGlobalShutter",
            "aiShutterStart",
            "aiShutterEnd",
            "aiShutterType",
            "aiEnableDOF",
            "aiFov",
            "aiHorizontalFov",
            "aiVerticalFov",
        ]

        DO_BAKE_THESE = [
            "focalLength",
        ]
        DO_BAKE_THESE += lib.TRANSFORM_ATTRS

        camera = cmds.ls(instance, type="camera", long=True)[0]

        cam_uuid = maya_utils.get_id(camera)
        cam_transform = cmds.listRelatives(camera, parent=True,
                                           fullPath=True)[0]

        donot_bake = [cam_transform + "." + attr for attr in DO_NOT_BAKE_THESE]
        do_bake = [cam_transform + "." + attr for attr in DO_BAKE_THESE]

        euler_filter = instance.data.get("eulerFilter", False)

        duplicate_input_graph = bool(cmds.ls(instance, type="stereoRigCamera"))

        instance.data["repr.mayaAscii._stage"] = staging_dir
        instance.data["repr.mayaAscii._files"] = [ma_filename]
        instance.data["repr.mayaAscii.entryFileName"] = ma_filename
        instance.data["repr.mayaAscii.cameraUUID"] = cam_uuid

        instance.data["repr.Alembic._stage"] = staging_dir
        instance.data["repr.Alembic._files"] = [abc_filename]
        instance.data["repr.Alembic.entryFileName"] = abc_filename
        instance.data["repr.Alembic.cameraUUID"] = cam_uuid

        instance.data["repr.FBX._stage"] = staging_dir
        instance.data["repr.FBX._files"] = [fbx_filename]
        instance.data["repr.FBX.entryFileName"] = fbx_filename
        instance.data["repr.FBX.cameraUUID"] = cam_uuid

        # Delay one for all
        instance.data["repr._all_repr_._stage"] = staging_dir
        instance.data["repr._all_repr_._delayRun"] = {
            "func":
            self.extract_all,
            "args": [
                cam_transform, ma_outpath, abc_outpath, fbx_outpath, start,
                end, step, euler_filter, do_bake, donot_bake
            ],
            "kwargs": {
                "duplicate_input_graph": duplicate_input_graph,
            }
        }
示例#17
0
    def process(self, instance):
        from maya import cmds
        from reveries import utils
        from reveries.maya import lib, capsule

        cmds.loadPlugin("animImportExport", quiet=True)

        staging_dir = utils.stage_dir()
        script = "%s.mel" % instance.data["subset"]
        filename = "%s.anim" % instance.data["subset"]
        scriptpath = "%s/%s" % (staging_dir, script)
        outpath = "%s/%s" % (staging_dir, filename)

        animated_asset = instance.data["animatedAssetId"]

        instance.data["repr.anim._stage"] = staging_dir
        instance.data["repr.anim._files"] = [filename, script]
        instance.data["repr.anim.entryFileName"] = filename
        instance.data["repr.anim.animatedAssetId"] = animated_asset

        # Save animated nodes with order
        with capsule.maintained_selection():
            cmds.select(instance.data["outAnim"], replace=True)

            with contextlib.nested(
                    capsule.namespaced(instance.data["animatedNamespace"],
                                       new=False),
                    capsule.relative_namespaced()):
                # Save with basename
                with open(scriptpath, "w") as fp:
                    # Allow not existing nodes between assets
                    fp.write("select -r `ls\n" + "\n".join(cmds.ls(sl=True)) +
                             "`;")

        context_data = instance.context.data
        start = context_data["startFrame"]
        end = context_data["endFrame"]

        instance.data["startFrame"] = start
        instance.data["endFrame"] = end

        with contextlib.nested(
                capsule.no_refresh(),
                capsule.maintained_selection(),
                capsule.undo_chunk(),
        ):
            lib.bake(
                instance.data["outAnim"],
                frame_range=(start, end),
                shape=False,
                # Remove baked from layer so to bake out all keys like
                # animLayers being merged.
                remove_baked_attr_from_layer=True)

            cmds.select(instance.data["outAnim"], replace=True, noExpand=True)
            cmds.file(outpath,
                      force=True,
                      typ="animExport",
                      exportSelectedAnim=True,
                      options=("options=keys;"
                               "hierarchy=none;"
                               "precision=17;"
                               "intValue=17;"
                               "nodeNames=1;"
                               "verboseUnits=0;"
                               "whichRange=1;"
                               "helpPictures=0;"
                               "useChannelBox=0;"
                               "controlPoints=0;"
                               "shapes=0;"
                               "copyKeyCmd="
                               "-animation objects "
                               "-option keys "
                               "-hierarchy none "
                               "-controlPoints 0 "
                               "-shape 0"))
    def process(self, instance):
        from maya import cmds
        from reveries import utils, lib

        staging_dir = utils.stage_dir(dir=instance.data["_sharedStage"])
        cachename = "%s.abc" % instance.data["subset"]
        filename = "%s.ma" % instance.data["subset"]
        cachepath = "%s/%s" % (staging_dir, cachename)
        outpath = "%s/%s" % (staging_dir, filename)

        instance.data["repr.GPUCache._stage"] = staging_dir
        instance.data["repr.GPUCache._hardlinks"] = [filename, cachename]
        instance.data["repr.GPUCache.entryFileName"] = filename

        if instance.data.get("staticCache"):
            start = cmds.currentTime(query=True)
            end = cmds.currentTime(query=True)
        else:
            context_data = instance.context.data
            start = context_data["startFrame"]
            end = context_data["endFrame"]

        instance.data["startFrame"] = start
        instance.data["endFrame"] = end

        # Collect root nodes
        assemblies = set()
        for node in instance.data["outCache"]:
            assemblies.add("|" + node[1:].split("|", 1)[0])
        assemblies = list(assemblies)

        # Collect all parent nodes
        out_hierarchy = set()
        for node in instance.data["outCache"]:
            out_hierarchy.add(node)
            out_hierarchy.update(lib.iter_uri(node, "|"))

        # Hide unwanted nodes (nodes that were not parents)
        attr_values = dict()
        for node in cmds.listRelatives(
                assemblies, allDescendents=True, type="transform",
                fullPath=True) or []:
            if node not in out_hierarchy:
                attr = node + ".visibility"

                locked = cmds.getAttr(attr, lock=True)
                has_connections = cmds.listConnections(attr,
                                                       source=True,
                                                       destination=False)
                if locked or has_connections:
                    continue

                attr_values[attr] = False

        instance.data["repr.GPUCache._delayRun"] = {
            "func":
            self.export_gpu,
            "args": [
                outpath, cachepath, cachename, start, end, assemblies,
                attr_values
            ],
        }
示例#19
0
    def process(self, instance):
        from maya import cmds
        from avalon import maya
        from reveries import utils
        from reveries.maya import capsule

        staging_dir = utils.stage_dir()
        filename = "%s.mb" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)

        # Perform extraction
        self.log.info("Performing extraction..")
        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_display_layers(instance[:]),
                maya.maintained_selection(),
        ):
            with capsule.undo_chunk_when_no_undo():
                """(DEPRECATED, keeping namespaces)
                # - Remove referenced subset's namespace before exporting
                #   (Not keeping model namespace)
                referenced_namespace = self.context.data["referencedNamespace"]
                for namespace in reversed(sorted(list(referenced_namespace))):
                    if not cmds.namespace(exists=namespace):
                        continue

                    try:
                        cmds.namespace(removeNamespace=namespace,
                                       mergeNamespaceWithRoot=True)
                    except Exception:
                        # Reload reference and try again.
                        # The namespace of the reference will be able to
                        # removed after reload.
                        # (TODO) This publish workflow might not be a good
                        #        approach...
                        ref_node = lib.reference_node_by_namespace(namespace)
                        # There must be a reference node, since that's the
                        # main reason why namespace can not be removed.
                        cmds.file(loadReference=ref_node)
                        cmds.namespace(removeNamespace=namespace,
                                       mergeNamespaceWithRoot=True)
                """

                # - Remove loaded container member
                #   If the mesh of the loaded model has been copied and edited
                #   (mesh face detach and separation), the model container
                #   might end up with a lots of facet member, which means there
                #   are dag connections that would make the model container be
                #   exported as well, and we don't want that happens.
                #   So we just remove them all for good.
                for container in instance.context.data["RootContainers"]:
                    cmds.delete(container)

                mesh_nodes = cmds.ls(instance,
                                     type="mesh",
                                     noIntermediate=True,
                                     long=True)
                geo_id_and_hash = self.hash(set(mesh_nodes))

                cmds.select(cmds.ls(instance), noExpand=True)

                cmds.file(outpath,
                          force=True,
                          typ="mayaBinary",
                          exportSelected=True,
                          preserveReferences=False,
                          channels=True,
                          constraints=True,
                          expressions=True,
                          constructionHistory=True,
                          shader=True)

        instance.data["repr.mayaBinary._stage"] = staging_dir
        instance.data["repr.mayaBinary._files"] = [filename]
        instance.data["repr.mayaBinary.entryFileName"] = filename
        instance.data["repr.mayaBinary.modelProfile"] = geo_id_and_hash