def extract(self):
        context_data = self.context.data
        self.start = context_data.get("startFrame")
        self.end = context_data.get("endFrame")
        self.step = self.data.get("bakeStep", 1.0)
        camera = cmds.ls(self.member, type="camera", long=True)[0]

        self.camera_uuid = utils.get_id(camera)

        donot_bake = [camera + "." + attr for attr in DO_NOT_BAKE_THESE]

        with contextlib.nested(
                capsule.no_refresh(),
                capsule.attr_unkeyable(donot_bake),
                capsule.evaluation("off"),
                capsule.undo_chunk(),
        ):
            # bake to worldspace
            baked_camera = lib.bake_camera(camera, self.start, self.end,
                                           self.step)

            cmds.select(
                baked_camera,
                hierarchy=True,  # With shape
                replace=True,
                noExpand=True)

            super(ExtractCamera, self).extract()
示例#2
0
    def extract(self):

        DO_NOT_BAKE_THESE = [
            "motionBlurOverride",
            "aiUseGlobalShutter",
            "aiShutterStart",
            "aiShutterEnd",
            "aiShutterType",
            "aiEnableDOF",
            "aiFov",
            "aiHorizontalFov",
            "aiVerticalFov",
        ]

        DO_BAKE_THESE = [
            "focalLength",
        ]
        DO_BAKE_THESE += lib.TRANSFORM_ATTRS

        context_data = self.context.data
        self.start = context_data.get("startFrame")
        self.end = context_data.get("endFrame")
        self.step = self.data.get("bakeStep", 1.0)
        camera = cmds.ls(self.member, type="camera", long=True)[0]

        self.camera_uuid = utils.get_id(camera)

        cam_transform = cmds.listRelatives(camera,
                                           parent=True,
                                           fullPath=True)[0]

        donot_bake = [cam_transform + "." + attr for attr in DO_NOT_BAKE_THESE]
        do_bake = [cam_transform + "." + attr for attr in DO_BAKE_THESE]

        with contextlib.nested(
            capsule.no_refresh(),
            capsule.attribute_states(donot_bake, lock=False, keyable=False),
            capsule.attribute_states(do_bake, lock=False, keyable=True),
            capsule.evaluation("off"),
        ):
            with capsule.delete_after() as delete_bin:

                # bake to worldspace
                frame_range = (self.start, self.end)
                baked_camera = lib.bake_to_world_space(cam_transform,
                                                       frame_range,
                                                       step=self.step)[0]
                delete_bin.append(baked_camera)

                cmds.select(baked_camera,
                            hierarchy=True,  # With shape
                            replace=True,
                            noExpand=True)

                super(ExtractCamera, self).extract()
    def extract_LightSet(self, packager):

        from maya import cmds
        from avalon import maya
        from reveries.maya import capsule

        entry_file = packager.file_name("ma")
        package_path = packager.create_package()

        # Extract lights
        #
        entry_path = os.path.join(package_path, entry_file)

        self.log.info("Extracting lights..")

        # From texture extractor
        try:
            texture = next(chd for chd in self.data.get("childInstances", [])
                           if chd.data["family"] == "reveries.texture")
        except StopIteration:
            file_node_attrs = dict()
        else:
            file_node_attrs = texture.data.get("fileNodeAttrs", dict())

        with contextlib.nested(
            maya.maintained_selection(),
            capsule.attribute_values(file_node_attrs),
            capsule.no_refresh(),
        ):
            cmds.select(self.member,
                        replace=True,
                        noExpand=True)

            cmds.file(entry_path,
                      options="v=0;",
                      type="mayaAscii",
                      force=True,
                      exportSelected=True,
                      preserveReferences=False,
                      constructionHistory=False,
                      channels=True,  # allow animation
                      constraints=False,
                      shader=False,
                      expressions=True)

        packager.add_data({
            "entryFileName": entry_file,
        })
示例#4
0
    def process(self, instance):
        from maya import cmds
        from avalon import maya
        from reveries import utils
        from reveries.maya import capsule

        staging_dir = utils.stage_dir()
        filename = "%s.ma" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)

        instance.data["repr.LightSet._stage"] = staging_dir
        instance.data["repr.LightSet._files"] = [filename]
        instance.data["repr.LightSet.entryFileName"] = filename

        # Extract lights
        #
        self.log.info("Extracting lights..")

        # From texture extractor
        child_instances = instance.data.get("childInstances", [])
        try:
            texture = next(chd for chd in child_instances
                           if chd.data["family"] == "reveries.texture")
        except StopIteration:
            file_node_attrs = dict()
        else:
            file_node_attrs = texture.data.get("fileNodeAttrs", dict())

        with contextlib.nested(
                maya.maintained_selection(),
                capsule.attribute_values(file_node_attrs),
                capsule.no_refresh(),
        ):
            cmds.select(instance, replace=True, noExpand=True)

            cmds.file(
                outpath,
                options="v=0;",
                type="mayaAscii",
                force=True,
                exportSelected=True,
                preserveReferences=False,
                constructionHistory=False,
                channels=True,  # allow animation
                constraints=False,
                shader=False,
                expressions=True)
示例#5
0
    def extract(self):

        if self.data.get("staticCache"):
            self.start_frame = cmds.currentTime(query=True)
            self.end_frame = cmds.currentTime(query=True)
        else:
            context_data = self.context.data
            self.start_frame = context_data.get("startFrame")
            self.end_frame = context_data.get("endFrame")

        with contextlib.nested(
            capsule.no_undo(),
            capsule.no_refresh(),
            capsule.evaluation("off"),
            capsule.maintained_selection(),
        ):
            cmds.select(self.data["outCache"], replace=True)
            super(ExtractPointCache, self).extract()
    def export_fbx(self, outpath, cachepath, cachename, nodes, keep_namespace):
        from reveries.maya import io, capsule
        from maya import cmds

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
        ):

            cmds.select(nodes, replace=True)

            with capsule.StripNamespace([] if keep_namespace else nodes):
                with io.export_fbx_set_pointcache("FBXCacheSET"):
                    io.export_fbx(cachepath)

                io.wrap_fbx(outpath, [(cachename, "ROOT")])
    def export_gpu(self, outpath, cachepath, cachename, start, end, assemblies,
                   attr_values):
        from reveries.maya import io, capsule
        from maya import cmds

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
        ):

            cmds.select(assemblies, replace=True, noExpand=True)

            with contextlib.nested(
                    capsule.attribute_values(attr_values),
                    # Mute animated visibility channels
                    capsule.attribute_mute(list(attr_values.keys())),
            ):
                io.export_gpu(cachepath, start, end)
                io.wrap_gpu(outpath, [(cachename, "ROOT")])
示例#8
0
    def extract_Ass(self):
        # Ensure mtoa loaded
        cmds.loadPlugin("mtoa", quiet=True)

        package_path = self.create_package()
        cache_file = self.file_name("ass")
        cache_path = os.path.join(package_path, cache_file)

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
                capsule.ref_edit_unlock(),
                remove_file_env_path(self.data),
        ):
            cmds.select(self.member, replace=True)
            asses = cmds.arnoldExportAss(filename=cache_path,
                                         selected=True,
                                         startFrame=self.data["startFrame"],
                                         endFrame=self.data["endFrame"],
                                         frameStep=self.data["byFrameStep"],
                                         shadowLinks=1,
                                         lightLinks=1,
                                         expandProcedurals=True,
                                         mask=24)

        use_sequence = self.data["startFrame"] != self.data["endFrame"]
        entry_file = os.path.basename(asses[0])

        self.add_data({
            "entryFileName": entry_file,
            "useSequence": use_sequence
        })
        if use_sequence:
            self.add_data({
                "startFrame": self.data["startFrame"],
                "endFrame": self.data["endFrame"]
            })
示例#9
0
    def extract_anim(self, packager):
        cmds.loadPlugin("animImportExport", quiet=True)

        package_path = packager.create_package()

        entry_file = packager.file_name("anim")
        entry_path = os.path.join(package_path, entry_file)

        sele_file = packager.file_name("mel")
        sele_path = os.path.join(package_path, sele_file)

        # Save animated nodes with order
        with capsule.maintained_selection():
            cmds.select(self.data["outAnim"], replace=True)

            with contextlib.nested(
                    capsule.namespaced(self.data["animatedNamespace"],
                                       new=False),
                    capsule.relative_namespaced()):
                # Save with basename
                with open(sele_path, "w") as fp:
                    fp.write("select -r\n" + "\n".join(cmds.ls(sl=True)) + ";")

        context_data = self.context.data
        start = context_data.get("startFrame")
        end = context_data.get("endFrame")

        with contextlib.nested(
                capsule.no_refresh(),
                capsule.maintained_selection(),
                capsule.undo_chunk(),
        ):
            lib.bake(self.data["outAnim"],
                     frame_range=(start, end),
                     shape=False)

            cmds.select(self.data["outAnim"], replace=True, noExpand=True)
            cmds.file(entry_path,
                      force=True,
                      typ="animExport",
                      exportSelectedAnim=True,
                      options=("options=keys;"
                               "hierarchy=none;"
                               "precision=17;"
                               "intValue=17;"
                               "nodeNames=1;"
                               "verboseUnits=0;"
                               "whichRange=1;"
                               "helpPictures=0;"
                               "useChannelBox=0;"
                               "controlPoints=0;"
                               "shapes=0;"
                               "copyKeyCmd="
                               "-animation objects "
                               "-option keys "
                               "-hierarchy none "
                               "-controlPoints 0 "
                               "-shape 0"))

        packager.add_data({
            "entryFileName": entry_file,
            "animatedAssetId": self.data["animatedAssetId"]
        })
    def extract_LookDev(self):

        from avalon import maya
        from reveries.maya import lib, capsule

        entry_file = self.file_name("ma")
        package_path = self.create_package()

        # Extract shaders
        #
        entry_path = os.path.join(package_path, entry_file)

        self.log.info("Extracting shaders..")

        with contextlib.nested(
                maya.maintained_selection(),
                capsule.undo_chunk(),
                capsule.no_refresh(),
        ):
            # From texture extractor
            file_node_path = self.context.data.get("fileNodePath")
            if file_node_path is not None:
                # Change texture path to published location
                for file_node in cmds.ls(self.member, type="file"):
                    attr_name = file_node + ".fileTextureName"
                    color_attr = file_node + ".colorSpace"
                    final_path = file_node_path[file_node]

                    # Unlock colorspace
                    color_space = cmds.getAttr(color_attr)
                    cmds.setAttr(color_attr, lock=False)

                    # Set texture file path to publish location
                    cmds.setAttr(attr_name, final_path, type="string")

                    # Lock colorspace
                    cmds.setAttr(color_attr,
                                 color_space,
                                 lock=True,
                                 type="string")

            # Select full shading network
            # If only select shadingGroups, and if there are any node
            # connected to Dag node (i.e. drivenKey), then the command
            # will not only export selected shadingGroups' shading network,
            # but also export other related DAG nodes (i.e. full hierarchy)
            cmds.select(self.member, replace=True, noExpand=True)

            cmds.file(entry_path,
                      options="v=0;",
                      type="mayaAscii",
                      force=True,
                      exportSelected=True,
                      preserveReferences=False,
                      constructionHistory=False)

        # Serialise shaders relationships
        #
        link_file = self.file_name("json")
        link_path = os.path.join(package_path, link_file)

        self.log.info("Serialising shaders..")

        shader_by_id = lib.serialise_shaders(self.data["dagMembers"])

        # Animatable attrs
        # Custom attributes in assembly node which require to be animated.
        self.log.info("Serialising animatable attributes..")
        animatable = dict()
        root = cmds.ls(self.data["dagMembers"], assemblies=True)
        if root:
            root = root[0]
            for attr in cmds.listAttr(root, userDefined=True) or list():
                animatable[attr] = cmds.listConnections(root + "." + attr,
                                                        destination=True,
                                                        source=False,
                                                        plugs=True)

        surfaces = cmds.ls(self.data["dagMembers"],
                           noIntermediate=True,
                           type="surfaceShape")

        # CreaseSet
        crease_sets = dict()
        creases = list()

        for node in surfaces:
            creases += cmds.ls(cmds.listSets(object=node), type="creaseSet")

        creases = list(set(creases))

        for cres in creases:
            # Grouping crease set members with crease level value.
            level = cmds.getAttr(cres + ".creaseLevel")
            if level not in crease_sets:
                crease_sets[level] = list()

            for member in cmds.ls(cmds.sets(cres, query=True), long=True):
                node, edges = member.split(".")
                if node not in self.data["dagMembers"]:
                    continue
                # We have validated Avalon UUID, so there must be a valid ID.
                id = utils.get_id(node)
                crease_sets[level].append(id + "." + edges)

        # Arnold attributes
        arnold_attrs = dict()

        try:
            # (TODO) This should be improved. see issue #65
            from reveries.maya import arnold
        except RuntimeError as e:
            self.log.debug(e)
        else:
            ai_sets = dict()
            for objset in cmds.ls(type="objectSet"):
                if not lib.hasAttr(objset, "aiOverride"):
                    continue
                if not cmds.getAttr(objset + ".aiOverride"):
                    continue
                # Ignore pyblish family instance
                if (lib.hasAttr(objset, "id")
                        and read(objset + ".id") == "pyblish.avalon.instance"):
                    continue

                ai_sets[objset] = cmds.ls(cmds.sets(objset, query=True),
                                          long=True)

            # (TODO) Validate only transform nodes in ai set
            transforms = cmds.ls(cmds.listRelatives(surfaces, parent=True),
                                 long=True)
            for node in transforms:
                # There must be a valid ID
                id = utils.get_id(node)

                attrs = dict()

                # Collect all `ai*` attributes from shape
                shape = cmds.listRelatives(
                    node, shapes=True, noIntermediate=True,
                    fullPath=True) or [None]
                shape = shape[0]
                if shape is None:
                    continue

                for attr in cmds.listAttr(shape, fromPlugin=True) or []:
                    value = read(shape + "." + attr)
                    if value is not None:
                        attrs[attr] = value

                # Collect all override attributes from objectSet
                for ai_set, member in ai_sets.items():
                    if node not in member:
                        continue

                    for attr in cmds.listAttr(ai_set, userDefined=True) or []:
                        # Collect all user attributes from objecSet
                        # (NOTE) Some attribute like `castsShadows` does not
                        #        startswith "ai", but also affect rendering in
                        #        Arnold.
                        value = read(node + "." + attr)
                        if value is not None:
                            attrs[attr] = value

                arnold_attrs[id] = attrs

        # VRay Attributes
        vray_attrs = dict()

        try:
            from reveries.maya import vray
        except RuntimeError as e:
            self.log.debug(e)
        else:
            for node in surfaces:
                # - shape
                values = vray.attributes_gather(node)
                if values:
                    vray_attrs[node] = values

                # - transfrom
                parent = cmds.listRelatives(node, parent=True)
                if parent:
                    values = vray.attributes_gather(parent[0])
                    if values:
                        vray_attrs[parent[0]] = values

        relationships = {
            "shaderById": shader_by_id,
            "animatable": animatable,
            "creaseSets": crease_sets,
            "arnoldAttrs": arnold_attrs,
            "vrayAttrs": vray_attrs,
        }

        self.log.info("Extracting serialisation..")
        with open(link_path, "w") as f:
            json.dump(relationships, f)

        self.add_data({
            "linkFname": link_file,
            "entryFileName": entry_file,
        })

        self.log.info("Extracted {name} to {path}".format(
            name=self.data["subset"], path=package_path))
示例#11
0
    def export_ass(self,
                   nodes,
                   outpath,
                   file_node_attrs,
                   has_yeti,
                   start,
                   end,
                   step,
                   expand_procedurals=True):
        from maya import cmds, mel
        from reveries.maya import arnold, capsule

        # Ensure option created
        arnold.utils.create_options()

        render_settings = {
            # Disable Auto TX update and enable to use existing TX
            "defaultArnoldRenderOptions.autotx": False,
            "defaultArnoldRenderOptions.use_existing_tiled_textures": True,
            # Ensure frame padding == 4
            "defaultRenderGlobals.extensionPadding": 4,
        }

        # Yeti
        if has_yeti:
            # In Deadline, this is a script job instead of rendering job, so
            # the `pgYetiPreRender` Pre-Render MEL will not be triggered.
            # We need to call it by ourselve, or Yeti will complain about
            # cache temp dir not exist.
            mel.eval("pgYetiPreRender;")

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
                capsule.ref_edit_unlock(),
                # (NOTE) Ensure attribute unlock
                capsule.attribute_states(file_node_attrs.keys(), lock=False),
                # Change to published path
                capsule.attribute_values(file_node_attrs),
                # Fixed render settings
                capsule.attribute_values(render_settings),
        ):
            cmds.select(nodes, replace=True)
            asses = cmds.arnoldExportAss(
                filename=outpath,
                selected=True,
                startFrame=start,
                endFrame=end,
                frameStep=step,
                expandProcedurals=expand_procedurals,
                boundingBox=True,
                # Mask:
                #      Shapes,
                #      Shaders,
                #      Override Nodes,
                #      Operators,
                #
                # mask=4152,  # No Color Manager
                mask=6200)  # With Color Manager

            # Change to environment var embedded path
            root = avalon.api.registered_root().replace("\\", "/")
            project = avalon.api.Session["AVALON_PROJECT"]

            for ass in asses:
                lines = list()
                has_change = False
                with open(ass, "r") as assf:
                    for line in assf.readlines():
                        if line.startswith(" filename "):
                            line = line.replace(root, "[AVALON_PROJECTS]", 1)
                            line = line.replace(project, "[AVALON_PROJECT]", 1)
                            has_change = True
                        lines.append(line)

                # Remove color manager
                # (NOTE): If Color Manager included,
                #         may raise error if rendering
                #         in Houdini or other DCC.
                try:
                    s = lines.index("color_manager_syncolor\n")
                except ValueError:
                    # No color manager found
                    pass
                else:
                    e = lines.index("}\n", s) + 1
                    lines = lines[:s] + lines[e:]
                    has_change = True

                # Re-write
                if has_change:
                    with open(ass, "w") as assf:
                        assf.write("".join(lines))
示例#12
0
    def export_ass(data, start, end, step):

        arnold_tx_settings = {
            "defaultArnoldRenderOptions.autotx": False,
            "defaultArnoldRenderOptions.use_existing_tiled_textures": True,
        }

        # Yeti
        if data["hasYeti"]:
            # In Deadline, this is a script job instead of rendering job, so
            # the `pgYetiPreRender` Pre-Render MEL will not be triggered.
            # We need to call it by ourselve, or Yeti will complain about
            # cache temp dir not exist.
            mel.eval("pgYetiPreRender;")

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
                capsule.ref_edit_unlock(),
                # (NOTE) Ensure attribute unlock
                capsule.attribute_states(data["fileNodeAttrs"].keys(),
                                         lock=False),
                # Change to published path
                capsule.attribute_values(data["fileNodeAttrs"]),
                # Disable Auto TX update and enable to use existing TX
                capsule.attribute_values(arnold_tx_settings),
        ):
            cmds.select(data["member"], replace=True)
            asses = cmds.arnoldExportAss(
                filename=data["cachePath"],
                selected=True,
                startFrame=start,
                endFrame=end,
                frameStep=step,
                expandProcedurals=True,
                boundingBox=True,
                # Mask:
                #      Shapes,
                #      Shaders,
                #      Override Nodes,
                #      Operators,
                #
                # (NOTE): If Color Manager included,
                #         may raise error if rendering
                #         in Houdini or other DCC.
                # mask=6200,  # With Color Manager
                #
                mask=4152)  # No Color Manager

            # Change to environment var embedded path
            root = avalon.api.registered_root().replace("\\", "/")
            project = avalon.api.Session["AVALON_PROJECT"]

            for ass in asses:
                lines = list()
                has_change = False
                with open(ass, "r") as assf:
                    for line in assf.readlines():
                        if line.startswith(" filename "):
                            line = line.replace(root, "[AVALON_PROJECTS]", 1)
                            line = line.replace(project, "[AVALON_PROJECT]", 1)
                            has_change = True
                        lines.append(line)

                if has_change:
                    with open(ass, "w") as assf:
                        assf.write("".join(lines))
    def export_alembic(self, root, outpath, start, end, euler_filter):
        from reveries.maya import io, lib, capsule
        from maya import cmds

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
        ):
            # Selection may change if there are duplicate named nodes and
            # require instancing them to resolve

            with capsule.delete_after() as delete_bin:

                # (NOTE) We need to check any duplicate named nodes, or
                #        error will raised during Alembic export.
                result = lib.ls_duplicated_name(root)
                duplicated = [n for m in result.values() for n in m]
                if duplicated:
                    self.log.info("Duplicate named nodes found, resolving...")
                    # Duplicate it so we could have a unique named new node
                    unique_named = list()
                    for node in duplicated:
                        new_nodes = cmds.duplicate(node,
                                                   inputConnections=True,
                                                   renameChildren=True)
                        new_nodes = cmds.ls(new_nodes, long=True)
                        unique_named.append(new_nodes[0])
                        # New nodes will be deleted after the export
                        delete_bin.extend(new_nodes)

                    # Replace duplicate named nodes with unique named
                    root = list(set(root) - set(duplicated)) + unique_named

                for node in set(root):
                    # (NOTE) If a descendent is instanced, it will appear only
                    #        once on the list returned.
                    root += cmds.listRelatives(node,
                                               allDescendents=True,
                                               fullPath=True,
                                               noIntermediate=True) or []
                root = list(set(root))
                cmds.select(root, replace=True, noExpand=True)

                def _export_alembic():
                    io.export_alembic(
                        outpath,
                        start,
                        end,
                        selection=True,
                        renderableOnly=True,
                        writeVisibility=True,
                        writeCreases=True,
                        worldSpace=True,
                        eulerFilter=euler_filter,
                        attr=[
                            lib.AVALON_ID_ATTR_LONG,
                        ],
                        attrPrefix=[
                            "ai",  # Write out Arnold attributes
                            "avnlook_",  # Write out lookDev controls
                        ],
                    )

                auto_retry = 1
                while auto_retry:
                    try:
                        _export_alembic()
                    except RuntimeError as err:
                        if auto_retry:
                            # (NOTE) Auto re-try export
                            # For unknown reason, some artist may encounter
                            # runtime error when exporting but re-run the
                            # publish without any change will resolve.
                            auto_retry -= 1
                            self.log.warning(err)
                            self.log.warning("Retrying...")
                        else:
                            raise err
                    else:
                        break
示例#14
0
    def process(self, instance):
        from maya import cmds
        from avalon import maya
        from reveries import utils
        from reveries.maya import lib, capsule, utils as maya_utils

        staging_dir = utils.stage_dir()

        filename = "%s.ma" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)

        linkfile = "%s.json" % instance.data["subset"]
        linkpath = "%s/%s" % (staging_dir, linkfile)

        instance.data["repr.LookDev._stage"] = staging_dir
        instance.data["repr.LookDev._files"] = [filename, linkfile]
        instance.data["repr.LookDev.entryFileName"] = filename
        instance.data["repr.LookDev.linkFname"] = linkfile

        # Serialise shaders relationships
        #
        self.log.info("Serialising shaders..")

        shader_by_id = lib.serialise_shaders(instance.data["dagMembers"])
        assert shader_by_id, "The map of shader relationship is empty."

        # Extract shaders
        #
        self.log.info("Extracting shaders..")

        child_instances = instance.data.get("childInstances", [])
        try:
            texture = next(chd for chd in child_instances
                           if chd.data["family"] == "reveries.texture")
        except StopIteration:
            file_node_attrs = dict()
        else:
            file_node_attrs = texture.data.get("fileNodeAttrs", dict())

        with contextlib.nested(
                maya.maintained_selection(),
                capsule.ref_edit_unlock(),
                # (NOTE) Ensure attribute unlock
                capsule.attribute_states(file_node_attrs.keys(), lock=False),
                # Change to published path
                capsule.attribute_values(file_node_attrs),
                capsule.no_refresh(),
        ):
            # Select full shading network
            # If only select shadingGroups, and if there are any node
            # connected to Dag node (i.e. drivenKey), then the command
            # will not only export selected shadingGroups' shading network,
            # but also export other related DAG nodes (i.e. full hierarchy)
            cmds.select(instance, replace=True, noExpand=True)

            cmds.file(
                outpath,
                options="v=0;",
                type="mayaAscii",
                force=True,
                exportSelected=True,
                preserveReferences=False,
                constructionHistory=False,
                channels=True,  # allow animation
                constraints=False,
                shader=True,
                expressions=True)

        # Animatable attrs
        # Custom attributes in assembly node which require to be animated.
        self.log.info("Serialising 'avnlook_' prefixed attributes..")
        avnlook_anim = dict()
        for node in cmds.ls(instance.data["dagMembers"], type="transform"):
            id = maya_utils.get_id(node)
            user_attrs = cmds.listAttr(node, userDefined=True) or []
            for attr in user_attrs:
                if not attr.startswith("avnlook_"):
                    continue
                connected = cmds.listConnections(node + "." + attr,
                                                 source=False,
                                                 destination=True,
                                                 plugs=True)
                if connected:
                    avnlook_anim[id + "." + attr] = connected

        surfaces = cmds.ls(instance.data["dagMembers"],
                           noIntermediate=True,
                           type="surfaceShape")

        # UV Chooser
        uv_chooser = dict()
        for chooser in cmds.ls(instance, type="uvChooser"):
            chooser_id = maya_utils.get_id(chooser)

            for src in cmds.listConnections(chooser + ".uvSets",
                                            source=True,
                                            destination=False,
                                            plugs=True) or []:
                geo, attr = src.split(".", 1)
                geo = cmds.listRelatives(geo, parent=True, path=True)[0]
                geo_attr = maya_utils.get_id(geo) + "." + attr

                if chooser_id not in uv_chooser:
                    uv_chooser[chooser_id] = list()
                if geo_attr not in uv_chooser[chooser_id]:
                    uv_chooser[chooser_id].append(geo_attr)

        # CreaseSet
        crease_sets = dict()
        creases = list()

        for node in surfaces:
            creases += cmds.ls(cmds.listSets(object=node), type="creaseSet")

        creases = list(set(creases))

        for cres in creases:
            # Grouping crease set members with crease level value.
            level = cmds.getAttr(cres + ".creaseLevel")
            if level not in crease_sets:
                crease_sets[level] = list()

            for member in cmds.ls(cmds.sets(cres, query=True), long=True):
                node, edges = member.split(".")
                if node not in instance.data["dagMembers"]:
                    continue
                # We have validated Avalon UUID, so there must be a valid ID.
                id = maya_utils.get_id(node)
                crease_sets[level].append(id + "." + edges)

        # Arnold attributes
        arnold_attrs = dict()

        try:
            # (TODO) This should be improved. see issue #65
            from reveries.maya import arnold
        except RuntimeError as e:
            self.log.debug(e)
        else:
            ai_sets = dict()
            for objset in cmds.ls(type="objectSet"):
                if not lib.hasAttr(objset, "aiOverride"):
                    continue
                if not cmds.getAttr(objset + ".aiOverride"):
                    continue
                # Ignore pyblish family instance
                if (lib.hasAttr(objset, "id")
                        and read(objset + ".id") == "pyblish.avalon.instance"):
                    continue

                ai_sets[objset] = cmds.ls(cmds.sets(objset, query=True),
                                          long=True)

            # (TODO) Validate only transform nodes in ai set
            transforms = cmds.ls(cmds.listRelatives(surfaces, parent=True),
                                 long=True)
            for node in transforms:
                # There must be a valid ID
                id = maya_utils.get_id(node)

                attrs = dict()

                # Collect all `ai*` attributes from shape
                shape = cmds.listRelatives(
                    node, shapes=True, noIntermediate=True,
                    fullPath=True) or [None]
                shape = shape[0]
                if shape is None:
                    continue

                for attr in cmds.listAttr(shape, fromPlugin=True) or []:
                    value = read(shape + "." + attr)
                    if value is not None:
                        attrs[attr] = value

                # Collect all override attributes from objectSet
                for ai_set, member in ai_sets.items():
                    if node not in member:
                        continue

                    for attr in cmds.listAttr(ai_set, userDefined=True) or []:
                        # Collect all user attributes from objecSet
                        # (NOTE) Some attribute like `castsShadows` does not
                        #        startswith "ai", but also affect rendering in
                        #        Arnold.
                        value = read(node + "." + attr)
                        if value is not None:
                            attrs[attr] = value

                arnold_attrs[id] = attrs

        # VRay Attributes
        vray_attrs = dict()

        try:
            from reveries.maya import vray
        except RuntimeError as e:
            self.log.debug(e)
        else:
            for node in surfaces:
                # - shape
                values = vray.attributes_gather(node)
                if values:
                    vray_attrs[node] = values

                # - transfrom
                parent = cmds.listRelatives(node, parent=True)
                if parent:
                    values = vray.attributes_gather(parent[0])
                    if values:
                        vray_attrs[parent[0]] = values

        relationships = {
            "shaderById": shader_by_id,
            "avnlookAttrs": avnlook_anim,
            "uvChooser": uv_chooser,
            "creaseSets": crease_sets,
            "arnoldAttrs": arnold_attrs,
            "vrayAttrs": vray_attrs,
        }

        self.log.info("Extracting serialisation..")

        with open(linkpath, "w") as f:
            json.dump(relationships, f)
示例#15
0
    def extract_all(self,
                    cam_transform,
                    ma_outpath,
                    abc_outpath,
                    fbx_outpath,
                    start,
                    end,
                    step,
                    euler_filter,
                    do_bake,
                    donot_bake,
                    duplicate_input_graph=False):
        from maya import cmds
        from reveries.maya import io, lib, capsule

        with contextlib.nested(
                capsule.no_refresh(),
                capsule.no_undo(),
                capsule.attribute_states(donot_bake, lock=False,
                                         keyable=False),
                capsule.attribute_states(do_bake, lock=False, keyable=True),
                capsule.evaluation("off"),
        ):
            with capsule.delete_after() as delete_bin:

                # bake to worldspace
                frame_range = (start, end)
                baked_camera = lib.bake_to_world_space(
                    cam_transform,
                    frame_range,
                    step=step,
                    # Remove baked from layer so to bake out all keys like
                    # animLayers being merged.
                    remove_baked_attr_from_layer=True,
                    duplicate_input_graph=duplicate_input_graph)[0]
                delete_bin.append(baked_camera)

                cmds.select(
                    baked_camera,
                    hierarchy=True,  # With shape
                    replace=True,
                    noExpand=True)

                with avalon.maya.maintained_selection():
                    io.export_alembic(abc_outpath,
                                      start,
                                      end,
                                      eulerFilter=euler_filter)

                with capsule.undo_chunk_when_no_undo():
                    if euler_filter:
                        cmds.filterCurve(cmds.ls(sl=True))

                    with avalon.maya.maintained_selection():
                        cmds.file(
                            ma_outpath,
                            force=True,
                            typ="mayaAscii",
                            exportSelected=True,
                            preserveReferences=False,
                            constructionHistory=False,
                            channels=True,  # allow animation
                            constraints=False,
                            shader=False,
                            expressions=False)

                    with avalon.maya.maintained_selection():
                        io.export_fbx_set_camera()
                        io.export_fbx(fbx_outpath)
示例#16
0
    def process(self, instance):
        from maya import cmds
        from reveries import utils
        from reveries.maya import lib, capsule

        cmds.loadPlugin("animImportExport", quiet=True)

        staging_dir = utils.stage_dir()
        script = "%s.mel" % instance.data["subset"]
        filename = "%s.anim" % instance.data["subset"]
        scriptpath = "%s/%s" % (staging_dir, script)
        outpath = "%s/%s" % (staging_dir, filename)

        animated_asset = instance.data["animatedAssetId"]

        instance.data["repr.anim._stage"] = staging_dir
        instance.data["repr.anim._files"] = [filename, script]
        instance.data["repr.anim.entryFileName"] = filename
        instance.data["repr.anim.animatedAssetId"] = animated_asset

        # Save animated nodes with order
        with capsule.maintained_selection():
            cmds.select(instance.data["outAnim"], replace=True)

            with contextlib.nested(
                    capsule.namespaced(instance.data["animatedNamespace"],
                                       new=False),
                    capsule.relative_namespaced()):
                # Save with basename
                with open(scriptpath, "w") as fp:
                    # Allow not existing nodes between assets
                    fp.write("select -r `ls\n" + "\n".join(cmds.ls(sl=True)) +
                             "`;")

        context_data = instance.context.data
        start = context_data["startFrame"]
        end = context_data["endFrame"]

        instance.data["startFrame"] = start
        instance.data["endFrame"] = end

        with contextlib.nested(
                capsule.no_refresh(),
                capsule.maintained_selection(),
                capsule.undo_chunk(),
        ):
            lib.bake(
                instance.data["outAnim"],
                frame_range=(start, end),
                shape=False,
                # Remove baked from layer so to bake out all keys like
                # animLayers being merged.
                remove_baked_attr_from_layer=True)

            cmds.select(instance.data["outAnim"], replace=True, noExpand=True)
            cmds.file(outpath,
                      force=True,
                      typ="animExport",
                      exportSelectedAnim=True,
                      options=("options=keys;"
                               "hierarchy=none;"
                               "precision=17;"
                               "intValue=17;"
                               "nodeNames=1;"
                               "verboseUnits=0;"
                               "whichRange=1;"
                               "helpPictures=0;"
                               "useChannelBox=0;"
                               "controlPoints=0;"
                               "shapes=0;"
                               "copyKeyCmd="
                               "-animation objects "
                               "-option keys "
                               "-hierarchy none "
                               "-controlPoints 0 "
                               "-shape 0"))