예제 #1
0
    def hash(self, mesh_nodes):
        # Hash model and collect Avalon UUID
        geo_id_and_hash = dict()
        hasher = utils.MeshHasher()
        for mesh in mesh_nodes:
            # Get ID
            transform = cmds.listRelatives(mesh, parent=True, fullPath=True)[0]
            id = utils.get_id(transform)
            assert id is not None, ("Some mesh has no Avalon UUID. "
                                    "This should not happend.")
            hasher.set_mesh(mesh)
            hasher.update_points()
            hasher.update_normals()
            hasher.update_uvmap()

            result = hasher.digest()
            result["hierarchy"] = transform

            # May have duplicated Id
            if id not in geo_id_and_hash:
                geo_id_and_hash[id] = list()
            geo_id_and_hash[id].append(result)

            hasher.clear()

        return geo_id_and_hash
    def get_invalid_duplicated(cls, instance, uuids=None):
        from reveries.maya import utils

        if uuids is None:
            uuids = cls._get_avalon_uuid(instance)

        if instance.data["family"] in cls.loose_uuid:
            invalid = uuids.get(utils.Identifier.Duplicated, [])
        else:
            invalid = list()
            nodes = set()
            for member in uuids.values():
                nodes.update(member)

            ids = set()
            for node in nodes:
                id = utils.get_id(node)
                if id is None:
                    continue
                if id not in ids:
                    ids.add(id)
                else:
                    invalid.append(node)

        return invalid
    def extract(self):
        context_data = self.context.data
        self.start = context_data.get("startFrame")
        self.end = context_data.get("endFrame")
        self.step = self.data.get("bakeStep", 1.0)
        camera = cmds.ls(self.member, type="camera", long=True)[0]

        self.camera_uuid = utils.get_id(camera)

        donot_bake = [camera + "." + attr for attr in DO_NOT_BAKE_THESE]

        with contextlib.nested(
                capsule.no_refresh(),
                capsule.attr_unkeyable(donot_bake),
                capsule.evaluation("off"),
                capsule.undo_chunk(),
        ):
            # bake to worldspace
            baked_camera = lib.bake_camera(camera, self.start, self.end,
                                           self.step)

            cmds.select(
                baked_camera,
                hierarchy=True,  # With shape
                replace=True,
                noExpand=True)

            super(ExtractCamera, self).extract()
예제 #4
0
    def get_invalid(cls, instance):
        from maya import cmds
        from reveries.maya import utils

        invalid = list()

        for chooser in cmds.ls(instance, type="uvChooser"):
            if not utils.get_id(chooser):
                invalid.append(chooser)

            for node in cmds.listConnections(chooser + ".uvSets",
                                             source=True,
                                             destination=False,
                                             shapes=False) or []:
                if not utils.get_id(node):
                    invalid.append(chooser)
                    break

        return invalid
예제 #5
0
    def extract(self):

        DO_NOT_BAKE_THESE = [
            "motionBlurOverride",
            "aiUseGlobalShutter",
            "aiShutterStart",
            "aiShutterEnd",
            "aiShutterType",
            "aiEnableDOF",
            "aiFov",
            "aiHorizontalFov",
            "aiVerticalFov",
        ]

        DO_BAKE_THESE = [
            "focalLength",
        ]
        DO_BAKE_THESE += lib.TRANSFORM_ATTRS

        context_data = self.context.data
        self.start = context_data.get("startFrame")
        self.end = context_data.get("endFrame")
        self.step = self.data.get("bakeStep", 1.0)
        camera = cmds.ls(self.member, type="camera", long=True)[0]

        self.camera_uuid = utils.get_id(camera)

        cam_transform = cmds.listRelatives(camera,
                                           parent=True,
                                           fullPath=True)[0]

        donot_bake = [cam_transform + "." + attr for attr in DO_NOT_BAKE_THESE]
        do_bake = [cam_transform + "." + attr for attr in DO_BAKE_THESE]

        with contextlib.nested(
            capsule.no_refresh(),
            capsule.attribute_states(donot_bake, lock=False, keyable=False),
            capsule.attribute_states(do_bake, lock=False, keyable=True),
            capsule.evaluation("off"),
        ):
            with capsule.delete_after() as delete_bin:

                # bake to worldspace
                frame_range = (self.start, self.end)
                baked_camera = lib.bake_to_world_space(cam_transform,
                                                       frame_range,
                                                       step=self.step)[0]
                delete_bin.append(baked_camera)

                cmds.select(baked_camera,
                            hierarchy=True,  # With shape
                            replace=True,
                            noExpand=True)

                super(ExtractCamera, self).extract()
예제 #6
0
    def transform_by_id(self, nodes):
        """
        """
        import maya.cmds as cmds
        from reveries.maya.utils import get_id

        transform_id_map = dict()
        for transform in cmds.ls(nodes, type="transform", long=True):
            transform_id_map[get_id(transform)] = transform

        return transform_id_map
예제 #7
0
    def transform_by_id(self, nodes):
        """
        """
        import maya.cmds as cmds
        from reveries.maya.utils import get_id

        transform_id_map = dict()
        for transform in cmds.ls(nodes, type="transform"):
            id = get_id(transform)
            if id not in transform_id_map:
                # (NOTE) New data model for duplicated AvalonID..
                transform_id_map[id] = list()
            transform_id_map[id].append(transform)

        return transform_id_map
    def _collect_components_matrix(self, data, container):

        id_path = container_to_id_path(container)

        data["subMatrix"][id_path] = dict()
        data["hidden"][id_path] = list()

        members = cmds.sets(container["objectName"], query=True)
        transforms = cmds.ls(members, type="transform", referencedNodes=True)

        for transform in transforms:
            matrix = cmds.xform(transform,
                                query=True,
                                matrix=True,
                                objectSpace=True)

            if matrix_equals(matrix, DEFAULT_MATRIX):
                matrix = "<default>"

            address = utils.get_id(transform)
            data["subMatrix"][id_path][address] = matrix

            # Collect visbility with matrix
            visibility = cmds.getAttr(transform + ".visibility")
            if not visibility:
                # Only record hidden nodes
                data["hidden"][id_path].append(address)

        # Collect subseet group node's matrix
        subset_group = container["subsetGroup"]

        matrix = cmds.xform(subset_group,
                            query=True,
                            matrix=True,
                            objectSpace=True)

        if matrix_equals(matrix, DEFAULT_MATRIX):
            return

        name = subset_group.rsplit(":", 1)[-1]
        data["subMatrix"][id_path]["GROUP"] = {name: matrix}
예제 #9
0
    def process(self, instance):
        from maya import cmds
        from reveries import utils
        from reveries.maya import lib, utils as maya_utils

        staging_dir = utils.stage_dir(dir=instance.data["_sharedStage"])

        context_data = instance.context.data
        start = context_data["startFrame"]
        end = context_data["endFrame"]
        step = instance.data.get("bakeStep", 1.0)

        instance.data["startFrame"] = start
        instance.data["endFrame"] = end
        instance.data["step"] = step

        ma_filename = "%s.ma" % instance.data["subset"]
        ma_outpath = "%s/%s" % (staging_dir, ma_filename)

        abc_filename = "%s.abc" % instance.data["subset"]
        abc_outpath = "%s/%s" % (staging_dir, abc_filename)

        fbx_filename = "%s.fbx" % instance.data["subset"]
        fbx_outpath = "%s/%s" % (staging_dir, fbx_filename)

        DO_NOT_BAKE_THESE = [
            "motionBlurOverride",
            "aiUseGlobalShutter",
            "aiShutterStart",
            "aiShutterEnd",
            "aiShutterType",
            "aiEnableDOF",
            "aiFov",
            "aiHorizontalFov",
            "aiVerticalFov",
        ]

        DO_BAKE_THESE = [
            "focalLength",
        ]
        DO_BAKE_THESE += lib.TRANSFORM_ATTRS

        camera = cmds.ls(instance, type="camera", long=True)[0]

        cam_uuid = maya_utils.get_id(camera)
        cam_transform = cmds.listRelatives(camera, parent=True,
                                           fullPath=True)[0]

        donot_bake = [cam_transform + "." + attr for attr in DO_NOT_BAKE_THESE]
        do_bake = [cam_transform + "." + attr for attr in DO_BAKE_THESE]

        euler_filter = instance.data.get("eulerFilter", False)

        duplicate_input_graph = bool(cmds.ls(instance, type="stereoRigCamera"))

        instance.data["repr.mayaAscii._stage"] = staging_dir
        instance.data["repr.mayaAscii._files"] = [ma_filename]
        instance.data["repr.mayaAscii.entryFileName"] = ma_filename
        instance.data["repr.mayaAscii.cameraUUID"] = cam_uuid

        instance.data["repr.Alembic._stage"] = staging_dir
        instance.data["repr.Alembic._files"] = [abc_filename]
        instance.data["repr.Alembic.entryFileName"] = abc_filename
        instance.data["repr.Alembic.cameraUUID"] = cam_uuid

        instance.data["repr.FBX._stage"] = staging_dir
        instance.data["repr.FBX._files"] = [fbx_filename]
        instance.data["repr.FBX.entryFileName"] = fbx_filename
        instance.data["repr.FBX.cameraUUID"] = cam_uuid

        # Delay one for all
        instance.data["repr._all_repr_._stage"] = staging_dir
        instance.data["repr._all_repr_._delayRun"] = {
            "func":
            self.extract_all,
            "args": [
                cam_transform, ma_outpath, abc_outpath, fbx_outpath, start,
                end, step, euler_filter, do_bake, donot_bake
            ],
            "kwargs": {
                "duplicate_input_graph": duplicate_input_graph,
            }
        }
예제 #10
0
    def extract_XGenLegacy(self, packager):

        packager.skip_stage()
        package_dir = packager.create_package()

        xgen_files = list()
        descriptions_data = dict()

        for desc in self.data["xgenDescriptions"]:
            palette = xgen.get_palette_by_description(desc)

            # Save UUID and bounding
            descriptions_data[desc] = {
                "id": utils.get_id(desc),
                "bound": xgen.list_bound_geometry(desc),
            }

            # Transfer maps
            maps = xgen.maps_to_transfer(desc)
            data_paths = xgen.current_data_paths(palette, expand=True)

            for src in maps:
                for root in data_paths:
                    if src.startswith(root):
                        # At least one root will be matched, since all
                        # map path has been validated that must exists
                        # under ${DESC} dir.
                        tail = src[len(root):]
                        if tail.startswith("/") or tail.startswith("\\"):
                            tail = tail[1:]
                        break
                else:
                    self.log.critical("Searched data path:")
                    for root in data_paths:
                        self.log.critical(root)
                    raise Exception("Could not find root path for %s , "
                                    "this is a bug." % src)

                dst = os.path.join(package_dir, "maps", palette, tail)
                packager.add_file(src, dst)

            # Export guides
            guides = xgen.list_guides(desc)
            if guides:
                guide_file = os.path.join(package_dir,
                                          "guides",
                                          palette,
                                          desc + ".abc")
                io.export_xgen_LGC_guides(guides, guide_file)

            # Export grooming
            groom = xgen.get_groom(desc)
            if groom and cmds.objExists(groom):
                groom_dir = os.path.join(package_dir,
                                         "groom",
                                         palette,
                                         desc)
                xgen.export_grooming(desc, groom, groom_dir)

        # Extract palette
        for palette in self.data["xgenPalettes"]:
            xgen_file = palette + ".xgen"
            xgen_path = os.path.join(package_dir, xgen_file)
            io.export_xgen_LGC_palette(palette, xgen_path)
            xgen_files.append(xgen_file)

            # Culled
            xgd_file = palette + "_culled.xgd"
            xgd_path = os.path.join(package_dir, "deltas", palette, xgd_file)
            if xgen.save_culled_as_delta(palette, xgd_path):
                self.log.info("Culled primitives saved.")

        packager.add_data({
            "entryFileName": None,  # Yes, no entry file for XGen Legacy.
            "descriptionsData": descriptions_data,
            "palettes": xgen_files,
            "step": self.data["step"],
        })
예제 #11
0
    def extract_mayaBinary(self):
        # Define extract output file path
        entry_file = self.file_name("mb")
        package_path = self.create_package()
        entry_path = os.path.join(package_path, entry_file)

        mesh_nodes = cmds.ls(self.member,
                             type="mesh",
                             noIntermediate=True,
                             long=True)

        # Hash model and collect Avalon UUID
        geo_id_and_hash = dict()
        hasher = utils.MeshHasher()
        for mesh in mesh_nodes:
            # Get ID
            transform = cmds.listRelatives(mesh, parent=True, fullPath=True)[0]
            id = utils.get_id(transform)
            assert id is not None, ("Some mesh has no Avalon UUID. "
                                    "This should not happend.")
            hasher.set_mesh(mesh)
            hasher.update_points()
            hasher.update_normals()
            hasher.update_uvmap()
            # It must be one mesh paring to one transform.
            geo_id_and_hash[id] = hasher.digest()
            hasher.clear()

        self.add_data({"modelProfile": geo_id_and_hash})

        # Perform extraction
        self.log.info("Performing extraction..")
        with contextlib.nested(
            capsule.no_undo(),
            capsule.no_display_layers(self.member),
            maya.maintained_selection(),
        ):
            with capsule.undo_chunk_when_no_undo():
                # (NOTE) Current workflow may keep model stay loaded as
                #   referenced in scene, but need to take extra care while
                #   extracting. (Will be undone)

                # - Remove referenced subset's namespace before exporting
                #   (Not keeping model namespace)
                referenced_namespace = self.context.data["referencedNamespace"]
                for namespace in reversed(sorted(list(referenced_namespace))):
                    if not cmds.namespace(exists=namespace):
                        continue

                    try:
                        cmds.namespace(removeNamespace=namespace,
                                       mergeNamespaceWithRoot=True)
                    except Exception:
                        # Reload reference and try again.
                        # The namespace of the reference will be able to
                        # removed after reload.
                        # (TODO) This publish workflow might not be a good
                        #        approach...
                        ref_node = lib.reference_node_by_namespace(namespace)
                        # There must be a reference node, since that's the
                        # main reason why namespace can not be removed.
                        cmds.file(loadReference=ref_node)
                        cmds.namespace(removeNamespace=namespace,
                                       mergeNamespaceWithRoot=True)

                # - Remove loaded container member
                #   If the mesh of the loaded model has been copied and edited
                #   (mesh face detach and separation), the model container
                #   might end up with a lots of facet member, which means there
                #   are dag connections that would make the model container be
                #   exported as well, and we don't want that happens.
                #   So we just remove them all for good.
                for container in self.context.data["RootContainers"]:
                    cmds.delete(container)

                cmds.select(cmds.ls(self.member), noExpand=True)

                cmds.file(entry_path,
                          force=True,
                          typ="mayaBinary",
                          exportSelected=True,
                          preserveReferences=False,
                          channels=True,
                          constraints=True,
                          expressions=True,
                          constructionHistory=True,
                          shader=True)

        self.add_data({
            "entryFileName": entry_file,
        })

        self.log.info("Extracted {name} to {path}".format(
            name=self.data["subset"],
            path=entry_path)
        )
예제 #12
0
    def process(self, instance):

        if "xgen" in instance.data["subset"].lower():
            self.log.info("No need to check on XGen look.")
            return

        elif "rig" in instance.data["subset"].lower():
            # rig's look
            self.log.info("Checking on rig and model.")
            FAMILIES = [
                self.model_family,
                self.rig_family,
            ]
            repr_name = "mayaBinary"
        else:
            # model's look
            self.log.info("Checking on model.")
            FAMILIES = [
                self.model_family,
            ]
            repr_name = "mayaBinary"

        collected_profiles = dict()

        asset = instance.context.data["assetDoc"]
        assert asset["name"] == instance.data["asset"], "Not the same asset."
        self.log.info("Asset: %s" % asset["name"])

        for subset in io.find({"type": "subset", "parent": asset["_id"]}):
            latest = io.find_one({
                "type": "version",
                "parent": subset["_id"]
            },
                                 sort=[("name", -1)])
            if latest is None:
                continue

            if not any(family in latest["data"]["families"]
                       for family in FAMILIES):
                continue

            # Get representation
            representation = io.find_one({
                "type": "representation",
                "parent": latest["_id"],
                "name": repr_name
            })
            profile = representation["data"]["modelProfile"]
            collected_profiles[subset["name"]] = profile

        if not collected_profiles:
            # Model not even published before, this is not right.
            self.log.error("No model been found.")
            raise Exception("No model for this look has been published "
                            "before, please publish model first.")

        hierarchy = cmds.ls(instance.data["requireAvalonUUID"], long=True)
        hierarchy += cmds.listRelatives(
            hierarchy, allDescendents=True, fullPath=True) or []
        meshes = cmds.ls(hierarchy, type="mesh", long=True)
        uuid_required_geos = cmds.listRelatives(meshes,
                                                parent=True,
                                                fullPath=True)

        if not uuid_required_geos:
            raise Exception("No UUID required nodes.")
            return

        # Hash current model and collect Avalon UUID
        geo_id_and_hash = dict()
        hasher = utils.MeshHasher()
        warned = False
        for transform in uuid_required_geos:
            # It must be one mesh paring to one transform.
            mesh = cmds.listRelatives(transform,
                                      shapes=True,
                                      noIntermediate=True,
                                      fullPath=True)[0]
            id = utils.get_id(transform)
            if id is None:
                if not warned:
                    self.log.warning("Some mesh has no Avalon UUID.")
                    warned = True
                continue

            hasher.set_mesh(mesh)
            hasher.update_points()
            hasher.update_normals()
            hasher.update_uvmap()
            # id must be unique, no other should have same id.
            geo_id_and_hash[id] = hasher.digest()
            hasher.clear()

        # Find matched model/rig subsets
        matched = list()
        for name, profile in collected_profiles.items():
            current_ids = set(geo_id_and_hash.keys())
            previous_ids = set(profile.keys())

            if current_ids.issuperset(previous_ids):
                self.log.info("Match found: %s" % name)
                matched.append(name)

            elif (self.rig_family in FAMILIES
                  and current_ids.issubset(previous_ids)):
                # In current pipeline, the look for rig is only for preview,
                # no need to be strict on this.
                self.log.info("Partial match found: %s" % name)
                matched.append(name)

            else:
                self.log.debug("Not matched: %s" % name)

        # Is current model/rig that this look applied to being published ?
        being_published = False
        staged_instances = [
            i for i in instance.context
            if (i.data["family"] in FAMILIES and i.data.get("publish", True))
        ]
        for inst in staged_instances:
            nodes = cmds.ls(inst, long=True)
            if set(nodes).issuperset(set(uuid_required_geos)):
                self.log.info("Model/Rig is being published.")
                being_published = True
                break
            else:
                self.log.debug("Instance not match: %s" % inst.name)

        # If it's not being published, check on match state
        if not being_published:
            if not matched:
                raise Exception("Current models UUID is not consistent "
                                "with previous published version.\n"
                                "Please update your loaded model/rig, or "
                                "publish it if you are the author.")
            else:
                # Checking on mesh changes, and pop warning if changed.
                changed_on = list()
                for match in matched:
                    for id, hash in geo_id_and_hash.items():
                        if id not in collected_profiles[match]:
                            continue

                        if not collected_profiles[match][id] == hash:
                            changed_on.append(match)
                            break

                if changed_on:
                    self.log.warning("Some model has been modified, the look "
                                     "may not apply correctly on these "
                                     "subsets:")
                    for changed in changed_on:
                        self.log.warning(changed)
예제 #13
0
    def extract_LookDev(self):

        from avalon import maya
        from reveries.maya import lib, capsule

        entry_file = self.file_name("ma")
        package_path = self.create_package()

        # Extract shaders
        #
        entry_path = os.path.join(package_path, entry_file)

        self.log.info("Extracting shaders..")

        with contextlib.nested(
                maya.maintained_selection(),
                capsule.undo_chunk(),
                capsule.no_refresh(),
        ):
            # From texture extractor
            file_node_path = self.context.data.get("fileNodePath")
            if file_node_path is not None:
                # Change texture path to published location
                for file_node in cmds.ls(self.member, type="file"):
                    attr_name = file_node + ".fileTextureName"
                    color_attr = file_node + ".colorSpace"
                    final_path = file_node_path[file_node]

                    # Unlock colorspace
                    color_space = cmds.getAttr(color_attr)
                    cmds.setAttr(color_attr, lock=False)

                    # Set texture file path to publish location
                    cmds.setAttr(attr_name, final_path, type="string")

                    # Lock colorspace
                    cmds.setAttr(color_attr,
                                 color_space,
                                 lock=True,
                                 type="string")

            # Select full shading network
            # If only select shadingGroups, and if there are any node
            # connected to Dag node (i.e. drivenKey), then the command
            # will not only export selected shadingGroups' shading network,
            # but also export other related DAG nodes (i.e. full hierarchy)
            cmds.select(self.member, replace=True, noExpand=True)

            cmds.file(entry_path,
                      options="v=0;",
                      type="mayaAscii",
                      force=True,
                      exportSelected=True,
                      preserveReferences=False,
                      constructionHistory=False)

        # Serialise shaders relationships
        #
        link_file = self.file_name("json")
        link_path = os.path.join(package_path, link_file)

        self.log.info("Serialising shaders..")

        shader_by_id = lib.serialise_shaders(self.data["dagMembers"])

        # Animatable attrs
        # Custom attributes in assembly node which require to be animated.
        self.log.info("Serialising animatable attributes..")
        animatable = dict()
        root = cmds.ls(self.data["dagMembers"], assemblies=True)
        if root:
            root = root[0]
            for attr in cmds.listAttr(root, userDefined=True) or list():
                animatable[attr] = cmds.listConnections(root + "." + attr,
                                                        destination=True,
                                                        source=False,
                                                        plugs=True)

        surfaces = cmds.ls(self.data["dagMembers"],
                           noIntermediate=True,
                           type="surfaceShape")

        # CreaseSet
        crease_sets = dict()
        creases = list()

        for node in surfaces:
            creases += cmds.ls(cmds.listSets(object=node), type="creaseSet")

        creases = list(set(creases))

        for cres in creases:
            # Grouping crease set members with crease level value.
            level = cmds.getAttr(cres + ".creaseLevel")
            if level not in crease_sets:
                crease_sets[level] = list()

            for member in cmds.ls(cmds.sets(cres, query=True), long=True):
                node, edges = member.split(".")
                if node not in self.data["dagMembers"]:
                    continue
                # We have validated Avalon UUID, so there must be a valid ID.
                id = utils.get_id(node)
                crease_sets[level].append(id + "." + edges)

        # Arnold attributes
        arnold_attrs = dict()

        try:
            # (TODO) This should be improved. see issue #65
            from reveries.maya import arnold
        except RuntimeError as e:
            self.log.debug(e)
        else:
            ai_sets = dict()
            for objset in cmds.ls(type="objectSet"):
                if not lib.hasAttr(objset, "aiOverride"):
                    continue
                if not cmds.getAttr(objset + ".aiOverride"):
                    continue
                # Ignore pyblish family instance
                if (lib.hasAttr(objset, "id")
                        and read(objset + ".id") == "pyblish.avalon.instance"):
                    continue

                ai_sets[objset] = cmds.ls(cmds.sets(objset, query=True),
                                          long=True)

            # (TODO) Validate only transform nodes in ai set
            transforms = cmds.ls(cmds.listRelatives(surfaces, parent=True),
                                 long=True)
            for node in transforms:
                # There must be a valid ID
                id = utils.get_id(node)

                attrs = dict()

                # Collect all `ai*` attributes from shape
                shape = cmds.listRelatives(
                    node, shapes=True, noIntermediate=True,
                    fullPath=True) or [None]
                shape = shape[0]
                if shape is None:
                    continue

                for attr in cmds.listAttr(shape, fromPlugin=True) or []:
                    value = read(shape + "." + attr)
                    if value is not None:
                        attrs[attr] = value

                # Collect all override attributes from objectSet
                for ai_set, member in ai_sets.items():
                    if node not in member:
                        continue

                    for attr in cmds.listAttr(ai_set, userDefined=True) or []:
                        # Collect all user attributes from objecSet
                        # (NOTE) Some attribute like `castsShadows` does not
                        #        startswith "ai", but also affect rendering in
                        #        Arnold.
                        value = read(node + "." + attr)
                        if value is not None:
                            attrs[attr] = value

                arnold_attrs[id] = attrs

        # VRay Attributes
        vray_attrs = dict()

        try:
            from reveries.maya import vray
        except RuntimeError as e:
            self.log.debug(e)
        else:
            for node in surfaces:
                # - shape
                values = vray.attributes_gather(node)
                if values:
                    vray_attrs[node] = values

                # - transfrom
                parent = cmds.listRelatives(node, parent=True)
                if parent:
                    values = vray.attributes_gather(parent[0])
                    if values:
                        vray_attrs[parent[0]] = values

        relationships = {
            "shaderById": shader_by_id,
            "animatable": animatable,
            "creaseSets": crease_sets,
            "arnoldAttrs": arnold_attrs,
            "vrayAttrs": vray_attrs,
        }

        self.log.info("Extracting serialisation..")
        with open(link_path, "w") as f:
            json.dump(relationships, f)

        self.add_data({
            "linkFname": link_file,
            "entryFileName": entry_file,
        })

        self.log.info("Extracted {name} to {path}".format(
            name=self.data["subset"], path=package_path))
예제 #14
0
    def process(self, instance):
        from maya import cmds
        from reveries import utils
        from reveries.maya import xgen, capsule, utils as maya_utils

        staging_dir = utils.stage_dir()

        # Export preset
        # (NOTE) Saving as ext `.ma` instead of `.xgip` is because
        #        I'd like to use reference to load it later.
        #        Referencing file that was not `.ma`, `.mb` or other
        #        normal ext will crash Maya on file saving.
        filename = "%s.ma" % instance.data["subset"]
        linkfile = "%s.json" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)
        linkpath = "%s/%s" % (staging_dir, linkfile)

        instance.data["repr.XGenInteractive._stage"] = staging_dir
        instance.data["repr.XGenInteractive._files"] = [filename, linkfile]
        instance.data["repr.XGenInteractive.entryFileName"] = filename
        instance.data["repr.XGenInteractive.linkFname"] = linkfile

        child_instances = instance.data.get("childInstances", [])
        try:
            texture = next(chd for chd in child_instances
                           if chd.data["family"] == "reveries.texture")
        except StopIteration:
            file_node_attrs = dict()
        else:
            file_node_attrs = texture.data.get("fileNodeAttrs", dict())

        bound_map = dict()
        clay_shader = "initialShadingGroup"
        descriptions = instance.data["igsDescriptions"]
        with capsule.assign_shader(descriptions, shadingEngine=clay_shader):

            for description in descriptions:

                desc_id = maya_utils.get_id(description)

                # Get bounded meshes
                bound_map[desc_id] = list()
                for mesh in xgen.interactive.list_bound_meshes(description):
                    transform = cmds.listRelatives(mesh,
                                                   parent=True,
                                                   fullPath=True)
                    id = maya_utils.get_id(transform[0])
                    bound_map[desc_id].append(id)

            # (NOTE) Separating grooms and bounding meshes seems not able to
            #        preserve sculpt layer data entirely correct.
            #        For example, sculpting long hair strands to really short,
            #        may ends up noisy shaped after import back.
            #
            #        So now we export the grooms with bound meshes...
            #
            # io.export_xgen_IGS_presets(descriptions, outpath)

            with contextlib.nested(
                    capsule.no_display_layers(instance[:]),
                    # Change to published path
                    capsule.attribute_values(file_node_attrs),
                    capsule.maintained_selection(),
            ):
                cmds.select(descriptions)

                cmds.file(outpath,
                          force=True,
                          typ="mayaAscii",
                          exportSelected=True,
                          preserveReferences=False,
                          channels=True,
                          constraints=True,
                          expressions=True,
                          constructionHistory=True)

        # Parse preset bounding map

        with open(linkpath, "w") as fp:
            json.dump(bound_map, fp, ensure_ascii=False)
예제 #15
0
    def extract_mayaBinary(self):
        entry_file = self.file_name("mb")
        package_path = self.create_package()
        entry_path = os.path.join(package_path, entry_file)

        mesh_nodes = cmds.ls(self.member,
                             type="mesh",
                             noIntermediate=True,
                             long=True)
        clay_shader = "initialShadingGroup"

        # Hash model and collect Avalon UUID
        geo_id_and_hash = dict()
        hasher = utils.MeshHasher()
        for mesh in mesh_nodes:
            # Get ID
            transform = cmds.listRelatives(mesh, parent=True, fullPath=True)[0]
            id = utils.get_id(transform)
            hasher.set_mesh(mesh)
            hasher.update_points()
            hasher.update_normals()
            hasher.update_uvmap()
            # It must be one mesh paring to one transform.
            geo_id_and_hash[id] = hasher.digest()
            hasher.clear()

        self.add_data({"modelProfile": geo_id_and_hash})

        # Perform extraction
        self.log.info("Extracting %s" % str(self.member))
        cmds.select(self.member, noExpand=True)

        with contextlib.nested(
            capsule.assign_shader(mesh_nodes, shadingEngine=clay_shader),
            capsule.undo_chunk_when_no_undo(),
        ):
            # Remove mesh history, for removing all intermediate nodes
            transforms = cmds.ls(self.member, type="transform")
            cmds.delete(transforms, constructionHistory=True)
            # Remove all stray shapes, ensure no intermediate nodes
            all_meshes = set(cmds.ls(self.member, type="mesh", long=True))
            cmds.delete(list(all_meshes - set(mesh_nodes)))

            cmds.file(
                entry_path,
                force=True,
                typ="mayaBinary",
                exportSelected=True,
                preserveReferences=False,
                # Shader assignment is the responsibility of
                # riggers, for animators, and lookdev, for
                # rendering.
                shader=False,
                # Construction history inherited from collection
                # This enables a selective export of nodes
                # relevant to this particular plug-in.
                constructionHistory=False
            )

        self.add_data({
            "entryFileName": entry_file,
        })

        self.log.info("Extracted {name} to {path}".format(
            name=self.data["subset"],
            path=entry_path)
        )
예제 #16
0
    def _collect_components_matrix(self, data, container):
        from maya import cmds
        from reveries.lib import DEFAULT_MATRIX, matrix_equals
        from reveries.maya import utils as maya_utils
        from reveries.maya import hierarchy

        id_path = hierarchy.container_to_id_path(container)

        data["subMatrix"][id_path] = dict()
        data["inheritsTransform"][id_path] = dict()
        data["hidden"][id_path] = dict()

        nodes = cmds.sets(container["objectName"], query=True, nodesOnly=True)

        # Alembic, If any..
        # (NOTE) Shouldn't be extracted here with matrix, need decouple
        if container["loader"] == "PointCacheReferenceLoader":
            abc = cmds.ls(nodes, type="AlembicNode")
            if abc:
                abc = abc[0]  # Should have one and only one alembic node
                data["alembic"][id_path] = [
                    cmds.getAttr(abc + ".speed"),
                    cmds.getAttr(abc + ".offset"),
                    cmds.getAttr(abc + ".cycleType"),
                ]

        # Transform Matrix
        #
        transforms = cmds.ls(nodes, type="transform", referencedNodes=True)
        transforms = set(transforms) - set(cmds.ls(transforms, type=["joint"]))

        for transform in transforms:
            matrix = cmds.xform(transform,
                                query=True,
                                matrix=True,
                                objectSpace=True)

            if matrix_equals(matrix, DEFAULT_MATRIX):
                matrix = "<default>"

            address = maya_utils.get_id(transform)
            short = transform.split("|")[-1].split(":")[-1]
            # (NOTE) New data model for duplicated AvalonID..
            #   Use transform node's short name as a buffer for AvalonID
            #   duplication..
            if address not in data["subMatrix"][id_path]:
                data["subMatrix"][id_path][address] = dict()
            data["subMatrix"][id_path][address][short] = matrix

            # Collect `inheritsTransform`...
            inherits = cmds.getAttr(transform + ".inheritsTransform")
            if address not in data["inheritsTransform"][id_path]:
                # (NOTE) New data model for duplicated AvalonID..
                data["inheritsTransform"][id_path][address] = dict()
            data["inheritsTransform"][id_path][address][short] = inherits

            # Collect visbility with matrix
            visibility = cmds.getAttr(transform + ".visibility")
            if not visibility:
                # Only record hidden nodes
                if address not in data["hidden"][id_path]:
                    # (NOTE) New data model for duplicated AvalonID..
                    data["hidden"][id_path][address] = list()
                data["hidden"][id_path][address].append(short)

        # Collect subseet group node's matrix
        subset_group = container["subsetGroup"]

        matrix = cmds.xform(subset_group,
                            query=True,
                            matrix=True,
                            objectSpace=True)
        inherits = cmds.getAttr(subset_group + ".inheritsTransform")

        name = subset_group.rsplit(":", 1)[-1]
        data["subMatrix"][id_path]["GROUP"] = {name: matrix}
        data["inheritsTransform"][id_path]["GROUP"] = {name: inherits}
예제 #17
0
    def process(self, instance):
        from maya import cmds
        from reveries import utils
        from reveries.maya import io, utils as maya_utils
        from reveries.maya.xgen import legacy as xgen

        staging_dir = utils.stage_dir()

        files = list()
        xgen_files = list()
        descriptions_data = dict()

        for desc in instance.data["xgenDescriptions"]:
            palette = xgen.get_palette_by_description(desc)

            # Save UUID and bounding
            descriptions_data[desc] = {
                "id": maya_utils.get_id(desc),
                "bound": xgen.list_bound_geometry(desc),
            }

            # Stage maps
            map_stage = staging_dir + "/maps/%s" % palette

            for head, src in xgen.maps_to_transfer(desc):
                relative = os.path.relpath(src, head)
                if os.path.isfile(src):
                    relative = os.path.dirname(relative)
                    ship = shutil.copy2
                elif os.path.isdir(src):
                    ship = shutil.copytree
                else:
                    continue

                dst_dir = map_stage + "/" + relative
                if not os.path.isdir(dst_dir):
                    os.makedirs(dst_dir)

                try:
                    ship(src, dst_dir)
                except OSError:
                    msg = "An unexpected error occurred."
                    self.log.critical(msg)
                    raise OSError(msg)

            for root, _, fnames in os.walk(map_stage):
                relative = os.path.relpath(root, staging_dir)
                relative = "" if relative == "." else (relative + "/")
                relative = relative.replace("\\", "/")
                for file in fnames:
                    map_file = relative + file
                    files.append(map_file)

            # Export guides
            guides = xgen.list_guides(desc)
            if guides:
                guide_file = "guides/%s/%s.abc" % (palette, desc)
                guide_path = "%s/%s" % (staging_dir, guide_file)
                io.export_xgen_LGC_guides(guides, guide_path)

                files.append(guide_file)

            # Export grooming
            groom = xgen.get_groom(desc)
            if groom and cmds.objExists(groom):
                groom_dir = "groom/%s/%s" % (palette, desc)
                groom_path = "%s/%s" % (staging_dir, groom_dir)
                xgen.export_grooming(desc, groom, groom_path)

                # Walk groom_path and add into files
                for root, _, fnames in os.walk(groom_path):
                    relative = os.path.relpath(root, staging_dir)
                    relative = "" if relative == "." else (relative + "/")
                    for file in fnames:
                        groom_file = relative + file
                        files.append(groom_file)

        # Extract palette
        for palette in instance.data["xgenPalettes"]:
            xgen_file = palette + ".xgen"
            xgen_path = "%s/%s" % (staging_dir, xgen_file)
            io.export_xgen_LGC_palette(palette, xgen_path)

            xgen_files.append(xgen_file)
            files.append(xgen_file)

            # Culled
            xgd_file = "deltas/%s/%s_culled.xgd" % (palette, palette)
            xgd_path = "%s/%s" % (staging_dir, xgd_file)
            if xgen.save_culled_as_delta(palette, xgd_path):
                self.log.info("Culled primitives saved.")

                files.append(xgd_file)

        instance.data["repr.XGenLegacy._stage"] = staging_dir
        instance.data["repr.XGenLegacy._files"] = files
        instance.data["repr.XGenLegacy.entryFileName"] = None  # no entry file
        instance.data["repr.XGenLegacy.descriptionsData"] = descriptions_data
        instance.data["repr.XGenLegacy.palettes"] = xgen_files
        instance.data["repr.XGenLegacy.step"] = instance.data["step"]
    def extract_XGenInteractive(self, packager):
        from maya import cmds

        package_dir = packager.create_package()

        bound_map = dict()
        clay_shader = "initialShadingGroup"
        descriptions = self.data["igsDescriptions"]
        with capsule.assign_shader(descriptions, shadingEngine=clay_shader):

            for description in descriptions:

                desc_id = utils.get_id(description)

                # Get bounded meshes
                bound_map[desc_id] = list()
                for mesh in xgen.interactive.list_bound_meshes(description):
                    transform = cmds.listRelatives(mesh, parent=True)
                    id = utils.get_id(transform[0])
                    bound_map[desc_id].append(id)

            # Export preset
            # (NOTE) Saving as ext `.ma` instead of `.xgip` is because
            #        I'd like to use reference to load it later.
            #        Referencing file that was not `.ma`, `.mb` or other
            #        normal ext will crash Maya on file saving.
            entry_file = packager.file_name("ma")
            entry_path = os.path.join(package_dir, entry_file)

            # (NOTE) Separating grooms and bounding meshes seems not able to
            #        preserve sculpt layer data entirely correct.
            #        For example, sculpting long hair strands to really short,
            #        may ends up noisy shaped after import back.
            #
            #        So now we export the grooms with bound meshes...
            #
            # io.export_xgen_IGS_presets(descriptions, entry_path)

            with contextlib.nested(
                    capsule.no_display_layers(self.member),
                    capsule.maintained_selection(),
            ):
                cmds.select(descriptions)

                cmds.file(entry_path,
                          force=True,
                          typ="mayaAscii",
                          exportSelected=True,
                          preserveReferences=False,
                          channels=True,
                          constraints=True,
                          expressions=True,
                          constructionHistory=True)

        # Parse preset bounding map
        link_file = packager.file_name("json")
        link_path = os.path.join(package_dir, link_file)

        with open(link_path, "w") as fp:
            json.dump(bound_map, fp, ensure_ascii=False)

        packager.add_data({
            "linkFname": link_file,
            "entryFileName": entry_file,
        })
예제 #19
0
    def process(self, instance):
        from maya import cmds
        from avalon import maya
        from reveries import utils
        from reveries.maya import lib, capsule, utils as maya_utils

        staging_dir = utils.stage_dir()

        filename = "%s.ma" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)

        linkfile = "%s.json" % instance.data["subset"]
        linkpath = "%s/%s" % (staging_dir, linkfile)

        instance.data["repr.LookDev._stage"] = staging_dir
        instance.data["repr.LookDev._files"] = [filename, linkfile]
        instance.data["repr.LookDev.entryFileName"] = filename
        instance.data["repr.LookDev.linkFname"] = linkfile

        # Serialise shaders relationships
        #
        self.log.info("Serialising shaders..")

        shader_by_id = lib.serialise_shaders(instance.data["dagMembers"])
        assert shader_by_id, "The map of shader relationship is empty."

        # Extract shaders
        #
        self.log.info("Extracting shaders..")

        child_instances = instance.data.get("childInstances", [])
        try:
            texture = next(chd for chd in child_instances
                           if chd.data["family"] == "reveries.texture")
        except StopIteration:
            file_node_attrs = dict()
        else:
            file_node_attrs = texture.data.get("fileNodeAttrs", dict())

        with contextlib.nested(
                maya.maintained_selection(),
                capsule.ref_edit_unlock(),
                # (NOTE) Ensure attribute unlock
                capsule.attribute_states(file_node_attrs.keys(), lock=False),
                # Change to published path
                capsule.attribute_values(file_node_attrs),
                capsule.no_refresh(),
        ):
            # Select full shading network
            # If only select shadingGroups, and if there are any node
            # connected to Dag node (i.e. drivenKey), then the command
            # will not only export selected shadingGroups' shading network,
            # but also export other related DAG nodes (i.e. full hierarchy)
            cmds.select(instance, replace=True, noExpand=True)

            cmds.file(
                outpath,
                options="v=0;",
                type="mayaAscii",
                force=True,
                exportSelected=True,
                preserveReferences=False,
                constructionHistory=False,
                channels=True,  # allow animation
                constraints=False,
                shader=True,
                expressions=True)

        # Animatable attrs
        # Custom attributes in assembly node which require to be animated.
        self.log.info("Serialising 'avnlook_' prefixed attributes..")
        avnlook_anim = dict()
        for node in cmds.ls(instance.data["dagMembers"], type="transform"):
            id = maya_utils.get_id(node)
            user_attrs = cmds.listAttr(node, userDefined=True) or []
            for attr in user_attrs:
                if not attr.startswith("avnlook_"):
                    continue
                connected = cmds.listConnections(node + "." + attr,
                                                 source=False,
                                                 destination=True,
                                                 plugs=True)
                if connected:
                    avnlook_anim[id + "." + attr] = connected

        surfaces = cmds.ls(instance.data["dagMembers"],
                           noIntermediate=True,
                           type="surfaceShape")

        # UV Chooser
        uv_chooser = dict()
        for chooser in cmds.ls(instance, type="uvChooser"):
            chooser_id = maya_utils.get_id(chooser)

            for src in cmds.listConnections(chooser + ".uvSets",
                                            source=True,
                                            destination=False,
                                            plugs=True) or []:
                geo, attr = src.split(".", 1)
                geo = cmds.listRelatives(geo, parent=True, path=True)[0]
                geo_attr = maya_utils.get_id(geo) + "." + attr

                if chooser_id not in uv_chooser:
                    uv_chooser[chooser_id] = list()
                if geo_attr not in uv_chooser[chooser_id]:
                    uv_chooser[chooser_id].append(geo_attr)

        # CreaseSet
        crease_sets = dict()
        creases = list()

        for node in surfaces:
            creases += cmds.ls(cmds.listSets(object=node), type="creaseSet")

        creases = list(set(creases))

        for cres in creases:
            # Grouping crease set members with crease level value.
            level = cmds.getAttr(cres + ".creaseLevel")
            if level not in crease_sets:
                crease_sets[level] = list()

            for member in cmds.ls(cmds.sets(cres, query=True), long=True):
                node, edges = member.split(".")
                if node not in instance.data["dagMembers"]:
                    continue
                # We have validated Avalon UUID, so there must be a valid ID.
                id = maya_utils.get_id(node)
                crease_sets[level].append(id + "." + edges)

        # Arnold attributes
        arnold_attrs = dict()

        try:
            # (TODO) This should be improved. see issue #65
            from reveries.maya import arnold
        except RuntimeError as e:
            self.log.debug(e)
        else:
            ai_sets = dict()
            for objset in cmds.ls(type="objectSet"):
                if not lib.hasAttr(objset, "aiOverride"):
                    continue
                if not cmds.getAttr(objset + ".aiOverride"):
                    continue
                # Ignore pyblish family instance
                if (lib.hasAttr(objset, "id")
                        and read(objset + ".id") == "pyblish.avalon.instance"):
                    continue

                ai_sets[objset] = cmds.ls(cmds.sets(objset, query=True),
                                          long=True)

            # (TODO) Validate only transform nodes in ai set
            transforms = cmds.ls(cmds.listRelatives(surfaces, parent=True),
                                 long=True)
            for node in transforms:
                # There must be a valid ID
                id = maya_utils.get_id(node)

                attrs = dict()

                # Collect all `ai*` attributes from shape
                shape = cmds.listRelatives(
                    node, shapes=True, noIntermediate=True,
                    fullPath=True) or [None]
                shape = shape[0]
                if shape is None:
                    continue

                for attr in cmds.listAttr(shape, fromPlugin=True) or []:
                    value = read(shape + "." + attr)
                    if value is not None:
                        attrs[attr] = value

                # Collect all override attributes from objectSet
                for ai_set, member in ai_sets.items():
                    if node not in member:
                        continue

                    for attr in cmds.listAttr(ai_set, userDefined=True) or []:
                        # Collect all user attributes from objecSet
                        # (NOTE) Some attribute like `castsShadows` does not
                        #        startswith "ai", but also affect rendering in
                        #        Arnold.
                        value = read(node + "." + attr)
                        if value is not None:
                            attrs[attr] = value

                arnold_attrs[id] = attrs

        # VRay Attributes
        vray_attrs = dict()

        try:
            from reveries.maya import vray
        except RuntimeError as e:
            self.log.debug(e)
        else:
            for node in surfaces:
                # - shape
                values = vray.attributes_gather(node)
                if values:
                    vray_attrs[node] = values

                # - transfrom
                parent = cmds.listRelatives(node, parent=True)
                if parent:
                    values = vray.attributes_gather(parent[0])
                    if values:
                        vray_attrs[parent[0]] = values

        relationships = {
            "shaderById": shader_by_id,
            "avnlookAttrs": avnlook_anim,
            "uvChooser": uv_chooser,
            "creaseSets": crease_sets,
            "arnoldAttrs": arnold_attrs,
            "vrayAttrs": vray_attrs,
        }

        self.log.info("Extracting serialisation..")

        with open(linkpath, "w") as f:
            json.dump(relationships, f)