def process(self, context):

        if lib.in_remote():
            return

        maya.unlock()

        with capsule.maintained_selection():
            cmds.file(rename=context.data["originMaking"])
            # Changing selection to update window title for
            # displaying new file name
            cmds.select("defaultLightSet")

        if all(result["success"] for result in context.data["results"]):

            self.log.info("Publish succeed, save scene back to workfile.")
            cmds.file(save=True, force=True)

        else:
            # Mark failed if error raised during extraction or integration
            publishing = context.data["currentMaking"]
            scene_dir, file_name = os.path.split(publishing)
            file_name = "__failed." + file_name

            os.rename(publishing, scene_dir + "/" + file_name)
Exemple #2
0
    def process(self, containers):

        _repr_cache = dict()
        standins = list()
        for container in containers:
            if not container.get("loader") == "ArnoldAssLoader":
                continue

            if not cmds.objExists(container["subsetGroup"]):
                continue

            asset_id = avalon.io.ObjectId(container["assetId"])
            if asset_id not in _repr_cache:
                asset_id = avalon.io.ObjectId(container["assetId"])
                asset = avalon.io.find_one({"_id": asset_id})
                representation = avalon.io.locate([
                    avalon.api.Session["AVALON_PROJECT"],
                    asset["name"],
                    "modelDefault",
                    -1,
                    "mayaBinary",
                ])
                _repr_cache[asset_id] = representation
            else:
                representation = _repr_cache[asset_id]

            if representation is None:
                continue

            subset_group = cmds.ls(container["subsetGroup"])[0]
            parent = cmds.listRelatives(subset_group, parent=True, path=True)
            matrix = cmds.xform(subset_group,
                                query=True,
                                matrix=True,
                                objectSpace=True)

            data = (
                parent[0] if parent else "",
                matrix,
                container,
                representation,
            )
            standins.append(data)

        switched_models = set()
        ModelLoader = next(Loader
                           for Loader in avalon.api.discover(avalon.api.Loader)
                           if Loader.__name__ == "ModelLoader")

        with maintained_selection():
            for parent, matrix, container, representation in standins:
                avalon.api.remove(container)
                container = avalon.api.load(ModelLoader, representation)
                subset_group = cmds.ls(container["subsetGroup"])[0]
                if parent:
                    cmds.parent(subset_group, parent)
                cmds.xform(subset_group, matrix=matrix, objectSpace=True)
                switched_models.add(container["objectName"])

        return switched_models
    def extract_alembic(self, nodes, outpath):
        import maya.cmds as cmds
        from reveries.maya import capsule, io, lib

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_display_layers(nodes),
                capsule.no_smooth_preview(),
                capsule.maintained_selection(),
                capsule.without_extension(),
        ):

            cmds.select(nodes, noExpand=True)

            frame = cmds.currentTime(query=True)
            io.export_alembic(
                outpath,
                frame,
                frame,
                selection=True,
                renderableOnly=True,
                writeCreases=True,
                worldSpace=True,
                uvWrite=True,
                writeUVSets=True,
                attr=[
                    lib.AVALON_ID_ATTR_LONG,
                ],
                attrPrefix=[
                    "ai",  # Write out Arnold attributes
                ],
            )
    def process(self, containers):

        shaders = set()
        for container in containers:
            if not container.get("loader") == "LookLoader":
                continue

            # Select assigned
            members = cmds.sets(container["objectName"], query=True)
            shaders.update(cmds.ls(members, type="shadingEngine"))

        with maintained_selection():
            cmds.select(list(shaders), replace=True)

            # Parse selected
            containers = avalon.maya.ls()
            container_names = set(c["objectName"] for c in containers)

            selected_items = set()
            for node in cmds.ls(sl=True):
                objsets = cmds.listSets(object=node) or []
                for objset in objsets:
                    if objset in container_names:
                        selected_items.add(objset)
                        break

        return selected_items
    def extract_mayabinary(self, nodes, outpath):
        import maya.cmds as cmds
        from reveries.maya import capsule

        geo_id_and_hash = None

        with contextlib.nested(
            capsule.no_undo(),
            capsule.no_display_layers(nodes),
            capsule.no_smooth_preview(),
            capsule.maintained_selection(),
            capsule.without_extension(),
        ):

            mesh_nodes = cmds.ls(nodes,
                                 type="mesh",
                                 noIntermediate=True,
                                 long=True)
            clay_shader = "initialShadingGroup"

            # Perform extraction
            cmds.select(nodes, noExpand=True)

            with capsule.undo_chunk_when_no_undo():

                # Remove mesh history, for removing all intermediate nodes
                transforms = cmds.ls(nodes, type="transform")
                cmds.delete(transforms, constructionHistory=True)
                # Remove all stray shapes, ensure no intermediate nodes
                all_meshes = set(cmds.ls(nodes, type="mesh", long=True))
                cmds.delete(list(all_meshes - set(mesh_nodes)))

                geo_id_and_hash = self.hash(set(mesh_nodes))

                with capsule.assign_shader(mesh_nodes,
                                           shadingEngine=clay_shader):
                    cmds.file(
                        outpath,
                        force=True,
                        typ="mayaBinary",
                        exportSelectedStrict=True,
                        preserveReferences=False,
                        # Shader assignment is the responsibility of
                        # riggers, for animators, and lookdev, for
                        # rendering.
                        shader=False,
                        # Construction history inherited from collection
                        # This enables a selective export of nodes
                        # relevant to this particular plug-in.
                        constructionHistory=False,
                        channels=False,
                        constraints=False,
                        expressions=False,
                    )

        return geo_id_and_hash
    def extract(self):

        with contextlib.nested(
            capsule.no_undo(),
            capsule.no_display_layers(self.member),
            capsule.no_smooth_preview(),
            capsule.maintained_selection(),
            capsule.without_extension(),
        ):
            super(ExtractModel, self).extract()
Exemple #7
0
    def extract_Alembic(self):
        entry_file = self.file_name("ma")
        cache_file = self.file_name("abc")
        package_path = self.create_package()
        entry_path = os.path.join(package_path, entry_file)
        cache_path = os.path.join(package_path, cache_file)

        root = cmds.ls(sl=True, long=True)

        with capsule.maintained_selection():
            # Selection may change if there are duplicate named nodes and
            # require instancing them to resolve

            with capsule.delete_after() as delete_bin:

                # (NOTE) We need to check any duplicate named nodes, or
                #        error will raised during Alembic export.
                result = lib.ls_duplicated_name(root)
                duplicated = [n for m in result.values() for n in m]
                if duplicated:
                    # Create instance to avoid duplicated names
                    instanced = cmds.instance(duplicated)
                    # Instance nodes will be deleted after the export
                    delete_bin.extend(instanced)
                    # Update root nodes
                    root = list(set(root) - set(duplicated)) + instanced

                io.export_alembic(
                    cache_path,
                    self.start_frame,
                    self.end_frame,
                    selection=False,
                    renderableOnly=True,
                    writeCreases=True,
                    worldSpace=True,
                    root=root,
                    attr=[
                        lib.AVALON_ID_ATTR_LONG,
                    ],
                    attrPrefix=[
                        "ai",  # Write out Arnold attributes
                    ],
                )

        io.wrap_abc(entry_path, [(cache_file, "ROOT")])

        self.add_data({"entryFileName": entry_file})
        self.add_range_data()
Exemple #8
0
    def extract(self):

        if self.data.get("staticCache"):
            self.start_frame = cmds.currentTime(query=True)
            self.end_frame = cmds.currentTime(query=True)
        else:
            context_data = self.context.data
            self.start_frame = context_data.get("startFrame")
            self.end_frame = context_data.get("endFrame")

        with contextlib.nested(
            capsule.no_undo(),
            capsule.no_refresh(),
            capsule.evaluation("off"),
            capsule.maintained_selection(),
        ):
            cmds.select(self.data["outCache"], replace=True)
            super(ExtractPointCache, self).extract()
    def export_fbx(self, outpath, cachepath, cachename, nodes, keep_namespace):
        from reveries.maya import io, capsule
        from maya import cmds

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
        ):

            cmds.select(nodes, replace=True)

            with capsule.StripNamespace([] if keep_namespace else nodes):
                with io.export_fbx_set_pointcache("FBXCacheSET"):
                    io.export_fbx(cachepath)

                io.wrap_fbx(outpath, [(cachename, "ROOT")])
Exemple #10
0
    def extract_FBXCache(self):
        entry_file = self.file_name("ma")
        cache_file = self.file_name("fbx")
        package_path = self.create_package()
        entry_path = os.path.join(package_path, entry_file)
        cache_path = os.path.join(package_path, cache_file)

        # bake visible key
        with capsule.maintained_selection():
            lib.bake_hierarchy_visibility(
                cmds.ls(sl=True), self.start_frame, self.end_frame)
        with io.export_fbx_set_pointcache("FBXCacheSET"):
            io.export_fbx(cache_path)

        io.wrap_fbx(entry_path, [(cache_file, "ROOT")])

        self.add_data({"entryFileName": entry_file})
        self.add_range_data()
    def export_gpu(self, outpath, cachepath, cachename, start, end, assemblies,
                   attr_values):
        from reveries.maya import io, capsule
        from maya import cmds

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
        ):

            cmds.select(assemblies, replace=True, noExpand=True)

            with contextlib.nested(
                    capsule.attribute_values(attr_values),
                    # Mute animated visibility channels
                    capsule.attribute_mute(list(attr_values.keys())),
            ):
                io.export_gpu(cachepath, start, end)
                io.wrap_gpu(outpath, [(cachename, "ROOT")])
Exemple #12
0
    def extract_Ass(self):
        # Ensure mtoa loaded
        cmds.loadPlugin("mtoa", quiet=True)

        package_path = self.create_package()
        cache_file = self.file_name("ass")
        cache_path = os.path.join(package_path, cache_file)

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
                capsule.ref_edit_unlock(),
                remove_file_env_path(self.data),
        ):
            cmds.select(self.member, replace=True)
            asses = cmds.arnoldExportAss(filename=cache_path,
                                         selected=True,
                                         startFrame=self.data["startFrame"],
                                         endFrame=self.data["endFrame"],
                                         frameStep=self.data["byFrameStep"],
                                         shadowLinks=1,
                                         lightLinks=1,
                                         expandProcedurals=True,
                                         mask=24)

        use_sequence = self.data["startFrame"] != self.data["endFrame"]
        entry_file = os.path.basename(asses[0])

        self.add_data({
            "entryFileName": entry_file,
            "useSequence": use_sequence
        })
        if use_sequence:
            self.add_data({
                "startFrame": self.data["startFrame"],
                "endFrame": self.data["endFrame"]
            })
Exemple #13
0
    def extract_anim(self, packager):
        cmds.loadPlugin("animImportExport", quiet=True)

        package_path = packager.create_package()

        entry_file = packager.file_name("anim")
        entry_path = os.path.join(package_path, entry_file)

        sele_file = packager.file_name("mel")
        sele_path = os.path.join(package_path, sele_file)

        # Save animated nodes with order
        with capsule.maintained_selection():
            cmds.select(self.data["outAnim"], replace=True)

            with contextlib.nested(
                    capsule.namespaced(self.data["animatedNamespace"],
                                       new=False),
                    capsule.relative_namespaced()):
                # Save with basename
                with open(sele_path, "w") as fp:
                    fp.write("select -r\n" + "\n".join(cmds.ls(sl=True)) + ";")

        context_data = self.context.data
        start = context_data.get("startFrame")
        end = context_data.get("endFrame")

        with contextlib.nested(
                capsule.no_refresh(),
                capsule.maintained_selection(),
                capsule.undo_chunk(),
        ):
            lib.bake(self.data["outAnim"],
                     frame_range=(start, end),
                     shape=False)

            cmds.select(self.data["outAnim"], replace=True, noExpand=True)
            cmds.file(entry_path,
                      force=True,
                      typ="animExport",
                      exportSelectedAnim=True,
                      options=("options=keys;"
                               "hierarchy=none;"
                               "precision=17;"
                               "intValue=17;"
                               "nodeNames=1;"
                               "verboseUnits=0;"
                               "whichRange=1;"
                               "helpPictures=0;"
                               "useChannelBox=0;"
                               "controlPoints=0;"
                               "shapes=0;"
                               "copyKeyCmd="
                               "-animation objects "
                               "-option keys "
                               "-hierarchy none "
                               "-controlPoints 0 "
                               "-shape 0"))

        packager.add_data({
            "entryFileName": entry_file,
            "animatedAssetId": self.data["animatedAssetId"]
        })
Exemple #14
0
    def process(self, instance):
        from maya import cmds
        from reveries import utils
        from reveries.maya import lib, capsule

        cmds.loadPlugin("animImportExport", quiet=True)

        staging_dir = utils.stage_dir()
        script = "%s.mel" % instance.data["subset"]
        filename = "%s.anim" % instance.data["subset"]
        scriptpath = "%s/%s" % (staging_dir, script)
        outpath = "%s/%s" % (staging_dir, filename)

        animated_asset = instance.data["animatedAssetId"]

        instance.data["repr.anim._stage"] = staging_dir
        instance.data["repr.anim._files"] = [filename, script]
        instance.data["repr.anim.entryFileName"] = filename
        instance.data["repr.anim.animatedAssetId"] = animated_asset

        # Save animated nodes with order
        with capsule.maintained_selection():
            cmds.select(instance.data["outAnim"], replace=True)

            with contextlib.nested(
                    capsule.namespaced(instance.data["animatedNamespace"],
                                       new=False),
                    capsule.relative_namespaced()):
                # Save with basename
                with open(scriptpath, "w") as fp:
                    # Allow not existing nodes between assets
                    fp.write("select -r `ls\n" + "\n".join(cmds.ls(sl=True)) +
                             "`;")

        context_data = instance.context.data
        start = context_data["startFrame"]
        end = context_data["endFrame"]

        instance.data["startFrame"] = start
        instance.data["endFrame"] = end

        with contextlib.nested(
                capsule.no_refresh(),
                capsule.maintained_selection(),
                capsule.undo_chunk(),
        ):
            lib.bake(
                instance.data["outAnim"],
                frame_range=(start, end),
                shape=False,
                # Remove baked from layer so to bake out all keys like
                # animLayers being merged.
                remove_baked_attr_from_layer=True)

            cmds.select(instance.data["outAnim"], replace=True, noExpand=True)
            cmds.file(outpath,
                      force=True,
                      typ="animExport",
                      exportSelectedAnim=True,
                      options=("options=keys;"
                               "hierarchy=none;"
                               "precision=17;"
                               "intValue=17;"
                               "nodeNames=1;"
                               "verboseUnits=0;"
                               "whichRange=1;"
                               "helpPictures=0;"
                               "useChannelBox=0;"
                               "controlPoints=0;"
                               "shapes=0;"
                               "copyKeyCmd="
                               "-animation objects "
                               "-option keys "
                               "-hierarchy none "
                               "-controlPoints 0 "
                               "-shape 0"))
    def process_import(self, context, name, namespace, group, options):
        from maya import cmds, mel
        from reveries import plugins

        representation = context["representation"]
        asset_id = representation["data"]["animatedAssetId"]
        selected = cmds.ls(selection=True, long=True)

        # Collect namespace from selected nodes
        namespaces = defaultdict(set)
        for node in selected:
            ns = lib.get_ns(node)
            if ns == ":":
                continue
            namespaces[ns].add(node)

        for ns, nodes in namespaces.items():
            try:
                container = pipeline.get_container_from_namespace(ns)
            except RuntimeError:
                continue

            if asset_id != cmds.getAttr(container + ".assetId"):
                confirm = plugins.message_box_warning(
                    "Warning",
                    "Applying animation to different asset, are you sure ?",
                    optional=True,
                )
                if not confirm:
                    raise Exception("Operation canceled.")

            target_ns = ns
            members = nodes
            break

        else:
            raise Exception("No matched asset found.")

        cmds.loadPlugin("animImportExport", quiet=True)

        entry_path = self.file_path(representation).replace("\\", "/")
        sele_path = entry_path.rsplit("anim", 1)[0] + "mel"
        sele_path = os.path.expandvars(sele_path)

        with capsule.maintained_selection():
            # Select nodes with order
            with contextlib.nested(capsule.namespaced(target_ns, new=False),
                                   capsule.relative_namespaced()):
                self._selection_patch(sele_path)
                mel.eval("source \"%s\"" % sele_path)

            targets = cmds.ls(selection=True, long=True)
            nodes = cmds.file(entry_path,
                              force=True,
                              type="animImport",
                              i=True,
                              importTimeRange="keep",
                              ignoreVersion=True,
                              returnNewNodes=True,
                              options=("targetTime=4;"
                                       "option=replace;"
                                       "connect=0"))
            # Apply namespace by ourselves, since animImport does not
            # take -namespace flag
            namespaced_nodes = list()
            for node in nodes:
                node = cmds.rename(node, namespace + ":" + node)
                namespaced_nodes.append(node)

            # Delete not connected
            targets = set(targets)
            connected = list()
            for node in namespaced_nodes:
                future = cmds.listHistory(node, future=True)
                future = set(cmds.ls(future, long=True))
                if targets.intersection(future):
                    connected.append(node)
                else:
                    cmds.delete(node)

            if not connected:
                raise Exception("No animation been applied.")

            self[:] = connected

        # Remove assigned from selection
        unprocessed = list(set(selected) - members)
        cmds.select(unprocessed, replace=True, noExpand=True)
Exemple #16
0
    def process(self, instance):
        from maya import cmds
        from reveries import utils
        from reveries.maya import xgen, capsule, utils as maya_utils

        staging_dir = utils.stage_dir()

        # Export preset
        # (NOTE) Saving as ext `.ma` instead of `.xgip` is because
        #        I'd like to use reference to load it later.
        #        Referencing file that was not `.ma`, `.mb` or other
        #        normal ext will crash Maya on file saving.
        filename = "%s.ma" % instance.data["subset"]
        linkfile = "%s.json" % instance.data["subset"]
        outpath = "%s/%s" % (staging_dir, filename)
        linkpath = "%s/%s" % (staging_dir, linkfile)

        instance.data["repr.XGenInteractive._stage"] = staging_dir
        instance.data["repr.XGenInteractive._files"] = [filename, linkfile]
        instance.data["repr.XGenInteractive.entryFileName"] = filename
        instance.data["repr.XGenInteractive.linkFname"] = linkfile

        child_instances = instance.data.get("childInstances", [])
        try:
            texture = next(chd for chd in child_instances
                           if chd.data["family"] == "reveries.texture")
        except StopIteration:
            file_node_attrs = dict()
        else:
            file_node_attrs = texture.data.get("fileNodeAttrs", dict())

        bound_map = dict()
        clay_shader = "initialShadingGroup"
        descriptions = instance.data["igsDescriptions"]
        with capsule.assign_shader(descriptions, shadingEngine=clay_shader):

            for description in descriptions:

                desc_id = maya_utils.get_id(description)

                # Get bounded meshes
                bound_map[desc_id] = list()
                for mesh in xgen.interactive.list_bound_meshes(description):
                    transform = cmds.listRelatives(mesh,
                                                   parent=True,
                                                   fullPath=True)
                    id = maya_utils.get_id(transform[0])
                    bound_map[desc_id].append(id)

            # (NOTE) Separating grooms and bounding meshes seems not able to
            #        preserve sculpt layer data entirely correct.
            #        For example, sculpting long hair strands to really short,
            #        may ends up noisy shaped after import back.
            #
            #        So now we export the grooms with bound meshes...
            #
            # io.export_xgen_IGS_presets(descriptions, outpath)

            with contextlib.nested(
                    capsule.no_display_layers(instance[:]),
                    # Change to published path
                    capsule.attribute_values(file_node_attrs),
                    capsule.maintained_selection(),
            ):
                cmds.select(descriptions)

                cmds.file(outpath,
                          force=True,
                          typ="mayaAscii",
                          exportSelected=True,
                          preserveReferences=False,
                          channels=True,
                          constraints=True,
                          expressions=True,
                          constructionHistory=True)

        # Parse preset bounding map

        with open(linkpath, "w") as fp:
            json.dump(bound_map, fp, ensure_ascii=False)
Exemple #17
0
    def export_ass(self,
                   nodes,
                   outpath,
                   file_node_attrs,
                   has_yeti,
                   start,
                   end,
                   step,
                   expand_procedurals=True):
        from maya import cmds, mel
        from reveries.maya import arnold, capsule

        # Ensure option created
        arnold.utils.create_options()

        render_settings = {
            # Disable Auto TX update and enable to use existing TX
            "defaultArnoldRenderOptions.autotx": False,
            "defaultArnoldRenderOptions.use_existing_tiled_textures": True,
            # Ensure frame padding == 4
            "defaultRenderGlobals.extensionPadding": 4,
        }

        # Yeti
        if has_yeti:
            # In Deadline, this is a script job instead of rendering job, so
            # the `pgYetiPreRender` Pre-Render MEL will not be triggered.
            # We need to call it by ourselve, or Yeti will complain about
            # cache temp dir not exist.
            mel.eval("pgYetiPreRender;")

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
                capsule.ref_edit_unlock(),
                # (NOTE) Ensure attribute unlock
                capsule.attribute_states(file_node_attrs.keys(), lock=False),
                # Change to published path
                capsule.attribute_values(file_node_attrs),
                # Fixed render settings
                capsule.attribute_values(render_settings),
        ):
            cmds.select(nodes, replace=True)
            asses = cmds.arnoldExportAss(
                filename=outpath,
                selected=True,
                startFrame=start,
                endFrame=end,
                frameStep=step,
                expandProcedurals=expand_procedurals,
                boundingBox=True,
                # Mask:
                #      Shapes,
                #      Shaders,
                #      Override Nodes,
                #      Operators,
                #
                # mask=4152,  # No Color Manager
                mask=6200)  # With Color Manager

            # Change to environment var embedded path
            root = avalon.api.registered_root().replace("\\", "/")
            project = avalon.api.Session["AVALON_PROJECT"]

            for ass in asses:
                lines = list()
                has_change = False
                with open(ass, "r") as assf:
                    for line in assf.readlines():
                        if line.startswith(" filename "):
                            line = line.replace(root, "[AVALON_PROJECTS]", 1)
                            line = line.replace(project, "[AVALON_PROJECT]", 1)
                            has_change = True
                        lines.append(line)

                # Remove color manager
                # (NOTE): If Color Manager included,
                #         may raise error if rendering
                #         in Houdini or other DCC.
                try:
                    s = lines.index("color_manager_syncolor\n")
                except ValueError:
                    # No color manager found
                    pass
                else:
                    e = lines.index("}\n", s) + 1
                    lines = lines[:s] + lines[e:]
                    has_change = True

                # Re-write
                if has_change:
                    with open(ass, "w") as assf:
                        assf.write("".join(lines))
Exemple #18
0
    def export_ass(data, start, end, step):

        arnold_tx_settings = {
            "defaultArnoldRenderOptions.autotx": False,
            "defaultArnoldRenderOptions.use_existing_tiled_textures": True,
        }

        # Yeti
        if data["hasYeti"]:
            # In Deadline, this is a script job instead of rendering job, so
            # the `pgYetiPreRender` Pre-Render MEL will not be triggered.
            # We need to call it by ourselve, or Yeti will complain about
            # cache temp dir not exist.
            mel.eval("pgYetiPreRender;")

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
                capsule.ref_edit_unlock(),
                # (NOTE) Ensure attribute unlock
                capsule.attribute_states(data["fileNodeAttrs"].keys(),
                                         lock=False),
                # Change to published path
                capsule.attribute_values(data["fileNodeAttrs"]),
                # Disable Auto TX update and enable to use existing TX
                capsule.attribute_values(arnold_tx_settings),
        ):
            cmds.select(data["member"], replace=True)
            asses = cmds.arnoldExportAss(
                filename=data["cachePath"],
                selected=True,
                startFrame=start,
                endFrame=end,
                frameStep=step,
                expandProcedurals=True,
                boundingBox=True,
                # Mask:
                #      Shapes,
                #      Shaders,
                #      Override Nodes,
                #      Operators,
                #
                # (NOTE): If Color Manager included,
                #         may raise error if rendering
                #         in Houdini or other DCC.
                # mask=6200,  # With Color Manager
                #
                mask=4152)  # No Color Manager

            # Change to environment var embedded path
            root = avalon.api.registered_root().replace("\\", "/")
            project = avalon.api.Session["AVALON_PROJECT"]

            for ass in asses:
                lines = list()
                has_change = False
                with open(ass, "r") as assf:
                    for line in assf.readlines():
                        if line.startswith(" filename "):
                            line = line.replace(root, "[AVALON_PROJECTS]", 1)
                            line = line.replace(project, "[AVALON_PROJECT]", 1)
                            has_change = True
                        lines.append(line)

                if has_change:
                    with open(ass, "w") as assf:
                        assf.write("".join(lines))
    def extract_Alembic(self, packager):
        from reveries.maya import io, lib, capsule
        from maya import cmds

        packager.skip_stage()

        entry_file = packager.file_name("abc")
        package_path = packager.create_package()
        entry_path = os.path.join(package_path, entry_file)

        euler_filter = self.data.get("eulerFilter", False)

        root = self.data["outCache"]

        with capsule.maintained_selection():
            # Selection may change if there are duplicate named nodes and
            # require instancing them to resolve

            with capsule.delete_after() as delete_bin:

                # (NOTE) We need to check any duplicate named nodes, or
                #        error will raised during Alembic export.
                result = lib.ls_duplicated_name(root)
                duplicated = [n for m in result.values() for n in m]
                if duplicated:
                    # Duplicate it so we could have a unique named new node
                    unique_named = list()
                    for node in duplicated:
                        new_nodes = cmds.duplicate(node,
                                                   inputConnections=True,
                                                   renameChildren=True)
                        new_nodes = cmds.ls(new_nodes, long=True)
                        unique_named.append(new_nodes[0])
                        # New nodes will be deleted after the export
                        delete_bin.extend(new_nodes)

                    # Replace duplicat named nodes with unique named
                    root = list(set(root) - set(duplicated)) + unique_named

                for node in set(root):
                    # (NOTE) If a descendent is instanced, it will appear only
                    #        once on the list returned.
                    root += cmds.listRelatives(node,
                                               allDescendents=True,
                                               fullPath=True,
                                               noIntermediate=True) or []
                root = list(set(root))
                cmds.select(root, replace=True, noExpand=True)

                io.export_alembic(
                    entry_path,
                    self.start_frame,
                    self.end_frame,
                    selection=True,
                    renderableOnly=True,
                    writeVisibility=True,
                    writeCreases=True,
                    worldSpace=True,
                    eulerFilter=euler_filter,
                    attr=[
                        lib.AVALON_ID_ATTR_LONG,
                    ],
                    attrPrefix=[
                        "ai",  # Write out Arnold attributes
                    ],
                )

        # (NOTE) Deprecated
        # io.wrap_abc(entry_path, [(cache_file, "ROOT")])

        packager.add_data({"entryFileName": entry_file})
        self.add_range_data()
    def export_alembic(self, root, outpath, start, end, euler_filter):
        from reveries.maya import io, lib, capsule
        from maya import cmds

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
        ):
            # Selection may change if there are duplicate named nodes and
            # require instancing them to resolve

            with capsule.delete_after() as delete_bin:

                # (NOTE) We need to check any duplicate named nodes, or
                #        error will raised during Alembic export.
                result = lib.ls_duplicated_name(root)
                duplicated = [n for m in result.values() for n in m]
                if duplicated:
                    self.log.info("Duplicate named nodes found, resolving...")
                    # Duplicate it so we could have a unique named new node
                    unique_named = list()
                    for node in duplicated:
                        new_nodes = cmds.duplicate(node,
                                                   inputConnections=True,
                                                   renameChildren=True)
                        new_nodes = cmds.ls(new_nodes, long=True)
                        unique_named.append(new_nodes[0])
                        # New nodes will be deleted after the export
                        delete_bin.extend(new_nodes)

                    # Replace duplicate named nodes with unique named
                    root = list(set(root) - set(duplicated)) + unique_named

                for node in set(root):
                    # (NOTE) If a descendent is instanced, it will appear only
                    #        once on the list returned.
                    root += cmds.listRelatives(node,
                                               allDescendents=True,
                                               fullPath=True,
                                               noIntermediate=True) or []
                root = list(set(root))
                cmds.select(root, replace=True, noExpand=True)

                def _export_alembic():
                    io.export_alembic(
                        outpath,
                        start,
                        end,
                        selection=True,
                        renderableOnly=True,
                        writeVisibility=True,
                        writeCreases=True,
                        worldSpace=True,
                        eulerFilter=euler_filter,
                        attr=[
                            lib.AVALON_ID_ATTR_LONG,
                        ],
                        attrPrefix=[
                            "ai",  # Write out Arnold attributes
                            "avnlook_",  # Write out lookDev controls
                        ],
                    )

                auto_retry = 1
                while auto_retry:
                    try:
                        _export_alembic()
                    except RuntimeError as err:
                        if auto_retry:
                            # (NOTE) Auto re-try export
                            # For unknown reason, some artist may encounter
                            # runtime error when exporting but re-run the
                            # publish without any change will resolve.
                            auto_retry -= 1
                            self.log.warning(err)
                            self.log.warning("Retrying...")
                        else:
                            raise err
                    else:
                        break
    def extract_XGenInteractive(self, packager):
        from maya import cmds

        package_dir = packager.create_package()

        bound_map = dict()
        clay_shader = "initialShadingGroup"
        descriptions = self.data["igsDescriptions"]
        with capsule.assign_shader(descriptions, shadingEngine=clay_shader):

            for description in descriptions:

                desc_id = utils.get_id(description)

                # Get bounded meshes
                bound_map[desc_id] = list()
                for mesh in xgen.interactive.list_bound_meshes(description):
                    transform = cmds.listRelatives(mesh, parent=True)
                    id = utils.get_id(transform[0])
                    bound_map[desc_id].append(id)

            # Export preset
            # (NOTE) Saving as ext `.ma` instead of `.xgip` is because
            #        I'd like to use reference to load it later.
            #        Referencing file that was not `.ma`, `.mb` or other
            #        normal ext will crash Maya on file saving.
            entry_file = packager.file_name("ma")
            entry_path = os.path.join(package_dir, entry_file)

            # (NOTE) Separating grooms and bounding meshes seems not able to
            #        preserve sculpt layer data entirely correct.
            #        For example, sculpting long hair strands to really short,
            #        may ends up noisy shaped after import back.
            #
            #        So now we export the grooms with bound meshes...
            #
            # io.export_xgen_IGS_presets(descriptions, entry_path)

            with contextlib.nested(
                    capsule.no_display_layers(self.member),
                    capsule.maintained_selection(),
            ):
                cmds.select(descriptions)

                cmds.file(entry_path,
                          force=True,
                          typ="mayaAscii",
                          exportSelected=True,
                          preserveReferences=False,
                          channels=True,
                          constraints=True,
                          expressions=True,
                          constructionHistory=True)

        # Parse preset bounding map
        link_file = packager.file_name("json")
        link_path = os.path.join(package_dir, link_file)

        with open(link_path, "w") as fp:
            json.dump(bound_map, fp, ensure_ascii=False)

        packager.add_data({
            "linkFname": link_file,
            "entryFileName": entry_file,
        })