def extract(self):
        context_data = self.context.data
        self.start = context_data.get("startFrame")
        self.end = context_data.get("endFrame")
        self.step = self.data.get("bakeStep", 1.0)
        camera = cmds.ls(self.member, type="camera", long=True)[0]

        self.camera_uuid = utils.get_id(camera)

        donot_bake = [camera + "." + attr for attr in DO_NOT_BAKE_THESE]

        with contextlib.nested(
                capsule.no_refresh(),
                capsule.attr_unkeyable(donot_bake),
                capsule.evaluation("off"),
                capsule.undo_chunk(),
        ):
            # bake to worldspace
            baked_camera = lib.bake_camera(camera, self.start, self.end,
                                           self.step)

            cmds.select(
                baked_camera,
                hierarchy=True,  # With shape
                replace=True,
                noExpand=True)

            super(ExtractCamera, self).extract()
Beispiel #2
0
    def extract(self):

        DO_NOT_BAKE_THESE = [
            "motionBlurOverride",
            "aiUseGlobalShutter",
            "aiShutterStart",
            "aiShutterEnd",
            "aiShutterType",
            "aiEnableDOF",
            "aiFov",
            "aiHorizontalFov",
            "aiVerticalFov",
        ]

        DO_BAKE_THESE = [
            "focalLength",
        ]
        DO_BAKE_THESE += lib.TRANSFORM_ATTRS

        context_data = self.context.data
        self.start = context_data.get("startFrame")
        self.end = context_data.get("endFrame")
        self.step = self.data.get("bakeStep", 1.0)
        camera = cmds.ls(self.member, type="camera", long=True)[0]

        self.camera_uuid = utils.get_id(camera)

        cam_transform = cmds.listRelatives(camera,
                                           parent=True,
                                           fullPath=True)[0]

        donot_bake = [cam_transform + "." + attr for attr in DO_NOT_BAKE_THESE]
        do_bake = [cam_transform + "." + attr for attr in DO_BAKE_THESE]

        with contextlib.nested(
            capsule.no_refresh(),
            capsule.attribute_states(donot_bake, lock=False, keyable=False),
            capsule.attribute_states(do_bake, lock=False, keyable=True),
            capsule.evaluation("off"),
        ):
            with capsule.delete_after() as delete_bin:

                # bake to worldspace
                frame_range = (self.start, self.end)
                baked_camera = lib.bake_to_world_space(cam_transform,
                                                       frame_range,
                                                       step=self.step)[0]
                delete_bin.append(baked_camera)

                cmds.select(baked_camera,
                            hierarchy=True,  # With shape
                            replace=True,
                            noExpand=True)

                super(ExtractCamera, self).extract()
Beispiel #3
0
    def extract(self):

        if self.data.get("staticCache"):
            self.start_frame = cmds.currentTime(query=True)
            self.end_frame = cmds.currentTime(query=True)
        else:
            context_data = self.context.data
            self.start_frame = context_data.get("startFrame")
            self.end_frame = context_data.get("endFrame")

        with contextlib.nested(
            capsule.no_undo(),
            capsule.no_refresh(),
            capsule.evaluation("off"),
            capsule.maintained_selection(),
        ):
            cmds.select(self.data["outCache"], replace=True)
            super(ExtractPointCache, self).extract()
    def export_fbx(self, outpath, cachepath, cachename, nodes, keep_namespace):
        from reveries.maya import io, capsule
        from maya import cmds

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
        ):

            cmds.select(nodes, replace=True)

            with capsule.StripNamespace([] if keep_namespace else nodes):
                with io.export_fbx_set_pointcache("FBXCacheSET"):
                    io.export_fbx(cachepath)

                io.wrap_fbx(outpath, [(cachename, "ROOT")])
    def export_gpu(self, outpath, cachepath, cachename, start, end, assemblies,
                   attr_values):
        from reveries.maya import io, capsule
        from maya import cmds

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
        ):

            cmds.select(assemblies, replace=True, noExpand=True)

            with contextlib.nested(
                    capsule.attribute_values(attr_values),
                    # Mute animated visibility channels
                    capsule.attribute_mute(list(attr_values.keys())),
            ):
                io.export_gpu(cachepath, start, end)
                io.wrap_gpu(outpath, [(cachename, "ROOT")])
Beispiel #6
0
    def extract_Ass(self):
        # Ensure mtoa loaded
        cmds.loadPlugin("mtoa", quiet=True)

        package_path = self.create_package()
        cache_file = self.file_name("ass")
        cache_path = os.path.join(package_path, cache_file)

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
                capsule.ref_edit_unlock(),
                remove_file_env_path(self.data),
        ):
            cmds.select(self.member, replace=True)
            asses = cmds.arnoldExportAss(filename=cache_path,
                                         selected=True,
                                         startFrame=self.data["startFrame"],
                                         endFrame=self.data["endFrame"],
                                         frameStep=self.data["byFrameStep"],
                                         shadowLinks=1,
                                         lightLinks=1,
                                         expandProcedurals=True,
                                         mask=24)

        use_sequence = self.data["startFrame"] != self.data["endFrame"]
        entry_file = os.path.basename(asses[0])

        self.add_data({
            "entryFileName": entry_file,
            "useSequence": use_sequence
        })
        if use_sequence:
            self.add_data({
                "startFrame": self.data["startFrame"],
                "endFrame": self.data["endFrame"]
            })
Beispiel #7
0
    def export_ass(self,
                   nodes,
                   outpath,
                   file_node_attrs,
                   has_yeti,
                   start,
                   end,
                   step,
                   expand_procedurals=True):
        from maya import cmds, mel
        from reveries.maya import arnold, capsule

        # Ensure option created
        arnold.utils.create_options()

        render_settings = {
            # Disable Auto TX update and enable to use existing TX
            "defaultArnoldRenderOptions.autotx": False,
            "defaultArnoldRenderOptions.use_existing_tiled_textures": True,
            # Ensure frame padding == 4
            "defaultRenderGlobals.extensionPadding": 4,
        }

        # Yeti
        if has_yeti:
            # In Deadline, this is a script job instead of rendering job, so
            # the `pgYetiPreRender` Pre-Render MEL will not be triggered.
            # We need to call it by ourselve, or Yeti will complain about
            # cache temp dir not exist.
            mel.eval("pgYetiPreRender;")

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
                capsule.ref_edit_unlock(),
                # (NOTE) Ensure attribute unlock
                capsule.attribute_states(file_node_attrs.keys(), lock=False),
                # Change to published path
                capsule.attribute_values(file_node_attrs),
                # Fixed render settings
                capsule.attribute_values(render_settings),
        ):
            cmds.select(nodes, replace=True)
            asses = cmds.arnoldExportAss(
                filename=outpath,
                selected=True,
                startFrame=start,
                endFrame=end,
                frameStep=step,
                expandProcedurals=expand_procedurals,
                boundingBox=True,
                # Mask:
                #      Shapes,
                #      Shaders,
                #      Override Nodes,
                #      Operators,
                #
                # mask=4152,  # No Color Manager
                mask=6200)  # With Color Manager

            # Change to environment var embedded path
            root = avalon.api.registered_root().replace("\\", "/")
            project = avalon.api.Session["AVALON_PROJECT"]

            for ass in asses:
                lines = list()
                has_change = False
                with open(ass, "r") as assf:
                    for line in assf.readlines():
                        if line.startswith(" filename "):
                            line = line.replace(root, "[AVALON_PROJECTS]", 1)
                            line = line.replace(project, "[AVALON_PROJECT]", 1)
                            has_change = True
                        lines.append(line)

                # Remove color manager
                # (NOTE): If Color Manager included,
                #         may raise error if rendering
                #         in Houdini or other DCC.
                try:
                    s = lines.index("color_manager_syncolor\n")
                except ValueError:
                    # No color manager found
                    pass
                else:
                    e = lines.index("}\n", s) + 1
                    lines = lines[:s] + lines[e:]
                    has_change = True

                # Re-write
                if has_change:
                    with open(ass, "w") as assf:
                        assf.write("".join(lines))
Beispiel #8
0
    def export_ass(data, start, end, step):

        arnold_tx_settings = {
            "defaultArnoldRenderOptions.autotx": False,
            "defaultArnoldRenderOptions.use_existing_tiled_textures": True,
        }

        # Yeti
        if data["hasYeti"]:
            # In Deadline, this is a script job instead of rendering job, so
            # the `pgYetiPreRender` Pre-Render MEL will not be triggered.
            # We need to call it by ourselve, or Yeti will complain about
            # cache temp dir not exist.
            mel.eval("pgYetiPreRender;")

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
                capsule.ref_edit_unlock(),
                # (NOTE) Ensure attribute unlock
                capsule.attribute_states(data["fileNodeAttrs"].keys(),
                                         lock=False),
                # Change to published path
                capsule.attribute_values(data["fileNodeAttrs"]),
                # Disable Auto TX update and enable to use existing TX
                capsule.attribute_values(arnold_tx_settings),
        ):
            cmds.select(data["member"], replace=True)
            asses = cmds.arnoldExportAss(
                filename=data["cachePath"],
                selected=True,
                startFrame=start,
                endFrame=end,
                frameStep=step,
                expandProcedurals=True,
                boundingBox=True,
                # Mask:
                #      Shapes,
                #      Shaders,
                #      Override Nodes,
                #      Operators,
                #
                # (NOTE): If Color Manager included,
                #         may raise error if rendering
                #         in Houdini or other DCC.
                # mask=6200,  # With Color Manager
                #
                mask=4152)  # No Color Manager

            # Change to environment var embedded path
            root = avalon.api.registered_root().replace("\\", "/")
            project = avalon.api.Session["AVALON_PROJECT"]

            for ass in asses:
                lines = list()
                has_change = False
                with open(ass, "r") as assf:
                    for line in assf.readlines():
                        if line.startswith(" filename "):
                            line = line.replace(root, "[AVALON_PROJECTS]", 1)
                            line = line.replace(project, "[AVALON_PROJECT]", 1)
                            has_change = True
                        lines.append(line)

                if has_change:
                    with open(ass, "w") as assf:
                        assf.write("".join(lines))
    def export_alembic(self, root, outpath, start, end, euler_filter):
        from reveries.maya import io, lib, capsule
        from maya import cmds

        with contextlib.nested(
                capsule.no_undo(),
                capsule.no_refresh(),
                capsule.evaluation("off"),
                capsule.maintained_selection(),
        ):
            # Selection may change if there are duplicate named nodes and
            # require instancing them to resolve

            with capsule.delete_after() as delete_bin:

                # (NOTE) We need to check any duplicate named nodes, or
                #        error will raised during Alembic export.
                result = lib.ls_duplicated_name(root)
                duplicated = [n for m in result.values() for n in m]
                if duplicated:
                    self.log.info("Duplicate named nodes found, resolving...")
                    # Duplicate it so we could have a unique named new node
                    unique_named = list()
                    for node in duplicated:
                        new_nodes = cmds.duplicate(node,
                                                   inputConnections=True,
                                                   renameChildren=True)
                        new_nodes = cmds.ls(new_nodes, long=True)
                        unique_named.append(new_nodes[0])
                        # New nodes will be deleted after the export
                        delete_bin.extend(new_nodes)

                    # Replace duplicate named nodes with unique named
                    root = list(set(root) - set(duplicated)) + unique_named

                for node in set(root):
                    # (NOTE) If a descendent is instanced, it will appear only
                    #        once on the list returned.
                    root += cmds.listRelatives(node,
                                               allDescendents=True,
                                               fullPath=True,
                                               noIntermediate=True) or []
                root = list(set(root))
                cmds.select(root, replace=True, noExpand=True)

                def _export_alembic():
                    io.export_alembic(
                        outpath,
                        start,
                        end,
                        selection=True,
                        renderableOnly=True,
                        writeVisibility=True,
                        writeCreases=True,
                        worldSpace=True,
                        eulerFilter=euler_filter,
                        attr=[
                            lib.AVALON_ID_ATTR_LONG,
                        ],
                        attrPrefix=[
                            "ai",  # Write out Arnold attributes
                            "avnlook_",  # Write out lookDev controls
                        ],
                    )

                auto_retry = 1
                while auto_retry:
                    try:
                        _export_alembic()
                    except RuntimeError as err:
                        if auto_retry:
                            # (NOTE) Auto re-try export
                            # For unknown reason, some artist may encounter
                            # runtime error when exporting but re-run the
                            # publish without any change will resolve.
                            auto_retry -= 1
                            self.log.warning(err)
                            self.log.warning("Retrying...")
                        else:
                            raise err
                    else:
                        break
    def extract_all(self,
                    cam_transform,
                    ma_outpath,
                    abc_outpath,
                    fbx_outpath,
                    start,
                    end,
                    step,
                    euler_filter,
                    do_bake,
                    donot_bake,
                    duplicate_input_graph=False):
        from maya import cmds
        from reveries.maya import io, lib, capsule

        with contextlib.nested(
                capsule.no_refresh(),
                capsule.no_undo(),
                capsule.attribute_states(donot_bake, lock=False,
                                         keyable=False),
                capsule.attribute_states(do_bake, lock=False, keyable=True),
                capsule.evaluation("off"),
        ):
            with capsule.delete_after() as delete_bin:

                # bake to worldspace
                frame_range = (start, end)
                baked_camera = lib.bake_to_world_space(
                    cam_transform,
                    frame_range,
                    step=step,
                    # Remove baked from layer so to bake out all keys like
                    # animLayers being merged.
                    remove_baked_attr_from_layer=True,
                    duplicate_input_graph=duplicate_input_graph)[0]
                delete_bin.append(baked_camera)

                cmds.select(
                    baked_camera,
                    hierarchy=True,  # With shape
                    replace=True,
                    noExpand=True)

                with avalon.maya.maintained_selection():
                    io.export_alembic(abc_outpath,
                                      start,
                                      end,
                                      eulerFilter=euler_filter)

                with capsule.undo_chunk_when_no_undo():
                    if euler_filter:
                        cmds.filterCurve(cmds.ls(sl=True))

                    with avalon.maya.maintained_selection():
                        cmds.file(
                            ma_outpath,
                            force=True,
                            typ="mayaAscii",
                            exportSelected=True,
                            preserveReferences=False,
                            constructionHistory=False,
                            channels=True,  # allow animation
                            constraints=False,
                            shader=False,
                            expressions=False)

                    with avalon.maya.maintained_selection():
                        io.export_fbx_set_camera()
                        io.export_fbx(fbx_outpath)