def _via_web_service(self, payload):
        response = requests.post(self._url,
                                 json=payload,
                                 auth=tuple(self._auth))

        if not response.ok:
            raise Exception(response.text)
        else:
            jobid = eval(response.text)["_id"]
            self.log.info("Success. JobID: %s" % jobid)
            return jobid
Esempio n. 2
0
    def _requests_post(self, *args, **kwargs):
        """ Wrapper for requests, disabling SSL certificate validation if
            DONT_VERIFY_SSL environment variable is found. This is useful when
            Deadline or Muster server are running with self-signed certificates
            and their certificate is not added to trusted certificates on
            client machines.

            WARNING: disabling SSL certificate validation is defeating one line
            of defense SSL is providing and it is not recommended.
        """
        if 'verify' not in kwargs:
            kwargs['verify'] = False if os.getenv("PYPE_DONT_VERIFY_SSL", True) else True  # noqa
        return requests.post(*args, **kwargs)
Esempio n. 3
0
    def payload_submit(self,
                       instance,
                       script_path,
                       render_path,
                       exe_node_name,
                       responce_data=None):
        render_dir = os.path.normpath(os.path.dirname(render_path))
        script_name = os.path.basename(script_path)
        jobname = "%s - %s" % (script_name, instance.name)

        output_filename_0 = self.preview_fname(render_path)

        if not responce_data:
            responce_data = {}

        try:
            # Ensure render folder exists
            os.makedirs(render_dir)
        except OSError:
            pass

        # define chunk and priority
        chunk_size = instance.data.get("deadlineChunkSize")
        if chunk_size == 0:
            chunk_size = self.deadline_chunk_size

        priority = instance.data.get("deadlinePriority")
        if not priority:
            priority = self.deadline_priority

        payload = {
            "JobInfo": {
                # Top-level group name
                "BatchName":
                script_name,

                # Asset dependency to wait for at least the scene file to sync.
                "AssetDependency0":
                script_path,

                # Job name, as seen in Monitor
                "Name":
                jobname,

                # Arbitrary username, for visualisation in Monitor
                "UserName":
                self._deadline_user,
                "Priority":
                priority,
                "ChunkSize":
                chunk_size,
                "Pool":
                self.deadline_pool,
                "SecondaryPool":
                self.deadline_pool_secondary,
                "Plugin":
                "Nuke",
                "Frames":
                "{start}-{end}".format(start=self._frame_start,
                                       end=self._frame_end),
                "Comment":
                self._comment,

                # Optional, enable double-click to preview rendered
                # frames from Deadline Monitor
                "OutputFilename0":
                output_filename_0.replace("\\", "/")
            },
            "PluginInfo": {
                # Input
                "SceneFile": script_path,

                # Output directory and filename
                "OutputFilePath": render_dir.replace("\\", "/"),
                # "OutputFilePrefix": render_variables["filename_prefix"],

                # Mandatory for Deadline
                "Version": self._ver.group(),

                # Resolve relative references
                "ProjectPath": script_path,
                "AWSAssetFile0": render_path,
                # Only the specific write node is rendered.
                "WriteNode": exe_node_name
            },

            # Mandatory for Deadline, may be empty
            "AuxFiles": []
        }

        if responce_data.get("_id"):
            payload["JobInfo"].update({
                "JobType":
                "Normal",
                "BatchName":
                responce_data["Props"]["Batch"],
                "JobDependency0":
                responce_data["_id"],
                "ChunkSize":
                99999999
            })

        # Include critical environment variables with submission
        keys = [
            "PYTHONPATH", "PATH", "AVALON_SCHEMA", "FTRACK_API_KEY",
            "FTRACK_API_USER", "FTRACK_SERVER", "PYBLISHPLUGINPATH",
            "NUKE_PATH", "TOOL_ENV", "PYPE_DEV"
        ]
        environment = dict(
            {key: os.environ[key]
             for key in keys if key in os.environ}, **api.Session)
        # self.log.debug("enviro: {}".format(pprint(environment)))
        for path in os.environ:
            if path.lower().startswith('pype_'):
                environment[path] = os.environ[path]

        # environment["PATH"] = os.environ["PATH"]
        # self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
        clean_environment = {}
        for key, value in environment.items():
            clean_path = ""
            self.log.debug("key: {}".format(key))
            if "://" in value:
                clean_path = value
            else:
                valid_paths = []
                for path in value.split(os.pathsep):
                    if not path:
                        continue
                    try:
                        path.decode('UTF-8', 'strict')
                        valid_paths.append(os.path.normpath(path))
                    except UnicodeDecodeError:
                        print('path contains non UTF characters')

                if valid_paths:
                    clean_path = os.pathsep.join(valid_paths)

            if key == "PYTHONPATH":
                clean_path = clean_path.replace('python2', 'python3')

            self.log.debug("clean path: {}".format(clean_path))
            clean_environment[key] = clean_path

        environment = clean_environment

        payload["JobInfo"].update({
            "EnvironmentKeyValue%d" % index:
            "{key}={value}".format(key=key, value=environment[key])
            for index, key in enumerate(environment)
        })

        plugin = payload["JobInfo"]["Plugin"]
        self.log.info("using render plugin : {}".format(plugin))

        self.log.info("Submitting..")
        self.log.info(json.dumps(payload, indent=4, sort_keys=True))

        # adding expectied files to instance.data
        self.expected_files(instance, render_path)
        self.log.debug("__ expectedFiles: `{}`".format(
            instance.data["expectedFiles"]))
        response = requests.post(self.deadline_url, json=payload, timeout=10)

        if not response.ok:
            raise Exception(response.text)

        return response
Esempio n. 4
0
    def process(self, instance):

        DEADLINE_REST_URL = api.Session.get("DEADLINE_REST_URL",
                                            "http://localhost:8082")
        assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"

        context = instance.context

        deadline_url = "{}/api/jobs".format(DEADLINE_REST_URL)
        deadline_user = context.data.get("deadlineUser", getpass.getuser())

        filepath = context.data["currentFile"]
        filename = os.path.basename(filepath)
        task_name = "{} - {}".format(filename, instance.name)

        batch_name = "{} - (vrscene)".format(filename)

        # Get the output template for vrscenes
        vrscene_output = instance.data["vrsceneOutput"]

        # This is also the input file for the render job
        first_file = self.format_output_filename(instance, filename,
                                                 vrscene_output)

        start_frame = int(instance.data["startFrame"])
        end_frame = int(instance.data["endFrame"])

        # Primary job
        self.log.info("Submitting export job ..")

        payload = {
            "JobInfo": {
                # Top-level group name
                "BatchName":
                batch_name,

                # Job name, as seen in Monitor
                "Name":
                "Export {} [{}-{}]".format(task_name, start_frame, end_frame),

                # Arbitrary username, for visualisation in Monitor
                "UserName":
                deadline_user,
                "Plugin":
                "MayaBatch",
                "Frames":
                "{}-{}".format(start_frame, end_frame),
                "FramesPerTask":
                instance.data.get("framesPerTask", 1),
                "Comment":
                context.data.get("comment", ""),
                "OutputFilename0":
                os.path.dirname(first_file),
            },
            "PluginInfo": {

                # Renderer
                "Renderer": "vray",

                # Mandatory for Deadline
                "Version": cmds.about(version=True),

                # Input
                "SceneFile": filepath,
                "SkipExistingFrames": True,
                "UsingRenderLayers": True,
                "UseLegacyRenderLayers": True
            },

            # Mandatory for Deadline, may be empty
            "AuxFiles": []
        }

        environment = dict(AVALON_TOOLS="global;python36;maya2018")
        environment.update(api.Session.copy())

        jobinfo_environment = self.build_jobinfo_environment(environment)

        payload["JobInfo"].update(jobinfo_environment)

        self.log.info("Job Data:\n{}".format(json.dumps(payload)))

        response = requests.post(url=deadline_url, json=payload)
        if not response.ok:
            raise RuntimeError(response.text)

        # Secondary job
        # Store job to create dependency chain
        dependency = response.json()

        if instance.data["suspendRenderJob"]:
            self.log.info("Skipping render job and publish job")
            return

        self.log.info("Submitting render job ..")

        start_frame = int(instance.data["startFrame"])
        end_frame = int(instance.data["endFrame"])
        ext = instance.data.get("ext", "exr")

        # Create output directory for renders
        render_ouput = self.format_output_filename(instance,
                                                   filename,
                                                   instance.data["outputDir"],
                                                   dir=True)

        self.log.info("Render output: %s" % render_ouput)

        # Update output dir
        instance.data["outputDir"] = render_ouput

        # Format output file name
        sequence_filename = ".".join([instance.name, ext])
        output_filename = os.path.join(render_ouput, sequence_filename)

        # Ensure folder exists:
        if not os.path.exists(render_ouput):
            os.makedirs(render_ouput)

        payload_b = {
            "JobInfo": {
                "JobDependency0":
                dependency["_id"],
                "BatchName":
                batch_name,
                "Name":
                "Render {} [{}-{}]".format(task_name, start_frame, end_frame),
                "UserName":
                deadline_user,
                "Frames":
                "{}-{}".format(start_frame, end_frame),
                "Plugin":
                "Vray",
                "OverrideTaskExtraInfoNames":
                False,
                "OutputFilename0":
                render_ouput,
            },
            "PluginInfo": {
                "InputFilename": first_file,
                "OutputFilename": output_filename,
                "SeparateFilesPerFrame": True,
                "VRayEngine": "V-Ray",
                "Width": instance.data["resolution"][0],
                "Height": instance.data["resolution"][1],
            },
            "AuxFiles": [],
        }

        # Add vray renderslave to environment
        tools = environment["AVALON_TOOLS"] + ";vrayrenderslave"
        environment_b = deepcopy(environment)
        environment_b["AVALON_TOOLS"] = tools

        jobinfo_environment_b = self.build_jobinfo_environment(environment_b)
        payload_b["JobInfo"].update(jobinfo_environment_b)

        self.log.info(json.dumps(payload_b))

        # Post job to deadline
        response_b = requests.post(url=deadline_url, json=payload_b)
        if not response_b.ok:
            raise RuntimeError(response_b.text)

        # Add job for publish job
        if not instance.data.get("suspendPublishJob", False):
            instance.data["deadlineSubmissionJob"] = response_b.json()
Esempio n. 5
0
    def _submit_deadline_post_job(self, instance, job):
        """Submit publish job to Deadline.

        Deadline specific code separated from :meth:`process` for sake of
        more universal code. Muster post job is sent directly by Muster
        submitter, so this type of code isn't necessary for it.

        """
        data = instance.data.copy()
        subset = data["subset"]
        job_name = "{batch} - {subset} [publish image sequence]".format(
            batch=job["Props"]["Name"], subset=subset)

        output_dir = instance.data["outputDir"]
        # Convert output dir to `{root}/rest/of/path/...` with Anatomy
        success, rootless_path = (
            self.anatomy.find_root_template_from_path(output_dir))
        if not success:
            # `rootless_path` is not set to `output_dir` if none of roots match
            self.log.warning(
                ("Could not find root path for remapping \"{}\"."
                 " This may cause issues on farm.").format(output_dir))
            rootless_path = output_dir

        # Generate the payload for Deadline submission
        payload = {
            "JobInfo": {
                "Plugin": "Python",
                "BatchName": job["Props"]["Batch"],
                "Name": job_name,
                "JobDependency0": job["_id"],
                "UserName": job["Props"]["User"],
                "Comment": instance.context.data.get("comment", ""),
                "Priority": job["Props"]["Pri"],
                "Pool": self.deadline_pool,
                "OutputDirectory0": output_dir
            },
            "PluginInfo": {
                "Version": "3.6",
                "ScriptFile": _get_script(),
                "Arguments": "",
                "SingleFrameOnly": "True",
            },
            # Mandatory for Deadline, may be empty
            "AuxFiles": [],
        }

        # Transfer the environment from the original job to this dependent
        # job so they use the same environment
        metadata_filename = "{}_metadata.json".format(subset)
        metadata_path = os.path.join(rootless_path, metadata_filename)

        environment = job["Props"].get("Env", {})
        environment["PYPE_METADATA_FILE"] = metadata_path
        environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
        environment["PYPE_LOG_NO_COLORS"] = "1"
        try:
            environment["PYPE_PYTHON_EXE"] = os.environ["PYPE_PYTHON_EXE"]
        except KeyError:
            # PYPE_PYTHON_EXE not set
            pass
        i = 0
        for index, key in enumerate(environment):
            if key.upper() in self.enviro_filter:
                payload["JobInfo"].update({
                    "EnvironmentKeyValue%d" % i:
                    "{key}={value}".format(key=key, value=environment[key])
                })
                i += 1

        # remove secondary pool
        payload["JobInfo"].pop("SecondaryPool", None)

        self.log.info("Submitting Deadline job ...")
        # self.log.info(json.dumps(payload, indent=4, sort_keys=True))

        url = "{}/api/jobs".format(self.DEADLINE_REST_URL)
        response = requests.post(url, json=payload, timeout=10)
        if not response.ok:
            raise Exception(response.text)
Esempio n. 6
0
    def _submit_deadline_post_job(self, instance, job):
        """
        Deadline specific code separated from :meth:`process` for sake of
        more universal code. Muster post job is sent directly by Muster
        submitter, so this type of code isn't necessary for it.
        """
        data = instance.data.copy()
        subset = data["subset"]
        job_name = "{batch} - {subset} [publish image sequence]".format(
            batch=job["Props"]["Name"], subset=subset
        )

        metadata_filename = "{}_metadata.json".format(subset)
        output_dir = instance.data["outputDir"]
        metadata_path = os.path.join(output_dir, metadata_filename)

        metadata_path = os.path.normpath(metadata_path)
        mount_root = os.path.normpath(os.environ["PYPE_STUDIO_PROJECTS_MOUNT"])
        network_root = os.environ["PYPE_STUDIO_PROJECTS_PATH"]
        metadata_path = metadata_path.replace(mount_root, network_root)
        metadata_path = metadata_path.replace(network_root, "{root}")

        # Generate the payload for Deadline submission
        payload = {
            "JobInfo": {
                "Plugin": "Python",
                "BatchName": job["Props"]["Batch"],
                "Name": job_name,
                "JobDependency0": job["_id"],
                "UserName": job["Props"]["User"],
                "Comment": instance.context.data.get("comment", ""),
                "Priority": job["Props"]["Pri"],
                "Pool": self.deadline_pool,
                "OutputDirectory0": output_dir
            },
            "PluginInfo": {
                "Version": "3.6",
                "ScriptFile": _get_script(),
                "Arguments": "",
                "SingleFrameOnly": "True",
            },
            # Mandatory for Deadline, may be empty
            "AuxFiles": [],
        }

        # Transfer the environment from the original job to this dependent
        # job so they use the same environment
        environment = job["Props"].get("Env", {})
        environment["PYPE_METADATA_FILE"] = metadata_path
        environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
        i = 0
        for index, key in enumerate(environment):
            if key.upper() in self.enviro_filter:
                payload["JobInfo"].update(
                    {
                        "EnvironmentKeyValue%d"
                        % i: "{key}={value}".format(
                            key=key, value=environment[key]
                        )
                    }
                )
                i += 1

        # remove secondary pool
        payload["JobInfo"].pop("SecondaryPool", None)

        self.log.info("Submitting Deadline job ...")
        # self.log.info(json.dumps(payload, indent=4, sort_keys=True))

        url = "{}/api/jobs".format(self.DEADLINE_REST_URL)
        response = requests.post(url, json=payload)
        if not response.ok:
            raise Exception(response.text)
Esempio n. 7
0
    def _submit_deadline_post_job(self, instance, job, instances):
        """Submit publish job to Deadline.

        Deadline specific code separated from :meth:`process` for sake of
        more universal code. Muster post job is sent directly by Muster
        submitter, so this type of code isn't necessary for it.

        """
        data = instance.data.copy()
        subset = data["subset"]
        job_name = "Publish - {subset}".format(subset=subset)

        output_dir = instance.data["outputDir"]

        # Generate the payload for Deadline submission
        payload = {
            "JobInfo": {
                "Plugin": "Python",
                "BatchName": job["Props"]["Batch"],
                "Name": job_name,
                "UserName": job["Props"]["User"],
                "Comment": instance.context.data.get("comment", ""),

                "Department": self.deadline_department,
                "ChunkSize": self.deadline_chunk_size,
                "Priority": job["Props"]["Pri"],

                "Group": self.deadline_group,
                "Pool": self.deadline_pool,
                "SecondaryPool": self.deadline_pool_secondary,

                "OutputDirectory0": output_dir
            },
            "PluginInfo": {
                "Version": self.plugin_python_version,
                "ScriptFile": _get_script(self.publishing_script),
                "Arguments": "",
                "SingleFrameOnly": "True",
            },
            # Mandatory for Deadline, may be empty
            "AuxFiles": [],
        }

        # add assembly jobs as dependencies
        if instance.data.get("tileRendering"):
            self.log.info("Adding tile assembly jobs as dependencies...")
            job_index = 0
            for assembly_id in instance.data.get("assemblySubmissionJobs"):
                payload["JobInfo"]["JobDependency{}".format(job_index)] = assembly_id  # noqa: E501
                job_index += 1
        else:
            payload["JobInfo"]["JobDependency0"] = job["_id"]

        # Transfer the environment from the original job to this dependent
        # job so they use the same environment
        metadata_path, roothless_metadata_path = self._create_metadata_path(
            instance)
        environment = job["Props"].get("Env", {})
        environment["PYPE_METADATA_FILE"] = roothless_metadata_path
        environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
        environment["PYPE_LOG_NO_COLORS"] = "1"
        try:
            environment["PYPE_PYTHON_EXE"] = os.environ["PYPE_PYTHON_EXE"]
        except KeyError:
            # PYPE_PYTHON_EXE not set
            pass
        i = 0
        for index, key in enumerate(environment):
            if key.upper() in self.enviro_filter:
                payload["JobInfo"].update(
                    {
                        "EnvironmentKeyValue%d"
                        % i: "{key}={value}".format(
                            key=key, value=environment[key]
                        )
                    }
                )
                i += 1

        # remove secondary pool
        payload["JobInfo"].pop("SecondaryPool", None)

        self.log.info("Submitting Deadline job ...")
        # self.log.info(json.dumps(payload, indent=4, sort_keys=True))

        url = "{}/api/jobs".format(self.DEADLINE_REST_URL)
        response = requests.post(url, json=payload, timeout=10)
        if not response.ok:
            raise Exception(response.text)
Esempio n. 8
0
    def process(self, instance):

        context = instance.context

        key = "__hasRun{}".format(self.__class__.__name__)
        if context.data.get(key, False):
            return
        else:
            context.data[key] = True

        from avalon.fusion.lib import get_frame_path

        AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
                                          "http://localhost:8082")
        assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"

        # Collect all saver instances in context that are to be rendered
        saver_instances = []
        for instance in context[:]:
            if not self.families[0] in instance.data.get("families"):
                # Allow only saver family instances
                continue

            if not instance.data.get("publish", True):
                # Skip inactive instances
                continue
            self.log.debug(instance.data["name"])
            saver_instances.append(instance)

        if not saver_instances:
            raise RuntimeError("No instances found for Deadline submittion")

        fusion_version = int(context.data["fusionVersion"])
        filepath = context.data["currentFile"]
        filename = os.path.basename(filepath)
        comment = context.data.get("comment", "")
        deadline_user = context.data.get("deadlineUser", getpass.getuser())

        # Documentation for keys available at:
        # https://docs.thinkboxsoftware.com
        #    /products/deadline/8.0/1_User%20Manual/manual
        #    /manual-submission.html#job-info-file-options
        payload = {
            "JobInfo": {
                # Top-level group name
                "BatchName":
                filename,

                # Job name, as seen in Monitor
                "Name":
                filename,

                # User, as seen in Monitor
                "UserName":
                deadline_user,

                # Use a default submission pool for Fusion
                "Pool":
                "fusion",
                "Plugin":
                "Fusion",
                "Frames":
                "{start}-{end}".format(start=int(context.data["startFrame"]),
                                       end=int(context.data["endFrame"])),
                "Comment":
                comment,
            },
            "PluginInfo": {
                # Input
                "FlowFile": filepath,

                # Mandatory for Deadline
                "Version": str(fusion_version),

                # Render in high quality
                "HighQuality": True,

                # Whether saver output should be checked after rendering
                # is complete
                "CheckOutput": True,

                # Proxy: higher numbers smaller images for faster test renders
                # 1 = no proxy quality
                "Proxy": 1,
            },

            # Mandatory for Deadline, may be empty
            "AuxFiles": []
        }

        # Enable going to rendered frames from Deadline Monitor
        for index, instance in enumerate(saver_instances):
            head, padding, tail = get_frame_path(instance.data["path"])
            path = "{}{}{}".format(head, "#" * padding, tail)
            folder, filename = os.path.split(path)
            payload["JobInfo"]["OutputDirectory%d" % index] = folder
            payload["JobInfo"]["OutputFilename%d" % index] = filename

        # Include critical variables with submission
        keys = [
            # TODO: This won't work if the slaves don't have accesss to
            # these paths, such as if slaves are running Linux and the
            # submitter is on Windows.
            "PYTHONPATH",
            "OFX_PLUGIN_PATH",
            "FUSION9_MasterPrefs"
        ]
        environment = dict(
            {key: os.environ[key]
             for key in keys if key in os.environ}, **api.Session)

        payload["JobInfo"].update({
            "EnvironmentKeyValue%d" % index:
            "{key}={value}".format(key=key, value=environment[key])
            for index, key in enumerate(environment)
        })

        self.log.info("Submitting..")
        self.log.info(json.dumps(payload, indent=4, sort_keys=True))

        # E.g. http://192.168.0.1:8082/api/jobs
        url = "{}/api/jobs".format(AVALON_DEADLINE)
        response = requests.post(url, json=payload)
        if not response.ok:
            raise Exception(response.text)

        # Store the response for dependent job submission plug-ins
        for instance in saver_instances:
            instance.data["deadlineSubmissionJob"] = response.json()
Esempio n. 9
0
    def process(self, instance):

        DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
                                          "http://localhost:8082")
        assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"

        context = instance.context
        workspace = context.data["workspaceDir"]

        filepath = None

        allInstances = []
        for result in context.data["results"]:
            if (result["instance"] is not None and
               result["instance"] not in allInstances):
                allInstances.append(result["instance"])

        for inst in allInstances:
            print(inst)
            if inst.data['family'] == 'scene':
                filepath = inst.data['destination_list'][0]

        if not filepath:
            filepath = context.data["currentFile"]

        self.log.debug(filepath)

        filename = os.path.basename(filepath)
        comment = context.data.get("comment", "")
        scene = os.path.splitext(filename)[0]
        dirname = os.path.join(workspace, "renders")
        renderlayer = instance.data['setMembers']       # rs_beauty
        renderlayer_name = instance.data['subset']      # beauty
        renderlayer_globals = instance.data["renderGlobals"]
        legacy_layers = renderlayer_globals["UseLegacyRenderLayers"]
        deadline_user = context.data.get("deadlineUser", getpass.getuser())
        jobname = "%s - %s" % (filename, instance.name)

        # Get the variables depending on the renderer
        render_variables = get_renderer_variables(renderlayer)
        output_filename_0 = preview_fname(folder=dirname,
                                          scene=scene,
                                          layer=renderlayer_name,
                                          padding=render_variables["padding"],
                                          ext=render_variables["ext"])

        try:
            # Ensure render folder exists
            os.makedirs(dirname)
        except OSError:
            pass

        # Documentation for keys available at:
        # https://docs.thinkboxsoftware.com
        #    /products/deadline/8.0/1_User%20Manual/manual
        #    /manual-submission.html#job-info-file-options
        payload = {
            "JobInfo": {
                # Top-level group name
                "BatchName": filename,

                # Job name, as seen in Monitor
                "Name": jobname,

                # Arbitrary username, for visualisation in Monitor
                "UserName": deadline_user,

                "Plugin": instance.data.get("mayaRenderPlugin", "MayaBatch"),
                "Frames": "{start}-{end}x{step}".format(
                    start=int(instance.data["startFrame"]),
                    end=int(instance.data["endFrame"]),
                    step=int(instance.data["byFrameStep"]),
                ),

                "Comment": comment,

                # Optional, enable double-click to preview rendered
                # frames from Deadline Monitor
                "OutputFilename0": output_filename_0.replace("\\", "/"),
            },
            "PluginInfo": {
                # Input
                "SceneFile": filepath,

                # Output directory and filename
                "OutputFilePath": dirname.replace("\\", "/"),
                "OutputFilePrefix": render_variables["filename_prefix"],

                # Mandatory for Deadline
                "Version": cmds.about(version=True),

                # Only render layers are considered renderable in this pipeline
                "UsingRenderLayers": True,

                # Use legacy Render Layer system
                "UseLegacyRenderLayers": legacy_layers,

                # Render only this layer
                "RenderLayer": renderlayer,

                # Determine which renderer to use from the file itself
                "Renderer": instance.data["renderer"],

                # Resolve relative references
                "ProjectPath": workspace,
            },

            # Mandatory for Deadline, may be empty
            "AuxFiles": []
        }

        # Include critical environment variables with submission
        keys = [
            # This will trigger `userSetup.py` on the slave
            # such that proper initialisation happens the same
            # way as it does on a local machine.
            # TODO(marcus): This won't work if the slaves don't
            # have accesss to these paths, such as if slaves are
            # running Linux and the submitter is on Windows.
            "PYTHONPATH",
            "PATH",

            "MTOA_EXTENSIONS_PATH",
            "MTOA_EXTENSIONS",
            "DYLD_LIBRARY_PATH",
            "MAYA_RENDER_DESC_PATH",
            "MAYA_MODULE_PATH",
            "ARNOLD_PLUGIN_PATH",
            "AVALON_SCHEMA",
            "FTRACK_API_KEY",
            "FTRACK_API_USER",
            "FTRACK_SERVER",
            "PYBLISHPLUGINPATH",

            # todo: This is a temporary fix for yeti variables
            "PEREGRINEL_LICENSE",
            "SOLIDANGLE_LICENSE",
            "ARNOLD_LICENSE"
            "MAYA_MODULE_PATH",
            "TOOL_ENV"
        ]
        environment = dict({key: os.environ[key] for key in keys
                            if key in os.environ}, **api.Session)
        #self.log.debug("enviro: {}".format(pprint(environment)))
        for path in os.environ:
            if path.lower().startswith('pype_'):
                environment[path] = os.environ[path]

        environment["PATH"] = os.environ["PATH"]
        self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
        clean_environment = {}
        for key in environment:
            clean_path = ""
            self.log.debug("key: {}".format(key))
            to_process = environment[key]
            if key == "PYPE_STUDIO_CORE_MOUNT":
                clean_path = environment[key]
            elif "://" in environment[key]:
                clean_path = environment[key]
            elif os.pathsep not in to_process:
                try:
                    path = environment[key]
                    path.decode('UTF-8', 'strict')
                    clean_path = os.path.normpath(path)
                except UnicodeDecodeError:
                    print('path contains non UTF characters')
            else:
                for path in environment[key].split(os.pathsep):
                    try:
                        path.decode('UTF-8', 'strict')
                        clean_path += os.path.normpath(path) + os.pathsep
                    except UnicodeDecodeError:
                        print('path contains non UTF characters')

            if key == "PYTHONPATH":
                clean_path = clean_path.replace('python2', 'python3')
            clean_path = clean_path.replace(
                                            os.path.normpath(environment['PYPE_STUDIO_CORE_MOUNT']),
                                            os.path.normpath(environment['PYPE_STUDIO_CORE']))
            clean_environment[key] = clean_path

        environment = clean_environment

        payload["JobInfo"].update({
            "EnvironmentKeyValue%d" % index: "{key}={value}".format(
                key=key,
                value=environment[key]
            ) for index, key in enumerate(environment)
        })

        # Include optional render globals
        render_globals = instance.data.get("renderGlobals", {})
        payload["JobInfo"].update(render_globals)

        plugin = payload["JobInfo"]["Plugin"]
        self.log.info("using render plugin : {}".format(plugin))

        self.preflight_check(instance)

        self.log.info("Submitting..")
        self.log.info(json.dumps(payload, indent=4, sort_keys=True))

        # E.g. http://192.168.0.1:8082/api/jobs
        url = "{}/api/jobs".format(DEADLINE_REST_URL)
        response = requests.post(url, json=payload)
        if not response.ok:
            raise Exception(response.text)

        # Store output dir for unified publisher (filesequence)
        instance.data["outputDir"] = os.path.dirname(output_filename_0)
        instance.data["deadlineSubmissionJob"] = response.json()
Esempio n. 10
0
    def _submit_deadline_post_job(self, instance, job, instances):
        """Submit publish job to Deadline.

        Deadline specific code separated from :meth:`process` for sake of
        more universal code. Muster post job is sent directly by Muster
        submitter, so this type of code isn't necessary for it.

        """
        data = instance.data.copy()
        subset = data["subset"]
        job_name = "Publish - {subset}".format(subset=subset)

        # instance.data.get("subset") != instances[0]["subset"]
        # 'Main' vs 'renderMain'
        override_version = None
        instance_version = instance.data.get("version")  # take this if exists
        if instance_version != 1:
            override_version = instance_version
        output_dir = self._get_publish_folder(
            instance.context.data['anatomy'],
            deepcopy(instance.data["anatomyData"]), instance.data.get("asset"),
            instances[0]["subset"], 'render', override_version)

        # Transfer the environment from the original job to this dependent
        # job so they use the same environment
        metadata_path, roothless_metadata_path = \
            self._create_metadata_path(instance)

        environment = job["Props"].get("Env", {})
        environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
        environment["AVALON_ASSET"] = io.Session["AVALON_ASSET"]
        environment["AVALON_TASK"] = io.Session["AVALON_TASK"]
        environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME")
        environment["PYPE_LOG_NO_COLORS"] = "1"
        environment["PYPE_USERNAME"] = instance.context.data["user"]
        environment["PYPE_PUBLISH_JOB"] = "1"
        environment["PYPE_RENDER_JOB"] = "0"

        args = ['publish', roothless_metadata_path]

        # Generate the payload for Deadline submission
        payload = {
            "JobInfo": {
                "Plugin": self.deadline_plugin,
                "BatchName": job["Props"]["Batch"],
                "Name": job_name,
                "UserName": job["Props"]["User"],
                "Comment": instance.context.data.get("comment", ""),
                "Department": self.deadline_department,
                "ChunkSize": self.deadline_chunk_size,
                "Priority": job["Props"]["Pri"],
                "Group": self.deadline_group,
                "Pool": self.deadline_pool,
                "SecondaryPool": self.deadline_pool_secondary,
                "OutputDirectory0": output_dir
            },
            "PluginInfo": {
                "Version": self.plugin_pype_version,
                "Arguments": " ".join(args),
                "SingleFrameOnly": "True",
            },
            # Mandatory for Deadline, may be empty
            "AuxFiles": [],
        }

        # add assembly jobs as dependencies
        if instance.data.get("tileRendering"):
            self.log.info("Adding tile assembly jobs as dependencies...")
            job_index = 0
            for assembly_id in instance.data.get("assemblySubmissionJobs"):
                payload["JobInfo"]["JobDependency{}".format(
                    job_index)] = assembly_id  # noqa: E501
                job_index += 1
        else:
            payload["JobInfo"]["JobDependency0"] = job["_id"]

        i = 0
        for index, key in enumerate(environment):
            if key.upper() in self.enviro_filter:
                payload["JobInfo"].update({
                    "EnvironmentKeyValue%d" % i:
                    "{key}={value}".format(key=key, value=environment[key])
                })
                i += 1

        # remove secondary pool
        payload["JobInfo"].pop("SecondaryPool", None)

        self.log.info("Submitting Deadline job ...")

        url = "{}/api/jobs".format(self.deadline_url)
        response = requests.post(url, json=payload, timeout=10)
        if not response.ok:
            raise Exception(response.text)
Esempio n. 11
0
    def fulfill(self, context, instances):

        assert "AVALON_DEADLINE" in avalon.api.Session, (
            "Environment variable missing: 'AVALON_DEADLINE'")

        AVALON_DEADLINE = avalon.api.Session["AVALON_DEADLINE"]

        workspace = context.data["workspaceDir"]
        fpath = context.data["currentMaking"]
        fname = os.path.basename(fpath)
        name, ext = os.path.splitext(fname)
        comment = context.data.get("comment", "")

        project = context.data["projectDoc"]

        project_id = str(project["_id"])[-4:].upper()
        project_code = project["data"].get("codename", project_id)

        asset = context.data["assetDoc"]["name"]

        batch_name = "({projcode}): [{asset}] {filename}"
        batch_name = batch_name.format(projcode=project_code,
                                       asset=asset,
                                       filename=fname)

        script_file = os.path.join(os.path.dirname(__file__), "scripts",
                                   "avalon_contractor_publish.py")

        # E.g. http://192.168.0.1:8082/api/jobs
        url = "{}/api/jobs".format(AVALON_DEADLINE)
        #
        # Documentation about RESTful api
        # https://docs.thinkboxsoftware.com/products/deadline/
        # 10.0/1_User%20Manual/manual/rest-jobs.html#rest-jobs-ref-label
        #
        # Documentation for keys available at:
        # https://docs.thinkboxsoftware.com
        #    /products/deadline/8.0/1_User%20Manual/manual
        #    /manual-submission.html#job-info-file-options

        auth = os.environ["AVALON_DEADLINE_AUTH"].split(":")

        # Grouping instances

        instance_group = dict()
        for instance in instances:
            dl_pool = instance.data["deadlinePool"]
            dl_group = instance.data["deadlineGroup"]
            dl_priority = instance.data["deadlinePriority"]

            group_key = (dl_pool, dl_group, dl_priority)

            if group_key not in instance_group:
                instance_group[group_key] = list()

            instance_group[group_key].append(instance)

        for settings, group in instance_group.items():
            dl_pool, dl_group, dl_priority = settings

            if len(group) == 1:
                instance = group[0]
                job_name = "{subset} v{version:0>3}".format(
                    subset=instance.data["subset"],
                    version=instance.data["versionNext"],
                )
            else:
                job_name = "queued %d subsets" % len(group)

            environment = dict()
            for instance in group:
                self.log.info("Adding instance: %s" % instance.data["subset"])
                environment.update(self.assemble_environment(instance))

            payload = {
                "JobInfo": {
                    "Plugin": "MayaBatch",
                    "BatchName": batch_name,  # Top-level group name
                    "Name": job_name,
                    "UserName": getpass.getuser(),
                    "MachineName": platform.node(),
                    "Comment": comment,
                    "Pool": dl_pool,
                    "Group": dl_group,
                    "Priority": dl_priority,
                    "ExtraInfo0": project["name"],
                },
                "PluginInfo": {
                    # Input
                    "SceneFile": fpath,
                    # Resolve relative references
                    "ProjectPath": workspace,
                    # Mandatory for Deadline
                    "Version": context.data["mayaVersion"],
                    "ScriptJob": True,
                    "ScriptFilename": script_file,
                },
                # Mandatory for Deadline, may be empty
                "AuxFiles": [],
                "IdOnly": True
            }

            payload["JobInfo"].update({
                "EnvironmentKeyValue%d" % index:
                "{key}={value}".format(key=key, value=environment[key])
                for index, key in enumerate(environment)
            })

            self.log.info("Submitting..")
            self.log.info(json.dumps(payload, indent=4, sort_keys=True))

            response = requests.post(url, json=payload, auth=tuple(auth))

            if response.ok:
                jobid = eval(response.text)["_id"]
                self.log.info("Success. JobID: %s" % jobid)
            else:
                msg = response.text
                self.log.error(msg)
                raise Exception(msg)

        self.log.info("Completed.")
    def process(self, instance):

        AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
                                          "http://localhost:8082")
        assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"

        # Get a submission job
        job = instance.data.get("deadlineSubmissionJob")
        if not job:
            raise RuntimeError("Can't continue without valid deadline "
                               "submission prior to this plug-in.")

        data = instance.data.copy()
        subset = data["subset"]
        state = data.get("publishJobState", "Suspended")
        job_name = "{batch} - {subset} [publish image sequence]".format(
            batch=job["Props"]["Name"], subset=subset)

        # Add in start/end frame
        context = instance.context
        start = instance.data.get("startFrame", context.data["startFrame"])
        end = instance.data.get("endFrame", context.data["endFrame"])
        resources = []

        # Add in regex for sequence filename
        # This assumes the output files start with subset name and ends with
        # a file extension.
        if "ext" in instance.data:
            ext = re.escape(instance.data["ext"])
        else:
            ext = "\.\D+"

        regex = "^{subset}.*\d+{ext}$".format(subset=re.escape(subset),
                                              ext=ext)

        # Write metadata for publish job
        render_job = data.pop("deadlineSubmissionJob")
        metadata = {
            "regex": regex,
            "startFrame": start,
            "endFrame": end,
            "families": ["colorbleed.imagesequence"],

            # Optional metadata (for debugging)
            "metadata": {
                "instance": data,
                "job": job,
                "session": api.Session.copy()
            }
        }

        # Ensure output dir exists
        output_dir = instance.data["outputDir"]
        if not os.path.isdir(output_dir):
            os.makedirs(output_dir)

        if data.get("extendFrames", False):

            family = "colorbleed.imagesequence"
            override = data["overrideExistingFrame"]

            # override = data.get("overrideExistingFrame", False)
            out_file = render_job.get("OutFile")
            if not out_file:
                raise RuntimeError("OutFile not found in render job!")

            extension = os.path.splitext(out_file[0])[1]
            _ext = extension[1:]

            # Frame comparison
            prev_start = None
            prev_end = None
            resource_range = range(int(start), int(end) + 1)

            # Gather all the subset files (one subset per render pass!)
            subset_names = [data["subset"]]
            subset_names.extend(data.get("renderPasses", []))

            for subset_name in subset_names:
                version = get_latest_version(asset_name=data["asset"],
                                             subset_name=subset_name,
                                             family=family)

                # Set prev start / end frames for comparison
                if not prev_start and not prev_end:
                    prev_start = version["data"]["startFrame"]
                    prev_end = version["data"]["endFrame"]

                subset_resources = get_resources(version, _ext)
                resource_files = get_resource_files(subset_resources,
                                                    resource_range, override)

                resources.extend(resource_files)

            updated_start = min(start, prev_start)
            updated_end = max(end, prev_end)

            # Update metadata and instance start / end frame
            self.log.info("Updating start / end frame : "
                          "{} - {}".format(updated_start, updated_end))

            # TODO : Improve logic to get new frame range for the
            # publish job (publish_filesequence.py)
            # The current approach is not following Pyblish logic which is based
            # on Collect / Validate / Extract.

            # ---- Collect Plugins  ---
            # Collect Extend Frames - Only run if extendFrames is toggled
            # # # Store in instance:
            # # # Previous rendered files per subset based on frames
            # # # --> Add to instance.data[resources]
            # # # Update publish frame range

            # ---- Validate Plugins ---
            # Validate Extend Frames
            # # # Check if instance has the requirements to extend frames
            # There might have been some things which can be added to the list
            # Please do so when fixing this.

            # Start frame
            metadata["startFrame"] = updated_start
            metadata["metadata"]["instance"]["startFrame"] = updated_start

            # End frame
            metadata["endFrame"] = updated_end
            metadata["metadata"]["instance"]["endFrame"] = updated_end

        metadata_filename = "{}_metadata.json".format(subset)
        metadata_path = os.path.join(output_dir, metadata_filename)
        with open(metadata_path, "w") as f:
            json.dump(metadata, f, indent=4, sort_keys=True)

        # Generate the payload for Deadline submission
        payload = {
            "JobInfo": {
                "Plugin": "Python",
                "BatchName": job["Props"]["Batch"],
                "Name": job_name,
                "JobType": "Normal",
                "JobDependency0": job["_id"],
                "UserName": job["Props"]["User"],
                "Comment": instance.context.data.get("comment", ""),
                "InitialStatus": state
            },
            "PluginInfo": {
                "Version": "3.6",
                "ScriptFile": _get_script(),
                "Arguments": '--path "{}"'.format(metadata_path),
                "SingleFrameOnly": "True"
            },

            # Mandatory for Deadline, may be empty
            "AuxFiles": []
        }

        # Transfer the environment from the original job to this dependent
        # job so they use the same environment
        environment = job["Props"].get("Env", {})
        payload["JobInfo"].update({
            "EnvironmentKeyValue%d" % index:
            "{key}={value}".format(key=key, value=environment[key])
            for index, key in enumerate(environment)
        })

        self.log.info("Submitting..")
        self.log.info(json.dumps(payload, indent=4, sort_keys=True))

        url = "{}/api/jobs".format(AVALON_DEADLINE)
        response = requests.post(url, json=payload)
        if not response.ok:
            raise Exception(response.text)

        # Copy files from previous render if extendFrame is True
        if data.get("extendFrames", False):

            self.log.info("Preparing to copy ..")
            import shutil

            dest_path = data["outputDir"]
            for source in resources:
                src_file = os.path.basename(source)
                dest = os.path.join(dest_path, src_file)
                shutil.copy(source, dest)

            self.log.info("Finished copying %i files" % len(resources))
    def process(self, instance):

        AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
                                          "http://localhost:8082")
        assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"

        context = instance.context
        workspace = context.data["workspaceDir"]
        filepath = context.data["currentFile"]
        filename = os.path.basename(filepath)
        comment = context.data.get("comment", "")
        scene = os.path.splitext(filename)[0]
        dirname = os.path.join(workspace, "renders")
        renderlayer = instance.data['setMembers']  # rs_beauty
        renderlayer_name = instance.name  # beauty
        renderlayer_globals = instance.data["renderGlobals"]
        legacy_layers = renderlayer_globals["UseLegacyRenderLayers"]
        deadline_user = context.data.get("deadlineUser", getpass.getuser())
        jobname = "%s - %s" % (filename, instance.name)

        # Get the variables depending on the renderer
        render_variables = get_renderer_variables(renderlayer)
        output_filename_0 = preview_fname(folder=dirname,
                                          scene=scene,
                                          layer=renderlayer_name,
                                          padding=render_variables["padding"],
                                          ext=render_variables["ext"])

        try:
            # Ensure render folder exists
            os.makedirs(dirname)
        except OSError:
            pass

        # Documentation for keys available at:
        # https://docs.thinkboxsoftware.com
        #    /products/deadline/8.0/1_User%20Manual/manual
        #    /manual-submission.html#job-info-file-options
        payload = {
            "JobInfo": {
                # Top-level group name
                "BatchName":
                filename,

                # Job name, as seen in Monitor
                "Name":
                jobname,

                # Arbitrary username, for visualisation in Monitor
                "UserName":
                deadline_user,
                "Plugin":
                instance.data.get("mayaRenderPlugin", "MayaBatch"),
                "Frames":
                "{start}-{end}x{step}".format(
                    start=int(instance.data["startFrame"]),
                    end=int(instance.data["endFrame"]),
                    step=int(instance.data["byFrameStep"]),
                ),
                "Comment":
                comment,

                # Optional, enable double-click to preview rendered
                # frames from Deadline Monitor
                "OutputFilename0":
                output_filename_0.replace("\\", "/"),
            },
            "PluginInfo": {
                # Input
                "SceneFile": filepath,

                # Output directory and filename
                "OutputFilePath": dirname.replace("\\", "/"),
                "OutputFilePrefix": render_variables["filename_prefix"],

                # Mandatory for Deadline
                "Version": cmds.about(version=True),

                # Only render layers are considered renderable in this pipeline
                "UsingRenderLayers": True,

                # Use legacy Render Layer system
                "UseLegacyRenderLayers": legacy_layers,

                # Render only this layer
                "RenderLayer": renderlayer,

                # Determine which renderer to use from the file itself
                "Renderer": instance.data["renderer"],

                # Resolve relative references
                "ProjectPath": workspace,
            },

            # Mandatory for Deadline, may be empty
            "AuxFiles": []
        }

        # Include critical environment variables with submission
        keys = [
            # This will trigger `userSetup.py` on the slave
            # such that proper initialisation happens the same
            # way as it does on a local machine.
            # TODO(marcus): This won't work if the slaves don't
            # have accesss to these paths, such as if slaves are
            # running Linux and the submitter is on Windows.
            "PYTHONPATH",

            # todo: This is a temporary fix for yeti variables
            "PEREGRINEL_LICENSE",
            "REDSHIFT_MAYAEXTENSIONSPATH",
            "VRAY_FOR_MAYA2018_PLUGINS_X64",
            "VRAY_PLUGINS_X64",
            "VRAY_USE_THREAD_AFFINITY",
            "MAYA_MODULE_PATH"
        ]
        environment = dict(
            {key: os.environ[key]
             for key in keys if key in os.environ}, **api.Session)

        PATHS = os.environ["PATH"].split(";")
        environment["PATH"] = ";".join(
            [p for p in PATHS if p.startswith("P:")])

        payload["JobInfo"].update({
            "EnvironmentKeyValue%d" % index:
            "{key}={value}".format(key=key, value=environment[key])
            for index, key in enumerate(environment)
        })

        # Include optional render globals
        render_globals = instance.data.get("renderGlobals", {})
        payload["JobInfo"].update(render_globals)

        self.log.info("using render plugin : {}".format(
            payload["JobInfo"]["Plugin"]))

        self.preflight_check(instance)

        self.log.info("Submitting..")
        self.log.info(json.dumps(payload, indent=4, sort_keys=True))

        # E.g. http://192.168.0.1:8082/api/jobs
        url = "{}/api/jobs".format(AVALON_DEADLINE)
        response = requests.post(url, json=payload)
        if not response.ok:
            raise Exception(response.text)

        # Store output dir for unified publisher (filesequence)
        instance.data["outputDir"] = os.path.dirname(output_filename_0)
        instance.data["deadlineSubmissionJob"] = response.json()
Esempio n. 14
0
    def _submit_deadline_post_job(self, instance, job):
        """
        Deadline specific code separated from :meth:`process` for sake of
        more universal code. Muster post job is sent directly by Muster
        submitter, so this type of code isn't necessary for it.
        """
        data = instance.data.copy()
        subset = data["subset"]
        state = data.get("publishJobState", "Suspended")
        job_name = "{batch} - {subset} [publish image sequence]".format(
            batch=job["Props"]["Name"], subset=subset)

        metadata_filename = "{}_metadata.json".format(subset)
        output_dir = instance.data["outputDir"]
        metadata_path = os.path.join(output_dir, metadata_filename)

        # Generate the payload for Deadline submission
        payload = {
            "JobInfo": {
                "Plugin": "Python",
                "BatchName": job["Props"]["Batch"],
                "Name": job_name,
                "JobType": "Normal",
                "JobDependency0": job["_id"],
                "UserName": job["Props"]["User"],
                "Comment": instance.context.data.get("comment", ""),
                "InitialStatus": state,
                "Priority": job["Props"]["Pri"]
            },
            "PluginInfo": {
                "Version": "3.6",
                "ScriptFile": _get_script(),
                "Arguments": '--paths "{}"'.format(metadata_path),
                "SingleFrameOnly": "True"
            },

            # Mandatory for Deadline, may be empty
            "AuxFiles": []
        }

        # Transfer the environment from the original job to this dependent
        # job so they use the same environment

        environment = job["Props"].get("Env", {})
        i = 0
        for index, key in enumerate(environment):
            self.log.info("KEY: {}".format(key))
            self.log.info("FILTER: {}".format(self.enviro_filter))

            if key.upper() in self.enviro_filter:
                payload["JobInfo"].update({
                    "EnvironmentKeyValue%d" % i:
                    "{key}={value}".format(key=key, value=environment[key])
                })
                i += 1

        # Avoid copied pools and remove secondary pool
        payload["JobInfo"]["Pool"] = "none"
        payload["JobInfo"].pop("SecondaryPool", None)

        self.log.info("Submitting..")
        self.log.info(json.dumps(payload, indent=4, sort_keys=True))

        url = "{}/api/jobs".format(self.DEADLINE_REST_URL)
        response = requests.post(url, json=payload)
        if not response.ok:
            raise Exception(response.text)
Esempio n. 15
0
    def process(self, instance):
        import os
        import json
        import shutil
        import getpass

        from maya import cmds

        from avalon import api
        from avalon.vendor import requests

        assert "AVALON_DEADLINE" in api.Session, (
            "Environment variable missing: 'AVALON_DEADLINE")

        AVALON_DEADLINE = api.Session["AVALON_DEADLINE"]

        context = instance.context
        workspace = context.data["workspaceDir"]
        fpath = context.data["currentFile"]
        fname = os.path.basename(fpath)
        name, ext = os.path.splitext(fname)
        comment = context.data.get("comment", "")
        dirname = os.path.join(workspace, "renders", name)

        try:
            os.makedirs(dirname)
        except OSError:
            pass

        # E.g. http://192.168.0.1:8082/api/jobs
        url = "{}/api/jobs?JobID=%s".format(AVALON_DEADLINE)

        # Documentation for keys available at:
        # https://docs.thinkboxsoftware.com
        #    /products/deadline/8.0/1_User%20Manual/manual
        #    /manual-submission.html#job-info-file-options
        payload = {
            "JobInfo": {
                # Top-level group name
                "BatchName":
                fname,

                # Job name, as seen in Monitor
                "Name":
                "%s - %s" % (fname, instance.name),

                # Arbitrary username, for visualisation in Monitor
                "UserName":
                getpass.getuser(),
                "Plugin":
                "MayaBatch",
                "Frames":
                "{start}-{end}x{step}".format(
                    start=int(instance.data["startFrame"]),
                    end=int(instance.data["endFrame"]),
                    step=int(instance.data["byFrameStep"]),
                ),
                "Comment":
                comment,

                # Optional, enable double-click to preview rendered
                # frames from Deadline Monitor
                "OutputFilename0":
                self.preview_fname(instance),
            },
            "PluginInfo": {
                # Input
                "SceneFile": fpath,

                # Output directory and filename
                "OutputFilePath": dirname,
                "OutputFilePrefix": "<RenderLayer>/<RenderLayer>",

                # Mandatory for Deadline
                "Version": cmds.about(version=True),

                # Only render layers are considered renderable in this pipeline
                "UsingRenderLayers": True,

                # Render only this layer
                "RenderLayer": instance.name,

                # Determine which renderer to use from the file itself
                "Renderer": "file",

                # Resolve relative references
                "ProjectPath": workspace,
            },

            # Mandatory for Deadline, may be empty
            "AuxFiles": []
        }

        # Include critical variables with submission
        environment = dict(
            {
                # This will trigger `userSetup.py` on the slave
                # such that proper initialisation happens the same
                # way as it does on a local machine.
                # TODO(marcus): This won't work if the slaves don't
                # have accesss to these paths, such as if slaves are
                # running Linux and the submitter is on Windows.
                "PYTHONPATH": os.getenv("PYTHONPATH", ""),
            },
            **api.Session)

        payload["JobInfo"].update({
            "EnvironmentKeyValue%d" % index:
            "{key}={value}".format(key=key, value=environment[key])
            for index, key in enumerate(environment)
        })

        # Include optional render globals
        payload["JobInfo"].update(instance.data.get("renderGlobals", {}))

        self.preflight_check(instance)

        self.log.info("Submitting..")
        self.log.info(json.dumps(payload, indent=4, sort_keys=True))

        response = requests.post(url, json=payload)

        if response.ok:
            # Write metadata for publish
            fname = os.path.join(dirname, instance.name + ".json")
            data = {
                "submission": payload,
                "session": api.Session,
                "instance": instance.data,
                "jobs": [response.json()],
            }

            with open(fname, "w") as f:
                json.dump(data, f, indent=4, sort_keys=True)

        else:
            try:
                shutil.rmtree(dirname)
            except OSError:
                # This is nice-to-have, but not critical to the operation
                pass

            raise Exception(response.text)
    def payload_submit(self,
                       instance,
                       script_path,
                       render_path
                       ):
        resolution_width = instance.data["resolutionWidth"]
        resolution_height = instance.data["resolutionHeight"]
        render_dir = os.path.normpath(os.path.dirname(render_path))
        render_path = os.path.normpath(render_path)
        script_name = os.path.basename(script_path)
        jobname = "%s - %s" % (script_name, instance.name)

        output_filename_0 = self.preview_fname(render_path)

        try:
            # Ensure render folder exists
            os.makedirs(render_dir)
        except OSError:
            pass

        # define chunk and priority
        chunk_size = instance.context.data.get("chunk")
        if chunk_size == 0:
            chunk_size = self.deadline_chunk_size

        # search for %02d pattern in name, and padding number
        search_results = re.search(r"(.%0)(\d)(d)[._]", render_path).groups()
        split_patern = "".join(search_results)
        padding_number = int(search_results[1])

        args = [
            f"<QUOTE>{script_path}<QUOTE>",
            "-a",
            "-16",
            "-s <STARTFRAME>",
            "-e <ENDFRAME>",
            f"-d <QUOTE>{render_dir}<QUOTE>",
            f"-x {resolution_width}",
            f"-y {resolution_height}",
            f"-r <QUOTE>{render_path.replace(split_patern, '')}<QUOTE>",
            f"-= AbsoluteFrameNumber=on -= PadDigits={padding_number}",
            "-= ClearAttachment=on",
        ]

        payload = {
            "JobInfo": {
                # Job name, as seen in Monitor
                "Name": jobname,

                # plugin definition
                "Plugin": "CelAction",

                # Top-level group name
                "BatchName": script_name,

                # Arbitrary username, for visualisation in Monitor
                "UserName": self._deadline_user,

                "Department": self.deadline_department,
                "Priority": self.deadline_priority,

                "Group": self.deadline_group,
                "Pool": self.deadline_pool,
                "SecondaryPool": self.deadline_pool_secondary,
                "ChunkSize": chunk_size,

                "Frames": f"{self._frame_start}-{self._frame_end}",
                "Comment": self._comment,

                # Optional, enable double-click to preview rendered
                # frames from Deadline Monitor
                "OutputFilename0": output_filename_0.replace("\\", "/"),

                # # Asset dependency to wait for at least the scene file to sync.
                # "AssetDependency0": script_path
                "ScheduledType": "Once",
                "JobDelay": "00:00:08:00"
            },
            "PluginInfo": {
                # Input
                "SceneFile": script_path,

                # Output directory
                "OutputFilePath": render_dir.replace("\\", "/"),

                # Plugin attributes
                "StartupDirectory": "",
                "Arguments": " ".join(args),

                # Resolve relative references
                "ProjectPath": script_path,
                "AWSAssetFile0": render_path,
            },

            # Mandatory for Deadline, may be empty
            "AuxFiles": []
        }

        plugin = payload["JobInfo"]["Plugin"]
        self.log.info("using render plugin : {}".format(plugin))

        i = 0
        for key, values in dict(os.environ).items():
            if key.upper() in self.enviro_filter:
                payload["JobInfo"].update(
                    {
                        "EnvironmentKeyValue%d"
                        % i: "{key}={value}".format(
                            key=key, value=values
                        )
                    }
                )
                i += 1

        self.log.info("Submitting..")
        self.log.info(json.dumps(payload, indent=4, sort_keys=True))

        # adding expectied files to instance.data
        self.expected_files(instance, render_path)
        self.log.debug("__ expectedFiles: `{}`".format(
            instance.data["expectedFiles"]))
        response = requests.post(self.deadline_url, json=payload)

        if not response.ok:
            raise Exception(response.text)

        return response
Esempio n. 17
0
    def process(self, instance):

        node = None
        for x in instance:
            if x.Class() == "Write":
                node = x

        if node is None:
            return

        DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
                                           "http://localhost:8082")
        assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"

        context = instance.context
        workspace = os.path.dirname(context.data["currentFile"])
        filepath = None

        # get path
        path = nuke.filename(node)
        output_dir = instance.data['outputDir']

        filepath = context.data["currentFile"]

        self.log.debug(filepath)

        filename = os.path.basename(filepath)
        comment = context.data.get("comment", "")
        dirname = os.path.join(workspace, "renders")
        deadline_user = context.data.get("deadlineUser", getpass.getuser())
        jobname = "%s - %s" % (filename, instance.name)
        ver = re.search(r"\d+\.\d+", context.data.get("hostVersion"))

        try:
            # Ensure render folder exists
            os.makedirs(dirname)
        except OSError:
            pass

        # Documentation for keys available at:
        # https://docs.thinkboxsoftware.com
        #    /products/deadline/8.0/1_User%20Manual/manual
        #    /manual-submission.html#job-info-file-options
        payload = {
            "JobInfo": {
                # Top-level group name
                "BatchName":
                filename,

                # Job name, as seen in Monitor
                "Name":
                jobname,

                # Arbitrary username, for visualisation in Monitor
                "UserName":
                deadline_user,
                "Plugin":
                "Nuke",
                "Frames":
                "{start}-{end}".format(start=int(instance.data["frameStart"]),
                                       end=int(instance.data["frameEnd"])),
                "ChunkSize":
                instance.data["deadlineChunkSize"],
                "Priority":
                instance.data["deadlinePriority"],
                "Comment":
                comment,

                # Optional, enable double-click to preview rendered
                # frames from Deadline Monitor
                # "OutputFilename0": output_filename_0.replace("\\", "/"),
            },
            "PluginInfo": {
                # Input
                "SceneFile": filepath,

                # Output directory and filename
                "OutputFilePath": dirname.replace("\\", "/"),
                # "OutputFilePrefix": render_variables["filename_prefix"],

                # Mandatory for Deadline
                "Version": ver.group(),

                # Resolve relative references
                "ProjectPath": workspace,

                # Only the specific write node is rendered.
                "WriteNode": instance[0].name()
            },

            # Mandatory for Deadline, may be empty
            "AuxFiles": []
        }

        # Include critical environment variables with submission
        keys = [
            # This will trigger `userSetup.py` on the slave
            # such that proper initialisation happens the same
            # way as it does on a local machine.
            # TODO(marcus): This won't work if the slaves don't
            # have accesss to these paths, such as if slaves are
            # running Linux and the submitter is on Windows.
            "PYTHONPATH",
            "PATH",
            "AVALON_SCHEMA",
            "FTRACK_API_KEY",
            "FTRACK_API_USER",
            "FTRACK_SERVER",
            "PYBLISHPLUGINPATH",
            "NUKE_PATH",
            "TOOL_ENV"
        ]
        environment = dict(
            {key: os.environ[key]
             for key in keys if key in os.environ}, **api.Session)
        # self.log.debug("enviro: {}".format(pprint(environment)))
        for path in os.environ:
            if path.lower().startswith('pype_'):
                environment[path] = os.environ[path]

        environment["PATH"] = os.environ["PATH"]
        # self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
        clean_environment = {}
        for key in environment:
            clean_path = ""
            self.log.debug("key: {}".format(key))
            to_process = environment[key]
            if key == "PYPE_STUDIO_CORE_MOUNT":
                clean_path = environment[key]
            elif "://" in environment[key]:
                clean_path = environment[key]
            elif os.pathsep not in to_process:
                try:
                    path = environment[key]
                    path.decode('UTF-8', 'strict')
                    clean_path = os.path.normpath(path)
                except UnicodeDecodeError:
                    print('path contains non UTF characters')
            else:
                for path in environment[key].split(os.pathsep):
                    try:
                        path.decode('UTF-8', 'strict')
                        clean_path += os.path.normpath(path) + os.pathsep
                    except UnicodeDecodeError:
                        print('path contains non UTF characters')

            if key == "PYTHONPATH":
                clean_path = clean_path.replace('python2', 'python3')
            clean_path = clean_path.replace(
                os.path.normpath(
                    environment['PYPE_STUDIO_CORE_MOUNT']),  # noqa
                os.path.normpath(environment['PYPE_STUDIO_CORE_PATH']))  # noqa
            clean_environment[key] = clean_path

        environment = clean_environment

        payload["JobInfo"].update({
            "EnvironmentKeyValue%d" % index:
            "{key}={value}".format(key=key, value=environment[key])
            for index, key in enumerate(environment)
        })

        plugin = payload["JobInfo"]["Plugin"]
        self.log.info("using render plugin : {}".format(plugin))

        self.preflight_check(instance)

        self.log.info("Submitting..")
        self.log.info(json.dumps(payload, indent=4, sort_keys=True))

        # E.g. http://192.168.0.1:8082/api/jobs
        url = "{}/api/jobs".format(DEADLINE_REST_URL)
        response = requests.post(url, json=payload)
        if not response.ok:
            raise Exception(response.text)

        # Store output dir for unified publisher (filesequence)
        instance.data["deadlineSubmissionJob"] = response.json()
        instance.data["publishJobState"] = "Active"