Exemple #1
0
    def execute(self):
        # Pop executable
        executable_path = self.launch_context.launch_args.pop(0)

        # Pop rest of launch arguments - There should not be other arguments!
        remainders = []
        while self.launch_context.launch_args:
            remainders.append(self.launch_context.launch_args.pop(0))

        script_path = os.path.join(
            OPENPYPE_DIR,
            "scripts",
            "non_python_host_launch.py"
        )

        new_launch_args = get_pype_execute_args(
            "run", script_path, executable_path
        )
        # Add workfile path if exists
        workfile_path = self.data["last_workfile_path"]
        if os.path.exists(workfile_path):
            new_launch_args.append(workfile_path)

        # Append as whole list as these areguments should not be separated
        self.launch_context.launch_args.append(new_launch_args)

        if remainders:
            self.launch_context.launch_args.extend(remainders)
Exemple #2
0
    def execute(self):
        # Pop tvpaint executable
        executable_path = self.launch_context.launch_args.pop(0)

        # Pop rest of launch arguments - There should not be other arguments!
        remainders = []
        while self.launch_context.launch_args:
            remainders.append(self.launch_context.launch_args.pop(0))

        new_launch_args = get_pype_execute_args(
            "run", self.launch_script_path(), executable_path
        )

        # Add workfile to launch arguments
        workfile_path = self.workfile_path()
        if workfile_path:
            new_launch_args.append(workfile_path)

        # How to create new command line
        # if platform.system().lower() == "windows":
        #     new_launch_args = [
        #         "cmd.exe",
        #         "/c",
        #         "Call cmd.exe /k",
        #         *new_launch_args
        #     ]

        # Append as whole list as these areguments should not be separated
        self.launch_context.launch_args.append(new_launch_args)

        if remainders:
            self.log.warning((
                "There are unexpected launch arguments in TVPaint launch. {}"
            ).format(str(remainders)))
            self.launch_context.launch_args.extend(remainders)
Exemple #3
0
def cli_publish(data, publish_paths, gui=True):
    PUBLISH_SCRIPT_PATH = os.path.join(
        os.path.dirname(os.path.dirname(__file__)), "publish.py")
    io.install()

    # Create hash name folder in temp
    chars = "".join([random.choice(string.ascii_letters) for i in range(15)])
    staging_dir = tempfile.mkdtemp(chars)

    # create also json and fill with data
    json_data_path = staging_dir + os.path.basename(staging_dir) + '.json'
    with open(json_data_path, 'w') as outfile:
        json.dump(data, outfile)

    envcopy = os.environ.copy()
    envcopy["PYBLISH_HOSTS"] = "standalonepublisher"
    envcopy["SAPUBLISH_INPATH"] = json_data_path
    envcopy["PYBLISHGUI"] = "pyblish_pype"
    envcopy["PUBLISH_PATHS"] = os.pathsep.join(publish_paths)
    if data.get("family", "").lower() == "editorial":
        envcopy["PYBLISH_SUSPEND_LOGS"] = "1"

    project_name = os.environ["AVALON_PROJECT"]
    env_copy = apply_project_environments_value(project_name, envcopy)

    args = get_pype_execute_args("run", PUBLISH_SCRIPT_PATH)
    result = execute(args, env=envcopy)

    result = {}
    if os.path.exists(json_data_path):
        with open(json_data_path, "r") as f:
            result = json.load(f)

    log.info(f"Publish result: {result}")

    io.uninstall()

    return False
Exemple #4
0
    def run(self):
        self._is_running = True
        time_socket = time.time()
        # Create a TCP/IP socket
        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.sock = sock

        # Bind the socket to the port - skip already used ports
        while True:
            try:
                server_address = ("localhost", self.port)
                sock.bind(server_address)
                break
            except OSError:
                self.port += 1

        self.log.debug(
            "Running Socked thread on {}:{}".format(*server_address))

        env = os.environ.copy()
        env["OPENPYPE_PROCESS_MONGO_ID"] = str(Logger.mongo_process_id)
        # OpenPype executable (with path to start script if not build)
        args = get_pype_execute_args(
            # Add `run` command
            "run",
            self.filepath,
            *self.additional_args,
            str(self.port))
        self.subproc = subprocess.Popen(args, env=env, stdin=subprocess.PIPE)

        # Listen for incoming connections
        sock.listen(1)
        sock.settimeout(1.0)
        while True:
            if not self._is_running:
                break
            try:
                connection, client_address = sock.accept()
                time_socket = time.time()
                connection.settimeout(1.0)
                self.connection = connection

            except socket.timeout:
                if (time.time() - time_socket) > self.MAX_TIMEOUT:
                    self.log.error("Connection timeout passed. Terminating.")
                    self._is_running = False
                    self.subproc.terminate()
                    break
                continue

            try:
                time_con = time.time()
                # Receive the data in small chunks and retransmit it
                while True:
                    try:
                        if not self._is_running:
                            break
                        data = None
                        try:
                            data = self.get_data_from_con(connection)
                            time_con = time.time()

                        except socket.timeout:
                            if (time.time() - time_con) > self.MAX_TIMEOUT:
                                self.log.error(
                                    "Connection timeout passed. Terminating.")
                                self._is_running = False
                                self.subproc.terminate()
                                break
                            continue

                        except ConnectionResetError:
                            self._is_running = False
                            break

                        self._handle_data(connection, data)

                    except Exception as exc:
                        self.log.error("Event server process failed",
                                       exc_info=True)

            finally:
                # Clean up the connection
                connection.close()
                if self.subproc.poll() is None:
                    self.subproc.terminate()

                self.finished = True
Exemple #5
0
    def main_process(self, instance):
        # TODO get these data from context
        host_name = os.environ["AVALON_APP"]
        task_name = os.environ["AVALON_TASK"]
        family = self.main_family_from_instance(instance)

        # Find profile most matching current host, task and instance family
        profile = self.find_matching_profile(host_name, task_name, family)
        if not profile:
            self.log.info(
                ("Skipped instance. None of profiles in presets are for"
                 " Host: \"{}\" | Family: \"{}\" | Task \"{}\"").format(
                     host_name, family, task_name))
            return

        # Pre-filter burnin definitions by instance families
        burnin_defs = self.filter_burnins_by_families(profile, instance)
        if not burnin_defs:
            self.log.info((
                "Skipped instance. Burnin definitions are not set for profile"
                " Host: \"{}\" | Family: \"{}\" | Task \"{}\" | Profile \"{}\""
            ).format(host_name, family, task_name, profile))
            return

        # Prepare burnin options
        profile_options = copy.deepcopy(self.default_options)
        for key, value in (self.options or {}).items():
            if value is not None:
                profile_options[key] = value

        # Prepare global burnin values from presets
        profile_burnins = {}
        for key, value in (self.fields or {}).items():
            key_low = key.lower()
            if key_low in self.positions:
                if value is not None:
                    profile_burnins[key_low] = value

        # Prepare basic data for processing
        _burnin_data, _temp_data = self.prepare_basic_data(instance)

        anatomy = instance.context.data["anatomy"]
        scriptpath = self.burnin_script_path()
        # Executable args that will execute the script
        # [pype executable, *pype script, "run"]
        executable_args = get_pype_execute_args("run", scriptpath)

        # Environments for script process
        env = os.environ.copy()
        # pop PYTHONPATH
        env.pop("PYTHONPATH", None)

        for idx, repre in enumerate(tuple(instance.data["representations"])):
            self.log.debug("repre ({}): `{}`".format(idx + 1, repre["name"]))
            if not self.repres_is_valid(repre):
                continue

            # Filter output definition by representation tags (optional)
            repre_burnin_defs = self.filter_burnins_by_tags(
                burnin_defs, repre["tags"])
            if not repre_burnin_defs:
                self.log.info(
                    ("Skipped representation. All burnin definitions from"
                     " selected profile does not match to representation's"
                     " tags. \"{}\"").format(str(repre["tags"])))
                continue

            # Create copy of `_burnin_data` and `_temp_data` for repre.
            burnin_data = copy.deepcopy(_burnin_data)
            temp_data = copy.deepcopy(_temp_data)

            # Prepare representation based data.
            self.prepare_repre_data(instance, repre, burnin_data, temp_data)

            # Add anatomy keys to burnin_data.
            filled_anatomy = anatomy.format_all(burnin_data)
            burnin_data["anatomy"] = filled_anatomy.get_solved()

            # Add source camera name to burnin data
            camera_name = repre.get("camera_name")
            if camera_name:
                burnin_data["camera_name"] = camera_name

            first_output = True

            files_to_delete = []
            for filename_suffix, burnin_def in repre_burnin_defs.items():
                new_repre = copy.deepcopy(repre)

                # Keep "ftrackreview" tag only on first output
                if first_output:
                    first_output = False
                elif "ftrackreview" in new_repre["tags"]:
                    new_repre["tags"].remove("ftrackreview")

                burnin_options = copy.deepcopy(profile_options)
                burnin_values = copy.deepcopy(profile_burnins)

                # Options overrides
                for key, value in (burnin_def.get("options") or {}).items():
                    # Set or override value if is valid
                    if value is not None:
                        burnin_options[key] = value

                # Burnin values overrides
                for key, value in burnin_def.items():
                    key_low = key.lower()
                    if key_low in self.positions:
                        if value is not None:
                            # Set or override value if is valid
                            burnin_values[key_low] = value

                        elif key_low in burnin_values:
                            # Pop key if value is set to None (null in json)
                            burnin_values.pop(key_low)

                # Remove "delete" tag from new representation
                if "delete" in new_repre["tags"]:
                    new_repre["tags"].remove("delete")

                if len(repre_burnin_defs.keys()) > 1:
                    # Update name and outputName to be
                    # able have multiple outputs in case of more burnin presets
                    # Join previous "outputName" with filename suffix
                    new_name = "_".join(
                        [new_repre["outputName"], filename_suffix])
                    new_repre["name"] = new_name
                    new_repre["outputName"] = new_name

                # Prepare paths and files for process.
                self.input_output_paths(new_repre, temp_data, filename_suffix)

                decompressed_dir = ''
                full_input_path = temp_data["full_input_path"]
                do_decompress = should_decompress(full_input_path)
                if do_decompress:
                    decompressed_dir = get_decompress_dir()

                    decompress(decompressed_dir, full_input_path,
                               temp_data["frame_start"],
                               temp_data["frame_end"], self.log)

                    # input path changed, 'decompressed' added
                    input_file = os.path.basename(full_input_path)
                    temp_data["full_input_path"] = os.path.join(
                        decompressed_dir, input_file)

                # Data for burnin script
                script_data = {
                    "input": temp_data["full_input_path"],
                    "output": temp_data["full_output_path"],
                    "burnin_data": burnin_data,
                    "options": burnin_options,
                    "values": burnin_values
                }

                self.log.debug("script_data: {}".format(
                    json.dumps(script_data, indent=4)))

                # Dump data to string
                dumped_script_data = json.dumps(script_data)

                # Store dumped json to temporary file
                temporary_json_file = tempfile.NamedTemporaryFile(
                    mode="w", suffix=".json", delete=False)
                temporary_json_file.write(dumped_script_data)
                temporary_json_file.close()
                temporary_json_filepath = temporary_json_file.name.replace(
                    "\\", "/")

                # Prepare subprocess arguments
                args = list(executable_args)
                args.append(temporary_json_filepath)
                self.log.debug("Executing: {}".format(" ".join(args)))

                # Run burnin script
                openpype.api.run_subprocess(args,
                                            shell=True,
                                            logger=self.log,
                                            env=env)

                # Remove the temporary json
                os.remove(temporary_json_filepath)

                for filepath in temp_data["full_input_paths"]:
                    filepath = filepath.replace("\\", "/")
                    if filepath not in files_to_delete:
                        files_to_delete.append(filepath)

                # Add new representation to instance
                instance.data["representations"].append(new_repre)

            # Remove source representation
            # NOTE we maybe can keep source representation if necessary
            instance.data["representations"].remove(repre)

            # Delete input files
            for filepath in files_to_delete:
                if os.path.exists(filepath):
                    os.remove(filepath)
                    self.log.debug("Removed: \"{}\"".format(filepath))

            if do_decompress and os.path.exists(decompressed_dir):
                shutil.rmtree(decompressed_dir)
Exemple #6
0
def legacy_server(ftrack_url):
    # Current file
    scripts_dir = os.path.join(FTRACK_MODULE_DIR, "scripts")

    min_fail_seconds = 5
    max_fail_count = 3
    wait_time_after_max_fail = 10

    subproc = None
    subproc_path = "{}/sub_legacy_server.py".format(scripts_dir)
    subproc_last_failed = datetime.datetime.now()
    subproc_failed_count = 0

    ftrack_accessible = False
    printed_ftrack_error = False

    while True:
        if not ftrack_accessible:
            ftrack_accessible = check_ftrack_url(ftrack_url)

        # Run threads only if Ftrack is accessible
        if not ftrack_accessible and not printed_ftrack_error:
            print("Can't access Ftrack {} <{}>".format(
                ftrack_url, str(datetime.datetime.now())))
            if subproc is not None:
                if subproc.poll() is None:
                    subproc.terminate()

                subproc = None

            printed_ftrack_error = True

            time.sleep(1)
            continue

        printed_ftrack_error = False

        if subproc is None:
            if subproc_failed_count < max_fail_count:
                args = get_pype_execute_args("run", subproc_path)
                subproc = subprocess.Popen(args, stdout=subprocess.PIPE)
            elif subproc_failed_count == max_fail_count:
                print(("Storer failed {}times I'll try to run again {}s later"
                       ).format(str(max_fail_count),
                                str(wait_time_after_max_fail)))
                subproc_failed_count += 1
            elif ((datetime.datetime.now() - subproc_last_failed).seconds >
                  wait_time_after_max_fail):
                subproc_failed_count = 0

        # If thread failed test Ftrack and Mongo connection
        elif subproc.poll() is not None:
            subproc = None
            ftrack_accessible = False

            _subproc_last_failed = datetime.datetime.now()
            delta_time = (_subproc_last_failed - subproc_last_failed).seconds
            if delta_time < min_fail_seconds:
                subproc_failed_count += 1
            else:
                subproc_failed_count = 0
            subproc_last_failed = _subproc_last_failed

        time.sleep(1)
 def run_standalone_publisher(self):
     args = get_pype_execute_args("standalonepublisher")
     subprocess.Popen(args, creationflags=subprocess.DETACHED_PROCESS)