예제 #1
0
    def get_installed_plugin_flow(self, *args, **kwargs):
        plugin_type = self.get_argument('plugin_type')

        plugin_handler = PluginsHandler()
        plugin_modules = plugin_handler.get_plugin_modules_by_type(plugin_type)

        # Only return the data that we need
        return_plugin_flow = []
        for plugin_module in plugin_modules:
            return_plugin_flow.append({
                "plugin_id":
                plugin_module.get("plugin_id"),
                "name":
                plugin_module.get("name"),
            })
        self.write(
            json.dumps({
                "success": True,
                "plugin_flow": return_plugin_flow
            }))
예제 #2
0
    def post_process_file(self):
        # Init plugins handler
        plugin_handler = PluginsHandler()

        # Read current task data
        # task_data = self.current_task.get_task_data()
        cache_path = self.current_task.get_cache_path()
        source_data = self.current_task.get_source_data()
        destination_data = self.current_task.get_destination_data()
        # Move file back to original folder and remove source
        file_move_processes_success = True
        # Create a list for filling with destination paths
        destination_files = []
        if self.current_task.task.success:
            # Run a postprocess file movement on the cache file for for each plugin that configures it

            # Ensure finaly cache path file is correct format
            self.current_task.task.success = self.validate_streams(
                self.current_task.task.cache_path)

            # Fetch all 'postprocessor.file_move' plugin modules
            plugin_modules = plugin_handler.get_plugin_modules_by_type(
                'postprocessor.file_move')

            # Check if the source file needs to be remove by default (only if it does not match the destination file)
            remove_source_file = False
            if source_data['abspath'] != destination_data['abspath']:
                remove_source_file = True

            # Set initial data (some fields will be overwritten further down)
            initial_data = {
                "source_data": None,
                'remove_source_file': remove_source_file,
                'copy_file': None,
                "file_in": None,
                "file_out": None,
            }

            for plugin_module in plugin_modules:
                # Always set source_data to the original file's source_data
                initial_data["source_data"] = source_data
                # Always set copy_file to True
                initial_data["copy_file"] = True
                # Always set file in to cache path
                initial_data["file_in"] = cache_path
                # Always set file out to destination data absolute path
                initial_data["file_out"] = destination_data.get('abspath')

                # Run plugin and fetch return data
                plugin_runner = plugin_module.get("runner")
                try:
                    data = plugin_runner(initial_data)
                except Exception as e:
                    self._log(
                        "Exception while carrying out plugin runner on postprocessor file movement '{}'"
                        .format(plugin_module.get('plugin_id')),
                        message2=str(e),
                        level="exception")
                    # Do not continue with this plugin module's loop
                    continue

                if data.get('copy_file'):
                    # Copy the file
                    self._log("Copying file {} --> {}".format(
                        data.get('file_in'), data.get('file_out')))
                    try:
                        before_checksum = hashlib.md5(
                            open(data.get('file_in'),
                                 'rb').read()).hexdigest()
                        file_in = os.path.abspath(data.get('file_in'))
                        file_out = os.path.abspath((data.get('file_out')))
                        if not os.path.exists(file_in):
                            self._log(
                                "Error - file_in path does not exist! '{}'".
                                format(file_in),
                                level="error")
                            time.sleep(1)
                        shutil.copyfile(file_in, file_out)
                        after_checksum = hashlib.md5(
                            open(data.get('file_out'),
                                 'rb').read()).hexdigest()
                        # Compare the checksums on the copied file to ensure it is still correct
                        if before_checksum != after_checksum:
                            # Something went wrong during that file copy
                            self._log(
                                "Copy function failed during postprocessor file movement '{}' on file '{}'"
                                .format(plugin_module.get('plugin_id'),
                                        cache_path),
                                level='warning')
                            file_move_processes_success = False
                        else:
                            destination_files.append(data.get('file_out'))
                    except Exception as e:
                        self._log(
                            "Exception while copying file {} to {}:".format(
                                data.get('file_in'), data.get('file_out')),
                            message2=str(e),
                            level="exception")
                        file_move_processes_success = False

            # Check if the remove source flag is still True after all plugins have run. If so, we will remove the source file
            if data.get('remove_source_file'):
                # Only carry out a source removal if the whole postprocess was successful
                if file_move_processes_success:
                    self._log("Removing source: {}".format(
                        source_data['abspath']))
                    os.remove(source_data['abspath'])

                    # If we need to keep the filename history, do that here
                    if self.settings.get_keep_filename_history():
                        dirname = os.path.dirname(source_data['abspath'])
                        self.keep_filename_history(
                            dirname, destination_data["basename"],
                            source_data["basename"])
                else:
                    self._log(
                        "Keeping source file '{}'. Not all postprocessor file movement functions completed."
                        .format(source_data['abspath']),
                        level="warning")

            if not file_move_processes_success:
                self._log(
                    "Error while running postprocessor file movement on file '{}'. Not all postprocessor file movement functions completed."
                    .format(cache_path),
                    level="error")

        else:
            self._log("Encoded file failed post processing test '{}'".format(
                cache_path),
                      level='warning')

        # Fetch all 'postprocessor.task_result' plugin modules
        plugin_modules = plugin_handler.get_plugin_modules_by_type(
            'postprocessor.task_result')

        for plugin_module in plugin_modules:
            data = {
                "source_data": source_data,
                'task_processing_success': self.current_task.task.success,
                'file_move_processes_success': file_move_processes_success,
                'destination_files': destination_files,
            }

            # Run plugin and fetch return data
            plugin_runner = plugin_module.get("runner")
            try:
                plugin_runner(data)
            except Exception as e:
                self._log(
                    "Exception while carrying out plugin runner on postprocessor task result '{}'"
                    .format(plugin_module.get('plugin_id')),
                    message2=str(e),
                    level="exception")
                continue

        # Cleanup cache files
        task_cache_directory = os.path.dirname(cache_path)
        if os.path.exists(
                task_cache_directory
        ) and "unmanic_file_conversion" in task_cache_directory:
            for f in os.listdir(task_cache_directory):
                cache_file_path = os.path.join(task_cache_directory, f)
                self._log("Removing task cache directory file '{}'".format(
                    cache_file_path))
                # Remove the cache file
                os.remove(cache_file_path)
            # Remove the directory
            self._log("Removing task cache directory '{}'".format(
                task_cache_directory))
            os.rmdir(task_cache_directory)
예제 #3
0
    def process_item(self):
        # Reset the ffmpeg class when a new item is received
        self.setup_ffmpeg()

        abspath = self.current_task.get_source_abspath()
        self._log("{} processing job - {}".format(self.name, abspath))

        # # Process item in loop for the default config
        # file_in = abspath
        # file_out = self.current_task.task.cache_path
        # data = self.convert_file(file_in, file_out)

        # Then process the item for for each plugin that configures it

        # Init plugins
        plugin_handler = PluginsHandler()
        plugin_modules = plugin_handler.get_plugin_modules_by_type('worker.process_item')

        # Create dictionary of runners info for the webUI
        self.worker_runners_info = {}
        for plugin_module in plugin_modules:
            self.worker_runners_info[plugin_module.get('plugin_id')] = {
                'plugin_id':   plugin_module.get('plugin_id'),
                'status':      'pending',
                "name":        plugin_module.get('name'),
                "author":      plugin_module.get('author'),
                "version":     plugin_module.get('version'),
                "icon":        plugin_module.get('icon'),
                "description": plugin_module.get('description'),
            }

        # Process item in loop.
        # First process the item for for each plugin that configures it, then run the default Unmanic configuration
        task_cache_path = self.current_task.get_cache_path()
        file_in = abspath
        overall_success = True
        current_file_out = ""
        runner_count = 0
        for plugin_module in plugin_modules:
            runner_count += 1
            self.worker_runners_info[plugin_module.get('plugin_id')]['status'] = 'in_progress'
            self.worker_runners_info[plugin_module.get('plugin_id')]['success'] = False
            # Fetch file out details
            # This creates a temp file labeled "WORKING" that will be moved to the cache_path on completion
            tmp_file_out = os.path.splitext(task_cache_path)
            file_out = current_file_out = "{}-{}-{}{}".format(tmp_file_out[0], "WORKING", runner_count, tmp_file_out[1])

            # Fetch initial file probe
            file_probe = self.ffmpeg.file_probe(file_in)
            # Create args from
            ffmpeg_args = self.ffmpeg.generate_ffmpeg_args(file_probe, file_in, file_out)
            initial_data = {
                "exec_ffmpeg":        True,
                "file_probe":         file_probe,
                "ffmpeg_args":        ffmpeg_args,
                "file_in":            file_in,
                "file_out":           file_out,
                "original_file_path": abspath,
            }

            # Run plugin and fetch return data
            plugin_runner = plugin_module.get("runner")
            try:
                data = plugin_runner(initial_data)
            except Exception as e:
                self._log("Exception while carrying out plugin runner on worker process '{}'".format(
                    plugin_module.get('plugin_id')), message2=str(e), level="exception")
                # Skip this plugin module's loop
                self.worker_runners_info[plugin_module.get('plugin_id')]['status'] = 'complete'
                continue
            self._log("Worker process '{}' (in)".format(plugin_module.get('plugin_id')), data.get("file_in"), level='debug')
            self._log("Worker process '{}' (out)".format(plugin_module.get('plugin_id')), data.get("file_out"), level='debug')

            # Only run the conversion process if "exec_ffmpeg" is True
            if data.get("exec_ffmpeg"):
                self.current_task.save_ffmpeg_log("\n\nRUNNER: \n" + plugin_module.get('name'))

                # Run conversion process
                success = self.convert_file(data, plugin_module.get('plugin_id'))

                if success:
                    # If file conversion was successful
                    self._log("Successfully ran worker process '{}' on file '{}'".format(plugin_module.get('plugin_id'),
                                                                                   data.get("file_in")))
                    # Set the file in as the file out for the next loop
                    file_in = data.get("file_out")
                else:
                    # If file conversion was successful
                    self._log(
                        "Error while running worker process '{}' on file '{}'".format(plugin_module.get('plugin_id'), abspath),
                        level="error")
                    self.worker_runners_info[plugin_module.get('plugin_id')]['success'] = False
                    overall_success = False
            else:
                self._log("Worker process '{}' set to not run the FFMPEG command.".format(plugin_module.get('plugin_id')),
                                                                                          level='debug')

            self.worker_runners_info[plugin_module.get('plugin_id')]['success'] = True
            self.worker_runners_info[plugin_module.get('plugin_id')]['status'] = 'complete'

        if overall_success:
            # If file conversion was successful, we will get here
            self._log("Successfully converted file '{}'".format(abspath))
            try:
                # Move file to original cache path
                self._log("Moving final cache file from '{}' to '{}'".format(current_file_out, task_cache_path))
                current_file_out = os.path.abspath(current_file_out)
                if not os.path.exists(current_file_out):
                    self._log("Error - current_file_out path does not exist! '{}'".format(file_in), level="error")
                    time.sleep(1)
                shutil.move(current_file_out, task_cache_path)
            except Exception as e:
                self._log("Exception in final move operation of file {} to {}:".format(current_file_out, task_cache_path),
                          message2=str(e), level="exception")
                return False
            return True
        self._log("Failed to convert file '{}'".format(abspath), level='warning')
        return False
예제 #4
0
파일: filetest.py 프로젝트: Unmanic/unmanic
    def should_file_be_added_to_task_list(self):
        """
        Test if this file needs to be added to the task list

        :return:
        """
        return_value = True
        file_issues = []

        # Init plugins
        plugin_handler = PluginsHandler()
        plugin_modules = plugin_handler.get_plugin_modules_by_type(
            'library_management.file_test')

        if self.file_in_unmanic_ignore_lockfile():
            file_issues.append({
                'id':
                'unmanicignore',
                'message':
                "File found in unmanic ignore file - '{}'".format(self.path),
            })
            return_value = False

        if not self.file_ends_in_allowed_search_extensions():
            file_issues.append({
                'id':
                'extension',
                'message':
                "File suffix is not in allowed search extensions - '{}'".
                format(self.path),
            })
            return_value = False

        # Check if file has failed in history.
        if self.file_failed_in_history():
            file_issues.append({
                'id':
                'blacklisted',
                'message':
                "File found already failed in history - '{}'".format(
                    self.path),
            })
            return_value = False

        # Check if this file is already the correct format:
        if self.file_already_in_target_format():
            file_issues.append({
                'id':
                'format',
                'message':
                "File is already in target format - '{}'".format(self.path),
            })
            return_value = False

        # Run tests against plugins
        for plugin_module in plugin_modules:
            data = {
                'path': self.path,
                'issues': file_issues.copy(),
                'add_file_to_pending_tasks': return_value,
            }

            # Run plugin and fetch return data
            plugin_runner = plugin_module.get("runner")
            try:
                plugin_runner(data)

                # Set the return_value based on the plugin results
                return_value = data.get('add_file_to_pending_tasks')

                # Append any file issues found during previous tests
                file_issues = data.get('issues')
            except Exception as e:
                self._log(
                    "Exception while carrying out plugin runner on library management file test '{}'"
                    .format(plugin_module.get('plugin_id')),
                    message2=str(e),
                    level="exception")
                continue
            pass

        return return_value, file_issues