def exec_data_panels_plugin_runner(data, plugin_id): """ Exec a frontend.panel plugin runner :param data: :param plugin_id: :return: """ plugin_handler = PluginsHandler() return plugin_handler.exec_plugin_runner(data, plugin_id, 'frontend.panel')
def exec_plugin_api_plugin_runner(data, plugin_id): """ Exec a frontend.plugin_api plugin runner :param data: :param plugin_id: :return: """ plugin_handler = PluginsHandler() return plugin_handler.exec_plugin_runner(data, plugin_id, 'frontend.plugin_api')
def post_process_file(self): # Init plugins handler plugin_handler = PluginsHandler() # Read current task data # task_data = self.current_task.get_task_data() library_id = self.current_task.get_task_library_id() cache_path = self.current_task.get_cache_path() source_data = self.current_task.get_source_data() destination_data = self.current_task.get_destination_data() # Move file back to original folder and remove source file_move_processes_success = True # Create a list for filling with destination paths destination_files = [] if self.current_task.task.success: # Run a postprocess file movement on the cache file for for each plugin that configures it # Fetch all 'postprocessor.file_move' plugin modules plugin_modules = plugin_handler.get_enabled_plugin_modules_by_type( 'postprocessor.file_move', library_id=library_id) # Check if the source file needs to be removed by default (only if it does not match the destination file) remove_source_file = False if source_data['abspath'] != destination_data['abspath']: remove_source_file = True # Set initial data (some fields will be overwritten further down) # - 'library_id' - The library ID for this task # - 'source_data' - Dictionary of data pertaining to the source file # - 'remove_source_file' - True to remove the original file (default is True if file name has changed) # - 'copy_file' - True to run a plugin initiated file copy (default is False unless the plugin says otherwise) # - 'file_in' - Source path to copy from (if 'copy_file' is True) # - 'file_out' - Destination path to copy to (if 'copy_file' is True) # - 'run_default_file_copy' - Prevent the final Unmanic post-process file movement (if different from the original file name) data = { 'library_id': library_id, 'source_data': None, 'remove_source_file': remove_source_file, 'copy_file': None, 'file_in': None, 'file_out': None, 'run_default_file_copy': True, } for plugin_module in plugin_modules: # Always set source_data to the original file's source_data data["source_data"] = source_data # Always set copy_file to False data["copy_file"] = False # Always set file in to cache path data["file_in"] = cache_path # Always set file out to destination data absolute path data["file_out"] = destination_data.get('abspath') # Run plugin to update data if not plugin_handler.exec_plugin_runner( data, plugin_module.get('plugin_id'), 'postprocessor.file_move'): # Do not continue with this plugin module's loop continue if data.get('copy_file'): # Copy the file file_in = os.path.abspath(data.get('file_in')) file_out = os.path.abspath(data.get('file_out')) if not self.__copy_file(file_in, file_out, destination_files, plugin_module.get('plugin_id')): file_move_processes_success = False else: self._log("Plugin did not request a file copy ({})".format( plugin_module.get('plugin_id')), level='debug') # Unmanic's default file movement process # Only carry out final post-processor file moments if all others were successful if file_move_processes_success and data.get( 'run_default_file_copy'): # Run the default post-process file movement. # This will always move the file back to the original location. # If that original location is the same file name, it will overwrite the original file. if destination_data.get('abspath') == source_data.get( 'abspath'): # Only run the final file copy to overwrite the source file if the remove_source_file flag was never set # The remove_source_file flag will remove the source file in later lines after this copy operation, # so if we did copy the file here, it would be a waste of time if not data.get('remove_source_file'): if not self.__copy_file( cache_path, destination_data.get('abspath'), destination_files, 'DEFAULT', move=True): file_move_processes_success = False elif not self.__copy_file(cache_path, destination_data.get('abspath'), destination_files, 'DEFAULT', move=True): file_move_processes_success = False # Source file removal process # Only run if all final post-processor file moments were successful if file_move_processes_success: # Check if the remove source flag is still True after all plugins have run. If so, we will remove the source file if data.get('remove_source_file'): # Only carry out a source removal if the file exists and the final copy was also successful if file_move_processes_success and os.path.exists( source_data.get('abspath')): self._log("Removing source: {}".format( source_data.get('abspath'))) os.remove(source_data.get('abspath')) else: self._log( "Keeping source file '{}'. Not all postprocessor file movement functions completed." .format(source_data.get('abspath')), level="warning") # Log a final error if not all file moments were successful if not file_move_processes_success: self._log( "Error while running postprocessor file movement on file '{}'. Not all postprocessor file movement functions completed." .format(cache_path), level="error") else: self._log( "Skipping file movement post-processor as the task was not successful '{}'" .format(cache_path), level='warning') # Fetch all 'postprocessor.task_result' plugin modules plugin_modules = plugin_handler.get_enabled_plugin_modules_by_type( 'postprocessor.task_result', library_id=library_id) for plugin_module in plugin_modules: data = { 'final_cache_path': cache_path, 'library_id': library_id, 'source_data': source_data, 'task_processing_success': self.current_task.task.success, 'file_move_processes_success': file_move_processes_success, 'destination_files': destination_files, } # Run plugin to update data if not plugin_handler.exec_plugin_runner( data, plugin_module.get('plugin_id'), 'postprocessor.task_result'): # Do not continue with this plugin module's loop continue # Cleanup cache files self.__cleanup_cache_files(cache_path)
class FileTest(object): """ FileTest Object to manage tests carried out on files discovered during a library scan or inode event """ def __init__(self, library_id: int): self.settings = config.Config() unmanic_logging = unlogger.UnmanicLogger.__call__() self.logger = unmanic_logging.get_logger(__class__.__name__) # Init plugins self.library_id = library_id self.plugin_handler = PluginsHandler() self.plugin_modules = self.plugin_handler.get_enabled_plugin_modules_by_type( 'library_management.file_test', library_id=library_id) # List of filed tasks self.failed_paths = [] def _log(self, message, message2='', level="info"): message = common.format_message(message, message2) getattr(self.logger, level)(message) def set_file(self): pass def file_failed_in_history(self, path): """ Check if file has already failed in history :return: """ # Fetch historical tasks history_logging = history.History() if not self.failed_paths: failed_tasks = history_logging.get_historic_tasks_list_with_source_probe( task_success=False) for task in failed_tasks: self.failed_paths.append(task.get('abspath')) if path in self.failed_paths: # That pathname was found in the results of failed historic tasks return True # No results were found matching that pathname return False def file_in_unmanic_ignore_lockfile(self, path): """ Check if folder contains a '.unmanicignore' lockfile :return: """ # Get file parent directory dirname = os.path.dirname(path) # Check if lockfile (.unmanicignore) exists unmanic_ignore_file = os.path.join(dirname, '.unmanicignore') if os.path.exists(unmanic_ignore_file): # Get file basename basename = os.path.basename(path) # Read the file and check for any entry with this file name with open(unmanic_ignore_file) as f: for line in f: if basename in line: return True return False def should_file_be_added_to_task_list(self, path): """ Test if this file needs to be added to the task list :return: """ return_value = None file_issues = [] # TODO: Remove this if self.file_in_unmanic_ignore_lockfile(path): file_issues.append({ 'id': 'unmanicignore', 'message': "File found in unmanic ignore file - '{}'".format(path), }) return_value = False # Check if file has failed in history. if self.file_failed_in_history(path): file_issues.append({ 'id': 'blacklisted', 'message': "File found already failed in history - '{}'".format(path), }) return_value = False # Only run checks with plugins if other tests were not conclusive priority_score_modification = 0 if return_value is None: # Set the initial data with just the priority score. data = { 'priority_score': 0, 'shared_info': {}, } # Run tests against plugins for plugin_module in self.plugin_modules: data['library_id'] = self.library_id data['path'] = path data['issues'] = file_issues.copy() data['add_file_to_pending_tasks'] = None # Run plugin to update data if not self.plugin_handler.exec_plugin_runner( data, plugin_module.get('plugin_id'), 'library_management.file_test'): continue # Append any file issues found during previous tests file_issues = data.get('issues') # Set the return_value based on the plugin results # If the add_file_to_pending_tasks returned an answer (True/False) then break the loop. # No need to continue. if data.get('add_file_to_pending_tasks') is not None: return_value = data.get('add_file_to_pending_tasks') break # Set the priority score modification priority_score_modification = data.get('priority_score', 0) return return_value, file_issues, priority_score_modification
def __exec_worker_runners_on_set_task(self): """ Executes the configured plugin runners against the set task. :return: """ # Init plugins library_id = self.current_task.get_task_library_id() plugin_handler = PluginsHandler() plugin_modules = plugin_handler.get_enabled_plugin_modules_by_type('worker.process_item', library_id=library_id) # Create dictionary of runners info for the frontend self.worker_runners_info = {} for plugin_module in plugin_modules: self.worker_runners_info[plugin_module.get('plugin_id')] = { 'plugin_id': plugin_module.get('plugin_id'), 'status': 'pending', "name": plugin_module.get('name'), "author": plugin_module.get('author'), "version": plugin_module.get('version'), "icon": plugin_module.get('icon'), "description": plugin_module.get('description'), } # Set the absolute path to the original file original_abspath = self.current_task.get_source_abspath() # Process item in loop. # First process the item for for each plugin that configures it, then run the default Unmanic configuration task_cache_path = self.current_task.get_cache_path() # Set the current input file to the original file path file_in = original_abspath # Mark the overall success of all runners. This will be set to False if any of the runners fails. overall_success = True # Set the current file out to nothing. # This will be configured by each runner. # If no runners are configured, then nothing needs to be done. current_file_out = original_abspath # The number of runners that have been run runner_count = 0 # Flag if a task has run a command no_exec_command_run = True # Generate default data object for the runner functions data = { "worker_log": self.worker_log, "library_id": library_id, "exec_command": [], "command_progress_parser": default_progress_parser, "file_in": file_in, "file_out": None, "original_file_path": original_abspath, "repeat": False, } for plugin_module in plugin_modules: # Increment the runners count (first runner will be set as #1) runner_count += 1 if not overall_success: # If one of the Plugins fails, don't continue. # The Plugins could be co-dependant and the final file will not go anywhere if 'overall_success' is False break # Mark the status of the worker for the frontend self.worker_runners_info[plugin_module.get('plugin_id')]['status'] = 'in_progress' self.worker_runners_info[plugin_module.get('plugin_id')]['success'] = False # Loop over runner. This way we can repeat the function with the same data if requested by the repeat flag runner_pass_count = 0 while not self.redundant_flag.is_set(): runner_pass_count += 1 # Fetch file out details # This creates a temp file labeled "WORKING" that will be moved to the cache_path on completion split_file_out = os.path.splitext(task_cache_path) split_file_in = os.path.splitext(file_in) file_out = "{}-{}-{}-{}{}".format(split_file_out[0], "WORKING", runner_count, runner_pass_count, split_file_in[1]) # Reset data object for this runner functions data['library_id'] = library_id data['exec_command'] = [] data['command_progress_parser'] = default_progress_parser data['file_in'] = file_in data['file_out'] = file_out data['original_file_path'] = original_abspath data['repeat'] = False time.sleep(.2) # Add delay for preventing loop maxing compute resources self.worker_log.append("\n\nRUNNER: \n{} [Pass #{}]\n\n".format(plugin_module.get('name'), runner_pass_count)) self.worker_log.append("\nExecuting plugin runner... Please wait\n") # Run plugin to update data if not plugin_handler.exec_plugin_runner(data, plugin_module.get('plugin_id'), 'worker.process_item'): # Skip this plugin module's loop self.worker_runners_info[plugin_module.get('plugin_id')]['status'] = 'complete' self.worker_runners_info[plugin_module.get('plugin_id')]['success'] = False # Set overall success status to failed overall_success = False # Append long entry to say the worker was terminated self.worker_log.append("\n\nPLUGIN FAILED!") self.worker_log.append("\nFailed to execute Plugin '{}'".format(plugin_module.get('name'))) self.worker_log.append("\nCheck Unmanic logs for more information") break # Log the in and out files returned by the plugin runner for debugging self._log("Worker process '{}' (in)".format(plugin_module.get('plugin_id')), data.get("file_in"), level='debug') self._log("Worker process '{}' (out)".format(plugin_module.get('plugin_id')), data.get("file_out"), level='debug') # Only run the conversion process if "exec_command" is not empty if data.get("exec_command"): self.worker_log.append("\nPlugin runner requested for a command to be executed by Unmanic") # Exec command as subprocess success = self.__exec_command_subprocess(data) no_exec_command_run = False if self.redundant_flag.is_set(): # This worker has been marked as redundant. It is being terminated. self._log("Worker has been terminated before a command was completed", level="warning") # Mark runner as failed self.worker_runners_info[plugin_module.get('plugin_id')]['success'] = False # Set overall success status to failed overall_success = False # Append long entry to say the worker was terminated self.worker_log.append("\n\nWORKER TERMINATED!") # Don't continue break # Run command. Check if command exited successfully. if success: # If file conversion was successful self._log("Successfully ran worker process '{}' on file '{}'".format(plugin_module.get('plugin_id'), data.get("file_in"))) # Ensure the 'file_out' that was specified by the plugin to be created was actually created. if os.path.exists(data.get('file_out')): # The outfile exists... # In order to clean up as we go and avoid unnecessary RAM/disk use in the cache directory, # we want to removed the 'file_in' file. # We want to ensure that we do not accidentally remove any original files here. # To avoid this, run x2 tests. # First, check current 'file_in' is not the original file. if os.path.abspath(data.get("file_in")) != os.path.abspath(original_abspath): # Second, check that the 'file_in' is in cache directory. if "unmanic_file_conversion" in os.path.abspath(data.get("file_in")): # Remove this file os.remove(os.path.abspath(data.get("file_in"))) # Set the new 'file_in' as the previous runner's 'file_out' for the next loop file_in = data.get("file_out") else: # If file conversion was successful self._log( "Error while running worker process '{}' on file '{}'".format( plugin_module.get('plugin_id'), original_abspath ), level="error") self.worker_runners_info[plugin_module.get('plugin_id')]['success'] = False overall_success = False # Ensure the new 'file_in' is set to the previous runner's 'file_in' for the next loop file_in = data.get("file_in") else: # Ensure the new 'file_in' is set to the previous runner's 'file_in' for the next loop file_in = data.get("file_in") # Log that this plugin did not request to execute anything self.worker_log.append("\nRunner did not request to execute a command") self._log( "Worker process '{}' did not request to execute a command.".format(plugin_module.get('plugin_id')), level='debug') if os.path.exists(data.get('file_out')): # Set the current file out to the most recently completed cache file # If the file out does not exist, it is likely never used by the plugin. current_file_out = data.get('file_out') else: # Ensure the current_file_out is set the currently set 'file_in' current_file_out = data.get('file_in') if data.get("repeat"): # The returned data contained the 'repeat'' flag. # Run another pass against this same plugin continue break self.worker_runners_info[plugin_module.get('plugin_id')]['success'] = True self.worker_runners_info[plugin_module.get('plugin_id')]['status'] = 'complete' # Log if no command was run by any Plugins if no_exec_command_run: # If no jobs were carried out on this task self._log("No Plugin requested to run commands for this file '{}'".format(original_abspath), level='warning') self.worker_log.append("\n\nNo Plugin requested to run commands for this file '{}'".format(original_abspath)) # Save the completed command log self.current_task.save_command_log(self.worker_log) # If all plugins that were executed completed successfully, then this was overall a successful task. # At this point we need to move the final out file to the original task cache path so the postprocessor can collect it. if overall_success: # If jobs carried out on this task were all successful, we will get here self._log("Successfully completed Worker processing on file '{}'".format(original_abspath)) # Attempt to move the final output file to the final cache file path for the postprocessor try: # Set the new file out as the extension may have changed split_file_name = os.path.splitext(current_file_out) file_extension = split_file_name[1].lstrip('.') cache_directory = os.path.dirname(os.path.abspath(task_cache_path)) self.current_task.set_cache_path(cache_directory, file_extension) # Read the updated cache path task_cache_path = self.current_task.get_cache_path() # Move file to original cache path self._log("Moving final cache file from '{}' to '{}'".format(current_file_out, task_cache_path)) current_file_out = os.path.abspath(current_file_out) # There is a really odd intermittent bug with the shutil module that is causing it to # sometimes report that the file does not exist. # This section adds a small pause and logs the error if that is the case. # I have not yet figured out a solution as this is difficult to reproduce. if not os.path.exists(current_file_out): self._log("Error - current_file_out path does not exist! '{}'".format(file_in), level="error") time.sleep(1) # Ensure the cache directory exists if not os.path.exists(cache_directory): os.makedirs(cache_directory) # Check that the current file out is not the original source file if os.path.abspath(current_file_out) == os.path.abspath(original_abspath): # The current file out is not a cache file, the file must have never been modified. # This can happen if all Plugins failed to run, or a Plugin specifically reset the out # file to the original source in order to preserve it. # In this circumstance, we want to create a cache copy and let the process continue. self._log("Final cache file is the same path as the original source. Creating cache copy.", level='debug') shutil.copyfile(current_file_out, task_cache_path) else: # Use shutil module to move the file to the final task cache location shutil.move(current_file_out, task_cache_path) except Exception as e: self._log("Exception in final move operation of file {} to {}:".format(current_file_out, task_cache_path), message2=str(e), level="exception") return False # Return True return True # If the overall result of the jobs carried out on this task were not successful, we will get here. # Log the failure and return False self._log("Failed to process task for file '{}'".format(original_abspath), level='warning') return False