def test_installed_plugins(): """ Test all plugin runners for correct return data :return: """ plugin_executor = PluginExecutor() plugins = PluginsHandler() order = { "column": 'position', "dir": 'desc', } plugin_results = plugins.get_plugin_list_filtered_and_sorted(order=order, start=0, length=None) for plugin_result in plugin_results: # plugin_runners = plugin_executor.get_plugin_runners('worker.process_item') print("{1}Testing plugin: '{0}'{2}".format(plugin_result.get("name"), BColours.HEADER, BColours.ENDC)) plugin_id = plugin_result.get("plugin_id") # Test Plugin runners print(" {0}Testing runners{1}".format(BColours.SUBHEADER, BColours.ENDC)) plugin_types_in_plugin = plugin_executor.get_all_plugin_types_in_plugin(plugin_id) if not plugin_types_in_plugin: error = "No runners found in plugin" print(" - {1}FAILED: {0}{2}".format(error, BColours.FAIL, BColours.ENDC)) else: for plugin_type_in_plugin in plugin_types_in_plugin: errors = plugin_executor.test_plugin_runner(plugin_id, plugin_type_in_plugin) if errors: for error in errors: print(" - {1}FAILED: {0}{2}".format(error, BColours.FAIL, BColours.ENDC)) else: print(" - {}PASSED{}".format(BColours.OKGREEN, BColours.ENDC)) # Test Plugin settings print(" {0}Testing settings{1}".format(BColours.SUBHEADER, BColours.ENDC)) errors, plugin_settings = plugin_executor.test_plugin_settings(plugin_id) if errors: for error in errors: print(" - {1}FAILED: {0}{2}".format(error, BColours.FAIL, BColours.ENDC)) else: formatted_plugin_settings = json.dumps(plugin_settings, indent=1) formatted_plugin_settings = formatted_plugin_settings.replace('\n', '\n' + ' ') print(" - {1}Settings: {0}{2}".format(formatted_plugin_settings, BColours.RESULTS, BColours.ENDC)) print(" - {}PASSED{}".format(BColours.OKGREEN, BColours.ENDC)) print()
def test_plugin_runner(plugin_id, plugin_type, test_data=None): plugin_executor = PluginExecutor() return plugin_executor.test_plugin_runner(plugin_id, plugin_type, test_data)
def should_file_be_added_to_task_list(self): """ Test if this file needs to be added to the task list :return: """ return_value = True file_issues = [] # Init plugins plugin_executor = PluginExecutor() # Run task success plugins plugin_modules = plugin_executor.get_plugin_modules_by_type('postprocessor.task_result') if self.file_in_unmanic_ignore_lockfile(): file_issues.append({ 'id': 'unmanicignore', 'message': "File found in unmanic ignore file - '{}'".format(self.path), }) return_value = False if not self.file_ends_in_allowed_search_extensions(): file_issues.append({ 'id': 'extension', 'message': "File suffix is not in allowed search extensions - '{}'".format(self.path), }) return_value = False # Check if file has failed in history. if self.file_failed_in_history(): file_issues.append({ 'id': 'blacklisted', 'message': "File found already failed in history - '{}'".format(self.path), }) return_value = False # Check if this file is already the correct format: if self.file_already_in_target_format(): file_issues.append({ 'id': 'format', 'message': "File is already in target format - '{}'".format(self.path), }) return_value = False # Run tests against plugins for plugin_module in plugin_modules: data = { 'path': self.path, 'issues': file_issues, 'add_file_to_pending_tasks': return_value, } # Test return data against schema and ensure there are no errors runner_errors = plugin_executor.test_plugin_runner(plugin_module.get('plugin_id'), 'library_management.file_test', data) if runner_errors: self._log( "Error while running library management file test '{}' on file '{}'".format(plugin_module.get('plugin_id'), self.path), runner_errors, level="error") # Don't execute this runner. It failed continue # Run plugin and fetch return data plugin_runner = plugin_module.get("runner") try: plugin_runner(data) except Exception as e: self._log("Exception while carrying out plugin runner on library management file test '{}'".format( plugin_module.get('plugin_id')), message2=str(e), level="exception") continue pass return return_value, file_issues
def post_process_file(self): # Check if the job was a success if not self.current_task.task.success: self._log("Task was marked as failed.", level='debug') self._log("Removing cached file", self.current_task.task.cache_path, level='debug') self.remove_current_task_cache_file() return # Ensure file is correct format self.current_task.task.success = self.validate_streams( self.current_task.task.cache_path) # Init plugins plugin_executor = PluginExecutor() # Read current task data # task_data = self.current_task.get_task_data() cache_path = self.current_task.get_cache_path() source_data = self.current_task.get_source_data() destination_data = self.current_task.get_destination_data() # Move file back to original folder and remove source file_move_processes_success = True # Create a list for filling with destination paths destination_files = [] if self.current_task.task.success: # Run a postprocess file movement on the cache file for for each plugin that configures it plugin_modules = plugin_executor.get_plugin_modules_by_type( 'postprocessor.file_move') # Check if the source file needs to be remove by default (only if it does not match the destination file) remove_source_file = False if source_data['abspath'] != destination_data['abspath']: remove_source_file = True # Set initial data (some fields will be overwritten further down) data = { "source_data": None, 'remove_source_file': remove_source_file, 'copy_file': None, "file_in": None, "file_out": None, } for plugin_module in plugin_modules: # Always set source_data to the original file's source_data data["source_data"] = source_data # Always set copy_file to True data["copy_file"] = True # Always set file in to cache path data["file_in"] = cache_path # Always set file out to destination data absolute path data["file_out"] = destination_data.get('abspath') # Test return data against schema and ensure there are no errors errors = plugin_executor.test_plugin_runner( plugin_module.get('plugin_id'), 'postprocessor.file_move', data) if errors: self._log( "Error while running postprocessor file movement '{}' on file '{}'" .format(plugin_module.get('plugin_id'), cache_path), errors, level="error") # Don't execute this runner. It failed continue # Run plugin and fetch return data plugin_runner = plugin_module.get("runner") try: data = plugin_runner(data) except Exception as e: self._log( "Exception while carrying out plugin runner on postprocessor file movement '{}'" .format(plugin_module.get('plugin_id')), message2=str(e), level="exception") # Do not continue with this plugin module's loop continue if data.get('copy_file'): # Copy the file self._log("Copying file {} --> {}".format( data.get('file_in'), data.get('file_out'))) shutil.copyfile(data.get('file_in'), data.get('file_out')) destination_files.append(data.get('file_out')) # Run another validation on the copied file to ensure it is still correct copy_valid = self.validate_streams(data.get('file_out')) if not copy_valid: # Something went wrong during that file copy self._log( "Copy function failed during postprocessor file movement '{}' on file '{}'" .format(plugin_module.get('plugin_id'), cache_path), level='warning') file_move_processes_success = False # Check if the remove source flag is still True after all plugins have run. If so, we will remove the source file if data.get('remove_source_file'): # Only carry out a source removal if the whole postprocess was successful if file_move_processes_success: self._log("Removing source: {}".format( source_data['abspath'])) os.remove(source_data['abspath']) # If we need to keep the filename history, do that here if self.settings.get_keep_filename_history(): dirname = os.path.dirname(source_data['abspath']) self.keep_filename_history( dirname, destination_data["basename"], source_data["basename"]) else: self._log( "Keeping source file '{}'. Not all postprocessor file movement functions completed." .format(source_data['abspath']), level="warning") if not file_move_processes_success: self._log( "Error while running postprocessor file movement on file '{}'. Not all postprocessor file movement functions completed." .format(cache_path), level="error") else: self._log("Encoded file failed post processing test '{}'".format( cache_path), level='warning') return # Run task success plugins plugin_modules = plugin_executor.get_plugin_modules_by_type( 'postprocessor.task_result') for plugin_module in plugin_modules: data = { "source_data": source_data, 'task_processing_success': self.current_task.task.success, 'file_move_processes_success': file_move_processes_success, 'destination_files': destination_files, } # Test return data against schema and ensure there are no errors errors = plugin_executor.test_plugin_runner( plugin_module.get('plugin_id'), 'postprocessor.task_result', data) if errors: self._log( "Error while running postprocessor task result'{}' on file '{}'" .format(plugin_module.get('plugin_id'), cache_path), errors, level="error") # Don't execute this runner. It failed continue # Run plugin and fetch return data plugin_runner = plugin_module.get("runner") try: plugin_runner(data) except Exception as e: self._log( "Exception while carrying out plugin runner on postprocessor task result '{}'" .format(plugin_module.get('plugin_id')), message2=str(e), level="exception") continue # Cleanup cache files task_cache_directory = os.path.dirname(cache_path) if os.path.exists( task_cache_directory ) and "unmanic_file_conversion" in task_cache_directory: for f in os.listdir(task_cache_directory): cache_file_path = os.path.join(task_cache_directory, f) # if not f.endswith(".bak"): # continue self._log("Removing task cache directory file '{}'".format( cache_file_path)) # Remove the cache file os.remove(cache_file_path) # Remove the directory self._log("Removing task cache directory '{}'".format( task_cache_directory)) os.rmdir(task_cache_directory)
def process_item(self): # Reset the ffmpeg class when a new item is received self.setup_ffmpeg() abspath = self.current_task.get_source_abspath() self._log("{} processing job - {}".format(self.name, abspath)) # # Process item in loop for the default config # file_in = abspath # file_out = self.current_task.task.cache_path # data = self.convert_file(file_in, file_out) # Then process the item for for each plugin that configures it from unmanic.libs.unplugins import PluginExecutor plugin_executor = PluginExecutor() plugin_modules = plugin_executor.get_plugin_modules_by_type( 'worker.process_item') # Process item in loop. # First process the item for for each plugin that configures it, then run the default Unmanic configuration task_cache_path = self.current_task.get_cache_path() file_in = abspath overall_success = True current_file_out = "" runner_count = 0 for plugin_module in plugin_modules: runner_count += 1 # Fetch file out details # This creates a temp file labeled "WORKING" that will be moved to the cache_path on completion tmp_file_out = os.path.splitext(task_cache_path) file_out = current_file_out = "{}-{}-{}{}".format( tmp_file_out[0], "WORKING", runner_count, tmp_file_out[1]) # Fetch initial file probe file_probe = self.ffmpeg.file_probe(file_in) # Create args from ffmpeg_args = self.ffmpeg.generate_ffmpeg_args( file_probe, file_in, file_out) data = { "exec_ffmpeg": True, "file_probe": file_probe, "ffmpeg_args": ffmpeg_args, "file_in": file_in, "file_out": file_out, } # Test return data against schema and ensure there are no errors errors = plugin_executor.test_plugin_runner( plugin_module.get('plugin_id'), 'worker.process_item', data) if errors: self._log( "Error while running worker process '{}' on file '{}'". format(plugin_module.get('plugin_id'), abspath), errors, level="error") # Dont execute this runner. It failed continue # Run plugin and fetch return data plugin_runner = plugin_module.get("runner") try: data = plugin_runner(data) except Exception as e: self._log( "Exception while carrying out plugin runner on worker process '{}'" .format(plugin_module.get('plugin_id')), message2=str(e), level="exception") # Skip this plugin module's loop continue self._log("Worker process '{}' file in".format( plugin_module.get('plugin_id')), data.get("file_in"), level='debug') self._log("Worker process '{}' file out".format( plugin_module.get('plugin_id')), data.get("file_out"), level='debug') # Only run the conversion process if "exec_ffmpeg" is True if data.get("exec_ffmpeg"): # Run conversion process success = self.convert_file(data, plugin_module.get('plugin_id')) if success: # If file conversion was successful self._log( "Successfully ran worker process '{}' on file '{}'". format(plugin_module.get('plugin_id'), abspath)) # Set the file in as the file out for the next loop file_in = file_out else: # If file conversion was successful self._log( "Error while running worker process '{}' on file '{}'". format(plugin_module.get('plugin_id'), abspath), level="error") overall_success = False else: self._log( "Worker process '{}' set to not run the FFMPEG command.", level='debug') if overall_success: # If file conversion was successful, we will get here self._log("Successfully converted file '{}'".format(abspath)) # Move file to original cache path shutil.move(current_file_out, task_cache_path) return True self._log("Failed to convert file '{}'".format(abspath), level='warning') return False