def write_plugin_data_to_db(plugin, plugin_directory): # Add installed plugin to database plugin_data = { Plugins.plugin_id: plugin.get("id"), Plugins.name: plugin.get("name"), Plugins.author: plugin.get("author"), Plugins.version: plugin.get("version"), Plugins.tags: plugin.get("tags"), Plugins.description: plugin.get("description"), Plugins.icon: plugin.get("icon"), Plugins.local_path: plugin_directory, } plugin_entry = Plugins.get_or_none(plugin_id=plugin.get("id")) if plugin_entry is not None: # Update the existing entry with db.atomic(): update_query = (Plugins.update(plugin_data).where( Plugins.plugin_id == plugin.get("id"))) update_query.execute() else: # Insert a new entry # Plugins are disable when first installed. This will help to prevent issues with broken plugins plugin_data[Plugins.enabled] = False with db.atomic(): Plugins.insert(plugin_data).execute() return True
def set_plugin_repos(repo_list): # Remove all existing repos PluginRepos.delete().execute() # Add new repos data = [] for repo_path in repo_list: data.append({"path": repo_path}) with db.atomic(): PluginRepos.insert_many(data).execute()
def save_task_history(self, task_data): """ Record a task's data and state to the database. :param task_data: :return: """ try: with db.atomic(): # Create the new historical task entry new_historic_task = self.create_historic_task_entry(task_data) # TODO: Create a snapshot of the current configuration of the application into HistoricTaskSettings # Ensure a dump of the task was passed through with the task data param # This dump is a snapshot of all information pertaining to the task task_dump = task_data.get('task_dump', {}) if not task_dump: self._log('Passed param dict', json.dumps(task_data), level="debug") raise Exception('Function param missing task data dump') # Create an entry of the data from the source ffprobe self.create_historic_task_probe_entry('source', new_historic_task, task_dump) # Create an entry of the data from the destination ffprobe self.create_historic_task_probe_entry('destination', new_historic_task, task_dump) # Create an entry of the data from the source ffprobe self.create_historic_task_ffmpeg_log_entry( new_historic_task, task_dump) return True except Exception as error: self._log("Failed to save historic task entry to database.", error, level="exception") return False
def disable_plugin_by_db_table_id(self, plugin_table_ids): self._log("Disable plugins '{}'".format(plugin_table_ids), level='debug') # Disable the matching entries in the table with db.atomic(): Plugins.update(enabled=False).where( Plugins.id.in_(plugin_table_ids)).execute() # Fetch records records_by_id = self.get_plugin_list_filtered_and_sorted( id_list=plugin_table_ids) # Ensure they are now disabled for record in records_by_id: if not record.get('enabled'): continue self._log("Failed to disable plugin '{}'".format( record.get('plugin_id')), level='debug') return False return True
def get_installation_uuid(self): """ Returns the installation UUID as a string. If it does not yet exist, it will create one. :return: """ if not self.uuid: # Fetch installation from unmanic.libs.unmodels import installation db_installation = installation.Installation() try: # Fetch a single row (get() will raise DoesNotExist exception if no results are found) current_installation = db_installation.select().limit(1).get() except Exception as e: # Create settings (defaults will be applied) self._log("Unmanic session does not yet exist... Creating.", level="debug") with db.atomic(): current_installation = db_installation.create() self.uuid = str(current_installation.uuid) return self.uuid
def uninstall_plugins_by_db_table_id(self, plugin_table_ids): self._log("Uninstall plugins '{}'".format(plugin_table_ids), level='debug') # Fetch records records_by_id = self.get_plugin_list_filtered_and_sorted( id_list=plugin_table_ids) # Remove each plugin from disk for record in records_by_id: plugin_directory = self.get_plugin_path(record.get('plugin_id')) self._log("Removing plugin files from disk '{}'".format( plugin_directory), level='debug') shutil.rmtree(plugin_directory) # Delete by ID in DB with db.atomic(): if not Plugins.delete().where( Plugins.id.in_(plugin_table_ids)).execute(): return False return True
def migrate_old_beta_data(self): """ Temporary function to migrate old JSON data to database TODO: Remove this function post release. It will not be required. :return: """ self._log( "migrate_old_beta_data function is temporary. To be removed post release.", level="warning") # Get paths to old historical json files. These are needed for the cleanup if not os.path.exists(self.settings.get_config_path()): os.makedirs(self.settings.get_config_path()) history_file = os.path.join(self.settings.get_config_path(), 'history.json') completed_job_details_dir = os.path.join( self.settings.get_config_path(), 'completed_job_details') # Check if we need to execute this migration if not os.path.exists(history_file): # Migration has already run. no need to continue self._log( "No job history migration required. No history.json file exists.", level="debug") return # Read current history log and migrate each entry history_log = self.read_history_log() for historical_job in history_log: # Fetch completed job data (if it exists) try: completed_job_data = self.read_completed_job_data( historical_job['job_id']) except Exception as e: self._log( "Missing critical data in completed_job_data JSON dump. Ignore this record.", str(e), level="debug") continue # No completed job data exists for this job if not completed_job_data: continue # Append ffmpeg_log to completed_job_data completed_job_data['ffmpeg_log'] = [] # Set path of job details file (to be deleted post migration) job_details_file = os.path.join( completed_job_details_dir, '{}.json'.format(historical_job['job_id'])) # Create new format dictionary from job data task_data = { 'task_label': historical_job['description'], 'task_success': historical_job['success'], 'start_time': completed_job_data['statistics']['start_time'], 'finish_time': completed_job_data['statistics']['finish_time'], 'processed_by_worker': completed_job_data['statistics']['processed_by_worker'], 'task_dump': completed_job_data, } try: with db.atomic(): result = self.save_task_history(task_data) if not result: raise Exception( 'Failed to migrate historical file data to database' ) # Remove json file os.remove(job_details_file) except Exception as error: self._log("Failed to save historic task entry to database.", error, level="error") continue # Success self._log("Migrated historical task to DB:", historical_job['abspath'], level="info") # If completed_job_details_dir is empty, delete it files = os.listdir(completed_job_details_dir) if len(files) == 0: os.rmdir(completed_job_details_dir) os.remove(history_file)