def initialize(self, **kwargs): self.name = 'pending_api' self.config = config.Config() self.params = kwargs.get("params") udq = UnmanicDataQueues() self.unmanic_data_queues = udq.get_unmanic_data_queues()
def generate_log_files_zip(): settings = config.Config() cache_path = settings.get_cache_path() logs_path = settings.get_log_path() # Ensure the cache path exists if not os.path.exists(cache_path): os.makedirs(cache_path) # Ensure the logs path exists if not os.path.exists(logs_path): os.makedirs(logs_path) # Create zip of all log files out_file = os.path.join(cache_path, 'UnmanicLogs.zip') with zipfile.ZipFile(out_file, 'w') as zip_object: # Iterate over all the files in directory for dir_name, subdirectories, filenames in os.walk(logs_path): for filename in filenames: # create complete filepath of file in directory logfile_path = os.path.join(dir_name, filename) # Add file to zip zip_object.write(logfile_path, os.path.basename(logfile_path)) return out_file
def run(self): # Init the configuration settings = config.Config() # Init the database self.db_connection = init_db(settings.get_config_path()) # Start all threads self.start_threads(settings) # Watch for the term signal if os.name == "nt": while self.run_threads: try: time.sleep(1) except (KeyboardInterrupt, SystemExit) as e: break else: signal.signal(signal.SIGINT, self.sig_handle) signal.signal(signal.SIGTERM, self.sig_handle) while self.run_threads: signal.pause() # Received term signal. Stop everything self.stop_threads() self.db_connection.stop() while not self.db_connection.is_stopped(): time.sleep(0.1) continue main_logger.info("Exit Unmanic")
def set_worker_count_based_on_remote_installation_links(self): settings = config.Config() # Get local task count as int task_handler = task.Task() local_task_count = int(task_handler.get_total_task_list_count()) # Get target count target_count = int(settings.get_distributed_worker_count_target()) # # TODO: Check if we should be aiming for one less than the target # if target_count > 1: # target_count -= 1 linked_configs = [] for local_config in settings.get_remote_installations(): if local_config.get('enable_distributed_worker_count'): linked_configs.append(local_config) # If no remote links are configured, then return here if not linked_configs: return # There is a link config with distributed worker counts enabled self._log("Syncing distributed worker count for this installation") # Get total tasks count of pending tasks across all linked_configs total_tasks = local_task_count for linked_config in linked_configs: total_tasks += int(linked_config.get('task_count', 0)) # From the counts fetched from all linked_configs, balance out the target count (including this installation) allocated_worker_count = 0 for linked_config in linked_configs: if linked_config.get('task_count', 0) == 0: continue allocated_worker_count += round((int(linked_config.get('task_count', 0)) / total_tasks) * target_count) # Calculate worker count for local target_workers_for_this_installation = 0 if local_task_count > 0: target_workers_for_this_installation = round((local_task_count / total_tasks) * target_count) # If the total allocated worker count is now above our target, set this installation back to 0 if allocated_worker_count > target_count: target_workers_for_this_installation = 0 # Every 10-12 minutes (make it random), give this installation at least 1 worker if it has pending tasks. # This should cause the pending task queue to sit idle if there is only one task in the queue and it will provide # rotation of workers when the pending task queue is close to the same. # EG. If time now (seconds) > time last checked (seconds) + 10mins (600 seconds) + random seconds within 2mins time_now = time.time() time_to_next_force_local_worker = int(self.force_local_worker_timer + 600 + random.randrange(120)) if time_now > time_to_next_force_local_worker: if (local_task_count > 1) and (target_workers_for_this_installation < 1): target_workers_for_this_installation = 1 self.force_local_worker_timer = time_now self._log("Configuring worker count as {} for this installation".format(target_workers_for_this_installation)) settings.set_config_item('number_of_workers', target_workers_for_this_installation, save_settings=True)
def __init__(self): self.name = 'Task' self.task = None self.task_dict = None self.settings = config.Config() unmanic_logging = unlogger.UnmanicLogger.__call__() self.logger = unmanic_logging.get_logger(__class__.__name__) self.statistics = {} self.errors = []
def __init__(self, data_queues, task_queue): super(PostProcessor, self).__init__(name='PostProcessor') self.logger = data_queues["logging"].get_logger(self.name) self.data_queues = data_queues self.settings = config.Config() self.task_queue = task_queue self.abort_flag = threading.Event() self.current_task = None self.ffmpeg = None self.abort_flag.clear()
def __init__(self, *args, **kwargs): self.name = 'UnmanicWebsocketHandler' self.config = config.Config() self.server_id = str(uuid.uuid4()) udq = UnmanicDataQueues() urt = UnmanicRunningTreads() self.data_queues = udq.get_unmanic_data_queues() self.foreman = urt.get_unmanic_running_thread('foreman') self.session = session.Session() super(UnmanicWebsocketHandler, self).__init__(*args, **kwargs)
def __init__(self, name, files_to_test, files_to_process, status_updates, library_id): super(FileTesterThread, self).__init__(name=name) self.settings = config.Config() self.logger = None self.files_to_test = files_to_test self.files_to_process = files_to_process self.library_id = library_id self.status_updates = status_updates self.abort_flag = threading.Event() self.abort_flag.clear()
def __init__(self, data_queues, task_queue): super(TaskHandler, self).__init__(name='TaskHandler') self.settings = config.Config() self.data_queues = data_queues self.logger = data_queues["logging"].get_logger(self.name) self.task_queue = task_queue self.inotifytasks = data_queues["inotifytasks"] self.scheduledtasks = data_queues["scheduledtasks"] self.abort_flag = threading.Event() self.abort_flag.clear() # Remove all items from the task list to start with self.clear_tasks_on_startup()
def __init__(self, library_id: int): self.settings = config.Config() unmanic_logging = unlogger.UnmanicLogger.__call__() self.logger = unmanic_logging.get_logger(__class__.__name__) # Init plugins self.library_id = library_id self.plugin_handler = PluginsHandler() self.plugin_modules = self.plugin_handler.get_enabled_plugin_modules_by_type( 'library_management.file_test', library_id=library_id) # List of filed tasks self.failed_paths = []
def __init__(self, data_queues): super(EventMonitorManager, self).__init__(name='EventMonitorManager') self.name = "EventMonitorManager" self.data_queues = data_queues self.settings = config.Config() self.logger = None # Create an event queue self.files_to_test = queue.Queue() self.abort_flag = threading.Event() self.abort_flag.clear() self.event_observer_thread = None self.event_observer_threads = []
def get_profile_directory(self): """ Return the absolute path to the Plugin's profile directory. This is where where Plugin settings are saved and where all mutable data for the Plugin should be stored. :return: """ settings = config.Config() userdata_path = settings.get_userdata_path() plugin_directory = self.get_plugin_directory() plugin_id = os.path.basename(plugin_directory) profile_directory = os.path.join(userdata_path, plugin_id) if not os.path.exists(profile_directory): os.makedirs(profile_directory) return profile_directory
def __init__(self, data_queues): super(LibraryScannerManager, self).__init__(name='LibraryScannerManager') self.interval = 0 self.firstrun = True self.data_queues = data_queues self.settings = config.Config() self.logger = None self.scheduledtasks = data_queues["scheduledtasks"] self.library_scanner_triggers = data_queues["library_scanner_triggers"] self.abort_flag = threading.Event() self.abort_flag.clear() self.scheduler = schedule.Scheduler() self.file_test_managers = {} self.files_to_test = queue.Queue() self.files_to_process = queue.Queue()
def main(): parser = argparse.ArgumentParser(description='Unmanic') parser.add_argument('--version', action='version', version='%(prog)s {version}'.format( version=metadata.read_version_string('long'))) parser.add_argument('--manage_plugins', action='store_true', help='manage installed plugins') parser.add_argument('--dev', action='store_true', help='Enable developer mode') parser.add_argument('--port', nargs='?', help='Specify the port to run the webserver on') # parser.add_argument('--unmanic_path', nargs='?', # help='Specify the unmanic configuration path instead of ~/.unmanic') args = parser.parse_args() # Configure application from args settings = config.Config(port=args.port, unmanic_path=None) if args.manage_plugins: # Init the DB connection db_connection = init_db(settings.get_config_path()) # Run the plugin manager CLI from unmanic.libs.unplugins.pluginscli import PluginsCLI plugin_cli = PluginsCLI() plugin_cli.run() # Stop the DB connection db_connection.stop() while not db_connection.is_stopped(): time.sleep(0.1) continue else: # Run the main Unmanic service service = Service() service.developer = args.dev service.run()
def __init__(self, unmanic_data_queues, foreman, developer): super(UIServer, self).__init__(name='UIServer') self.config = config.Config() self.developer = developer self.data_queues = unmanic_data_queues self.logger = unmanic_data_queues["logging"].get_logger(self.name) self.inotifytasks = unmanic_data_queues["inotifytasks"] # TODO: Move all logic out of template calling to foreman. # Create methods here to handle the calls and rename to foreman self.foreman = foreman self.set_logging() # Add a singleton for handling the data queues for sending data to unmanic's other processes udq = UnmanicDataQueues() udq.set_unmanic_data_queues(unmanic_data_queues) urt = UnmanicRunningTreads() urt.set_unmanic_running_threads( { 'foreman': foreman } )
def manage_completed_tasks(self): settings = config.Config() # Only run if configured to auto manage completed tasks if not settings.get_auto_manage_completed_tasks(): return self._log("Running completed task cleanup for this installation") max_age_in_days = settings.get_max_age_of_completed_tasks() date_x_days_ago = datetime.now() - timedelta(days=int(max_age_in_days)) before_time = date_x_days_ago.timestamp() task_success = True inc_status = 'successfully' if not settings.get_always_keep_failed_tasks(): inc_status = 'successfully or failed' task_success = None # Fetch completed tasks from unmanic.libs import history history_logging = history.History() count = history_logging.get_historic_task_list_filtered_and_sorted(task_success=task_success, before_time=before_time).count() results = history_logging.get_historic_task_list_filtered_and_sorted(task_success=task_success, before_time=before_time) if count == 0: self._log("Found no {} completed tasks older than {} days".format(inc_status, max_age_in_days)) return self._log( "Found {} {} completed tasks older than {} days that should be removed".format(count, inc_status, max_age_in_days)) if not history_logging.delete_historic_tasks_recursively(results): self._log("Failed to delete {} {} completed tasks".format(count, inc_status), level='error') return self._log("Deleted {} {} completed tasks".format(count, inc_status))
def setup_class(self): """ Setup the class state for pytest :return: """ self.project_dir = os.path.dirname( os.path.dirname(os.path.abspath(__file__))) self.data_queues = data_queues.data_queues self.scheduledtasks = self.data_queues["scheduledtasks"] self.inotifytasks = self.data_queues["inotifytasks"] self.progress_reports = self.data_queues["progress_reports"] self.task_queue = mock_jobqueue_class.MockJobQueue() self.task_handler = None # Create temp config path config_path = tempfile.mkdtemp(prefix='unmanic_tests_') # Create connection to a test DB from unmanic.libs import unmodels app_dir = os.path.dirname(os.path.abspath(__file__)) database_settings = { "TYPE": "SQLITE", "FILE": ':memory:', "MIGRATIONS_DIR": os.path.join(app_dir, 'migrations'), } from unmanic.libs.unmodels.lib import Database self.db_connection = Database.select_database(database_settings) # Create required tables self.db_connection.create_tables([Tasks]) # import config from unmanic import config self.settings = config.Config(config_path=config_path) self.settings.set_config_item('debugging', True, save_settings=False)
def register_unmanic(self, force=False): """ Register Unmanic with site. This sends information about the system that Unmanic is running on. It also sends a unique ID. Based on the return information, this will set the session level. Return success status. :param force: :return: """ # First check if the current session is still valid if not force and self.__check_session_valid(): return True # Set now as the last time this was run (before it was actually run self.last_check = time.time() # Update the session settings = config.Config() try: # Fetch the installation data prior to running a session update self.__fetch_installation_data() # Build post data from unmanic.libs.system import System system = System() system_info = system.info() platform_info = system_info.get("platform", None) if platform_info: platform_info = " * ".join(platform_info) post_data = { "uuid": self.get_installation_uuid(), "version": settings.read_version(), "python_version": system_info.get("python", ''), "system": { "platform": platform_info, "devices": system_info.get("devices", {}), } } # Register Unmanic registration_response = self.api_post(1, 'unmanic-register', post_data) # Save data if registration_response and registration_response.get("success"): registration_data = registration_response.get("data") # Set level from response data (default back to 0) self.level = int(registration_data.get("level", 0)) # Get user data from response data user_data = registration_data.get('user') if user_data: # Set name from user data name = user_data.get("name") self.name = name if name else 'Valued Supporter' # Set avatar from user data picture_uri = user_data.get("picture_uri") self.picture_uri = picture_uri if picture_uri else '/assets/global/img/avatar/avatar_placeholder.png' # Set email from user data email = user_data.get("email") self.email = email if email else '' self.__update_created_timestamp() # Persist session in DB self.__store_installation_data() return True # Allow an extension for the session for 7 days without an internet connection if self.__created_older_than_x_days(days=7): # Reset the session - Unmanic should phone home once every 7 days self._reset_session_installation_data() return False except Exception as e: self._log("Exception while registering Unmanic.", str(e), level="debug") if self.__check_session_valid(): # If the session is still valid, just return true. Perhaps the internet is down and it timed out? return True return False
def __init__(self): self.name = 'History' self.settings = config.Config()
def __init__(self, app, **kwargs): self.app = app self.config = config.Config()
def initialize(self, **kwargs): self.name = 'history_api' self.config = config.Config() self.params = kwargs.get("params")
def get_all_worker_groups(): """ Return a list of all worker groups :return: """ # Fetch all worker groups from DB configured_worker_groups = WorkerGroups.select() if not configured_worker_groups: default_worker_group = { 'id': 1, 'locked': False, 'name': generate_random_worker_group_name(), 'number_of_workers': 0, 'tags': [], 'worker_event_schedules': [], } # Migrate default worker data from settings = config.Config() if settings.number_of_workers is not None: default_worker_group[ 'number_of_workers'] = settings.number_of_workers if settings.worker_event_schedules is not None: default_worker_group[ 'worker_event_schedules'] = settings.worker_event_schedules # Disable the legacy settings settings.set_config_item('number_of_workers', None, save_settings=True) settings.set_config_item('worker_event_schedules', None, save_settings=True) WorkerGroup.create(default_worker_group) return [default_worker_group] # Loop over results worker_groups = [] for group in configured_worker_groups: group_config = { 'id': group.id, 'locked': group.locked, 'name': group.name, 'number_of_workers': group.number_of_workers, 'worker_event_schedules': [], 'tags': [], } # Append tags for tag in group.tags.order_by(Tags.name): group_config['tags'].append(tag.name) # Append worker_event_schedules for event_schedule in group.worker_schedules: group_config['worker_event_schedules'].append({ 'repetition': event_schedule.repetition, 'schedule_task': event_schedule.schedule_task, 'schedule_time': event_schedule.schedule_time, 'schedule_worker_count': event_schedule.schedule_worker_count, }) worker_groups.append(group_config) # Return the list of worker groups return worker_groups
def __init__(self, *args, **kwargs): self.settings = config.Config() unmanic_logging = unlogger.UnmanicLogger.__call__() self.logger = unmanic_logging.get_logger(__class__.__name__)
def initialize(self, **kwargs): self.session = session.Session() self.params = kwargs.get("params") self.config = config.Config() self.frontend_messages = FrontendPushMessages()
def initialize(self, **kwargs): self.session = session.Session() self.params = kwargs.get("params") udq = UnmanicDataQueues() self.unmanic_data_queues = udq.get_unmanic_data_queues() self.config = config.Config()