def create_task_from_path(self, *args, **kwargs): """ Generate a Task object from a pathname :param pathname: :return: """ request_dict = json.loads(self.request.body) # Fetch the abspath name abspath = os.path.abspath(request_dict.get("path")) # Ensure path exists if not os.path.exists(abspath): self.write(json.dumps({"success": False})) return # Create a new task new_task = task.Task(tornado.log.app_log) # Run a probe on the file for current data source_data = common.fetch_file_data_by_path(abspath) if not new_task.create_task_by_absolute_path(abspath, self.config, source_data): # If file exists in task queue already this will return false. # Do not carry on. self.write(json.dumps({"success": False})) return self.write(json.dumps({"success": True}))
def fetch_tasks_status(pending_task_ids): """ Fetch the status of a number of pending remote tasks given their table IDs :param pending_task_ids: :return: """ # Fetch tasks task_handler = task.Task() remote_pending_tasks = task_handler.get_task_list_filtered_and_sorted( id_list=pending_task_ids) # Iterate over tasks and append them to the task data return_data = [] for pending_task in remote_pending_tasks: # Set params as required in template item = { 'id': pending_task['id'], 'abspath': pending_task['abspath'], 'priority': pending_task['priority'], 'type': pending_task['type'], 'status': pending_task['status'], } return_data.append(item) return return_data
def set_worker_count_based_on_remote_installation_links(self): settings = config.Config() # Get local task count as int task_handler = task.Task() local_task_count = int(task_handler.get_total_task_list_count()) # Get target count target_count = int(settings.get_distributed_worker_count_target()) # # TODO: Check if we should be aiming for one less than the target # if target_count > 1: # target_count -= 1 linked_configs = [] for local_config in settings.get_remote_installations(): if local_config.get('enable_distributed_worker_count'): linked_configs.append(local_config) # If no remote links are configured, then return here if not linked_configs: return # There is a link config with distributed worker counts enabled self._log("Syncing distributed worker count for this installation") # Get total tasks count of pending tasks across all linked_configs total_tasks = local_task_count for linked_config in linked_configs: total_tasks += int(linked_config.get('task_count', 0)) # From the counts fetched from all linked_configs, balance out the target count (including this installation) allocated_worker_count = 0 for linked_config in linked_configs: if linked_config.get('task_count', 0) == 0: continue allocated_worker_count += round((int(linked_config.get('task_count', 0)) / total_tasks) * target_count) # Calculate worker count for local target_workers_for_this_installation = 0 if local_task_count > 0: target_workers_for_this_installation = round((local_task_count / total_tasks) * target_count) # If the total allocated worker count is now above our target, set this installation back to 0 if allocated_worker_count > target_count: target_workers_for_this_installation = 0 # Every 10-12 minutes (make it random), give this installation at least 1 worker if it has pending tasks. # This should cause the pending task queue to sit idle if there is only one task in the queue and it will provide # rotation of workers when the pending task queue is close to the same. # EG. If time now (seconds) > time last checked (seconds) + 10mins (600 seconds) + random seconds within 2mins time_now = time.time() time_to_next_force_local_worker = int(self.force_local_worker_timer + 600 + random.randrange(120)) if time_now > time_to_next_force_local_worker: if (local_task_count > 1) and (target_workers_for_this_installation < 1): target_workers_for_this_installation = 1 self.force_local_worker_timer = time_now self._log("Configuring worker count as {} for this installation".format(target_workers_for_this_installation)) settings.set_config_item('number_of_workers', target_workers_for_this_installation, save_settings=True)
def remove_pending_tasks(pending_task_ids): """ Removes a list of pending tasks :param pending_task_ids: :return: """ # Delete by ID task_handler = task.Task() return task_handler.delete_tasks_recursively(id_list=pending_task_ids)
def delete_pending_tasks(self, pending_task_ids): """ Deletes a list of pending tasks :param pending_task_ids: :return: """ # Fetch tasks task_handler = task.Task() # Delete by ID return task_handler.delete_tasks_recursively(id_list=pending_task_ids)
def reorder_pending_tasks(pending_task_ids, direction="top"): """ Moves a list of pending tasks to either the top of the list of bottom depending on the provided direction. :param pending_task_ids: :param direction: :return: """ # Fetch tasks task_handler = task.Task() return task_handler.reorder_tasks(pending_task_ids, direction)
def delete_pending_tasks(self, pending_task_ids): """ Deletes a list of pending tasks :param pending_task_ids: :return: """ # Fetch tasks task_handler = task.Task( self.unmanic_data_queues.get('logging').get_logger("UIServer")) # Delete by ID return task_handler.delete_tasks_recursively(id_list=pending_task_ids)
def reorder_pending_tasks(self, pending_task_ids, direction="top"): """ Moves a list of pending tasks to either the top of the list of bottom depending on the provided direction. :param pending_task_ids: :param direction: :return: """ # Fetch tasks task_handler = task.Task( self.unmanic_data_queues.get('logging').get_logger("UIServer")) return task_handler.reorder_tasks(pending_task_ids, direction)
def setup_test_task(self, pathname): # Create a new task and set the source self.test_task = task.Task( self.data_queues["logging"].get_logger("Task")) # Fill test_task with data from unmanic.libs import common source_data = common.fetch_file_data_by_path(pathname) self.test_task.create_task_by_absolute_path(os.path.abspath(pathname), self.settings, source_data) #self.test_task.set_source_data(pathname) destination_data = task.prepare_file_destination_data( os.path.abspath(pathname), self.settings.get_out_container()) self.test_task.set_destination_data(destination_data) self.test_task.set_cache_path()
def create_task(abspath, library_id=1, library_name=None, task_type='local', priority_score=0): """ Create a pending task given the path to a file and a library ID or name :param abspath: :param library_id: :param library_name: :param task_type: :param priority_score: :return: """ if library_name is not None: for library in Library.get_all_libraries(): if library_name == library.get('name'): library_id = library.get('id') # Ensure the library provided exists (prevents errors as the task library_id column is not a foreign key library = Library(library_id) # Create a new task new_task = task.Task() # Create the task as a local task as the path provided is local if not new_task.create_task_by_absolute_path( abspath, task_type=task_type, library_id=library.get_id(), priority_score=priority_score): # File was not created. # Do not carry on. return False # Return task info (same as the data returned in a file upload task_info = new_task.get_task_data() return { "id": task_info.get('id'), "abspath": task_info.get('abspath'), "priority": task_info.get('priority'), "type": task_info.get('type'), "status": task_info.get('status'), "library_id": task_info.get('library_id'), }
def add_remote_tasks(pathname): """ Adds an upload file path to the pending task list as a 'remote' task Returns the task ID :param pathname: :return: """ abspath = os.path.abspath(pathname) # Create a new task new_task = task.Task() if not new_task.create_task_by_absolute_path(abspath, task_type='remote'): # File was not created. # Do not carry on. return False return new_task.get_task_data()
def fetch_next_task_filtered(self, status, sort_by='id', sort_order='asc'): """ Returns the next task in the task list for a given status # TODO: Make method function :param sort_order: :param sort_by: :param status: :return: """ # Fetch the task item first (to ensure it exists) task_item = build_tasks_query(status, sort_by, sort_order) if not task_item: return False # Set the task object by the abspath and return it next_task = task.Task(self.data_queues["logging"].get_logger("Task")) next_task.read_and_set_task_by_absolute_path(task_item.abspath) return next_task
def create_task_from_path(self, pathname, library_id, priority_score=0): """ Generate a Task object from a pathname :param pathname: :param library_id: :param priority_score: :return: """ abspath = os.path.abspath(pathname) # Create a new task new_task = task.Task() if not new_task.create_task_by_absolute_path( abspath, library_id=library_id, priority_score=priority_score): # If file exists in task queue already this will return false. # Do not carry on. return False return new_task
def create_task_from_path(self, pathname): """ Generate a Task object from a pathname :param pathname: :return: """ abspath = os.path.abspath(pathname) # Create a new task new_task = task.Task(self.data_queues["logging"].get_logger("Task")) source_data = common.fetch_file_data_by_path(pathname) if not new_task.create_task_by_absolute_path(abspath, self.settings, source_data): # If file exists in task queue already this will return false. # Do not carry on. return False return new_task
def add_item(self, pathname): abspath = os.path.abspath(pathname) # Check if this path is already in the job queue for item in self.list_all_incoming_items(): if item.source['abspath'] == abspath: return False # Check if this path is already in progress of being converted for path in self.list_all_in_progress_paths(): if path == abspath: return False # Check if this path is already processed and waiting to be moved for item in self.list_all_processed_items(): if item.source['abspath'] == abspath: return False # Create a new task and set the source new_task = task.Task(self.settings, self.data_queues) new_task.set_source_data(pathname) new_task.set_destination_data() new_task.set_cache_path() self.incoming.append(new_task) return True
def setup_test_task(self, pathname): # Create a new task and set the source self.test_task = task.Task(self.data_queues["logging"].get_logger("Task")) # Fill test_task with data from unmanic.libs import common source_data = common.fetch_file_data_by_path(pathname) self.test_task.create_task_by_absolute_path(os.path.abspath(pathname), self.settings, source_data) # Get container extension if self.settings.get_keep_original_container(): split_file_name = os.path.splitext(os.path.basename(pathname)) container_extension = split_file_name[1].lstrip('.') else: from unmanic.libs.unffmpeg import containers container = containers.grab_module(self.settings.get_out_container()) container_extension = container.container_extension() destination_data = task.prepare_file_destination_data(os.path.abspath(pathname), container_extension) self.test_task.set_destination_data(destination_data) self.test_task.set_cache_path()
def add_historic_tasks_to_pending_tasks_list(historic_task_ids, library_id=None): """ Adds a list of historical tasks to the pending tasks list. :param historic_task_ids: :param library_id: :return: """ errors = {} # Fetch historical tasks history_logging = history.History() # Get total count records_by_id = history_logging.get_current_path_of_historic_tasks_by_id( id_list=historic_task_ids) for record in records_by_id: record_errors = [] # Fetch the abspath name abspath = os.path.abspath(record.get("abspath")) # Ensure path exists if not os.path.exists(abspath): errors[record.get("id")] = "Path does not exist - '{}'".format( abspath) continue # Create a new task new_task = task.Task() if not new_task.create_task_by_absolute_path(abspath, library_id=library_id): # If file exists in task queue already this will return false. # Do not carry on. errors[record.get( "id")] = "File already in task queue - '{}'".format(abspath) continue return errors
def add_historic_tasks_to_pending_tasks_list(self, historic_task_ids): """ Adds a list of historical tasks to the pending tasks list. :param historic_task_ids: :return: """ success = True # Fetch historical tasks history_logging = history.History(self.config) # Get total count records_by_id = history_logging.get_current_path_of_historic_tasks_by_id( id_list=historic_task_ids) # records_by_id = history_logging.get_historic_task_list_filtered_and_sorted(id_list=historic_task_ids) for record in records_by_id: # Fetch the abspath name abspath = os.path.abspath(record.get("abspath")) # Ensure path exists if not os.path.exists(abspath): success = False continue # Create a new task new_task = task.Task(tornado.log.app_log) # Run a probe on the file for current data source_data = common.fetch_file_data_by_path(abspath) if not new_task.create_task_by_absolute_path( abspath, self.config, source_data): # If file exists in task queue already this will return false. # Do not carry on. success = False continue return success
def requeue_tasks_at_bottom(self, task_id): task_handler = task.Task() return task_handler.reorder_tasks([task_id], 'bottom')
def setup_test_task(self, pathname): # Create a new task and set the source self.test_task = task.Task(self.settings, self.data_queues) self.test_task.set_source_data(pathname) self.test_task.set_destination_data() self.test_task.set_cache_path()
def prepare_filtered_pending_tasks_for_table(request_dict): """ Returns a object of records filtered and sorted according to the provided request. :param request_dict: :return: """ # Generate filters for query draw = request_dict.get('draw') start = request_dict.get('start') length = request_dict.get('length') search = request_dict.get('search') search_value = search.get("value") # Force sort order always by ID desc order = { "column": 'priority', "dir": 'desc', } # Fetch tasks task_handler = task.Task() # Get total count records_total_count = task_handler.get_total_task_list_count() # Get quantity after filters (without pagination) records_filtered_count = task_handler.get_task_list_filtered_and_sorted( order=order, start=0, length=0, search_value=search_value, status='pending').count() # Get filtered/sorted results pending_task_results = task_handler.get_task_list_filtered_and_sorted( order=order, start=start, length=length, search_value=search_value, status='pending') # Build return data return_data = { "draw": draw, "recordsTotal": records_total_count, "recordsFiltered": records_filtered_count, "successCount": 0, "failedCount": 0, "data": [] } # Iterate over tasks and append them to the task data for pending_task in pending_task_results: # Set params as required in template item = { 'id': pending_task['id'], 'selected': False, 'abspath': pending_task['abspath'], 'status': pending_task['status'], } return_data["data"].append(item) # Return results return return_data
def prepare_filtered_pending_tasks(params, include_library=False): """ Returns a object of records filtered and sorted according to the provided request. :param params: :param include_library: :return: """ start = params.get('start', 0) length = params.get('length', 0) search_value = params.get('search_value', '') order = params.get('order', { "column": 'priority', "dir": 'desc', }) # Fetch tasks task_handler = task.Task() # Get total count records_total_count = task_handler.get_total_task_list_count() # Get quantity after filters (without pagination) records_filtered_count = task_handler.get_task_list_filtered_and_sorted( order=order, start=0, length=0, search_value=search_value, status='pending').count() # Get filtered/sorted results pending_task_results = task_handler.get_task_list_filtered_and_sorted( order=order, start=start, length=length, search_value=search_value, status='pending') # Build return data return_data = { "recordsTotal": records_total_count, "recordsFiltered": records_filtered_count, "results": [] } # Iterate over tasks and append them to the task data for pending_task in pending_task_results: # Set params as required in template item = { 'id': pending_task['id'], 'abspath': pending_task['abspath'], 'priority': pending_task['priority'], 'type': pending_task['type'], 'status': pending_task['status'], } if include_library: # Get library library = Library(pending_task['library_id']) item['library_id'] = library.get_id() item['library_name'] = library.get_name() return_data["results"].append(item) # Return results return return_data