def __init__(self, func, objtype, kwargs, path): self._func = func self._path = path.replace(os.getcwd() + "/", "") log.debug("Adding func %s" % str(func)) self._needs = kwargs.get("needs", None) self._provides = kwargs.get("provides", None) self._max_concurrency = kwargs.get("max_concurrency", None) if self._needs: if type(self._needs) == list: for need in self._needs: TaskDep.add_obj_needing(self, need) else: TaskDep.add_obj_needing(self, self._needs) self._needs = [self._needs] if self._provides: if type(self._provides) == list: for prov in self._provides: TaskDep.add_obj_providing(self, prov) else: TaskDep.add_obj_providing(self, self._provides) self._provides = [self._provides] self.proc = None
def add_obj_providing(obj, provide): log.debug("[%s] Provides %s" % (str(obj), provide)) if provide in TaskDep.object_provide: raise ValueError( "There already is an object providing %s" % (str(obj))) TaskDep.object_provide[provide] = obj
async def sched_runnable_task(): """ Schedule a runnable task """ to_run = [] # Keep running while there are dependencies or tasks to run while not TaskDep.finished() \ or len(to_run) > 0 \ or len(TaskRunner.tasks) > 0: for obj in TaskDep.to_test(): # If object has no needs if obj.has_needs_satisfied(): log.debug("Enqueue %s" % str(obj)) TaskDep.rem_test_object(obj) to_run.append(obj) for runnable in to_run: # Check if max_concurrency condition is achieved if TaskRunner.can_run_in_pool(runnable.max_concurrency): to_run.remove(runnable) # Run it await TaskRunner.run_task(runnable) await asyncio.sleep(0.1) TaskRunner.finished = True
def rem_running_task(task): """ Remove task from running list """ TaskRunner.tasks.remove(task) log.debug(TaskRunner.tasks) Report.add_finished(task)
def set_pool_size(new_size, force=False): if not new_size: return if not force: if TaskRunner.pool_size > new_size: log.debug("New pool size is %d" % new_size) TaskRunner.pool_size = new_size else: log.debug("New pool size is %d (forced)" % new_size) TaskRunner.pool_size = new_size
async def run_task(task): """ Start a task """ log.debug("Starting %s" % str(task)) # Set pool size to the tasks maximum pool size TaskRunner.set_pool_size(task.max_concurrency) # Start the process task.start_process() # Add running task to the list TaskRunner.add_running_task(task)
def can_run_in_pool(req_concur): # Check if the required concurrency can be achieved if req_concur and req_concur <= len(TaskRunner.tasks): return False # Check if there is room in the pool if TaskRunner.pool_size - len(TaskRunner.tasks) - 1 < 0: return False log.debug( "OK run req %s avail %s" % ( req_concur if type(req_concur) == int else 0, TaskRunner.pool_size if TaskRunner.pool_size != sys.maxsize else "MAX")) return True
async def monitor_tasks(): """ Monitor all running tasks """ while not TaskRunner.finished: # Check each task something_died = False for task in list(TaskRunner.tasks): if task.is_proc_alive(): continue # If it's not alive anymore, mark its # providing dependencies as solved for prov in task.provides: TaskDep.mark_dependency_solved(prov) log.debug("%s exited" % str(task)) TaskRunner.rem_running_task(task) something_died = True if something_died: # Calculate new concurrency min_concurrency = sys.maxsize for task in list(TaskRunner.tasks): if task.max_concurrency: min_concurrency = \ min(min_concurrency, task.max_concurrency) if min_concurrency != TaskRunner.pool_size: TaskRunner.set_pool_size(min_concurrency, force=True) # Exit if there are no other tasks and the queue is empty if TaskRunner.finished: return await asyncio.sleep(0.1)
def add_running_task(task): """ Add task to running list """ TaskRunner.tasks.append(task) log.debug(TaskRunner.tasks)
def add_obj_needing(obj, need): log.debug("[%s] Needs %s" % (str(obj), need)) if need not in TaskDep.object_needs: TaskDep.object_needs[need] = [obj] else: TaskDep.object_needs[need].append(obj)