def soft_stop(self, ctxt, plan_uid): with Lock(plan_uid, str(get_current_ident()), retries=20, wait=1): plan = graph.get_graph(plan_uid) for n in plan: if plan.node[n]['status'] in ( states.PENDING.name, states.PENDING_RETRY.name): plan.node[n]['status'] = states.SKIPPED.name graph.update_graph(plan)
def soft_stop(self, ctxt, plan_uid): with Lock(plan_uid, str(get_current_ident()), retries=20, waiter=Waiter(1)): plan = graph.get_graph(plan_uid) for n in plan: if plan.node[n]['status'] in (states.PENDING.name, states.ERROR_RETRY.name): plan.node[n]['status'] = states.SKIPPED.name graph.update_graph(plan)
def next(self, ctxt, plan_uid): with Lock(plan_uid, str(get_current_ident()), retries=20, wait=1): log.debug('Received *next* event for %s', plan_uid) plan = graph.get_graph(plan_uid) rst = self._next(plan) for task_name in rst: self._do_scheduling(plan, task_name) graph.update_graph(plan) log.debug('Scheduled tasks %r', rst) # process tasks with tasks client return rst
def soft_stop(self, ctxt, plan_uid): with Lock( plan_uid, str(get_current_ident()), retries=20, waiter=Waiter(1) ): for task in graph.get_graph(plan_uid): if task.status in ( states.PENDING.name, states.ERROR_RETRY.name): task.status = states.SKIPPED.name task.save_lazy()
def update_next(self, ctxt, status, errmsg): log.debug( 'Received update for TASK %s - %s %s', ctxt['task_id'], status, errmsg) plan_uid, task_name = ctxt['task_id'].rsplit(':', 1) with Lock(plan_uid, str(get_current_ident()), retries=20, wait=1): plan = graph.get_graph(plan_uid) self._do_update(plan, task_name, status, errmsg=errmsg) rst = self._next(plan) for task_name in rst: self._do_scheduling(plan, task_name) graph.update_graph(plan) log.debug('Scheduled tasks %r', rst) return rst
def next(self, ctxt, plan_uid): with Lock(plan_uid, str(get_current_ident()), retries=20, waiter=Waiter(1)): log.debug('Received *next* event for %s', plan_uid) plan = graph.get_graph(plan_uid) if len(plan) == 0: raise ValueError('Plan {} is empty'.format(plan_uid)) rst = self._next(plan) for task_name in rst: self._do_scheduling(plan, task_name) graph.update_graph(plan) log.debug('Scheduled tasks %r', rst) # process tasks with tasks client return rst
def update_next(self, ctxt, status, errmsg): log.debug('Received update for TASK %s - %s %s', ctxt['task_id'], status, errmsg) plan_uid, task_name = ctxt['task_id'].rsplit(':', 1) with Lock(plan_uid, str(get_current_ident()), retries=20, waiter=Waiter(1)): plan = graph.get_graph(plan_uid) self._do_update(plan, task_name, status, errmsg=errmsg) rst = self._next(plan) for task_name in rst: self._do_scheduling(plan, task_name) graph.update_graph(plan) log.debug('Scheduled tasks %r', rst) return rst
def next(self, ctxt, plan_uid): with Lock( plan_uid, str(get_current_ident()), retries=20, waiter=Waiter(1) ): log.debug('Received *next* event for %s', plan_uid) plan = graph.get_graph(plan_uid) if len(plan) == 0: raise ValueError('Plan {} is empty'.format(plan_uid)) rst = self._next(plan) for task_name in rst: self._do_scheduling(plan, task_name) graph.update_graph(plan) log.debug('Scheduled tasks %r', rst) # process tasks with tasks client return rst
def next(self, ctxt, plan_uid): with Lock( plan_uid, str(get_current_ident()), retries=20, waiter=Waiter(1) ): log.debug('Received *next* event for %s', plan_uid) plan = graph.get_graph(plan_uid) # FIXME get_graph should raise DBNotFound if graph is not # created if len(plan) == 0: raise ValueError('Plan {} is empty'.format(plan_uid)) tasks_to_schedule = self._next(plan) for task in tasks_to_schedule: self._do_scheduling(task) log.debug('Scheduled tasks %r', tasks_to_schedule) ModelMeta.save_all_lazy() return tasks_to_schedule
def update_next(self, ctxt, status, errmsg): log.debug( 'Received update for TASK %s - %s %s', ctxt['task_id'], status, errmsg) plan_uid, task_name = ctxt['task_id'].rsplit('~', 1) with Lock( plan_uid, str(get_current_ident()), retries=20, waiter=Waiter(1) ): plan = graph.get_graph(plan_uid) task = next(t for t in plan.nodes() if t.name == task_name) self._do_update(task, status, errmsg=errmsg) tasks_to_schedule = self._next(plan) for task in tasks_to_schedule: self._do_scheduling(task) log.debug('Scheduled tasks %r', tasks_to_schedule) ModelMeta.save_all_lazy() return tasks_to_schedule