def _start_task_instance(self, key: TaskInstanceKey): """ Ignore all dependencies, force start a task instance """ ti = self.get_task_instance(key) if ti is None: self.log.error("TaskInstance not found in DB, %s.", str(key)) return command = TaskInstance.generate_command( ti.dag_id, ti.task_id, ti.execution_date, local=True, mark_success=False, ignore_all_deps=True, ignore_depends_on_past=True, ignore_task_deps=True, ignore_ti_state=True, pool=ti.pool, file_path=ti.dag_model.fileloc, pickle_id=ti.dag_model.pickle_id, server_uri=self._server_uri, ) ti.set_state(State.QUEUED) self.execute_async(key=key, command=command, queue=ti.queue, executor_config=ti.executor_config)
def restart(self, ti, full_filepath, pickle_id=None): """ restart the task :param ti: the task instance :param full_filepath: the dag full_filepath :param pickle_id: the dag pickle_id :return: """ self.stop_task(ti) from airflow.models.taskinstance import TaskInstance as TI simple_task_instance = airflow.utils.dag_processing.SimpleTaskInstance(ti) command = TI.generate_command( simple_task_instance.dag_id, simple_task_instance.task_id, simple_task_instance.execution_date, local=True, mark_success=False, ignore_all_deps=False, ignore_depends_on_past=False, ignore_task_deps=False, ignore_ti_state=False, pool=simple_task_instance.pool, file_path=full_filepath, pickle_id=pickle_id) priority = simple_task_instance.priority_weight queue = simple_task_instance.queue self.log.info( "Sending %s to executor with priority %s and queue %s", simple_task_instance.key, priority, queue ) self.queue_command( simple_task_instance, command, priority=priority, queue=queue) self.change_state(ti.key, State.SCHEDULED)