def _process_finished_ti(session, ti):
        """
        Process the TaskInstance object which already finished.
        :param session:
        :param ti:
        :return:
        """
        from airflow.ti_deps.deps.runnable_exec_date_dep import RunnableExecDateDep
        from airflow.ti_deps.deps.valid_state_dep import ValidStateDep
        from airflow.ti_deps.deps.events_dep import EventTIDep

        EVENT_SCHEDULED_DEPS = {
            RunnableExecDateDep(),
            ValidStateDep(FINISHED_STATES),
            EventTIDep(),
        }
        dep_context = DepContext(deps=EVENT_SCHEDULED_DEPS)
        if ti.are_dependencies_met(dep_context=dep_context, session=session):
            ts = TaskState.query_task_state(ti, session=session)
            if ts.action is None or TaskAction(ts.action) == TaskAction.NONE:
                return
            if TaskAction(ts.action) == TaskAction.RESTART:
                log.debug('Queuing Finished task: %s', ti)
                ti.state = State.SCHEDULED
                log.info("Creating / updating %s in ORM", ti)
                session.merge(ti)
            ts.action = None
            session.merge(ts)
            session.commit()
 def parse_configs(config_str: str):
     configs: List[MetConfig] = []
     config_json = json.loads(config_str)
     for config in config_json:
         met_config = MetConfig(event_key=config['event_key'],
                                event_value=config['event_value'],
                                event_type=config['event_type'],
                                action=TaskAction(config['action']),
                                condition=MetCondition(config['condition']),
                                value_condition=MetValueCondition(config['value_condition']),
                                life=EventLife(config['life']))
         configs.append(met_config)
     return configs
    def _set_task_instance_state(self,
                                 dag_run,
                                 dag_id,
                                 task_id,
                                 execution_date,
                                 state,
                                 try_number,
                                 session=None):
        """
        Set the task state to db and maybe set the dagrun object finished to db.
        :param dag_run: DagRun object
        :param dag_id: Dag identify
        :param task_id: task identify
        :param execution_date: the dag run execution date
        :param state: the task state should be set.
        :param try_number: the task try_number.
        :param session:
        :return:
        """
        TI = models.TaskInstance
        qry = session.query(TI).filter(TI.dag_id == dag_id,
                                       TI.task_id == task_id,
                                       TI.execution_date == execution_date)
        ti = qry.first()
        if not ti:
            self.log.warning("TaskInstance %s went missing from the database",
                             ti)
            return
        ts = TaskState.query_task_state(ti, session)
        self.log.debug(
            "set task state dag_id {0} task_id {1} execution_date {2} try_number {3} "
            "current try_number {4} state {5} ack_id {6} action {7}.".format(
                dag_id, task_id, execution_date, try_number, ti.try_number,
                state, ts.ack_id, ts.action))
        is_restart = False
        if state == State.FAILED or state == State.SUCCESS or state == State.SHUTDOWN:
            if ti.try_number == try_number and ti.state == State.QUEUED:
                msg = ("Executor reports task instance {} finished ({}) "
                       "although the task says its {}. Was the task "
                       "killed externally?".format(ti, state, ti.state))
                Stats.incr('scheduler.tasks.killed_externally')
                self.log.error(msg)
                try:
                    dag = self.task_route.find_dagrun(dag_id, execution_date)
                    ti.task = dag.get_task(task_id)
                    ti.handle_failure(msg)
                except Exception:
                    self.log.error(
                        "Cannot load the dag bag to handle failure for %s"
                        ". Setting task to FAILED without callbacks or "
                        "retries. Do you have enough resources?", ti)
                ti.state = State.FAILED
                session.merge(ti)
            else:
                if ts.action is None:
                    self.log.debug(
                        "task dag_id {0} task_id {1} execution_date {2} action is None."
                        .format(dag_id, task_id, execution_date))
                elif TaskAction(ts.action) == TaskAction.RESTART:
                    # if ts.stop_flag is not None and ts.stop_flag == try_number:
                    ti.state = State.SCHEDULED
                    ts.action = None
                    ts.stop_flag = None
                    ts.ack_id = 0
                    session.merge(ti)
                    session.merge(ts)
                    self.log.debug(
                        "task dag_id {0} task_id {1} execution_date {2} try_number {3} restart action."
                        .format(dag_id, task_id, execution_date,
                                str(try_number)))
                    is_restart = True
                elif TaskAction(ts.action) == TaskAction.STOP:
                    # if ts.stop_flag is not None and ts.stop_flag == try_number:
                    ts.action = None
                    ts.stop_flag = None
                    ts.ack_id = 0
                    session.merge(ts)
                    self.log.debug(
                        "task dag_id {0} task_id {1} execution_date {2} try_number {3} stop action."
                        .format(dag_id, task_id, execution_date,
                                str(try_number)))
                else:
                    self.log.debug(
                        "task dag_id {0} task_id {1} execution_date {2} action {3}."
                        .format(dag_id, task_id, execution_date, ts.action))
            session.commit()

        if not is_restart and ti.state == State.RUNNING:
            self.log.debug(
                "set task dag_id {0} task_id {1} execution_date {2} state {3}".
                format(dag_id, task_id, execution_date, state))
            ti.state = state
            session.merge(ti)
        session.commit()
        # update dagrun state
        sync_dag_run = session.query(DagRun).filter(
            DagRun.id == dag_run.id).first()
        if sync_dag_run.state not in FINISHED_STATES:
            if self.dagrun_route.find_dagrun_by_id(sync_dag_run.id) is None:
                self.log.error(
                    "DagRun lost dag_id {0} task_id {1} execution_date {2}".
                    format(dag_id, task_id, execution_date))
            else:
                run_process_func(target=dag_run_update_state,
                                 args=(
                                     dag_run,
                                     self.dagrun_route.find_simple_dag(
                                         dag_run.id),
                                 ))