示例#1
0
    def _start_by_fork(self):
        pid = os.fork()
        if pid:
            self.log.info("Started process %d to run task", pid)
            return psutil.Process(pid)
        else:
            from airflow.bin.cli import get_parser
            import signal
            import airflow.settings as settings

            signal.signal(signal.SIGINT, signal.SIG_DFL)
            signal.signal(signal.SIGTERM, signal.SIG_DFL)
            # Start a new process group
            os.setpgid(0, 0)

            # Force a new SQLAlchemy session. We can't share open DB handles
            # between process. The cli code will re-create this as part of its
            # normal startup
            settings.engine.pool.dispose()
            settings.engine.dispose()

            parser = get_parser()
            # [1:] - remove "airflow" from the start of the command
            args = parser.parse_args(self._command[1:])

            proc_title = "airflow task runner: {0.dag_id} {0.task_id} {0.execution_date}"
            if hasattr(args, "job_id"):
                proc_title += " {0.job_id}"
            setproctitle(proc_title.format(args))

            try:
                args.func(args)
                os._exit(0)
            except Exception:
                os._exit(1)
示例#2
0
 def setUp(self):
     configuration.test_mode()
     app = create_app()
     app.config['TESTING'] = True
     self.parser = cli.get_parser()
     self.dagbag = models.DagBag(
         dag_folder=DEV_NULL, include_examples=True)
示例#3
0
文件: core.py 项目: DingaGa/airflow
 def setUp(self):
     configuration.test_mode()
     app = application.create_app()
     app.config['TESTING'] = True
     self.parser = cli.get_parser()
     self.dagbag = models.DagBag(
         dag_folder=DEV_NULL, include_examples=True)
示例#4
0
    def pickle_from_command(self, command):

        from airflow.bin.cli import get_parser
        parser = get_parser()
        strip_airflow = command[len('airflow '):]
        args = parser.parse_args(strip_airflow.split())
        if hasattr(args, 'pickle'):
            return args.pickle
    def pickle_from_command(self, command):

        from airflow.bin.cli import get_parser
        parser = get_parser()
        strip_airflow = command[len('airflow '):]
        args = parser.parse_args(strip_airflow.split())
        if hasattr(args, 'pickle'):
            return args.pickle
示例#6
0
    def run(args, configuration=None):
        cmd_prints = None
        with SuppressPrints():
            parser = cli.get_parser()
            args = parser.parse_args(args)

            with FetchPrints() as buffer:
                args.func(args)
                cmd_prints = buffer.getvalue()

        return cmd_prints
示例#7
0
文件: core.py 项目: john5223/airflow
    def test_cli(self):
        from airflow.bin import cli
        parser = cli.get_parser()
        args = parser.parse_args(['list_dags'])
        cli.list_dags(args)

        for dag_id in self.dagbag.dags.keys():
            args = parser.parse_args(['list_tasks', dag_id])
            cli.list_tasks(args)

        args = parser.parse_args(['list_tasks', 'example_bash_operator', '--tree'])
        cli.list_tasks(args)

        cli.initdb(parser.parse_args(['initdb']))
示例#8
0
    def test_cli(self):
        from airflow.bin import cli
        parser = cli.get_parser()
        args = parser.parse_args(['list_dags'])
        cli.list_dags(args)

        for dag_id in self.dagbag.dags.keys():
            args = parser.parse_args(['list_tasks', dag_id])
            cli.list_tasks(args)

        args = parser.parse_args(
            ['list_tasks', 'example_bash_operator', '--tree'])
        cli.list_tasks(args)

        cli.initdb(parser.parse_args(['initdb']))
示例#9
0
文件: core.py 项目: johnw424/airflow
    def test_cli(self):
        from airflow.bin import cli

        parser = cli.get_parser()
        args = parser.parse_args(["list_dags"])
        cli.list_dags(args)

        for dag_id in self.dagbag.dags.keys():
            args = parser.parse_args(["list_tasks", dag_id])
            cli.list_tasks(args)

        args = parser.parse_args(["list_tasks", "example_bash_operator", "--tree"])
        cli.list_tasks(args)

        cli.initdb(parser.parse_args(["initdb"]))
示例#10
0
    def _start_by_fork(self):
        pid = os.fork()
        if pid:
            self.log.info("Started process %d to run task", pid)
            return psutil.Process(pid)
        else:
            from airflow.bin.cli import get_parser
            from airflow.sentry import Sentry
            import signal
            import airflow.settings as settings

            signal.signal(signal.SIGINT, signal.SIG_DFL)
            signal.signal(signal.SIGTERM, signal.SIG_DFL)
            # Start a new process group
            os.setpgid(0, 0)

            # Force a new SQLAlchemy session. We can't share open DB handles
            # between process. The cli code will re-create this as part of its
            # normal startup
            settings.engine.pool.dispose()
            settings.engine.dispose()

            parser = get_parser()
            # [1:] - remove "airflow" from the start of the command
            args = parser.parse_args(self._command[1:])

            self.log.info('Running: %s', self._command)
            self.log.info('Job %s: Subtask %s', self._task_instance.job_id,
                          self._task_instance.task_id)

            proc_title = "airflow task runner: {0.dag_id} {0.task_id} {0.execution_date}"
            if hasattr(args, "job_id"):
                proc_title += " {0.job_id}"
            setproctitle(proc_title.format(args))

            try:
                args.func(args, dag=self.dag)
                return_code = 0
            except Exception:
                return_code = 1
            finally:
                # Explicitly flush any pending exception to Sentry if enabled
                Sentry.flush()
                os._exit(return_code)  # pylint: disable=protected-access
 def setUpClass(cls):
     cls.parser = cli.get_parser()