Beispiel #1
0
    def test_launch_process(self):
        def processor_factory(file_path, zombies):
            return DagFileProcessor(file_path, False, [], zombies)

        test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py')
        async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn')

        log_file_loc = conf.get('core', 'DAG_PROCESSOR_MANAGER_LOG_LOCATION')
        try:
            os.remove(log_file_loc)
        except OSError:
            pass

        # Starting dag processing with 0 max_runs to avoid redundant operations.
        processor_agent = DagFileProcessorAgent(test_dag_path, [], 0,
                                                processor_factory, async_mode)
        manager_process = \
            processor_agent._launch_process(processor_agent._dag_directory,
                                            processor_agent._file_paths,
                                            processor_agent._max_runs,
                                            processor_agent._processor_factory,
                                            processor_agent._child_signal_conn,
                                            processor_agent._stat_queue,
                                            processor_agent._result_queue,
                                            processor_agent._async_mode)
        if not async_mode:
            processor_agent.heartbeat()

        manager_process.join()

        self.assertTrue(os.path.isfile(log_file_loc))
    def test_reload_module(self):
        """
        Configure the context to have core.logging_config_class set to a fake logging
        class path, thus when reloading logging module the airflow.processor_manager
        logger should not be configured.
        """
        with settings_context(SETTINGS_FILE_VALID):
            # Launch a process through DagFileProcessorAgent, which will try
            # reload the logging module.
            def processor_factory(file_path, zombies):
                return DagFileProcessor(file_path,
                                        False,
                                        [],
                                        zombies)

            test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py')
            async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn')

            log_file_loc = conf.get('core', 'DAG_PROCESSOR_MANAGER_LOG_LOCATION')
            try:
                os.remove(log_file_loc)
            except OSError:
                pass

            # Starting dag processing with 0 max_runs to avoid redundant operations.
            processor_agent = DagFileProcessorAgent(test_dag_path,
                                                    [],
                                                    0,
                                                    processor_factory,
                                                    async_mode)
            manager_process = \
                processor_agent._launch_process(processor_agent._dag_directory,
                                                processor_agent._file_paths,
                                                processor_agent._max_runs,
                                                processor_agent._processor_factory,
                                                processor_agent._child_signal_conn,
                                                processor_agent._stat_queue,
                                                processor_agent._result_queue,
                                                processor_agent._async_mode)
            if not async_mode:
                processor_agent.heartbeat()

            manager_process.join()

            # Since we are reloading logging config not creating this file,
            # we should expect it to be nonexistent.
            self.assertFalse(os.path.isfile(log_file_loc))
Beispiel #3
0
    def test_reload_module(self):
        """
        Configure the context to have core.logging_config_class set to a fake logging
        class path, thus when reloading logging module the airflow.processor_manager
        logger should not be configured.
        """
        with settings_context(SETTINGS_FILE_VALID):
            # Launch a process through DagFileProcessorAgent, which will try
            # reload the logging module.
            def processor_factory(file_path, zombies):
                return DagFileProcessor(file_path,
                                        False,
                                        [],
                                        zombies)

            test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py')
            async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn')

            log_file_loc = conf.get('core', 'DAG_PROCESSOR_MANAGER_LOG_LOCATION')
            try:
                os.remove(log_file_loc)
            except OSError:
                pass

            # Starting dag processing with 0 max_runs to avoid redundant operations.
            processor_agent = DagFileProcessorAgent(test_dag_path,
                                                    [],
                                                    0,
                                                    processor_factory,
                                                    async_mode)
            manager_process = \
                processor_agent._launch_process(processor_agent._dag_directory,
                                                processor_agent._file_paths,
                                                processor_agent._max_runs,
                                                processor_agent._processor_factory,
                                                processor_agent._child_signal_conn,
                                                processor_agent._stat_queue,
                                                processor_agent._result_queue,
                                                processor_agent._async_mode)
            if not async_mode:
                processor_agent.heartbeat()

            manager_process.join()

            # Since we are reloading logging config not creating this file,
            # we should expect it to be nonexistent.
            self.assertFalse(os.path.isfile(log_file_loc))
    def test_launch_process(self):
        def processor_factory(file_path, zombies):
            return DagFileProcessor(file_path,
                                    False,
                                    [],
                                    zombies)

        test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py')
        async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn')

        log_file_loc = conf.get('core', 'DAG_PROCESSOR_MANAGER_LOG_LOCATION')
        try:
            os.remove(log_file_loc)
        except OSError:
            pass

        # Starting dag processing with 0 max_runs to avoid redundant operations.
        processor_agent = DagFileProcessorAgent(test_dag_path,
                                                [],
                                                0,
                                                processor_factory,
                                                async_mode)
        manager_process = \
            processor_agent._launch_process(processor_agent._dag_directory,
                                            processor_agent._file_paths,
                                            processor_agent._max_runs,
                                            processor_agent._processor_factory,
                                            processor_agent._child_signal_conn,
                                            processor_agent._stat_queue,
                                            processor_agent._result_queue,
                                            processor_agent._async_mode)
        if not async_mode:
            processor_agent.heartbeat()

        manager_process.join()

        self.assertTrue(os.path.isfile(log_file_loc))