Exemplo n.º 1
0
    def test_localtaskjob_maintain_heart_rate(self):
        dagbag = models.DagBag(
            dag_folder=TEST_DAG_FOLDER,
            include_examples=False,
        )
        dag = dagbag.dags.get('test_localtaskjob_double_trigger')
        task = dag.get_task('test_localtaskjob_double_trigger_task')

        session = settings.Session()

        dag.clear()
        dag.create_dagrun(run_id="test",
                          state=State.SUCCESS,
                          execution_date=DEFAULT_DATE,
                          start_date=DEFAULT_DATE,
                          session=session)

        ti_run = TI(task=task, execution_date=DEFAULT_DATE)
        ti_run.refresh_from_db()
        job1 = LocalTaskJob(task_instance=ti_run,
                            executor=SequentialExecutor())

        # this should make sure we only heartbeat once and exit at the second
        # loop in _execute()
        return_codes = [None, 0]

        def multi_return_code():
            return return_codes.pop(0)

        time_start = time.time()
        from airflow.task.task_runner.standard_task_runner import StandardTaskRunner
        with patch.object(StandardTaskRunner, 'start',
                          return_value=None) as mock_start:
            with patch.object(StandardTaskRunner,
                              'return_code') as mock_ret_code:
                mock_ret_code.side_effect = multi_return_code
                job1.run()
                self.assertEqual(mock_start.call_count, 1)
                self.assertEqual(mock_ret_code.call_count, 2)
        time_end = time.time()

        self.assertEqual(self.mock_base_job_sleep.call_count, 1)
        self.assertEqual(job1.state, State.SUCCESS)

        # Consider we have patched sleep call, it should not be sleeping to
        # keep up with the heart rate in other unpatched places
        #
        # We already make sure patched sleep call is only called once
        self.assertLess(time_end - time_start, job1.heartrate)
        session.close()
 def test_loading_local_settings_without_logging_config(self):
     from airflow.logging_config import configure_logging, log
     with patch.object(log, 'debug') as mock_info:
         configure_logging()
         mock_info.assert_called_with(
             'Unable to load custom logging, using default config instead'
         )
Exemplo n.º 3
0
 def test_when_the_config_key_does_not_exists(self):
     from airflow import logging_config
     with conf_vars({('core', 'logging_config_class'): None}):
         with patch.object(logging_config.log, 'debug') as mock_debug:
             logging_config.configure_logging()
             mock_debug.assert_any_call(
                 'Could not find key logging_config_class in config')
 def test_loading_local_settings_without_logging_config(self):
     from airflow.logging_config import configure_logging, log
     with patch.object(log, 'debug') as mock_info:
         configure_logging()
         mock_info.assert_called_with(
             'Unable to load custom logging, using default config instead'
         )
Exemplo n.º 5
0
 def test_loading_valid_local_settings(self):
     with settings_context(SETTINGS_FILE_VALID):
         from airflow.logging_config import configure_logging, log
         with patch.object(log, 'info') as mock_info:
             configure_logging()
             mock_info.assert_called_with(
                 'Successfully imported user-defined logging config from %s',
                 '{}.LOGGING_CONFIG'.format(SETTINGS_DEFAULT_NAME))
    def test_loading_invalid_local_settings(self):
        from airflow.logging_config import configure_logging, log
        with settings_context(SETTINGS_FILE_INVALID):
            with patch.object(log, 'warning') as mock_info:
                # Load config
                with self.assertRaises(ValueError):
                    configure_logging()

                mock_info.assert_called_with(
                    'Unable to load the config, contains a configuration error.'
                )
 def test_loading_valid_local_settings(self):
     with settings_context(SETTINGS_FILE_VALID):
         from airflow.logging_config import configure_logging, log
         with patch.object(log, 'info') as mock_info:
             configure_logging()
             mock_info.assert_called_with(
                 'Successfully imported user-defined logging config from %s',
                 '{}.LOGGING_CONFIG'.format(
                     SETTINGS_DEFAULT_NAME
                 )
             )
    def test_loading_invalid_local_settings(self):
        from airflow.logging_config import configure_logging, log
        with settings_context(SETTINGS_FILE_INVALID):
            with patch.object(log, 'warning') as mock_info:
                # Load config
                with self.assertRaises(ValueError):
                    configure_logging()

                mock_info.assert_called_with(
                    'Unable to load the config, contains a configuration error.'
                )
Exemplo n.º 9
0
 def test_loading_valid_complex_local_settings(self):
     # Test what happens when the config is somewhere in a subfolder
     module_structure = 'etc.airflow.config'
     dir_structure = module_structure.replace('.', '/')
     with settings_context(SETTINGS_FILE_VALID, dir_structure):
         from airflow.logging_config import configure_logging, log
         with patch.object(log, 'info') as mock_info:
             configure_logging()
             mock_info.assert_called_with(
                 'Successfully imported user-defined logging config from %s',
                 'etc.airflow.config.{}.LOGGING_CONFIG'.format(
                     SETTINGS_DEFAULT_NAME))
Exemplo n.º 10
0
 def test_loading_valid_complex_local_settings(self):
     # Test what happens when the config is somewhere in a subfolder
     module_structure = 'etc.airflow.config'
     dir_structure = module_structure.replace('.', '/')
     with settings_context(SETTINGS_FILE_VALID, dir_structure):
         from airflow.logging_config import configure_logging, log
         with patch.object(log, 'info') as mock_info:
             configure_logging()
             mock_info.assert_called_with(
                 'Successfully imported user-defined logging config from %s',
                 'etc.airflow.config.{}.LOGGING_CONFIG'.format(
                     SETTINGS_DEFAULT_NAME
                 )
             )
Exemplo n.º 11
0
    def test_volume_mount(self):
        with patch.object(PodLauncher, 'log') as mock_logger:
            volume_mount = VolumeMount('test-volume',
                                       mount_path='/tmp/test_volume',
                                       sub_path=None,
                                       read_only=False)

            volume_config = {
                'persistentVolumeClaim': {
                    'claimName': 'test-volume'
                }
            }
            volume = Volume(name='test-volume', configs=volume_config)
            args = [
                "echo \"retrieved from mount\" > /tmp/test_volume/test.txt "
                "&& cat /tmp/test_volume/test.txt"
            ]
            k = KubernetesPodOperator(
                namespace='default',
                image="ubuntu:16.04",
                cmds=["bash", "-cx"],
                arguments=args,
                labels={"foo": "bar"},
                volume_mounts=[volume_mount],
                volumes=[volume],
                name="test",
                task_id="task",
                in_cluster=False,
                do_xcom_push=False,
            )
            context = create_context(k)
            k.execute(context=context)
            mock_logger.info.assert_any_call(b"retrieved from mount\n")
            actual_pod = self.api_client.sanitize_for_serialization(k.pod)
            self.expected_pod['spec']['containers'][0]['args'] = args
            self.expected_pod['spec']['containers'][0]['volumeMounts'] = [{
                'name':
                'test-volume',
                'mountPath':
                '/tmp/test_volume',
                'readOnly':
                False
            }]
            self.expected_pod['spec']['volumes'] = [{
                'name': 'test-volume',
                'persistentVolumeClaim': {
                    'claimName': 'test-volume'
                }
            }]
            self.assertEqual(self.expected_pod, actual_pod)
Exemplo n.º 12
0
    def test_execute_with_empty_destination_bucket(self, mock_hook):
        mock_hook.return_value.list.return_value = SOURCE_FILES_LIST
        operator = GoogleCloudStorageToGoogleCloudStorageOperator(
            task_id=TASK_ID, source_bucket=TEST_BUCKET,
            source_object=SOURCE_OBJECT_NO_WILDCARD,
            destination_bucket=None,
            destination_object=DESTINATION_OBJECT_PREFIX)

        with patch.object(operator.log, 'warning') as mock_warn:
            operator.execute(None)
            mock_warn.assert_called_with(
                'destination_bucket is None. Defaulting it to source_bucket (%s)',
                TEST_BUCKET
            )
            self.assertEquals(operator.destination_bucket, operator.source_bucket)
    def test_execute_with_empty_destination_bucket(self, mock_hook):
        mock_hook.return_value.list.return_value = SOURCE_FILES_LIST
        operator = GoogleCloudStorageToGoogleCloudStorageOperator(
            task_id=TASK_ID, source_bucket=TEST_BUCKET,
            source_object=SOURCE_OBJECT_NO_WILDCARD,
            destination_bucket=None,
            destination_object=DESTINATION_OBJECT_PREFIX)

        with patch.object(operator.log, 'warning') as mock_warn:
            operator.execute(None)
            mock_warn.assert_called_with(
                'destination_bucket is None. Defaulting it to source_bucket (%s)',
                TEST_BUCKET
            )
            self.assertEquals(operator.destination_bucket, operator.source_bucket)
Exemplo n.º 14
0
    def test_when_the_config_key_does_not_exists(self):
        from airflow import logging_config
        conf_get = conf.get

        def side_effect(*args):
            if args[1] == 'logging_config_class':
                raise AirflowConfigException
            else:
                return conf_get(*args)

        logging_config.conf.get = mock.Mock(side_effect=side_effect)

        with patch.object(logging_config.log, 'debug') as mock_debug:
            logging_config.configure_logging()
            mock_debug.assert_any_call(
                'Could not find key logging_config_class in config')
Exemplo n.º 15
0
    def test_when_the_config_key_does_not_exists(self):
        from airflow import logging_config
        conf_get = conf.get

        def side_effect(*args):
            if args[1] == 'logging_config_class':
                raise AirflowConfigException
            else:
                return conf_get(*args)

        logging_config.conf.get = mock.Mock(side_effect=side_effect)

        with patch.object(logging_config.log, 'debug') as mock_debug:
            logging_config.configure_logging()
            mock_debug.assert_any_call(
                'Could not find key logging_config_class in config'
            )
Exemplo n.º 16
0
    def test_localtaskjob_double_trigger(self):
        dagbag = models.DagBag(
            dag_folder=TEST_DAG_FOLDER,
            include_examples=False,
        )
        dag = dagbag.dags.get('test_localtaskjob_double_trigger')
        task = dag.get_task('test_localtaskjob_double_trigger_task')

        session = settings.Session()

        dag.clear()
        dr = dag.create_dagrun(run_id="test",
                               state=State.SUCCESS,
                               execution_date=DEFAULT_DATE,
                               start_date=DEFAULT_DATE,
                               session=session)
        ti = dr.get_task_instance(task_id=task.task_id, session=session)
        ti.state = State.RUNNING
        ti.hostname = get_hostname()
        ti.pid = 1
        session.merge(ti)
        session.commit()

        ti_run = TI(task=task, execution_date=DEFAULT_DATE)
        ti_run.refresh_from_db()
        job1 = LocalTaskJob(task_instance=ti_run,
                            executor=SequentialExecutor())
        from airflow.task.task_runner.standard_task_runner import StandardTaskRunner
        with patch.object(StandardTaskRunner, 'start',
                          return_value=None) as mock_method:
            job1.run()
            mock_method.assert_not_called()

        ti = dr.get_task_instance(task_id=task.task_id, session=session)
        self.assertEqual(ti.pid, 1)
        self.assertEqual(ti.state, State.RUNNING)

        session.close()