コード例 #1
0
    def setUp(self):
        from airflow.providers.ssh.hooks.ssh import SSHHook

        hook = SSHHook(ssh_conn_id='ssh_default')
        hook.no_host_key_check = True
        args = {
            'owner': 'airflow',
            'start_date': DEFAULT_DATE,
        }
        dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
        dag.schedule_interval = '@once'
        self.hook = hook
        self.dag = dag
        self.test_dir = "/tmp"
        self.test_local_dir = "/tmp/tmp2"
        self.test_remote_dir = "/tmp/tmp1"
        self.test_local_filename = 'test_local_file'
        self.test_remote_filename = 'test_remote_file'
        self.test_local_filepath = '{0}/{1}'.format(self.test_dir,
                                                    self.test_local_filename)
        # Local Filepath with Intermediate Directory
        self.test_local_filepath_int_dir = '{0}/{1}'.format(
            self.test_local_dir, self.test_local_filename)
        self.test_remote_filepath = '{0}/{1}'.format(self.test_dir,
                                                     self.test_remote_filename)
        # Remote Filepath with Intermediate Directory
        self.test_remote_filepath_int_dir = '{0}/{1}'.format(
            self.test_remote_dir, self.test_remote_filename)
コード例 #2
0
    def setUp(self):
        from airflow.contrib.hooks.ssh_hook import SSHHook
        from airflow.hooks.S3_hook import S3Hook

        hook = SSHHook(ssh_conn_id='ssh_default')
        s3_hook = S3Hook('aws_default')
        hook.no_host_key_check = True
        args = {
            'owner': 'airflow',
            'start_date': DEFAULT_DATE,
            'provide_context': True
        }
        dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
        dag.schedule_interval = '@once'

        self.hook = hook
        self.s3_hook = s3_hook

        self.ssh_client = self.hook.get_conn()
        self.sftp_client = self.ssh_client.open_sftp()

        self.dag = dag
        self.s3_bucket = BUCKET
        self.sftp_path = SFTP_PATH
        self.s3_key = S3_KEY
コード例 #3
0
ファイル: test_dag.py プロジェクト: xuusheng/airflow
    def test_fractional_seconds(self):
        """
        Tests if fractional seconds are stored in the database
        """
        dag_id = "test_fractional_seconds"
        dag = DAG(dag_id=dag_id)
        dag.schedule_interval = '@once'
        dag.add_task(
            BaseOperator(task_id="faketastic",
                         owner='Also fake',
                         start_date=datetime_tz(2015, 1, 2, 0, 0)))

        start_date = timezone.utcnow()

        run = dag.create_dagrun(run_id='test_' + start_date.isoformat(),
                                execution_date=start_date,
                                start_date=start_date,
                                state=State.RUNNING,
                                external_trigger=False)

        run.refresh_from_db()

        self.assertEqual(start_date, run.execution_date,
                         "dag run execution_date loses precision")
        self.assertEqual(start_date, run.start_date,
                         "dag run start_date loses precision ")
        self._clean_up(dag_id)
コード例 #4
0
    def setUp(self):
        configuration.load_test_config()
        from airflow.contrib.hooks.ssh_hook import SSHHook
        from airflow.hooks.S3_hook import S3Hook

        hook = SSHHook(ssh_conn_id='ssh_default')
        s3_hook = S3Hook('aws_default')
        hook.no_host_key_check = True
        args = {
            'owner': 'airflow',
            'start_date': DEFAULT_DATE,
            'provide_context': True
        }
        dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
        dag.schedule_interval = '@once'

        self.hook = hook
        self.s3_hook = s3_hook

        self.ssh_client = self.hook.get_conn()
        self.sftp_client = self.ssh_client.open_sftp()

        self.dag = dag
        self.s3_bucket = BUCKET
        self.sftp_path = SFTP_PATH
        self.s3_key = S3_KEY
コード例 #5
0
 def setUp(self):
     configuration.load_test_config()
     from airflow.contrib.hooks.ssh_hook import SSHHook
     hook = SSHHook(ssh_conn_id='ssh_default')
     hook.no_host_key_check = True
     args = {
         'owner': 'airflow',
         'start_date': DEFAULT_DATE,
         'provide_context': True
     }
     dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
     dag.schedule_interval = '@once'
     self.hook = hook
     self.dag = dag
     self.test_dir = "/tmp"
     self.test_local_dir = "/tmp/tmp2"
     self.test_remote_dir = "/tmp/tmp1"
     self.test_local_filename = 'test_local_file'
     self.test_remote_filename = 'test_remote_file'
     self.test_local_filepath = '{0}/{1}'.format(self.test_dir,
                                                 self.test_local_filename)
     # Local Filepath with Intermediate Directory
     self.test_local_filepath_int_dir = '{0}/{1}'.format(self.test_local_dir,
                                                         self.test_local_filename)
     self.test_remote_filepath = '{0}/{1}'.format(self.test_dir,
                                                  self.test_remote_filename)
     # Remote Filepath with Intermediate Directory
     self.test_remote_filepath_int_dir = '{0}/{1}'.format(self.test_remote_dir,
                                                          self.test_remote_filename)
コード例 #6
0
 def setUp(self):
     from airflow.contrib.hooks.ssh_hook import SSHHook
     hook = SSHHook(ssh_conn_id='ssh_default')
     hook.no_host_key_check = True
     args = {
         'owner': 'airflow',
         'start_date': DEFAULT_DATE,
     }
     dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
     dag.schedule_interval = '@once'
     self.hook = hook
     self.dag = dag
コード例 #7
0
 def setUp(self):
     configuration.load_test_config()
     from airflow.contrib.hooks.ssh_hook import SSHHook
     hook = SSHHook(ssh_conn_id='ssh_default')
     hook.no_host_key_check = True
     args = {
         'owner': 'airflow',
         'start_date': DEFAULT_DATE,
         'provide_context': True
     }
     dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
     dag.schedule_interval = '@once'
     self.hook = hook
     self.dag = dag
コード例 #8
0
    def test_schedule_dag_once(self):
        """
        Tests scheduling a dag scheduled for @once - should be scheduled the first time
        it is called, and not scheduled the second.
        """
        dag_id = "test_schedule_dag_once"
        dag = DAG(dag_id=dag_id)
        dag.schedule_interval = '@once'
        dag.add_task(BaseOperator(
            task_id="faketastic",
            owner='Also fake',
            start_date=datetime_tz(2015, 1, 2, 0, 0)))
        dag_run = DagFileProcessor(dag_ids=[], log=mock.MagicMock()).create_dag_run(dag)
        dag_run2 = DagFileProcessor(dag_ids=[], log=mock.MagicMock()).create_dag_run(dag)

        self.assertIsNotNone(dag_run)
        self.assertIsNone(dag_run2)
        dag.clear()
        self._clean_up(dag_id)
コード例 #9
0
 def setUp(self):
     configuration.load_test_config()
     from airflow.contrib.hooks.ssh_hook import SSHHook
     hook = SSHHook(ssh_conn_id='ssh_default')
     hook.no_host_key_check = True
     args = {
         'owner': 'airflow',
         'start_date': DEFAULT_DATE,
         'provide_context': True
     }
     dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
     dag.schedule_interval = '@once'
     self.hook = hook
     self.dag = dag
     self.test_dir = "/tmp"
     self.test_local_filename = 'test_local_file'
     self.test_remote_filename = 'test_remote_file'
     self.test_local_filepath = '{0}/{1}'.format(self.test_dir,
                                                 self.test_local_filename)
     self.test_remote_filepath = '{0}/{1}'.format(self.test_dir,
                                                  self.test_remote_filename)