Ejemplo n.º 1
0
 def test(self,
          execution_date: Optional[Union[Pendulum, datetime]] = None,
          airflow_db: Optional[AirflowDb] = None):
     import airflow
     from airflow_plus.airflow_testing import mock_airflow_db
     if not execution_date:
         execution_date = pendulum.now()
     elif isinstance(execution_date, datetime):
         execution_date = pendulum.instance(execution_date)
     if not airflow_db:
         with mock_airflow_db():
             with airflow.DAG(dag_id='test_dag',
                              schedule_interval='@once',
                              start_date=datetime.min) as dag:
                 airflow_op = self.airflow_operator
                 dag >> airflow_op
                 airflow.models.TaskInstance(
                     airflow_op, execution_date=execution_date).run(
                         test_mode=True,
                         ignore_task_deps=True,
                         ignore_ti_state=True,
                     )
     else:
         with airflow.DAG(dag_id='test_dag',
                          schedule_interval='@once',
                          start_date=datetime.min) as dag:
             airflow_op = self.airflow_operator
             dag >> airflow_op
             airflow.models.TaskInstance(airflow_op,
                                         execution_date=execution_date).run(
                                             test_mode=True,
                                             ignore_task_deps=True,
                                             ignore_ti_state=True,
                                         )
Ejemplo n.º 2
0
def test_database_to_filesystem_operator(tmp_sqlite, tmp_path):
    with mock_airflow_db() as af_db:
        print(f'Adding sqlite connection...')
        af_db.set_connection(
            conn_id='source_database',
            conn_type='sqlite',
            host=tmp_sqlite,
        )
        print(f'Adding data lake connection with base path {tmp_path}...')
        af_db.set_connection(
            conn_id='data_lake',
            conn_type='local_filesystem',
            extra={
                'base_path': str(tmp_path)
            }
        )

        op = DatabaseToFileSystem(
            task_id='testing_db_to_fs',
            db_hook='source_database',
            sql='SELECT * FROM stocks',
            fs_hook='data_lake',
            path='dumps/{{ ds }}/stocks.csv'
        )
        op.test(execution_date=datetime(2020, 2, 18), airflow_db=af_db)
        assert (tmp_path / 'dumps/2020-02-18/stocks.csv').read_text().strip() == """\
Ejemplo n.º 3
0
def test_create_airflow_dag():
    with mock_airflow_db() as db:
        settings.PLUGINS_FOLDER = str(Path(__file__).parent / 'example_plugins')
        settings.prepare_syspath()
        db.set_connection(
            conn_id='default_useless',
            conn_type='useless',
        )
        dag_bag = DagBag(
            dag_folder=str(Path(__file__).parent/'example_dags'),
            include_examples=False,
        )
        assert 'test_dag' in dag_bag.dag_ids
        assert 'test_dag_using_hook' in dag_bag.dag_ids
Ejemplo n.º 4
0
def test_make_airflow_operator(capsys):
    op = MyOperator(task_id='test_op', sql='SELECT 1')
    with mock_airflow_db():
        with DAG(dag_id='test_dag',
                 schedule_interval='@once',
                 start_date=datetime.min) as dag:
            airflow_op = op.airflow_operator
            dag >> airflow_op
            TaskInstance(airflow_op,
                         execution_date=pendulum.datetime(
                             2020, 2, 18)).run(test_mode=True)
            captured = capsys.readouterr()
            assert '2020-02-18' in captured.out
            assert 'SELECT 1' in captured.out
Ejemplo n.º 5
0
def run_modded_webserver(hostname: str, port: int, debug: bool, mock_db: bool):
    # replace_logos()
    for cls in ConnectionHelper.custom_hook_classes():
        conn_type = getattr(cls, 'conn_type', None)
        if conn_type:
            conn_type_long = getattr(cls, 'conn_type_long', None)
            # noinspection PyProtectedMember
            Connection._types = [(conn_type, conn_type_long or conn_type)
                                 ] + Connection._types

    args = Args(hostname=hostname, port=port, debug=debug)
    if mock_db:
        with mock_airflow_db():
            webserver(args)
    else:
        webserver(args)
Ejemplo n.º 6
0
def test_with_hook(capsys):
    with mock_airflow_db() as db:
        settings.PLUGINS_FOLDER = str(
            Path(__file__).parent / 'example_plugins')
        settings.prepare_syspath()
        db.set_connection(
            conn_id='default_useless',
            conn_type='useless',
        )
        # noinspection PyUnresolvedReferences
        from useless_plugin.useless_hook import UselessHook

        @dataclass
        class MyOperatorWithHook(Operator):
            task_id: str
            my_hook: UselessHook

            def execute(self, dag_context: DAGContext):
                print(self.my_hook.conn_id)

        op = MyOperatorWithHook(task_id='op1', my_hook='default_useless')
        op.test(airflow_db=db)
        captured = capsys.readouterr()
        assert 'default_useless' in captured.out
Ejemplo n.º 7
0
from argparse import Namespace
from pathlib import Path

from airflow import settings
from airflow.bin.cli import webserver
from airflow.models import Connection

from airflow_plus.airflow_testing import mock_airflow_db


# class Args(Namespace):
#     def __init__(self, **kwargs):
#         self.args = kwargs or {}
#
#     def __getattr__(self, item):
#         return self.args.get(item)
from airflow_plus.cli import run_modded_webserver

if __name__ == '__main__':
    # Connection._types.append(('ZZZZZZZ', 'HACK!!!'))
    with mock_airflow_db():
        settings.PLUGINS_FOLDER = str(Path(__file__).parent / 'example_plugins')
        settings.prepare_syspath()
        run_modded_webserver(hostname='0.0.0.0', port=8080, debug=True, mock_db=False)
        # args = Args(hostname='0.0.0.0', port=8080, debug=True)
        # webserver(args)
Ejemplo n.º 8
0
import os
from pathlib import Path

from airflow import settings
from airflow.models import DagBag

from airflow_plus.airflow_testing import mock_airflow_db
from airflow_plus.cli import run_modded_webserver

if __name__ == '__main__':
    os.environ['AIRFLOW_HOME'] = str(
        Path(__file__).parent / 'test_airflow_home')

    os.environ['AIRFLOW__CORE__LOAD_EXAMPLES'] = 'false'
    os.environ['AIRFLOW__CORE__LOAD_EXAMPLES'] = 'false'
    with mock_airflow_db() as db:
        # settings.PLUGINS_FOLDER = str(Path(__file__).parent / 'example_plugins')
        # settings.DAGS_FOLDER = str(Path(__file__).parent / 'example_dags')
        settings.prepare_syspath()
        db.set_connection(
            conn_id='default_useless',
            conn_type='useless',
        )

        dag_bag = DagBag(
            dag_folder=str(Path(__file__).parent / 'example_dags'),
            include_examples=False,
        )
        run_modded_webserver(hostname='0.0.0.0',
                             port=8080,
                             debug=True,