Example #1
0
class GoogleLink(BaseOperatorLink):

    name = 'Google'

    def get_link(self, operator, dttm):
        return "https://www.google.com"


class HelloOperator(BaseOperator):

    operator_extra_links = (GoogleLink(), )

    @apply_defaults
    def __init__(self, name: str, **kwargs) -> None:
        super().__init__(**kwargs)
        self.name = name

    def execute(self, context):
        message = "Hello {}".format(self.name)
        self.log.info(message)
        logging.info(message)
        return message


with DAG(dag_id=set_dag_id(__file__),
         start_date=days_ago(1),
         schedule_interval="@daily") as parent_dag:

    hello_task = HelloOperator(task_id='sample-task', name='foo_bar')
Example #2
0
import os
import sys

sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))

from airflow import DAG
from airflow.sensors.external_task import ExternalTaskSensor, ExternalTaskMarker
from airflow.operators.bash import BashOperator
from airflow.operators.dummy import DummyOperator
from airflow.utils.dates import days_ago

from airflow_utils import set_dag_id

with DAG(dag_id=set_dag_id(__file__) + '-parent',
         start_date=days_ago(1),
         schedule_interval="@daily") as parent_dag:

    start = DummyOperator(task_id='start')

    do_something = BashOperator(task_id='do_something',
                                bash_command="sleep 10s")

    # Use Task Marker in case to clear child task if this task is cleared
    end = ExternalTaskMarker(
        task_id="end",
        external_dag_id="dag-dependency-child",
        external_task_id="child_task1",
    )

    start >> do_something >> end
Example #3
0
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))

from airflow.models.dag import dag
from airflow.operators.python import task
from airflow.utils.dates import days_ago

from airflow_utils import set_dag_id

# These args will get passed on to each operator
# You can override them on a per-task basis during operator initialization
default_args = {
    'owner': 'airflow',
}


@dag(dag_id=set_dag_id(__file__),
     default_args=default_args,
     schedule_interval=None,
     start_date=days_ago(1))
def tutorial_taskflow_api_etl():
    """
    ### TaskFlow API Tutorial Documentation
    This is a simple ETL data pipeline example which demonstrates the use of
    the TaskFlow API using three simple tasks for Extract, Transform, and Load.
    Documentation that goes along with the Airflow TaskFlow API tutorial is
    located
    [here](https://airflow.apache.org/docs/stable/tutorial_taskflow_api.html)
    """
    @task
    def extract():
        """
Example #4
0
        start_date=days_ago(2),
        schedule_interval="@daily",
    )

    for i in range(5):
        DummyOperator(
            task_id='{}-task-{}'.format(child_dag_name, i + 1),
            default_args=args,
            dag=dag_subdag,
        )

    return dag_subdag


with DAG(
    dag_id=set_dag_id(__file__), start_date=days_ago(2), schedule_interval="@once", tags=['example']
) as dag:

    start = DummyOperator(
        task_id='start',
        dag=dag,
    )

    section_1 = SubDagOperator(
        task_id='section-1',
        subdag=subdag(set_dag_id(__file__), 'section-1', {}),
        dag=dag,
    )

    some_other_task = DummyOperator(
        task_id='some-other-task',