예제 #1
0
import airflow
from airflow import DAG
from airflow.operators.papermill_operator import PapermillOperator
from airflow.operators.bash_operator import BashOperator
from datetime import datetime, timedelta


default_args = {
    'owner': 'Utsav',
    'start_date': datetime(2019,1,25),
}

dag = DAG('papermill_DAG', default_args=default_args, schedule_interval=None)


t1=PapermillOperator(
    task_id="Job_Schedular",
    input_nb="schedular.ipynb",
    #output_nb="op-{{execution_date}}.ipynb",
    output_nb="op1.ipynb",
    parameters={"msgs": "Ran from Airflow at {{ execution_date }}!"},
    dag=dag,
)

t2=BashOperator(
    task_id="Finished",
    bash_command="echo Finished",
    dag=dag,
)
t1.set_downstream(t2)
예제 #2
0
    provide_context=True,
    dag=dag,
)

t2 = PapermillOperator(
    task_id='notebook01',
    depends_on_past=True,
    input_nb=dag.params['base_directory'] + "Notebook01.ipynb",
    output_nb=dag.params['base_directory'] + "output/{{ execution_date }}/" +
    "Notebook01.ipynb",
    parameters="",
    dag=dag,
)

t3 = PapermillOperator(
    task_id='notebook02',
    depends_on_past=True,
    input_nb=dag.params['base_directory'] + "Notebook02.ipynb",
    output_nb=dag.params['base_directory'] + "output/{{ execution_date }}/" +
    "Notebook02.ipynb",
    parameters="",
    dag=dag,
)

dag.doc_md = __doc__

t1.set_downstream(t2)
t2.set_downstream(t3)
#t3.set_upstream([t1,t2])
#t1.set_downstream([t2, t3])