Exemple #1
0
task0 = SSHExecuteOperator(task_id="sync_jars",
                           bash_command=sync_jars,
                           ssh_hook=sshHook,
                           dag=dag)

task1 = SSHExecuteOperator(task_id="import_nation",
                           bash_command=import_nation,
                           ssh_hook=sshHook,
                           dag=dag)

task2 = SSHExecuteOperator(task_id="import_region",
                           bash_command=import_region,
                           ssh_hook=sshHook,
                           dag=dag)

task3 = SSHExecuteOperator(task_id="spark_dimNation",
                           bash_command=spark_dimNation,
                           ssh_hook=sshHook,
                           dag=dag)

task4 = SSHExecuteOperator(task_id="export_dimNation",
                           bash_command=export_dimNation,
                           ssh_hook=sshHook,
                           dag=dag)

task0.set_downstream(task1)
task1.set_downstream(task2)
task2.set_downstream(task3)
task3.set_downstream(task4)
Exemple #2
0
task5 = SSHExecuteOperator(task_id="export_dimCustomer",
                           bash_command=export_dimCustomer,
                           ssh_hook=sshHook,
                           dag=dag)

task6 = SSHExecuteOperator(task_id="export_dimSupplier",
                           bash_command=export_dimSupplier,
                           ssh_hook=sshHook,
                           dag=dag)

task7 = SSHExecuteOperator(task_id="export_dimPart",
                           bash_command=export_dimPart,
                           ssh_hook=sshHook,
                           dag=dag)

task8 = SSHExecuteOperator(task_id="export_dimDate",
                           bash_command=export_dimDate,
                           ssh_hook=sshHook,
                           dag=dag)

task9 = SSHExecuteOperator(task_id="export_salesFact",
                           bash_command=export_salesFact,
                           ssh_hook=sshHook,
                           dag=dag)

task0.set_downstream(task1)
task1.set_downstream(task2)
task3.set_upstream(task2)
task3.set_downstream(
    task_or_task_list=[task4, task5, task6, task7, task8, task9])
    bash_command='(bash {path}/ec_iteminfo_import.sh {last_par})'.format(path=path,last_par=iteminfo_last_update()),
    ssh_hook=sshHook,
    dag=dag)

spark_shop = SSHExecuteOperator(
    task_id="ec_shopinfo_spark",
    bash_command='(bash {path}/ec_shopinfo_parse.sh)'.format(path=path),
    ssh_hook=sshHook,
    dag=dag)

hive_shop= SSHExecuteOperator(
    task_id="ec_shopinfo_hive",
    bash_command='(bash {path}/ec_shopinfo_import.sh {last_par})'.format(path=path,last_par=shopinfo_last_update()),
    ssh_hook=sshHook,
    dag=dag)
final_ops= SSHExecuteOperator(
    task_id="ec_hdfs_ops",
    bash_command='(bash {path}/ec_item_shop_final_ops.sh)'.format(path=path),
    ssh_hook=sshHook,
    dag=dag)
email = EmailOperator(task_id='ec_item_and_shopinfo_email',
                      to=['*****@*****.**'],
                      subject='ec item and shop info workflow',
                      html_content='Execute the jobs successfully!!!!',
                      dag=dag)
spark_item.set_downstream(hive_item)
hive_item.set_downstream(spark_shop)
spark_shop.set_downstream(hive_shop)
hive_shop.set_downstream(final_ops)
final_ops.set_downstream(email)