示例#1
0
def launch_workflow_command(args):

    config_location = args.config_location
    analysis_id = args.analysis_id
    workflow_id = args.workflow_id

    my_workflow = get_workflow_by_id(workflow_id)

    id_from_filename = args.id_from_filename

    if not os.path.isdir(config_location):
        raise ValueError("config_location must be a path to a directory")

    my_dag_run = TriggerDagRunOperator(
        trigger_dag_id=my_workflow.workflow_name,
        python_callable=set_up_dag_run,
        task_id="run_my_workflow",
        owner="airflow")

    for root, dirs, files in os.walk(config_location):

        for config_file in files:
            current_config = create_configuration_from_file(
                os.path.join(root, config_file), id_from_filename)

            current_analysis_run = create_analysis_run(
                analysis_id, current_config.config_id, workflow_id)
            set_scheduled(current_analysis_run)

            effective_config = get_effective_configuration(
                current_analysis_run.analysis_run_id)

            effective_config[
                "analysis_run_id"] = current_analysis_run.analysis_run_id

            my_dag_run.execute({"config": effective_config})
pp = pprint.PrettyPrinter(indent=4)


def conditionally_trigger(context, dag_run_obj):
    """This function decides whether or not to Trigger the remote DAG"""
    c_p = context['params']['condition_param']
    print("Controller DAG : conditionally_trigger = {}".format(c_p))
    if context['params']['condition_param']:
        dag_run_obj.payload = {'message': context['params']['message']}
        pp.pprint(dag_run_obj.payload)
        return dag_run_obj


# Define the DAG
dag = DAG(dag_id='example_trigger_controller_dag',
          default_args={
              "owner": "airflow",
              "start_date": datetime.now()
          },
          schedule_interval='@once')

# Define the single task in this controller example DAG
trigger = TriggerDagRunOperator(task_id='test_trigger_dagrun',
                                trigger_dag_id="example_trigger_target_dag",
                                python_callable=conditionally_trigger,
                                params={
                                    'condition_param': True,
                                    'message': 'Hello World'
                                },
                                dag=dag)
示例#3
0
"""

task_file_to_inprogress = SSHOperator(task_id='file_to_inprogress',
                                      ssh_conn_id='envShortName_ph9_edge_node_ssh',
                                      command=file_to_inprogress,
                                      do_xcom_push=True,
                                      dag=dag)


def pck_add_attr_trigger(context, dag_run_obj):
    task_instance = context["task_instance"]
    mfg_file = task_instance.xcom_pull(task_ids='sftp_mfg_file_sensor', key='file_name').strip()
    cont_file = task_instance.xcom_pull(task_ids='sftp_cont_file_sensor', key='file_name').strip()
    print("mfg_file : " + mfg_file)
    print("cont_file : " + cont_file)
    if len(mfg_file) > 0 and len(cont_file) > 0:
        mfg_date = mfg_file.split("_")[3]
        cont_date = cont_file.split("_")[3]
        if mfg_date == cont_date:
            dag_run_obj.payload = {
            "mfg_file": "{}".format(mfg_file),
            "cont_file": "{}".format(cont_file)
            }
            return dag_run_obj

task_pck_add_attr_trigger = TriggerDagRunOperator(task_id='pck_add_attr_trigger',
                                                  trigger_dag_id="envShortName_ph_pck_add_attr_process",
                                                  python_callable=pck_add_attr_trigger,
                                                  dag=dag)

task_sftp_mfg_file_sensor >> task_sftp_cont_file_sensor >> task_file_to_inprogress >> task_pck_add_attr_trigger
示例#4
0
    's3_connection_id': s3_connection_id,
    's3_bucket_name': s3_bucket_name,
    'snowflake_username': snowflake_username,
    'snowflake_password': snowflake_password,
    'snowflake_account': snowflake_account,
    'snowflake_database': snowflake_database,
    'snowflake_stage_schema': snowflake_stage_schema,
    'sfstage': sfstage
    },
    trigger_rule="all_success",
    dag=main_dag
)

trigger = TriggerDagRunOperator(
	task_id = 'trigger_dagrun',
	trigger_dag_id = 'Migration_All_Databases_Mysql_to_Snowflake_no_subdag_addus',
	trigger_rule = 'all_done',
	dag=main_dag
)

    
    #end dummy dag  
end = DummyOperator(
    task_id='end',
    dag=main_dag
)



database_list = functs.get_database_list(mysql_username, mysql_password, mysql_hostname, mysql_port, trim_by_patterns = database_include_patterns, excluded_databases = excluded_databases) 
database_list.sort()
for i in database_list:
t1 = GetWorkflowOperator(
    task_id='get_workflow_by_name',
    mongo_conn_id="mongo_default",
    mongo_database="karakuri",
    mongo_colletion="workflows",
    mongo_query={"name": "substitut_actual_workflow_name"},
    dag=dag)

t2 = GetWorkflowDocsOperator(task_id='execute_workflow_query',
                             provide_context=True,
                             mongo_conn_id="mongo_default",
                             dag=dag)

t3 = TransformDocToTaskOperator(task_id='transform_query_result_to_tasks',
                                provide_context=True,
                                mongo_conn_id="mongo_default",
                                dag=dag)

t4 = QueueTaskOperator(task_id='save_tasks',
                       provide_context=True,
                       mongo_conn_id="mongo_default",
                       dag=dag)

t5 = TriggerDagRunOperator(
    task_id='trigger_sfsc_review_new_airflow_process_tasks',
    trigger_dag_id="sfsc_review_new_airflow_process_tasks",
    python_callable=conditionally_trigger,
    params={'condition_param': 'save_tasks'},
    dag=dag)

t1 >> t2 >> t3 >> t4 >> t5