}, ] }, region_name='us-east-1', launch_type='EC2', dag=dag) run_job_notify = SlackAPIPostOperator( task_id="Copy_%s_notify" % (x), username='******', token='XXX', channel='#databricks_jobs', text= ':ecs: Talend Containerized Job : Copy %s from S3 TO DBFS successful!' % (x), dag=dag) run_job.set_upstream(s3_list_files) run_job_notify.set_upstream(run_job) train_model.set_upstream(run_job_notify) train_model_notify = SlackAPIPostOperator( task_id="train_model_notify", username='******', token='XXX', channel='#databricks_jobs', text=':ecs: Talend Containerized Databricks Job : Train model successful', dag=dag) train_model_notify.set_upstream(train_model) test_model = ECSOperator( task_id="test_model",
catchup=False, max_active_runs=1, ) # t1, t2 and t3 are examples of tasks created by instantiating operators setup_task = BashOperator(task_id="setup", bash_command="date", dag=dag) finish_task = BashOperator(task_id="tear_down", bash_command="date", dag=dag) np.random.seed(593) for i in range(N_MODELS): t = ECSOperator( # ECS-specific args task_definition="generic_task:6", cluster="tims-cluster", # the work goes in here overrides={ "containerOverrides": [{ "command": ["sleep", str(np.random.poisson(10, size=None))] }] }, aws_conn_id="tims_aws_account", launch_type="FARGATE", # general operator args task_id="train_model_%d" % i, retries=0, dag=dag, ) t.set_upstream(setup_task) t.set_downstream(finish_task)