task_id='rsync', bash_command=as_user(rsync_command, USER), params={'klustadir': KLUSTA_DIR, 'mansortdir': MANSORT_DIR}, dag=dag) email_me = EmailOperator( task_id='email_me', to=default_args['email'], subject='%s is complete' % dag_id, html_content='You may now manually sort on NIAO', dag=dag) slack_it = SlackAPIPostOperator( task_id='slack_it', token=SLACK_TOKEN, text='%s is complete' % dag_id, channel='#ephys', dag=dag) make_kwd_task.set_upstream(make_klusta_dir_task) phy_task.set_upstream(make_kwd_task) #merge_events_task.set_upstream(phy_task) clear_phy_task.set_upstream(phy_task) make_kwik_bak_dir_task.set_upstream(phy_task) mv_kwik_bak_task.set_upstream(make_kwik_bak_dir_task) #rsync_task.set_upstream(merge_events_task) rsync_task.set_upstream(clear_phy_task) rsync_task.set_upstream(mv_kwik_bak_task) email_me.set_upstream(rsync_task) slack_it.set_upstream(rsync_task)
ftp_dir=Variable.get('ftp_dir'), host=Variable.get('ftp_host_ip'), username=Variable.get('sftp_username') ), task_id='get_file', dag=dag ) t1 = SlackAPIPostOperator( channel=Variable.get('slack_channel'), trigger_rule=TriggerRule.ALL_SUCCESS, token=Variable.get('slack_token'), username='******', text='Timesheet file retrieval action complete.', attachments=[ { 'text': 'Timesheet file retrieval successful.', 'color': '#449944', 'mrkdwn_in': ['text'] } ], task_id='slack_success', dag=dag ) t2 = SlackAPIPostOperator( channel=Variable.get('slack_channel'), trigger_rule=TriggerRule.ONE_FAILED, token=Variable.get('slack_token'), username='******', text='Timesheet file retrieval action failed.', attachments=[
params={ 'klustadir': KLUSTA_DIR, 'mansortdir': MANSORT_DIR }, dag=dag) email_me = EmailOperator( task_id='email_me', to=default_args['email'], subject='%s is complete' % dag_id, html_content='You may now manually sort on NIAO', dag=dag) slack_it = SlackAPIPostOperator(task_id='slack_it', token=SLACK_TOKEN, text='%s is complete' % dag_id, channel='#ephys', dag=dag) make_kwd_task.set_upstream(make_klusta_dir_task) phy_task.set_upstream(make_kwd_task) #merge_events_task.set_upstream(phy_task) clear_phy_task.set_upstream(phy_task) make_kwik_bak_dir_task.set_upstream(phy_task) mv_kwik_bak_task.set_upstream(make_kwik_bak_dir_task) #rsync_task.set_upstream(merge_events_task) rsync_task.set_upstream(clear_phy_task) rsync_task.set_upstream(mv_kwik_bak_task) email_me.set_upstream(rsync_task) slack_it.set_upstream(rsync_task)
env={'PATH': ANACONDA_PATH}, params={ 'postphydir': POSTPHY_DIR, 'ecanalysispath': ECANALYSIS_PATH, 'rasterdir': RASTER_DIR }, dag=dag) ############ Report Completion email_me = EmailOperator(task_id='email_me', to=default_args['email'], subject='%s is merged' % dag_id, html_content='You may commence analysis.', dag=dag) slack_it = SlackAPIPostOperator(task_id='slack_it', token=SLACK_TOKEN, text='%s is merged' % dag_id, channel='#ephys', dag=dag) rsync_task.set_upstream(make_postphy_dir_task) merge_events_task.set_upstream(rsync_task) kwik2pandas_task.set_upstream(merge_events_task) email_me.set_upstream(kwik2pandas_task) slack_it.set_upstream(kwik2pandas_task) make_raster_dir_task.set_upstream(kwik2pandas_task) make_raster_task.set_upstream(make_raster_dir_task) globals()[dag_id] = dag
make_raster_task = BashOperator( task_id="make_rasters", bash_command=make_raster_cmd, env={"PATH": ANACONDA_PATH}, params={"postphydir": POSTPHY_DIR, "ecanalysispath": ECANALYSIS_PATH, "rasterdir": RASTER_DIR}, dag=dag, ) ############ Report Completion email_me = EmailOperator( task_id="email_me", to=default_args["email"], subject="%s is merged" % dag_id, html_content="You may commence analysis.", dag=dag, ) slack_it = SlackAPIPostOperator( task_id="slack_it", token=SLACK_TOKEN, text="%s is merged" % dag_id, channel="#ephys", dag=dag ) rsync_task.set_upstream(make_postphy_dir_task) merge_events_task.set_upstream(rsync_task) kwik2pandas_task.set_upstream(merge_events_task) email_me.set_upstream(kwik2pandas_task) slack_it.set_upstream(kwik2pandas_task) make_raster_dir_task.set_upstream(kwik2pandas_task) make_raster_task.set_upstream(make_raster_dir_task) globals()[dag_id] = dag
'retries': 2, 'retry_delay': datetime.timedelta(minutes=1), 'project_id': models.Variable.get('gcp_project') } with models.DAG('github-trending-job', schedule_interval=datetime.timedelta(days=1), default_args=default_dag_args) as dag: get_k8s_credinald = 'gcloud container clusters get-credentials us-central1-github-survey-j-73721e60-gke --zone us-central1-f --project gas-webscraper' image_url = models.Variable.get('github_image_url') run_container = 'kubectl run puppeter --image={} --rm --attach=true --command -- npm start'.format( image_url) collect_data = BashOperator( task_id='get_data_github', execution_timeout=datetime.timedelta(minutes=10), bash_command='{} && {}'.format(get_k8s_credinald, run_container), dag=dag, ) success_task = SlackAPIPostOperator( task_id='notify_skack', username='******', token= 'xoxp-16701812533-48173599442-292225017239-0a6dd5ab5e617bd1adc37e4db93c85e3', channel='#lab', text='hello done', dag=dag) collect_data >> success_task