### # register the runs into the logbook ### runs = LogbookCreateRunOperator( task_id='runs', http_hook=logbook_hook, experiment= "{{ ti.xcom_pull( task_ids='config', key='experiment' ).split('_')[0] }}", retries=3, ) slack_channel = SlackAPIEnsureChannelOperator( task_id='slack_channel', channel= "{{ ti.xcom_pull( task_ids='config', key='experiment' )[:21] | replace( ' ', '' ) | lower }}", token=Variable.get('slack_token'), retries=2, ) slack_users = SlackAPIInviteToChannelOperator( task_id='slack_users', channel= "{{ ti.xcom_pull( task_ids='config', key='experiment' )[:21] | replace( ' ', '' ) | lower }}", token=Variable.get('slack_token'), users="{{ ti.xcom_pull( task_ids='config', key='collaborators' ) }}", default_users="W9RUM1ET1,WCPH4JZFU") ### # define pipeline ### config >> sample_directory >> touch >> rsync >> delete >> untouch
parse_parameters = FeiEpuOperator(task_id='parse_parameters', filepath="{{ ti.xcom_pull( task_ids='parameter_file' )[0] }}", ) # upload to the logbook logbook_parameters = PythonOperator(task_id='logbook_parameters', python_callable=uploadExperimentalParameters2Logbook, op_kwargs={} ) influx_parameters = FeiEpu2InfluxOperator( task_id='influx_parameters', xcom_task_id='parse_parameters', host=args['influx_host'], experiment="{{ dag_run.conf['experiment'] }}", ) ensure_slack_channel = SlackAPIEnsureChannelOperator( task_id='ensure_slack_channel', channel="{{ dag_run.conf['experiment'][:21] | replace( ' ', '' ) | lower }}", token=Variable.get('slack_token'), ) # invite_slack_users = SlackAPIInviteToChannelOperator( task_id='invite_slack_users', invite_slack_users = NotYetImplementedOperator( task_id='invite_slack_users', # channel="{{ dag_run.conf['experiment'][:21] }}", # token=Variable.get('slack_token'), # users=('yee',), ) ### # get the summed jpg ### summed_preview = FileGlobSensor( task_id='summed_preview', filepath="{{ dag_run.conf['directory'] }}/**/{{ dag_run.conf['base'] }}.jpg", recursive=True,