t1 = PythonOperator(task_id='print_date', provide_context=True, python_callable=get_date, dag=dag) sensor = HttpSensor( task_id='check_for_new_dump', http_conn_id='http_default', method='HEAD', poke_interval=5, timeout=15 * 60, endpoint="{{ ti.xcom_pull(task_ids='print_date' )}}.mentions.CSV.zip", dag=dag) t2 = BashOperator( task_id='producer', bash_command= "python /usr/local/kafka/airflow_producer_mentions.py {{ ti.xcom_pull(task_ids='print_date' )}}", retries=3, dag=dag) t3 = BashOperator( task_id='consumer', bash_command='python /usr/local/kafka/airflow_consumer_mentions.py', retries=3, dag=dag) sensor.set_upstream(t1) t2.set_upstream(sensor) t3.set_upstream(sensor)
dag=dag) domain01_sensor = HttpSensor( task_id='domain01_sensor', endpoint='', http_conn_id='http_domain01', retries=1, params={}, dag=dag) domain02_sensor = HttpSensor( task_id='domain02_sensor', endpoint='', http_conn_id='http_domain02', retries=1, params={}, dag=dag) domain03_sensor = HttpSensor( task_id='domain03_sensor', endpoint='', http_conn_id='http_domain03', retries=1, params={}, dag=dag) domain02_sensor.set_upstream(domain01_sensor) domain03_sensor.set_upstream(domain02_sensor) t0.set_upstream(domain03_sensor) t1.set_upstream(t0)