dag=dag) qubole_task = QuboleOperator( task_id='qubole_task', command_type='shellcmd', script='ls /usr/lib/airflow', cluster_label='airflow-demo', fetch_logs= True, # If true, will fetch qubole command logs and concatenate them into corresponding airflow task logs # To attach tags to qubole command, auto attach 3 tags - dag_id, task_id, run_id qubole_conn_id= 'qubole_default', # Connection id to submit commands inside QDS, if not set "qubole_default" is used dag=dag) bash_task = BashOperator( task_id='bash_task', bash_command='echo "run_id={{ run_id }} | dag_run={{ dag_run }}"', dag=dag) http_sensor_task = HttpSensor(task_id='http_sensor_task', http_conn_id='http_default', endpoint='', request_params={}, response_check=lambda response: True if "Google" in str(response.content) else False, poke_interval=5, dag=dag) qubole_task.set_upstream(python_task) bash_task.set_upstream(python_task) http_sensor_task.set_upstream(python_task)
"Tt-I2ap-Id": "*****@*****.**", "Tt-I2ap-Sec": "E8OLhEWWihzdpIz5"}, http_conn_id='i2ap_processor', xcom_push=True, dag=dag) # retrieve the job id associated with the async call in t1 t5 = PythonOperator( task_id='weekly_dbm_partner_pull_jobid', python_callable=setPartnerEndPoint, provide_context=True, dag=dag ) # loop on the status pull until the status is ERROR or COMPLETE t6 = HttpSensor( task_id='weekly_dbm_partner_pull_status', http_conn_id='i2ap_processor', endpoint=Variable.get('weekly_dbm_partner_pull-statusEndpoint'), headers={"Content-Type": "application/json", "Tt-I2ap-Id": "*****@*****.**", "Tt-I2ap-Sec": "E8OLhEWWihzdpIz5"}, response_check=responseCheck, poke_interval=60, dag=dag) t2.set_upstream(t1) t3.set_upstream(t2) t4.set_upstream(t3) t5.set_upstream(t4) t6.set_upstream(t5)