def slack_dag_failure_notification(context): slack_webhook_token = BaseHook.get_connection("slack").password icon_color = (":red_circle" if configuration.ENVIRONMENT.lower() == "production" else ":yellow_circle") msg = """ {icon_color}: Task Failed. *Task*: {task} *Dag*: {dag} *Execution Time*: {exec_date} *Log Url*: {log_url} """.format( icon_color=icon_color, task=context.get("task_instance").task_id, dag=context.get("task_instance").dag_id, ti=context.get("task_instance"), exec_date=context.get("execution_date"), log_url=context.get("task_instance").log_url, ) failed_alert = SlackWebhookOperator( task_id="slack_failed_notification", http_conn_id="slack", webhook_token=slack_webhook_token, message=msg, username="******", ) return failed_alert.execute(context=context)
def task_fail_slack_alert(context): """ Callback task that can be used in DAG to alert of failure task completion Args: context (dict): Context variable passed in from Airflow Returns: None: Calls the SlackWebhookOperator execute method internally """ conection = BaseHook.get_connection(SLACK_CONN_ID) slack_webhook_token = conection.password slack_msg = """*Status*: :x: Task Failed\n*Task*: {task}\n*Dag*: {dag}\n*Execution Time*: {exec_date}\n*Log Url*: {log_url}""".format( task=context.get("task_instance").task_id, dag=context.get("task_instance").dag_id, ti=context.get("task_instance"), exec_date=context.get("execution_date"), log_url=context.get("task_instance").log_url, ) if conection.extra_dejson.get('users'): slack_msg = slack_msg + '\n' + conection.extra_dejson.get('users') failed_alert = SlackWebhookOperator( task_id="slack_task", http_conn_id="slack", webhook_token=slack_webhook_token, message=slack_msg, username="******", link_names=True ) return failed_alert.execute(context=context)
def slack_success_notification(context): slack_webhook_token = BaseHook.get_connection("slack").password msg = """ :green_circle: Task Successful. *Task*: {task} *Dag*: {dag} *Execution Time*: {exec_date} *Log Url*: {log_url} """.format( task=context.get("task_instance").task_id, dag=context.get("task_instance").dag_id, ti=context.get("task_instance"), exec_date=context.get("execution_date"), log_url=context.get("task_instance").log_url, ) success_alert = SlackWebhookOperator( task_id="slack_success_notification", http_conn_id="slack", webhook_token=slack_webhook_token, message=msg, username="******", ) return success_alert.execute(context=context)
def task_fail_slack_alert(context): slack_webhook_token = BaseHook.get_connection(SLACK_CONN_ID).password if var_loader.get_git_user() != "cloud-bulldozer": print("Task Failed") return if context.get('task_instance').task_id != "final_status": print(context.get('task_instance').task_id, "Task failed") return slack_msg = """ :red_circle: DAG Failed {mem} *Task*: {task} *Dag*: {dag} *Execution Time*: {exec_date} *Log Url*: {log_url} """.format( task=context.get('task_instance').task_id, dag=context.get('task_instance').dag_id, mem=alert_members(context), ti=context.get('task_instance'), exec_date=context.get('execution_date'), log_url=get_hyperlink(context), ) failed_alert = SlackWebhookOperator(task_id='slack_test', http_conn_id='slack', webhook_token=slack_webhook_token, message=slack_msg, username='******', link_names=True) return failed_alert.execute(context=context)
def test_assert_templated_fields(self): operator = SlackWebhookOperator( task_id='slack_webhook_job', dag=self.dag, **self._config ) template_fields = ['webhook_token', 'message', 'attachments', 'blocks', 'channel', 'username', 'proxy'] self.assertEqual(operator.template_fields, template_fields)
def test_execute(self): # Given / When operator = SlackWebhookOperator(task_id='slack_webhook_job', dag=self.dag, **self._config) self.assertEqual(self._config['http_conn_id'], operator.http_conn_id) self.assertEqual(self._config['webhook_token'], operator.webhook_token) self.assertEqual(self._config['message'], operator.message) self.assertEqual(self._config['attachments'], operator.attachments) self.assertEqual(self._config['blocks'], operator.blocks) self.assertEqual(self._config['channel'], operator.channel) self.assertEqual(self._config['username'], operator.username) self.assertEqual(self._config['icon_emoji'], operator.icon_emoji) self.assertEqual(self._config['icon_url'], operator.icon_url) self.assertEqual(self._config['link_names'], operator.link_names) self.assertEqual(self._config['proxy'], operator.proxy)
def task_fail_slack_alert(context): dag_run = context.get('dag_run') analysis_id = dag_run.conf.get('analysis_id') if analysis_id: print("Fail, updating analysis status") gendb_utils.add_analysis_metadata(analysis_id, "airflow_execution_status", "failed", update=True) gendb_utils.update_analysis_status(analysis_id, "failed") else: print("NO ANALYSE NUMBER") print("task_fail_slack_alert") slack_webhook_token = BaseHook.get_connection(SLACK_CONN_ID).password slack_msg = """ :red_circle: Task Failed. *Task*: {task} *Dag*: {dag} *Execution Time*: {exec_date} *Log Url*: {log_url} *Analysis*: {analysis_id} """.format( task=context.get('task_instance').task_id, dag=context.get('task_instance').dag_id, exec_date=context.get('execution_date'), log_url=context.get('task_instance').log_url, analysis_id=analysis_id, ) print("message:\n", slack_msg) failed_alert = SlackWebhookOperator( task_id='slack_test', http_conn_id='slack', webhook_token=slack_webhook_token, message=slack_msg, username='******') return failed_alert.execute(context=context)
def task_success_slack_alert(context): ''' Slack message + update analysis status ''' dag_run = context.get('dag_run') analysis_id = dag_run.conf.get('analysis_id') if analysis_id: print("Success, updating db") gendb_utils.add_analysis_metadata(analysis_id, "airflow_execution_status", "success", update=True) gendb_utils.update_analysis_status(analysis_id, "success") print("slack message") slack_webhook_token = BaseHook.get_connection(SLACK_CONN_ID).password slack_msg = """ :heavy_check_mark: Dag Success. *Dag*: {dag} *Execution Time*: {exec_date} *Log Url*: {log_url} *Analysis ID*: {analysis_id} """.format( dag=context.get('task_instance').dag_id, ti=context.get('task_instance'), exec_date=context.get('execution_date'), log_url=context.get('task_instance').log_url, analysis_id=analysis_id ) success_alert = SlackWebhookOperator( task_id='slack_test', http_conn_id='slack', webhook_token=slack_webhook_token, message=slack_msg, username='******') return success_alert.execute(context=context)
STORED AS TEXTFILE """) # Spark forex_processing = SparkSubmitOperator( task_id="forex_processing", conn_id="spark_conn", application="/opt/airflow/dags/scripts/forex_processing.py", verbose=False) email_notification = EmailOperator( task_id="sending_email", to="*****@*****.**", subject="forex_data_pipeline", html_content="""<h3>forex_data_pipeline succeeded</h3>""") slack_notification = SlackWebhookOperator( task_id="sending_slack", http_conn_id="slack_conn", webhook_token="T02226ZE8GG/C021JHLK96Y", message="DAG forex_data_pipeline: DONE", username="******") # Organizing the workflow is_forex_rates_available >> is_forex_currencies_file_available >> downloading_rates >> saving_rates saving_rates >> creating_forex_rates_table >> forex_processing forex_processing >> email_notification >> slack_notification
ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE """) #Spark operator forex_processing = SparkSubmitOperator( task_id="forex_processing", application="/home/enes/airflow2/dags/scripts/forex_processing.py", conn_id="spark_conn", verbose=False) #Email operator send_email_notification = EmailOperator( task_id="send_email_notification", to="*****@*****.**", subject="forex_data_pipeline", html_content="<h3>forex_data_pipeline</h3>") #Slack notification operator send_slack_notification = SlackWebhookOperator( task_id="send_slack_notification", http_conn_id="slack_conn", message=_get_message(), channel="#monitoring") #Define dependencies is_forex_rates_available >> is_file_available >> downloading_rates >> saving_rates saving_rates >> creating_forex_rates_table >> forex_processing >> send_email_notification send_email_notification >> send_slack_notification
upload_orders_to_hdfs = SSHOperator( task_id="upload_orders_to_hdfs", ssh_conn_id="cloudera", command="hdfs dfs -rm -R -f airflow_input && hdfs dfs -mkdir -p airflow_input && hadoop fs -put " "./airflow_pipeline/orders.csv airflow_input/ " ) run_spark_job = SSHOperator( task_id="run_spark_job", ssh_conn_id="cloudera", command=get_orders_filter_cmd() ) slack_success_task = SlackWebhookOperator( task_id="Success_notify", http_conn_id="slack", message="Data loaded successfully", channel="#alerts", username="******", webhook_token=slack_password() ) slack_failure_task = SlackWebhookOperator( task_id="failure_notify", http_conn_id="slack", message="Data loading is failed", channel="#alerts", username="******", webhook_token=slack_password(), trigger_rule="all_failed" ) aws_sensor >> import_customers_info >> [slack_success_task, slack_failure_task] aws_sensor >> ssh_edge_download_task >> upload_orders_to_hdfs >> run_spark_job >> [slack_success_task,