def build_email(**context): message = "" for key, value in context.items(): try: message = message + "\n\n" + key + ": " + str(value) except: message = message + "\n\n" + key + ": N/A" log_dag_name = os.environ['AIRFLOW_CTX_DAG_ID'] log_task_name = os.environ['AIRFLOW_CTX_TASK_ID'] log_time = os.environ['AIRFLOW_CTX_EXECUTION_DATE'] log = '/home/pchoix/airflow/logs/' + log_dag_name + '/' + log_task_name + '/' + log_time + '/' + '1.log' print("log path: " + log) file1 = open(log, "r") me = os.path.realpath(__file__) print("me: " + me) file2 = open(me, "r") email_op = EmailOperator( task_id='send_email', to="*****@*****.**", subject="Test Email With Log Attachment using EmailOperator", html_content=message, files=[file1.name, file2.name]) email_op.execute(context) file1.close() file2.close()
def build_email(**context): with open('/tmp/cc_report.xlsx', mode='r') as file: email_op = EmailOperator( task_id='send_email', to=['*****@*****.**', '*****@*****.**', '*****@*****.**'], subject="CC report", html_content='Hello, <br/>', files=[file.name], ) email_op.execute(context)
def build_email(**context): with NamedTemporaryFile(mode='w+', suffix=".txt") as file: file.write("Hello World") email_op = EmailOperator( task_id='send_email', to="*****@*****.**", subject="Test Email Please Ignore", html_content=None, files=[file.name], ) email_op.execute(context)
def read_data(**kwargs): cluster = cl(['10.103.5.51', '10.103.5.52', '10.103.5.53']) session = cluster.connect('darbiz') conn = PostgresHook(postgres_conn_id='pgConn_pg').get_conn() cur = conn.cursor() rows = session.execute("SELECT * from darbiz.forte_express_loan_requests where created_on>='2020-09-20' allow filtering") cur.execute ("select distinct owner profile_id, uid order_id, pay_title from dar_group.bazar_orders1 where created_on>=now()-interval '24' hour") res = cur.fetchall() for user_row in rows: d = json.loads(user_row.loan_request) id0 = user_row.profile_id id1 = user_row.order_id id2 = user_row.created_on pp = d['person_info']['financing_info']['period'] if 'period' in d['person_info']['financing_info'] else None lh = datetime.now() - timedelta(hours = 12) if id2>=lh: for a,b,c in res: ll=c.split() if id1==b: if pp!=int(ll[2]): email = EmailOperator(\ task_id='send_email',\ to=['*****@*****.**','*****@*****.**'],\ subject='Ошибка в Fortemarket',\ html_content='Error in order_id: {} created at: {}, profile_id: {}, months in request: {}, months in orders: {}\n' \ .format(a, id2, b, pp, ll[2])\ ) email.execute(context=kwargs) t3 = SlackWebhookOperator( task_id='send_slack_notification', http_conn_id='slack_connection', message='Error in order_id: {} created at: {}, profile_id: {}, months in request: {}, months in orders: {}\n' \ .format(a, id2, b, pp, ll[2]),\ # files = '/tmp/BPM_report.xlsx', channel='#reports',\ dag=dag ) t3.execute(context=kwargs) else: continue else: continue # lt = d['person_info']['financing_info']['loan_type'] if 'loan_type' in d['person_info']['financing_info'] else None cur.close() conn.close()
def report_notify_email(report, email_template_location, **context): """ For the given report, sends a notification email in the format given in the email_template :param report: report being notified on :type report: Report :param email_template_location: location of html template to use for status :type email_template_location: str :param test_prefix: the prefix that precedes all test tasks :type test_prefix: str """ ri = ReportInstance(context["dag_run"]) updated_time = ri.updated timezone = pendulum.timezone(conf.get("core", "default_timezone")) updated_time.replace(tzinfo=timezone) passed = ri.passed status = get_status(passed) details_link = get_details_link() with open(email_template_location) as file: send_email = EmailOperator( task_id="custom_email_notification", to=report.subscribers, subject="[{{status}}] {{title}}", html_content=file.read(), ) params = { "passed": passed, "status": status, "updated": updated_time, "title": report.report_title, "details_link": details_link, } send_email.render_template_fields( context=params, jinja_env=context["dag"].get_template_env()) logging.info(f'Sending "{send_email.subject}" email...') send_email.execute(context)
def get_spy_data(**context): dates = [] closings = [] print('hello bollinger bands') r = requests.get( "https://www.alphavantage.co/query?function=TIME_SERIES_DAILY_ADJUSTED&symbol=SPY&apikey=" + API_KEY) data_dict = r.json()['Time Series (Daily)'] # Parse dict to capture desired info into lists for k, v in data_dict.items(): dates.append(k) closings.append(v['5. adjusted close']) df = {'Date': dates, 'AdjClose': closings} stock_data = work_up_data(df) email = EmailOperator(task_id="email_task", to="*****@*****.**", subject='', html_content='', dag=context.get("dag")) # Send email based on conditions if stock_data.iloc[-1]['buy'] == 1: print("BUY!!!!") email.subject = "BUT SOME SPY STOCK!" email.html_content = "Bollinger bands say to buy some SPY" email.execute(context=context) if stock_data.iloc[-1]['sell'] == 1: print("SELL!!!!") email.subject = "SELL SOME SPY STOCK!" email.html_content = "Bollinger bands say to sell SPY" email.execute(context=context) # Populate db stock_data.to_csv('/tmp/spy_data.csv')
def error_email(context): """ Define the callback to post on Slack if a failure is detected in the Workflow :return: operator.execute """ error_email = EmailOperator( task_id="error_email", trigger_rule=TriggerRule.ONE_FAILED, to=['*****@*****.**'], subject="af_advt_bdt_publish_domestic_sdk_adv_rank error", html_content="af_advt_bdt_publish_domestic_sdk_adv_rank error", ) return error_email.execute(context=context)