def send_warning_info(warning_type, msg): """ 发送报警信息 :param warning_type: 警告类型, monitor: 监控警告, task: 队列任务警告 :param msg: 消息内容 :return: """ send_success_num = 0 for email in warning_to_email: try: pre_subject = '[{} error]'.format(warning_type) content = '{} {}'.format(pre_subject, msg) send_pwd = properties.get('messages', 'EMAIL_AUTHKEY') send_address = properties.get('messages', 'SEND_ADDRESS') mail_host = 'smtp.exmail.qq.com' mime_content = MIMEText(content, _subtype='plain', _charset='UTF-8') mime_content['From'] = send_address mime_content['To'] = email mime_content['Subject'] = pre_subject me = '<' + send_address + '>' server = smtplib.SMTP_SSL() server.connect(mail_host, 465) server.login(send_address, send_pwd) server.sendmail(me, [email], mime_content.as_string()) server.quit() send_success_num += 1 logger.info('send warning mail success, to: %s, subject: %s, content: %s', email, pre_subject, content) except Exception, e: logger.error('send warning mail failed, e: %s', e.message)
def send_warning_info(warning_type, msg): """ 发送报警信息 :param warning_type: 警告类型, monitor: 监控警告, task: 队列任务警告 :param msg: 消息内容 :return: """ send_success_num = 0 for email in warning_to_email: try: pre_subject = '[{} error]'.format(warning_type) content = '{} {}'.format(pre_subject, msg) send_pwd = properties.get('messages', 'EMAIL_AUTHKEY') send_address = properties.get('messages', 'SEND_ADDRESS') mail_host = 'smtp.exmail.qq.com' mime_content = MIMEText(content, _subtype='plain', _charset='UTF-8') mime_content['From'] = send_address mime_content['To'] = email mime_content['Subject'] = pre_subject me = '<' + send_address + '>' server = smtplib.SMTP_SSL() server.connect(mail_host, 465) server.login(send_address, send_pwd) server.sendmail(me, [email], mime_content.as_string()) server.quit() send_success_num += 1 logger.info( 'send warning mail success, to: %s, subject: %s, content: %s', email, pre_subject, content) except Exception, e: logger.error('send warning mail failed, e: %s', e.message)
async def get_access_token(self, params): """ 获取商户管理员的JWT ACCESS TOKEN :param params: params['account'] (*) params['password'] (*) :return: { access_token: 'JWT token', expires_in: 7200, refresh_expires_in: 2592000 } access_token: admin_id: user_type: 用户类型 exp: 过期时间 refresh_expires_in: 刷新过期时间 iat: 创建时间 """ if self.common_utils.is_empty('___body', params) \ or self.common_utils.is_empty('account', params['___body']) \ or self.common_utils.is_empty('password', params['___body']): return self._e('JWT_GET_PARAMS_ERROR') account = params['___body']['account'] password = params['___body']['password'] account_res = await self.cs('v1.user.admin.account.service', 'query_one', {'account': account}) if account_res['code'] != 0: return self._e('ACCOUNT_OR_PASSWORD_ERROR') account_data = account_res['data'] # 检查密码 if self.common_utils.md5( self.common_utils.md5(password) + account_data['salt']) != account_data['password']: return self._e('ACCOUNT_OR_PASSWORD_ERROR') # 从配置文件中获取过期时间与刷新过期时间 expires_in = int(properties.get('setting', 'jwt', 'expires_in')) refresh_expires_in = int( properties.get('setting', 'jwt', 'refresh_expires_in')) # access_token = JWT.encode({ 'admin_id': account_res['data']['admin_id'], 'user_type': 'admin', 'exp': expires_in, 'iat': int(self.date_utils.timestamps_now()), }) return self._rs({ 'access_token': access_token, 'expires_in': expires_in, 'refresh_expires_in': refresh_expires_in })
def build_table_name(self, str_table_name): """ 构建表名 根据配置文件中的表前辍,构建表名 @params str_table_name string 表名 """ str_table_name = properties.get('setting', 'mysql', 'DB_TABLE_PRE') + str_table_name \ if properties.get('setting', 'mysql', 'DB_TABLE_PRE') else str_table_name return str_table_name
class Report: send_time = 0 smtp_server = properties.get('task', 'smtp', 'server') smtp_account = properties.get('task', 'smtp', 'account') smtp_pass = properties.get('task', 'smtp', 'pass') report_from = properties.get('task', 'report', 'from') report_to = properties.get('task', 'report', 'to') report_server = properties.get('task', 'report', 'server') @classmethod async def report(cls, content, error_track): """ 发送错误邮件 :param content: 邮件内容 :param error_track: 错误信息 :return: """ timestamps_now = date_utils.timestamps_now() if cls.send_time + 1800 <= timestamps_now: logger.info('send report message. error: %s', error_track) # 发送邮件 smtp = TornadoSMTP(cls.smtp_server, use_ssl=True) # await smtp.starttls() await smtp.login(cls.smtp_account, cls.smtp_pass) msg = EmailMessage() msg['Subject'] = '[{}]Task error.'.format(cls.report_server) msg['To'] = cls.report_to msg['From'] = cls.report_from msg.set_content(error_track, 'html', 'utf-8') await smtp.send_message(msg) cls.send_time = timestamps_now
def encode(data): """ JWT签名 :param data: :return: """ header = {"typ": "JWT", "alg": "HS256"} header = json.dumps(header, separators=(',', ':')).encode('utf-8') header = base64.urlsafe_b64encode(header).replace(b'=', b'') p = json.dumps(data, separators=(',', ':')).encode('utf-8') p = base64.urlsafe_b64encode(p).replace(b'=', b'') secret_key = properties.get('setting', 'jwt', 'secret_key').encode('utf-8') content = header + b'.' + p signature = hmac.new(secret_key, content, digestmod=hashlib.sha256).digest() signature = base64.urlsafe_b64encode(signature).replace(b'=', b'') s = header + b'.' + p + b'.' + signature return s.decode('utf-8')
import smtplib import json from email.mime.text import MIMEText from constants.cachekey_predix import CacheKeyPredix from source.redisbase import RedisBase from source.properties import properties from tools.date_utils import DateUtils from tools.logs import Logs redis = RedisBase() list_cache_key = CacheKeyPredix.TASK_DATA_LIST logger = Logs().logger try: warning_to_email = properties.get('monitor', 'MAIL_LIST') warning_to_email = warning_to_email.split(',') except Exception, e: warning_to_email = None print e # warning_to_email = [ # '*****@*****.**', # '*****@*****.**' # ] def send_warning_info(warning_type, msg): """ 发送报警信息 :param warning_type: 警告类型, monitor: 监控警告, task: 队列任务警告
sys.path.append("../") import conf.config as config from constants.cachekey_predix import CacheKeyPredix as cachekey from source.redisbase import RedisBase from source.service_manager import ServiceManager from tools.schedule_utils import ScheduleUtils from tools.date_json_encoder import CJsonEncoder from source.properties import properties from source.model import ModelBase from tools.date_utils import DateUtils import traceback from tools.logs import Logs pool_size = 3 try: pool_size = int(properties.get('task', 'POOL_NUM')) except Exception, e: print e redis = RedisBase() task_redis = redis.get_conn() error_redis = redis.get_conn() cache_key = cachekey.TASK_DATA_LIST error_cache_key = cachekey.ERROR_TASK_DATA_LIST pool = threadpool.ThreadPool(pool_size) service_manager = ServiceManager() config = config logger = Logs().get_logger() def update_task(last_id):
import random from tools.logs import logs as logger from tools.date_json_encoder import CJsonEncoder from tools.date_utils import DateUtils from source.async_redis import AsyncRedis from source.async_model import AsyncModelBase from source.properties import properties from .report import Report import sys import traceback redis = AsyncRedis() date_utils = DateUtils() task_queue = properties.get('task', 'cache', 'task_queue') failed_queue = properties.get('task', 'cache', 'failed_queue') loop_num = int(properties.get('task', 'task', 'task_num')) server_key = properties.get('task', 'cache', 'servers') server_coroutine_key = properties.get('task', 'cache', 'server_coroutine') async def save_to_db(data): # async def save_to_db(task_unique_id, service_path, method, params_json): """ 保存任务到DB :param data: list data[][task_unique_id] data[][service_path] data[][method] data[][params_json]
# -*- coding:utf-8 -*- """ @package: @file: aio_redis.py @author: yuiitsu @time: 2020-04-03 19:48 """ import ssl import aioredis from source.properties import properties from tools.logs import logs as logger REDIS_HOST = properties.get("setting", "redis", "REDIS_HOST") REDIS_PORT = int(properties.get('setting', 'redis', 'REDIS_PORT')) REDIS_PASS = properties.get('setting', 'redis', 'REDIS_PASS') REDIS_USE_SSL = properties.get('setting', 'redis', 'REDIS_USE_SSL') REDIS_MAX_CONNECTION = int( properties.get('setting', 'redis', 'REDIS_MAX_CONNECTION')) SSLContext = ssl.SSLContext( ) if REDIS_USE_SSL and REDIS_USE_SSL == 'True' else False class AsyncRedis: @staticmethod async def get_connection(): redis = await aioredis.create_redis((REDIS_HOST, REDIS_PORT), password=REDIS_PASS, encoding='utf-8', ssl=SSLContext) return redis
class AsyncModelBase(SqlBuilder): async_pools = tormysql.helpers.ConnectionPool( max_connections=int( properties.get('setting', 'mysql', 'MAX_CONNECTIONS')), idle_seconds=150, wait_connection_timeout=120, host=properties.get('setting', 'mysql', 'DB_HOST'), port=int(properties.get('setting', 'mysql', 'DB_PORT')), user=properties.get('setting', 'mysql', 'DB_USER'), passwd=properties.get('setting', 'mysql', 'DB_PASS'), db=properties.get('setting', 'mysql', 'DB_BASE'), charset="utf8mb4", cursorclass=tormysql.cursor.DictCursor, ) date_encoder = CJsonEncoder util = CommonUtil date_utils = dateUtils logger = logs # tx = None def __init__(self): self.tx = None async def do_sqls(self, params_list): sql = '' tx = None result = None try: tx = await self.async_pools.begin() for params in params_list: sql_type = params[self.sql_constants.SQL_TYPE] table_name = params[self.sql_constants.TABLE_NAME] dict_data = params[self.sql_constants.DICT_DATA] value_tuple = params[self.sql_constants.VALUE_TUPLE] if sql_type == self.sql_constants.INSERT: # 创建 sql = self.build_insert(table_name, dict_data) elif sql_type == self.sql_constants.BATCH_INSERT: # 批量创建 sql = self.build_batch_insert(table_name, dict_data) elif sql_type == self.sql_constants.UPDATE: # 更新 sql = self.build_update(table_name, dict_data) elif sql_type == self.sql_constants.DELETE: # 删除 sql = self.build_delete(table_name, dict_data) await tx.execute(sql, value_tuple) if params_list: await tx.commit() result = True except Exception as e: await tx.rollback() self.logger.exception(e) self.logger.info(sql) result = None return result async def page_find(self, table_name, params, value_tuple, sql='', sql_count=''): """ :param table_name: :param params: :param value_tuple: :param sql: :param sql_count: :return: """ if not sql: sql = self.build_paginate(table_name, params) if not sql_count: sql_count = self.build_get_rows(table_name, params) result = None try: cursor = await self.async_pools.execute(sql, value_tuple) dict_list = cursor.fetchall() cursor = await self.async_pools.execute(sql_count, value_tuple) dic_rows = cursor.fetchone() result = { 'list': dict_list, 'row_count': dic_rows[self.sql_constants.ROW_COUNT] if dic_rows else 0 } except Exception as e: self.logger.info(sql) self.logger.info(sql_count) self.logger.exception(e) return result async def get_rows(self, table_name, params, value_tuple): sql_count = self.build_get_rows(table_name, params) result = 0 try: cursor = await self.async_pools.execute(sql_count, value_tuple) dic_rows = cursor.fetchone() result = dic_rows[self.sql_constants.ROW_COUNT] if dic_rows else 0 except Exception as e: self.logger.info(sql_count) self.logger.exception(e) return result async def find(self, table_name, params=None, value_tuple=(), str_type='one'): sql = self.build_select(table_name, params) result = False try: cursor = await self.async_pools.execute(sql, value_tuple) if str_type == self.sql_constants.LIST: result = cursor.fetchall() else: result = cursor.fetchone() except Exception as e: self.logger.info(sql) self.logger.exception(e) return result async def insert(self, table_name, params, value_tuple, auto_commit=True): sql = self.build_insert(table_name, params) result = None tx = await self.async_pools.begin() try: if auto_commit: cursor = await tx.execute(sql, value_tuple) await tx.commit() tx = None else: cursor = await self.tx.execute(sql, value_tuple) result = self.sql_constants.SUCCESS.copy() result['last_id'] = cursor.lastrowid result['affected_rows'] = cursor.rowcount except Exception as e: tx.rollback() self.logger.info(sql) self.logger.exception(e) return result async def batch_insert(self, table_name, params, value_tuple, auto_commit=True): result = None sql = self.build_batch_insert(table_name, params) if not self.tx: self.tx = await self.async_pools.begin() try: if auto_commit: cursor = await self.tx.execute(sql, value_tuple) await self.tx.commit() self.tx = None else: cursor = await self.tx.execute(sql, value_tuple) result = self.sql_constants.SUCCESS.copy() result['affected_rows'] = cursor.rowcount except Exception as e: self.logger.info(sql) self.logger.exception(e) return result async def update(self, table_name, params, value_tuple, auto_commit=True): result = None sql = self.build_update(table_name, params) if not self.tx: self.tx = await self.async_pools.begin() try: if auto_commit: cursor = await self.tx.execute(sql, value_tuple) await self.tx.commit() self.tx = None else: cursor = await self.tx.execute(sql, value_tuple) result = cursor.rowcount except Exception as e: self.logger.info(sql) self.logger.exception(e) return result async def delete(self, table_name, params, value_tuple, auto_commit=True): sql = self.build_delete(table_name, params) result = None if not self.tx: self.tx = await self.async_pools.begin() try: if auto_commit: cursor = await self.tx.execute(sql, value_tuple) await self.tx.commit() self.tx = None else: cursor = await self.tx.execute(sql, value_tuple) result = cursor.rowcount except Exception as e: self.logger.info(sql) self.logger.exception(e) return result