def init_log(service_name): # 初始化log组件 import log_helper logPath = GLOBAL_CONFIG.get('flask').get(service_name).get('logPath') logLevel = GLOBAL_CONFIG.get('flask').get(service_name).get('logLevel') handler = log_helper.addTimedRotatingFileHandler(logPath,logLevel = logLevel) return handler
def login(login_form: LoginForm, response: Response): # 口令登录 if login_form.loginType == 1: if login_form.username != GLOBAL_CONFIG.get( "username") or not passwordEq(GLOBAL_CONFIG.get("password"), login_form.password): raise ServiceException(msg="账户密码不匹配") token = Token() token.type = TokenType.API_TOKEN.value token.username = login_form.username common_token = tokenManger.createToken(token) token.endTime += tokenManger.expire token.type = TokenType.REFRESH.value refresh_token = tokenManger.createToken(token) result = { "username": login_form.username, "api_token": common_token, "expire": tokenManger.expire, "refresh_token": refresh_token } # 一周免登录 if login_form.remember: token.type = TokenType.REMEMBER_ME.value remember_me_time = int(GLOBAL_CONFIG.get("remember_me_min")) * 60 token.endTime = token.createTime + remember_me_time result['remember_me_token'] = tokenManger.createToken(token) result['remember_me_expire'] = remember_me_time response.set_cookie(key="remember_me_token", value=result['remember_me_token'], max_age=remember_me_time, expires=remember_me_time) return result # remember_me登录 if login_form.loginType == 2: remember_me_token = login_form.remember_me_token print(remember_me_token) if remember_me_token is None: raise ServiceException(msg="登录操作非法") token_dict = tokenManger.parseToken(remember_me_token, token_type=TokenType.REMEMBER_ME) if token_dict['username'] != GLOBAL_CONFIG.get("username"): raise ServiceException(msg="登录操作非法") token = Token() token.type = TokenType.API_TOKEN.value token.username = token_dict['username'] common_token = tokenManger.createToken(token) token.endTime += tokenManger.expire token.type = TokenType.REFRESH.value refresh_token = tokenManger.createToken(token) result = { "api_token": common_token, "expire": tokenManger.expire, "refresh_token": refresh_token } return result
def get_instance(cls, name): if not RedisForCommon.INSTANCE: import config from config import GLOBAL_CONFIG init_config = GLOBAL_CONFIG.get('cache').get(name) RedisForCommon.INSTANCE = RedisForCommon(**init_config) return RedisForCommon.INSTANCE
def get_instance(flag=None): if not RedisForCluster.INSTANCE: import config from config import GLOBAL_CONFIG init_config = GLOBAL_CONFIG.get('cache').get("cluster_redis") RedisForCluster.INSTANCE = RedisForCluster(**init_config) return RedisForCluster.INSTANCE
def start(service_name): log_handler = init_log(service_name) flask_config = GLOBAL_CONFIG.get("flask").get(service_name) # 启动falsk options = { 'threaded' : True, } host = flask_config.get("host") port = flask_config.get('port') debug_mode = flask_config.get('debug_mode') reg_module_name = flask_config.get("reg_module_name") #注册interface import reg_route reg_route.reg(reg_module_name) logging.info(" start app %s", service_name) #生成pid pidFile = os.environ["BASIC_PATH"] + '/bin/' + service_name + '_flask_app.pid' pid_ins = pid_util.PidUtil(service_name, pidFile) pid_ins.start() flask_app.get_flask_app().logger.addHandler(log_handler) flask_app.get_flask_app().run(host, port, debug_mode, **options) pid_ins.clear() logging.info('stop app %s', service_name)
def get_instance(db_flag_name): global INSTANCE_POOL from config import GLOBAL_CONFIG init_config = GLOBAL_CONFIG.get('db').get(db_flag_name) pool_db_instance = INSTANCE_POOL.get(db_flag_name) if not pool_db_instance: pool_db_instance = Mysql(db_flag_name, **init_config) INSTANCE_POOL[db_flag_name] = pool_db_instance return pool_db_instance
def get_instance(db_flag_name): global INSTANCE_POOL import config from config import GLOBAL_CONFIG init_config = GLOBAL_CONFIG.get('db').get(db_flag_name) pool_db_instance = INSTANCE_POOL.get(db_flag_name) if not pool_db_instance: pool_db_instance = Mysql(db_flag_name, **init_config) INSTANCE_POOL[db_flag_name] = pool_db_instance return pool_db_instance
def replace_img(match): str = match.group() img_src = str[str.index("(") + 1:str.index(")")] if img_src.startswith("http"): return str img_abs_src = img_src # 图片相对路径拼接为绝对路劲 if not os.path.exists(img_src): img_abs_src = os.path.join( os.path.dirname(note_concurrent['note_path']), img_src) return str.replace(img_src, GLOBAL_CONFIG.get("note_file_url").format(img_abs_src))
def get_logger_config(): log_filename = GLOBAL_CONFIG.get("log.filename") log_max_days = GLOBAL_CONFIG.get("log.log_max_days") log_max_size = GLOBAL_CONFIG.get("log.log_max_size") log_level = GLOBAL_CONFIG.get("log.log_level") if log_filename is None: log_filename = GLOBAL_CONFIG.project_path + "/logs/log.log" if log_max_days is None: log_max_days = "30 days" if log_max_size is None: log_max_size = "500 MB" if log_level is None: log_level = "DEBUG" size_time_rotator = Rotator(log_max_size, "00:00") # 日志配置 logger.add(log_filename, level=log_level, rotation=size_time_rotator.should_rotate, enqueue=True, encoding='utf-8', retention=log_max_days) return logger
def start(service_name): init_js_config_file() log_handler = init_log(service_name) flask_config = GLOBAL_CONFIG.get("flask").get(service_name) # 启动falsk options = { 'threaded': True, } host = flask_config.get("host") port = flask_config.get('port') debug_mode = flask_config.get('debug_mode') reg_module_name = flask_config.get("reg_module_name") print reg_module_name # 注册interface import reg_route reg_route.reg(reg_module_name) print reg_route logging.info(" start app %s", service_name) # 生成pid pid_file = os.environ[ "BASIC_PATH"] + '/bin/' + service_name + '_flask_app.pid' pid_ins = pid_util.PidUtil(service_name, pid_file) pid_ins.start() app = flask_app.get_flask_app() # secret_key两种方式:1.固定字符串 2.随机random:每次app重启,浏览器session会失效,用户需重登 app.config['SECRET_KEY'] = 'kvmmgr_666_lee#!&@!*@%' login_manager = LoginManager() # flask-login注册用户登录 @login_manager.user_loader def load_user(userid): user = user_info.UserInfo().get_user(userid) return user login_manager.init_app(app) app.logger.addHandler(log_handler) app.run(host, port, debug_mode, **options) pid_ins.clear() logging.info('stop app %s', service_name)
def login(refreshToken: str): if refreshToken is None: raise ServiceException(msg="操作非法") token_dict = tokenManger.parseToken(refreshToken, token_type=TokenType.REFRESH) if token_dict['username'] != GLOBAL_CONFIG.get("username"): raise ServiceException(msg="操作非法") token = Token() token.type = TokenType.API_TOKEN.value token.username = token_dict['username'] common_token = tokenManger.createToken(token) token.endTime += tokenManger.expire token.type = TokenType.REFRESH.value refresh_token = tokenManger.createToken(token) result = { "api_token": common_token, "expire": tokenManger.expire, "refresh_token": refresh_token } return result
def send_async_msg(topic=KAFKA_TOPIC_NAME, msg=None): ''' 发送KAFKA消息 :param topic: :param msg: :return: ''' logging.info('kafka send async msg, date: {}'.format( datetime.datetime.now())) config_info = GLOBAL_CONFIG.get('ASYNC_MESSAGE_BROKER') client = KafkaClient( hosts=config_info.get('config').get('hosts'), zookeeper_hosts=config_info.get('config').get('zookeeper_hosts'), broker_version=config_info.get('config').get('broker_version')) """ topic = client.topics[topic] with topic.get_sync_producer() as producer: producer.produce(json_helper.dumps(msg)) return 'success' """ topic = client.topics[topic] producer = topic.get_sync_producer() return producer.produce(json_helper.dumps(msg))
def start(consumer_id): try: config_info = GLOBAL_CONFIG.get('ASYNC_MESSAGE_BROKER') log_path = config_info.get('log_path') log_level = config_info.get('log_level') log_helper.add_timed_rotating_file_handler(log_path, logLevel=log_level) kafka_instance = kafka_client.MKafkaClient(**config_info.get('config')) kafka_instance.connect() # 初始化handler logging.info("config info %s", config_info) init_threads_num = config_info.get('init_threads_num', 32) max_threads_num = config_info.get('max_threads_num', 32) thread_idle_time = config_info.get('thread_idle_time', 60 * 5) handler_ins = handler_register.Handler(init_threads_num, max_threads_num, thread_idle_time) # handler_ins = handler_register.Handler_simple() # 开始接收消息 topic_name = KAFKA_TOPIC_NAME # consumer = kafka_instance.get_consumer(topic_name) consumer = kafka_instance.get_balanced_consumer(topic_name) logging.info("start consumer") for msg_object in consumer: try: msg = msg_object.value print msg_object.offset if not msg: time.sleep(1) else: req_data = json_helper.read(msg) data = json_helper.read(msg).get('data') if req_data['routing_key'] == 'INSTANCE.CREATE': # 检查虚拟机状态是否为创建中、创建失败,除此以外不重复创建 check_status, ins_status = ins_s.check_instance_status( data['uuid']) if not check_status: logging.info( "can not find instance create from kafka") logging.info(msg) elif ins_status == '100': logging.info(msg) consumer.commit_offsets() handler_ins.deal(msg) elif ins_status == '0': is_instance_exist = ins_a_s.whether_vm_repeat_create( data['request_id']) if is_instance_exist: logging.info("repeat job from kafka") logging.info(msg) else: logging.info(msg) consumer.commit_offsets() handler_ins.deal(msg) else: logging.info("repeat instance create from kafka") logging.info(msg) consumer.commit_offsets() elif req_data['routing_key'] == 'INSTANCE.CLONECREATE': # 检查虚拟机状态是否为创建中、创建失败,除此以外不重复创建 check_status, ins_status = ins_s.check_instance_status( data['uuid']) if not check_status: logging.info( "can not find instance create from kafka") logging.info(msg) elif ins_status == '102': logging.info(msg) consumer.commit_offsets() handler_ins.deal(msg) elif ins_status == '0': is_instance_exist = ins_a_s.whether_vm_repeat_create( data['request_id']) if is_instance_exist: logging.info("repeat job from kafka") logging.info(msg) else: logging.info(msg) consumer.commit_offsets() handler_ins.deal(msg) else: logging.info("repeat instance create from kafka") logging.info(msg) consumer.commit_offsets() elif req_data['routing_key'] == 'INSTANCE.CLONE': # 检查虚拟机状态是否为创建中、创建失败,除此以外不重复创建 check_status, ins_status = ins_s.check_instance_status( data['uuid']) if not check_status: logging.info( "can not find instance create from kafka") logging.info(msg) elif ins_status == '102': logging.info(msg) consumer.commit_offsets() handler_ins.deal(msg) elif ins_status == '0': is_instance_exist = ins_a_s.whether_vm_repeat_create( data['request_id']) if is_instance_exist: logging.info("repeat job from kafka") logging.info(msg) else: logging.info(msg) consumer.commit_offsets() handler_ins.deal(msg) else: logging.info("repeat instance create from kafka") logging.info(msg) consumer.commit_offsets() elif req_data['routing_key'] == 'INSTANCE.PERFORMANCE': logging.info(msg) consumer.commit_offsets() handler_ins.deal(msg) else: is_instance_exist = ins_a_s.whether_vm_repeat_create( data['request_id']) if is_instance_exist: logging.info("repeat job from kafka") logging.info(msg) else: logging.info(msg) consumer.commit_offsets() handler_ins.deal(msg) except: logging.error(traceback.format_exc()) handler_ins.waitAndStopAll() except: print traceback.format_exc() logging.info(consumer_id + ' stop') sys.exit()
from web.routers import router from config import GLOBAL_CONFIG import os, re, time from threading import Lock from fastapi.responses import FileResponse NOTE_BASE_DIR = GLOBAL_CONFIG.get("note_dir") markdown_ext = "md" @router.get("/note/list") def note_list(noteDirPath: str = None): abs_path = NOTE_BASE_DIR if noteDirPath is not None: abs_path = os.path.join(NOTE_BASE_DIR, noteDirPath) note_list = list_file(abs_path, markdown_ext) if note_list is None: return note_list # 日期排序 return sorted(note_list, key=lambda note: time.strptime(note['create_time'], '%Y-%m-%d %H:%M:%S'), reverse=True) @router.get("/note") def note_detail(notePath: str): return read_note_to_str(os.path.join(NOTE_BASE_DIR, notePath))
token.createTime = int(time.time()) if token.endTime is None: token.endTime = token.createTime + self.expire bytes_data = json.dumps(token.__dict__).encode() bytes_sign = hmac.new(self.key, bytes_data, digestmod=self.digestmod).digest() return "{}.{}".format(to_str(bytes_data, self.str_type), to_str(bytes_sign, self.str_type)) def parseToken(self, token: str, token_type: TokenType = None): s = token.split(".") if len(s) != 2: raise AuthException(msg="token格式异常") bytes_data = to_bytes(s[0], self.str_type) bytes_sign = to_bytes(s[1], self.str_type) calc_sign = hmac.new(self.key, bytes_data, digestmod=self.digestmod).digest() if calc_sign != bytes_sign: raise AuthException(msg="token验证异常") token_dict = json.loads(str(bytes_data, encoding=self.encoding)) now = int(time.time()) if token_dict['createTime'] is not None and token_dict['createTime'] > now: raise AuthException(msg="token未生效") if token_dict['endTime'] < now: raise AuthException(msg="token已过期") if token_type is not None and 'type' in token_dict and token_dict['type'] != token_type.value: raise AuthException(msg="token类型非法") return token_dict global_token_manger = TokenManger(key=GLOBAL_CONFIG.get("token_sign_key").encode(encoding='utf-8'))
def force_pull(): output = os.popen( 'cd ' + GLOBAL_CONFIG.get("note_dir") + ' && git fetch --all && git reset --hard origin/master && git pull') print(output.read())
from mongoengine import * from config import GLOBAL_CONFIG connection_string = GLOBAL_CONFIG.get('Mongo', 'conexao') connect(host=connection_string)