Ejemplo n.º 1
0
def create_app(Config, enable_config_file=False):
    """
    创建应用的工厂函数
    :param config: 配置信息类
    :param enable_config_file: 是否允许环境变量中的配置文件覆盖已加载的配置信息
    :return: 应用对象
    """
    app = create_flask_app(Config, enable_config_file)
    """项目运行需要的配置"""
    # 配置日志
    from utils.logging import create_logger
    create_logger(app)

    # 配置redis哨兵
    from redis.sentinel import Sentinel
    _sentinel = Sentinel(app.config.get('REDIS_SENTINELS'))
    # 根据哨兵设置主、从服务
    app.redis_master = _sentinel.master_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])
    app.redis_slave = _sentinel.slave_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])
    # Redis集群
    from rediscluster import StrictRedisCluster
    app.redis_cluster = StrictRedisCluster(
        startup_nodes=app.config['REDIS_CLUSTER'])

    # MySQL数据库连接初始化
    from models import db
    db.init_app(app)

    # 创建Snowflake ID worker--雪花算法
    from utils.snowflake.id_worker import IdWorker
    app.id_worker = IdWorker(app.config['DATACENTER_ID'],
                             app.config['WORKER_ID'], app.config['SEQUENCE'])
    """添加请求钩子"""
    from utils.middlewares import jwt_authentication
    app.before_request(jwt_authentication)
    """注册蓝图"""
    # 用户蓝图
    from toutiao.resources.user import user_bp
    app.register_blueprint(user_bp)
    """定时任务,每天3点更正我们redis和mysql的数据"""
    # 初始化调度器,并配置最大开始10个线程(不指定,默认10个)
    bg_scheduler = BackgroundScheduler(
        executor={'default': ThreadPoolExecutor()})
    # 添加任务函数
    # bg_scheduler.add_job('任务函数', '执行器', '执行周期时间')
    # bg_scheduler.add_job('任务函数', 'cron', hours=3)  # 每天3点执行
    from toutiao.aps_scheduler.statistic_data import fix_process
    bg_scheduler.add_job(fix_process, 'date', args=[app])  # 为了测试让他立即执行
    # 执行任务
    bg_scheduler.start()
    return app
Ejemplo n.º 2
0
def create_app(config, enable_config_file=False):
    """
    创建应用
    :param config: 配置信息对象
    :param enable_config_file: 是否允许运行环境中的配置文件覆盖已加载的配置信息
    :return: 应用
    """
    app = create_flask_app(config, enable_config_file)

    # 创建Snowflake ID worker
    from utils.snowflake.id_worker import IdWorker
    app.id_worker = IdWorker(app.config['DATACENTER_ID'],
                             app.config['WORKER_ID'],
                             app.config['SEQUENCE'])

    # 限流器
    from utils.limiter import limiter as lmt
    lmt.init_app(app)

    # 配置日志
    from utils.logging import create_logger
    create_logger(app)

    # 注册url转换器
    from utils.converters import register_converters
    register_converters(app)

    from redis.sentinel import Sentinel
    _sentinel = Sentinel(app.config['REDIS_SENTINELS'])
    app.redis_master = _sentinel.master_for(app.config['REDIS_SENTINEL_SERVICE_NAME'])
    app.redis_slave = _sentinel.slave_for(app.config['REDIS_SENTINEL_SERVICE_NAME'])

    from rediscluster import StrictRedisCluster
    app.redis_cluster = StrictRedisCluster(startup_nodes=app.config['REDIS_CLUSTER'])

    # rpc
    # app.rpc_reco = grpc.insecure_channel(app.config['RPC'].RECOMMEND)

    # Elasticsearch
    app.es = Elasticsearch(
        app.config['ES'],
        # sniff before doing anything
        sniff_on_start=True,
        # refresh nodes after a node fails to respond
        sniff_on_connection_fail=True,
        # and also every 60 seconds
        sniffer_timeout=60
    )

    # socket.io
    # app.sio_maneger = socketio.KombuManager(app.config['RABBITMQ'], write_only=True)

    # MySQL数据库连接初始化
    from models import db

    db.init_app(app)

    # 添加请求钩子
    from utils.middlewares import jwt_authentication
    app.before_request(jwt_authentication)

    # 注册用户模块蓝图
    from .resources.user import user_bp
    app.register_blueprint(user_bp)

    # 注册新闻模块蓝图
    from .resources.news import news_bp
    app.register_blueprint(news_bp)

    # 注册通知模块
    from .resources.notice import notice_bp
    app.register_blueprint(notice_bp)

    # 搜索
    from .resources.search import search_bp
    app.register_blueprint(search_bp)

    return app
Ejemplo n.º 3
0
def create_app(config, enable_config_file=False):
    """
    创建应用
    :param config: 配置信息对象
    :param enable_config_file: 是否允许运行环境中的配置文件覆盖已加载的配置信息
    :return: 应用
    """
    app = create_flask_app(config, enable_config_file)

    # 创建Snowflake ID worker
    from utils.snowflake.id_worker import IdWorker
    app.id_worker = IdWorker(app.config['DATACENTER_ID'],
                             app.config['WORKER_ID'], app.config['SEQUENCE'])

    # 限流器
    from utils.limiter import limiter as lmt
    lmt.init_app(app)

    # CORS
    CORS(app)

    # 配置日志
    from utils.logging import create_logger
    create_logger(app)

    # 注册url转换器
    from utils.converters import register_converters
    register_converters(app)

    # redis
    # 暂时保留旧redis接口
    from utils.redis_client import create_redis_clients
    app.redis_cli = create_redis_clients(app)

    from redis.sentinel import Sentinel
    _sentinel = Sentinel(app.config['REDIS_SENTINELS'])
    app.redis_master = _sentinel.master_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])
    app.redis_slave = _sentinel.slave_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])

    from rediscluster import StrictRedisCluster
    app.redis_cluster = StrictRedisCluster(
        startup_nodes=app.config['REDIS_CLUSTER'])

    # Elasticsearch
    app.es = Elasticsearch(
        app.config['ES'],
        # sniff before doing anything
        sniff_on_start=True,
        # refresh nodes after a node fails to respond
        sniff_on_connection_fail=True,
        # and also every 60 seconds
        sniffer_timeout=60)

    # MySQL数据库连接初始化
    from models import db
    db.init_app(app)

    # 已废弃 添加异常处理
    # from utils.error_handlers import handle_redis_error, handler_mysql_error
    # app.register_error_handler(RedisError, handle_redis_error)
    # app.register_error_handler(SQLAlchemyError, handler_mysql_error)

    # 添加请求钩子
    from utils.middlewares import jwt_authentication
    app.before_request(jwt_authentication)

    # 注册用户模块蓝图
    from .resources.user import user_bp
    app.register_blueprint(user_bp)

    # 注册用户模块蓝图
    from .resources.news import news_bp
    app.register_blueprint(news_bp)

    # 注册统计模块
    from .resources.statistic import statistic_bp
    app.register_blueprint(statistic_bp)

    return app
Ejemplo n.º 4
0
def create_app(env_type, enable_config_file=False):
    """
    创建flask应用 并 初始化各组件

    :param env_type: 环境类型
    :param enable_config_file: 是否允许运行环境中的配置文件覆盖已加载的配置信息
    :return: flask应用
    """
    app = create_flask_app(env_type, enable_config_file)

    # 添加自定义正则转换器
    from utils.converters import register_converters
    register_converters(app)

    # 创建redis哨兵
    from redis.sentinel import Sentinel
    _sentinel = Sentinel(app.config['REDIS_SENTINELS'])
    # 获取redis主从连接对象
    app.redis_master = _sentinel.master_for(app.config['REDIS_SENTINEL_SERVICE_NAME'])
    app.redis_slave = _sentinel.slave_for(app.config['REDIS_SENTINEL_SERVICE_NAME'])

    # 创建redis集群
    from rediscluster import StrictRedisCluster
    app.redis_cluster = StrictRedisCluster(startup_nodes=app.config['REDIS_CLUSTER'])

    # 配置myql数据库
    from models import db
    db.init_app(app)

    # 配置日志
    from utils.logging import create_logger
    create_logger(app)

    # 限流器
    from utils.limiter import limiter as lmt
    lmt.init_app(app)

    # 创建Snowflake ID worker
    from utils.snowflake.id_worker import IdWorker
    app.id_worker = IdWorker(app.config['DATACENTER_ID'],
                             app.config['WORKER_ID'],
                             app.config['SEQUENCE'])

    # 创建执行器
    from apscheduler.executors.pool import ThreadPoolExecutor
    executor = ThreadPoolExecutor()
    # 创建定时任务调度器
    from apscheduler.schedulers.background import BackgroundScheduler
    app.scheduler = BackgroundScheduler(executors={'default': executor})
    from scheduler.cache_schedule import fix_statistic
    # 添加定时任务  每天3天同步数据
    # app.scheduler.add_job(fix_statistic, 'cron', hour=3)
    app.scheduler.add_job(fix_statistic, 'date', args=[app])
    # 启动调度器
    app.scheduler.start()
    
    # 建立grpc的连接
    app.channel = grpc.insecure_channel(app.config['RPC'].RECOMMEND)

    # 创建socketio的消息队列管理器(要求flask应用处于生产模式)  将消息保存到消息队列中
    import socketio
    app.siomgr = socketio.KombuManager(app.config['RABBIT_MQ'])

    # 创建es客户端
    from elasticsearch5 import Elasticsearch
    app.es = Elasticsearch(
        app.config['ES_HOST'],
        # 启动前嗅探es集群服务器
        sniff_on_start=True,
        # es集群服务器结点连接异常时是否刷新es节点信息
        sniff_on_connection_fail=True,
        # 每60秒刷新节点信息
        sniffer_timeout=60
    )

    # 添加请求钩子
    from utils.middlewares import jwt_authentication
    app.before_request(jwt_authentication)

    # 注册用户模块蓝图
    from .resources.user import user_bp
    app.register_blueprint(user_bp)

    # 注册新闻模块蓝图
    from .resources.news import news_bp
    app.register_blueprint(news_bp)

    # 注册搜索模块蓝图
    from .resources.search import search_bp
    app.register_blueprint(search_bp)

    return app
Ejemplo n.º 5
0
def create_app(config, enable_config_file=False):
    """
    创建应用
    :param config: 配置信息对象
    :param enable_config_file: 是否允许运行环境中的配置文件覆盖已加载的配置信息
    :return: 应用
    """
    app = create_flask_app(config, enable_config_file)

    # 创建Snowflake ID worker
    from utils.snowflake.id_worker import IdWorker
    app.id_worker = IdWorker(app.config['DATACENTER_ID'],
                             app.config['WORKER_ID'], app.config['SEQUENCE'])

    # 限流器
    from utils.limiter import limiter as lmt
    lmt.init_app(app)

    # 配置日志
    from utils.logging import create_logger
    create_logger(app)

    # 注册url转换器
    from utils.converters import register_converters
    register_converters(app)

    from redis.sentinel import Sentinel
    _sentinel = Sentinel(app.config['REDIS_SENTINELS'])
    app.redis_master = _sentinel.master_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])
    app.redis_slave = _sentinel.slave_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])

    from rediscluster import StrictRedisCluster
    app.redis_cluster = StrictRedisCluster(
        startup_nodes=app.config['REDIS_CLUSTER'])

    # rpc
    app.rpc_reco = grpc.insecure_channel(app.config['RPC'].RECOMMEND)

    # Elasticsearch
    app.es = Elasticsearch(
        app.config['ES'],
        # sniff before doing anything
        sniff_on_start=True,
        # refresh nodes after a node fails to respond
        sniff_on_connection_fail=True,
        # and also every 60 seconds
        sniffer_timeout=60)

    # socket.io
    # app.sio = socketio.KombuManager(app.config['RABBITMQ'], write_only=True)

    # MySQL数据库连接初始化
    from models import db

    db.init_app(app)

    # 添加请求钩子
    from utils.middlewares import jwt_authentication
    app.before_request(jwt_authentication)

    # 注册用户模块蓝图
    from .resources.user import user_bp
    app.register_blueprint(user_bp)

    # 注册新闻模块蓝图
    from .resources.news import news_bp
    app.register_blueprint(news_bp)

    # 注册通知模块
    from .resources.notice import notice_bp
    app.register_blueprint(notice_bp)

    # 搜索
    from .resources.search import search_bp
    app.register_blueprint(search_bp)

    # 定义apscheduler的调度器对象
    # 保存到flask的app对象,方便在视图中使用调度器添加新的定时任务
    executors = {
        'default': ThreadPoolExecutor(20),
    }
    app.scheduler = BackgroundScheduler(executors=executors)

    # 由scheduler管理的定时任务 两种:
    # 一种是一开始就明确确定的 ,比如 修正redis的统计数据
    # 在此处定义 add_job
    # app.scheduler.add_job()

    # 添加定时修正统计数据的定时任务
    from .schedulers.statistic import fix_statistics
    # 每天的凌晨3点执行
    # 通过args 可以在调度器执行定时任务方法的时候,传递给定时任务方法参数
    # app.scheduler.add_job(fix_statistics, 'cron', hour=3, args=[app])

    # 为了测试方便,需要立即执行
    app.scheduler.add_job(fix_statistics, 'date', args=[app])

    # 另一种 是在flask运行期间,由视图函数产生的,动态添加的新定时任务
    # 在视图函数中 调用 current_app.scheduler.add_job来添加

    app.scheduler.start()  # 非阻塞,不会阻塞住flask程序的执行,会在后台单独创建进程或线程进行计时

    return app
Ejemplo n.º 6
0
def create_app(config, enable_config_file=False):
    """
    创建应用
    :param config: 配置信息对象
    :param enable_config_file: 是否允许运行环境中的配置文件覆盖已加载的配置信息
    :return: 应用
    """
    app = create_flask_app(config, enable_config_file)

    # 创建Snowflake ID worker
    from utils.snowflake.id_worker import IdWorker
    app.id_worker = IdWorker(app.config['DATACENTER_ID'],
                             app.config['WORKER_ID'], app.config['SEQUENCE'])

    # 限流器
    from utils.limiter import limiter as lmt
    lmt.init_app(app)

    # 配置日志
    from utils.logging import create_logger
    create_logger(app)

    # 注册url转换器
    from utils.converters import register_converters
    register_converters(app)

    from redis.sentinel import Sentinel
    _sentinel = Sentinel(app.config['REDIS_SENTINELS'])
    app.redis_master = _sentinel.master_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])
    app.redis_slave = _sentinel.slave_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])

    from rediscluster import StrictRedisCluster
    app.redis_cluster = StrictRedisCluster(
        startup_nodes=app.config['REDIS_CLUSTER'])

    # rpc
    app.rpc_reco_channel = grpc.insecure_channel(app.config['RPC'].RECOMMEND)
    app.rpc_reco = app.rpc_reco_channel

    # Elasticsearch
    app.es = Elasticsearch(
        app.config['ES'],
        # sniff before doing anything
        sniff_on_start=True,
        # refresh nodes after a node fails to respond
        sniff_on_connection_fail=True,
        # and also every 60 seconds
        sniffer_timeout=60)

    # socket.io
    # 通过sio mgr对象 可以发布要进行即使消息推送的任务,由socketio服务器从rabbitmq中取出任务,推送消息
    app.sio_mgr = socketio.KombuManager(app.config['RABBITMQ'],
                                        write_only=True)

    # MySQL数据库连接初始化
    from models import db

    db.init_app(app)

    # 创建APScheduler定时任务调度器对象
    executors = {'default': ThreadPoolExecutor(10)}

    app.scheduler = BackgroundScheduler(executors=executors)

    # 添加"静态的"定时任务
    from .schedule.statistic import fix_statistics
    # app.scheduler.add_job(fix_statistics, 'date', args=[app])
    app.scheduler.add_job(fix_statistics, 'cron', hour=3, args=[app])

    # 启动定时任务调度器
    app.scheduler.start()

    # 废弃 添加异常处理 对于flask-restful无效
    # from utils.error_handlers import handle_redis_error, handler_mysql_error
    # app.register_error_handler(RedisError, handle_redis_error)
    # app.register_error_handler(SQLAlchemyError, handler_mysql_error)

    # 添加请求钩子
    from utils.middlewares import jwt_authentication
    app.before_request(jwt_authentication)

    # 注册用户模块蓝图
    from .resources.user import user_bp
    app.register_blueprint(user_bp)

    # 注册新闻模块蓝图
    from .resources.news import news_bp
    app.register_blueprint(news_bp)

    # 注册通知模块
    from .resources.notice import notice_bp
    app.register_blueprint(notice_bp)

    # 搜索
    from .resources.search import search_bp
    app.register_blueprint(search_bp)

    return app
Ejemplo n.º 7
0
def create_app(config, enable_config_file=False):
    """
    创建应用
    :param config: 配置信息对象
    :param enable_config_file: 是否允许运行环境中的配置文件覆盖已加载的配置信息
    :return: 应用
    """
    app = create_flask_app(config, enable_config_file)

    # 创建Snowflake ID worker
    from utils.snowflake.id_worker import IdWorker
    app.id_worker = IdWorker(app.config['DATACENTER_ID'],
                             app.config['WORKER_ID'], app.config['SEQUENCE'])

    # 限流器
    from utils.limiter import limiter as lmt
    lmt.init_app(app)

    # 配置日志
    from utils.logging import create_logger
    create_logger(app)

    # 注册url转换器
    from utils.converters import register_converters
    register_converters(app)

    from redis.sentinel import Sentinel
    _sentinel = Sentinel(app.config['REDIS_SENTINELS'])
    app.redis_master = _sentinel.master_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])
    app.redis_slave = _sentinel.slave_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])

    from rediscluster import StrictRedisCluster
    app.redis_cluster = StrictRedisCluster(
        startup_nodes=app.config['REDIS_CLUSTER'])

    # rpc

    app.rpc_reco_channel = grpc.insecure_channel(app.config['RPC'].RECOMMEND)

    # app.rpc_reco = grpc.insecure_channel(app.config['RPC'].RECOMMEND)

    # Elasticsearch
    app.es = Elasticsearch(
        app.config['ES'],
        # sniff before doing anything
        sniff_on_start=True,
        # refresh nodes after a node fails to respond
        sniff_on_connection_fail=True,
        # and also every 60 seconds
        sniffer_timeout=60)

    # socket.io
    # app.sio = socketio.KombuManager(app.config['RABBITMQ'], write_only=True)

    # MySQL数据库连接初始化
    from models import db

    db.init_app(app)

    # # 添加请求钩子
    from utils.middleware import jwt_authorization
    app.before_request(jwt_authorization)

    # 添加定时任务APScheduler
    from apscheduler.schedulers.background import BackgroundScheduler
    from apscheduler.executors.pool import ThreadPoolExecutor
    # 触发器
    from apscheduler.triggers import date, interval, cron
    from toutiao.schedule.statistics import fix_statistics

    # 1.创建执行器对象executors
    executors = {
        # 默认会将定时任务使用线程执行,并且添加到线程池,最大并发10个线程
        "default": ThreadPoolExecutor(max_workers=10)
    }

    # 2.创建调度器对象-使用executors进行配置
    scheduler = BackgroundScheduler(executors=executors)

    # 2.1 将scheduler对象保存到app中,其他地方如果需要添加`动态任务` :current_app.scheduler.add_job(动态任务)
    app.scheduler = scheduler

    # 3.添加任务--修正统计数据--`静态任务`
    # app.scheduler.add_job(func="定时任务函数引用", trigger="触发器", args=["参数"])
    # app.scheduler.add_job(func=fix_statistics, trigger=cron.CronTrigger(hour=4), args=["参数"])
    # 触发器凌晨4点执行任务
    # app.scheduler.add_job(func=fix_statistics, trigger="cron", hour=4, args=[app])
    app.scheduler.add_job(func=fix_statistics, trigger="date", args=[app])

    # 4.开启定时任务
    app.scheduler.start()

    # 注册用户模块蓝图
    from .resources.user import user_bp

    app.register_blueprint(user_bp)

    # 注册新闻模块蓝图
    from .resources.news import news_bp

    app.register_blueprint(news_bp)

    # 注册通知模块
    from .resources.notice import notice_bp

    app.register_blueprint(notice_bp)

    # 搜索
    from .resources.search import search_bp

    app.register_blueprint(search_bp)

    return app
Ejemplo n.º 8
0
def create_app(config, enable_config_file=False):
    """
    创建应用
    :param config: 配置信息对象
    :param enable_config_file: 是否允许运行环境中的配置文件覆盖已加载的配置信息
    :return: 应用
    """
    app = create_flask_app(config, enable_config_file)

    # 创建Snowflake ID worker
    from utils.snowflake.id_worker import IdWorker
    app.id_worker = IdWorker(app.config['DATACENTER_ID'],
                             app.config['WORKER_ID'], app.config['SEQUENCE'])

    # 如果在视图中需要生成分布式ID
    # id = current_app.id_worker.get_id()

    # 限流器
    from utils.limiter import limiter as lmt
    lmt.init_app(app)

    # 配置日志
    from utils.logging import create_logger
    create_logger(app)

    # 注册url转换器
    from utils.converters import register_converters
    register_converters(app)

    from redis.sentinel import Sentinel
    _sentinel = Sentinel(app.config['REDIS_SENTINELS'])
    app.redis_master = _sentinel.master_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])
    app.redis_slave = _sentinel.slave_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])

    from rediscluster import StrictRedisCluster
    app.redis_cluster = StrictRedisCluster(
        startup_nodes=app.config['REDIS_CLUSTER'])

    # 视图
    # current_app.redis_master.set()
    # current_app.redis_cluster.get()

    # rpc
    app.rpc_reco = grpc.insecure_channel(app.config['RPC'].RECOMMEND)

    # Elasticsearch
    app.es = Elasticsearch(
        app.config['ES'],
        # sniff before doing anything
        sniff_on_start=True,
        # refresh nodes after a node fails to respond
        sniff_on_connection_fail=True,
        # and also every 60 seconds
        sniffer_timeout=60)

    # socket.io
    # app.sio = socketio.KombuManager(app.config['RABBITMQ'], write_only=True)

    # MySQL数据库连接初始化
    from models import db

    db.init_app(app)

    # db = SQLAlchmey(app)

    # db = SQLAlchemy()
    # db.init_app(app_

    # 创建定时任务工具对象
    # 将scheduler对象保存到flask app对象中的目的,是方便视图执行的时候随时产生新的定时任务需求,可以借助current_app.scheduler.add_job()来
    # 动态添加新的定时任务

    executors = {
        'default': ThreadPoolExecutor(10),
    }

    app.scheduler = BackgroundScheduler(executors=executors)

    # 此处可以添加定时任务,这些定时任务与视图程序的执行无关,是在程序启动一开始就确定好的
    from .schedule import statistic
    # 每天凌晨3点执行
    app.scheduler.add_job(statistic.fix_statistics, 'cron', hour=3, args=[app])

    # 为了测试方便,立即执行
    # app.scheduler.add_job(statistic.fix_statistics, 'date', args=[app])

    # app.scheduler.add_job()
    # app.scheduler.add_job()
    # app.scheduler.add_job()

    app.scheduler.start()

    # 添加请求钩子
    from utils.middlewares import jwt_authentication
    app.before_request(jwt_authentication)

    # 注册用户模块蓝图
    from .resources.user import user_bp
    app.register_blueprint(user_bp)

    # 注册新闻模块蓝图
    from .resources.news import news_bp
    app.register_blueprint(news_bp)

    # 注册通知模块
    from .resources.notice import notice_bp
    app.register_blueprint(notice_bp)

    # 搜索
    from .resources.search import search_bp
    app.register_blueprint(search_bp)

    return app
Ejemplo n.º 9
0
    def post(self):
        parser_form = reqparse.RequestParser()
        parser_form.add_argument('mobile',
                                 type=str,
                                 required=True,
                                 location='json')
        parser_form.add_argument('code',
                                 type=str,
                                 required=True,
                                 location='json')
        parser_form.add_argument('iiuv', type=str, location='json')
        args = parser_form.parse_args()
        mobile = args.mobile
        code = args.code
        iiuv_data = args.iiuv
        if not re.match(r'^\d{6}$', code):
            return {'error': '验证码格式错误'}, HttpStatus.OK.value
        if iiuv_data:
            cx = MysqlSearch().get_one(
                f"SELECT id FROM {MEMBERS_TABLE} WHERE IIUV='{iiuv_data}'")
            if cx is False:
                iiuv_data = None
        # 从redis中获取验证码
        key = 'app:code:{}'.format(mobile)
        try:
            real_code = current_app.redis_cli.get(key)
            if not real_code or real_code.decode() != code:
                return {'error': '验证码错误.'}, HttpStatus.OK.value
        except ConnectionError as e:
            current_app.logger.error(e)
        # 查询,保存用户
        user_cache = current_app.redis_cli.hget('user_info_:{}'.format(mobile),
                                                0)
        if user_cache:
            dict_user_cache = json.loads(user_cache.decode())
            if dict_user_cache['mobile'] == mobile:
                token, refresh_token = self._generate_tokens(
                    dict_user_cache['id'], dict_user_cache['setreal'],
                    dict_user_cache['mobile'])
                token_data = [token, refresh_token]
                return {
                    'token': token_data,
                    "手机号码": int(mobile),
                    'data': '欢迎登陆',
                }, HttpStatus.OK.value
        if user_cache is None:
            sql = f"SELECT id,setreal,mobile FROM {MEMBERS_TABLE} WHERE mobile='{args.mobile}';"
            user_info = MysqlSearch().get_one(sql)
            if user_info and user_info['mobile'] == mobile:
                user_info = UserCache(mobile).get()
                token, refresh_token = self._generate_tokens(
                    user_info['id'], user_info['setreal'], user_info['mobile'])
                token_data = [token, refresh_token]
                return {
                    'token': token_data,
                    "手机号码": int(mobile)
                }, HttpStatus.OK.value
            else:
                # 新用户数据入库
                # 生成uuid
                nickname = IdWorker(1, 2, 0).get_id()
                user_mobile = mobile
                # 随机头像生成
                ran_data = random.randint(1, 12)
                txx = f"http://static.hfj447.com/avatars/{ran_data}.jpg"
                sql = f"INSERT INTO {MEMBERS_TABLE} (nickname,mobile,avatar) VALUE ('{nickname}','{user_mobile}','{txx}')"
                res = MysqlWrite().write(sql)
                # 查询新用户id,生成iiuv邀请码
                ii = MysqlSearch().get_one(
                    f"SELECT id FROM {MEMBERS_TABLE} WHERE mobile='{user_mobile}'"
                )
                member_iiuv = hashids_iivu_encode(ii['id'])
                # 生成邀请dwz入库
                short_link = get_short_link(member_iiuv)
                # 生成iiuv入库
                iiu = MysqlWrite().write(
                    f"UPDATE {MEMBERS_TABLE} SET IIUV='{member_iiuv},short_link='{short_link}' WHERE id='{ii['id']}'"
                )
                if res == 1 and iiuv_data == None:
                    user_info = UserCache(mobile).get()
                    MemberRelationTreeCache().tree(args.mobile, None)
                    # 返回token
                    token, refresh_token = self._generate_tokens(
                        user_info['id'], user_info['setreal'],
                        user_info['mobile'])
                    token_data = [token, refresh_token]
                    return {
                        'token': token_data,
                        "手机号码": int(mobile)
                    }, HttpStatus.OK.value

                elif res == 1 and iiuv_data:
                    MemberRelationTreeCache().tree(args.mobile, iiuv_data)
                    # 返回token
                    user_info = UserCache(mobile).get()
                    token, refresh_token = self._generate_tokens(
                        user_info['id'], user_info['setreal'],
                        user_info['mobile'])
                    token_data = [token, refresh_token]
                    return {
                        'token': token_data,
                        "手机号码": int(mobile)
                    }, HttpStatus.OK.value