Exemple #1
0
    def remove_apscheduler():
        """
        移除redis中保存的标记
        Args:

        Returns:

        """
        rdb_ = None
        try:
            rdb_ = redis.StrictRedis(
                host=app_.config["ECLIENTS_REDIS_HOST"],
                port=app_.config["ECLIENTS_REDIS_PORT"],
                db=2,
                password=app_.config["ECLIENTS_REDIS_PASSWD"],
                decode_responses=True)
        except RedisError as err:
            aelog.exception(err)
        else:
            with ignore_error():
                rdb_.delete("apscheduler")
                aelog.info(f"当前进程{os.getpid()}清除redis[2]任务标记[apscheduler].")
        finally:
            if rdb_:
                rdb_.connection_pool.disconnect()
Exemple #2
0
 async def set_expire_data(self, name, ex=EXPIRED):
     try:
         await self.redis_db.expire(name, ex)
         return True
     except RedisError as e:
         aelog.info(RedisClientError(str(e)))
         return False
Exemple #3
0
    def save_session(self, session: Session, dump_responses: bool = False, ex: int = SESSION_EXPIRED) -> str:
        """
        利用hash map保存session
        Args:
            session: Session 实例
            dump_responses: 是否对每个键值进行dump
            ex: 过期时间,单位秒
        Returns:

        """
        session_data = self.response_dumps(dump_responses, session)

        try:
            if not self.redis_db.hmset(session_data["session_id"], session_data):
                raise RedisClientError("save session failed, session_id={}".format(session_data["session_id"]))
            if not self.redis_db.expire(session_data["session_id"], ex):
                aelog.error("set session expire failed, session_id={}".format(session_data["session_id"]))
        except RedisError as e:
            aelog.exception("save session error: {}, {}".format(session.session_id, e))
            raise RedisClientError(str(e))
        else:
            # 清除老的令牌
            try:
                old_session_id = self.get_hash_data(self._account_key, field_name=session.account_id)
            except RedisClientError as e:
                aelog.info(f"{session.account_id} no old token token, {str(e)}")
            else:
                with ignore_error():
                    self.delete_session(old_session_id, False)
            # 更新新的令牌
            self.save_update_hash_data(self._account_key, field_name=session.account_id,
                                       hash_data=session.session_id, ex=LONG_EXPIRED)
            return session.session_id
Exemple #4
0
    def gen_model(self, model_cls, suffix: str = None, **kwargs):
        """
        用于根据现有的model生成新的model类

        主要用于分表的查询和插入
        Args:
            model_cls: 要生成分表的model类
            suffix: 新的model类名的后缀
            kwargs: 其他的参数
        Returns:

        """
        if kwargs:
            aelog.info(kwargs)
        if not issubclass(model_cls, self.Model):
            raise ValueError("model_cls must be db.Model type.")

        table_name = f"{model_cls.__tablename__}_{suffix}"
        class_name = f"{gen_class_name(table_name)}Model"
        if getattr(model_cls, "_cache_class", None) is None:
            setattr(model_cls, "_cache_class", {})

        model_cls_ = getattr(model_cls, "_cache_class").get(class_name, None)
        if model_cls_ is None:
            model_fields = {}
            for attr_name, field in model_cls.__dict__.items():
                if isinstance(field, InstrumentedAttribute
                              ) and not attr_name.startswith("_"):
                    model_fields[attr_name] = self.Column(
                        type_=field.type,
                        primary_key=field.primary_key,
                        index=field.index,
                        nullable=field.nullable,
                        default=field.default,
                        onupdate=field.onupdate,
                        unique=field.unique,
                        autoincrement=field.autoincrement,
                        doc=field.doc)
            model_cls_ = type(
                class_name, (self.Model, ), {
                    "__doc__": model_cls.__doc__,
                    "__table_args__ ": model_cls.__table_args__ or {
                        'mysql_engine': 'InnoDB',
                        'mysql_charset': 'utf8mb4'
                    },
                    "__tablename__": table_name,
                    "__module__": model_cls.__module__,
                    **model_fields
                })
            getattr(model_cls, "_cache_class")[class_name] = model_cls_

        return model_cls_
Exemple #5
0
    def callback_done(fn):
        """
        线程回调函数
        Args:

        Returns:

        """
        try:
            data = fn.result()
        except Exception as e:
            aelog.exception("error,{} return result: {}".format(task_name, e))
        else:
            aelog.info("{} return result: {}".format(task_name, data))
Exemple #6
0
def test_aelog_output_file():
    """

    Args:

    Returns:

    """
    aelog.init_app(aelog_access_file="test.log", aelog_console=True)
    aelog.debug("simple debug message", "other message", 1, [6, 2, 3])
    aelog.info("simple info message", "other message", 1, [6, 2, 3])
    aelog.warning("simple warning message", "other message", 1, [6, 2, 3])
    aelog.error("simple error message", "other message", 1, [1, 2, 3])
    aelog.critical("simple critical message", "other message", 1, [1, 2, 3])
    try:
        5 / 0
    except Exception as e:
        aelog.exception(e)
Exemple #7
0
def test_aelog_output_console():
    """

    Args:

    Returns:

    """
    aelog.init_app(aelog_console=True)
    aelog.debug("simple debug message", "other message", 1, [1, 2, 3])
    aelog.info("simple info message", "other message", 2, (1, 2, 3))
    aelog.warning("simple warning message", "other message", 3, {1, 2, 3})
    aelog.error("simple error message", "other message", 4, {1: 1, 2: 2, 3: 3})
    aelog.critical("simple critical message", "other message", 5, classmethod)
    try:
        5 / 0
    except Exception as e:
        aelog.exception(e)
Exemple #8
0
    async def save_session(self, session: Session, dump_responses=False, ex=SESSION_EXPIRED):
        """
        利用hash map保存session
        Args:
            session: Session 实例
            dump_responses: 是否对每个键值进行dump
            ex: 过期时间,单位秒
        Returns:

        """
        session_data = dict(vars(session))
        # 是否对每个键值进行dump
        if dump_responses:
            hash_data = {}
            for hash_key, hash_val in session_data.items():
                if not isinstance(hash_val, str):
                    with ignore_error():
                        hash_val = ujson.dumps(hash_val)
                hash_data[hash_key] = hash_val
            session_data = hash_data

        try:
            if not await self.redis_db.hmset(session_data["session_id"], session_data):
                raise RedisClientError("save session failed, session_id={}".format(session_data["session_id"]))
            if not await self.redis_db.expire(session_data["session_id"], ex):
                aelog.error("set session expire failed, session_id={}".format(session_data["session_id"]))
        except RedisError as e:
            aelog.exception("save session error: {}, {}".format(session.session_id, e))
            raise RedisClientError(str(e))
        else:
            # 清除老的令牌
            try:
                old_session_id = await self.get_hash_data(self._account_key, field_name=session.account_id)
            except RedisClientError as e:
                aelog.info(f"{session.account_id} no old token token, {str(e)}")
            else:
                with ignore_error():
                    await self.delete_session(old_session_id, False)
            # 更新新的令牌
            await self.save_update_hash_data(self._account_key, field_name=session.account_id,
                                             hash_data=session.session_id, ex=LONG_EXPIRED)
            return session.session_id
Exemple #9
0
 async def execute(self, sql, param=None):
     """
     插入数据,更新或者删除数据
     Args:
         query: SQL的查询字符串或者sqlalchemy表达式
     Returns:
         不确定执行的是什么查询,直接返回ResultProxy实例
     """
     async with self.aio_engine.acquire() as conn:
         async with conn.begin() as trans:
             try:
                 aelog.info("sql: %s" % sql)
                 if not param:
                     cursor = await conn.execute(sql)
                 else:
                     cursor = await conn.execute(sql, param)
             except IntegrityError as e:
                 await trans.rollback()
                 aelog.exception(e)
                 if "Duplicate" in str(e):
                     raise MysqlDuplicateKeyError(e)
                 else:
                     raise MysqlError(e)
             except (MySQLError, Error) as e:
                 await trans.rollback()
                 aelog.exception(e)
                 raise MysqlError(e)
             except Exception as e:
                 await trans.rollback()
                 aelog.exception(e)
                 raise HttpError(500,
                                 message=self.message[6][self.msg_zh],
                                 error=e)
             else:
                 await conn.execute('commit')
     return cursor
Exemple #10
0
def apscheduler_start(app_: Flask,
                      scheduler,
                      is_warkup: bool = True,
                      warkup_func: Callable = None,
                      warkup_seconds: int = 3600) -> NoReturn:
    """
    apscheduler的启动方法,利用redis解决多进程多实例的问题

    warkup_func可以包装apscheduler_warkup_job即可
    def warkup_func():
        apscheduler_warkup_job(scheduler)  # 这里的scheduler就是启动后的apscheduler全局实例
    Args:
        app_: app应用实例
        scheduler: apscheduler的调度实例
        is_warkup: 是否定期发现job,用于非运行scheduler进程添加的job
        warkup_func: 唤醒的job函数,可以包装apscheduler_warkup_job
        warkup_seconds: 定期唤醒的时间间隔
    Returns:

    """
    def remove_apscheduler():
        """
        移除redis中保存的标记
        Args:

        Returns:

        """
        rdb_ = None
        try:
            rdb_ = redis.StrictRedis(
                host=app_.config["ECLIENTS_REDIS_HOST"],
                port=app_.config["ECLIENTS_REDIS_PORT"],
                db=2,
                password=app_.config["ECLIENTS_REDIS_PASSWD"],
                decode_responses=True)
        except RedisError as err:
            aelog.exception(err)
        else:
            with ignore_error():
                rdb_.delete("apscheduler")
                aelog.info(f"当前进程{os.getpid()}清除redis[2]任务标记[apscheduler].")
        finally:
            if rdb_:
                rdb_.connection_pool.disconnect()

    try:
        from flask_apscheduler import APScheduler
        if not isinstance(scheduler, APScheduler):
            raise ValueError("scheduler类型错误")
    except ImportError as e:
        raise ImportError(f"please install flask_apscheduler {e}")

    rdb = None
    try:
        rdb = redis.StrictRedis(host=app_.config["ECLIENTS_REDIS_HOST"],
                                port=app_.config["ECLIENTS_REDIS_PORT"],
                                db=2,
                                password=app_.config["ECLIENTS_REDIS_PASSWD"],
                                decode_responses=True)
    except RedisError as e:
        aelog.exception(e)
    else:
        with rdb.lock("apscheduler_lock"):
            if rdb.get("apscheduler") is None:
                rdb.set("apscheduler", "apscheduler")
                scheduler.start()
                if is_warkup and callable(warkup_func):
                    scheduler.add_job("warkup",
                                      warkup_func,
                                      trigger="interval",
                                      seconds=warkup_seconds,
                                      replace_existing=True)
                atexit.register(remove_apscheduler)
                aelog.info(
                    f"当前进程{os.getpid()}启动定时任务成功,设置redis[2]任务标记[apscheduler],"
                    f"任务函数为{warkup_func.__name__}.")
            else:
                scheduler._scheduler.state = 2
                aelog.info(f"其他进程已经启动了定时任务,当前进程{os.getpid()}不再加载定时任务.")
    finally:
        if rdb:
            rdb.connection_pool.disconnect()