def _insert_one(self, cname: str, document: Union[List[Dict], Dict], insert_one: bool = True ) -> Union[Tuple[str], str]: """ 插入一个单独的文档 Args: cname:collection name document: document obj insert_one: insert_one insert_many的过滤条件,默认True Returns: 返回插入的Objectid """ try: if insert_one: result = self.db.get_collection(cname).insert_one(document) else: result = self.db.get_collection(cname).insert_many(document) except InvalidName as e: raise MongoInvalidNameError("Invalid collention name {} {}".format(cname, e)) except DuplicateKeyError as e: raise MongoDuplicateKeyError("Duplicate key error, {}".format(e)) except PyMongoError as err: aelog.exception("Insert one document failed, {}".format(err)) raise HttpError(400, message=mongo_msg[100][self.msg_zh]) else: return str(result.inserted_id) if insert_one else (str(val) for val in result.inserted_ids) # type: ignore
def execute_many(self, sql: str, args_data: List[Tuple]) -> int: """ 批量插入数据 Args: sql: 插入的SQL语句 args_data: 批量插入的数据,为一个包含元祖的列表 Returns: INSERT INTO traffic_100 (IMEI,lbs_dict_id,app_key) VALUES(%s,%s,%s) [('868403022323171', None, 'EB23B21E6E1D930E850E7267E3F00095'), ('865072026982119', None, 'EB23B21E6E1D930E850E7267E3F00095')] """ count = None try: with self.conn.cursor() as cursor: count = cursor.executemany(sql, args_data) except pymysql.Error as e: self.conn.rollback() aelog.exception(e) except Exception as e: self.conn.rollback() aelog.exception(e) else: self.conn.commit() return count
def _find_many(self, cname: str, query_key: Dict, exclude_key: Dict = None, skip: int = 0, limit: int = 0, sort: List[Tuple] = None) -> List[Dict]: """ 批量查询document文档 Args: cname: collection name query_key: 查询document的过滤条件 exclude_key: 过滤返回值中字段的过滤条件 skip: 从查询结果中调过指定数量的document limit: 限制返回的document条数 sort: 排序方式,可以自定多种字段的排序,值为一个列表的键值对, eg:[('field1', pymongo.ASCENDING)] Returns: 返回匹配的document列表 """ try: find_data = [] cursor = self.db.get_collection(cname).find( query_key, projection=exclude_key, skip=skip, limit=limit, sort=sort) for doc in cursor: if doc.get("_id", None) is not None: doc["id"] = str(doc.pop("_id")) find_data.append(doc) except InvalidName as e: raise MongoInvalidNameError("Invalid collention name {} {}".format(cname, e)) except PyMongoError as err: aelog.exception("Find many document failed, {}".format(err)) raise HttpError(400, message=mongo_msg[104][self.msg_zh]) else: return find_data
async def _find_documents(self, name, query_key, filter_key=None, limit=None, skip=None, sort=None): """ 批量查询documents文档 Args: name: collection name query_key: 查询document的过滤条件 filter_key: 过滤返回值中字段的过滤条件 limit: 限制返回的document条数 skip: 从查询结果中调过指定数量的document sort: 排序方式,可以自定多种字段的排序,值为一个列表的键值对, eg:[('field1', pymongo.ASCENDING)] Returns: 返回匹配的document列表 """ try: find_data = [] cursor = self.db.get_collection(name).find(query_key, projection=filter_key, limit=limit, skip=skip, sort=sort) # find_data = await cursor.to_list(None) async for doc in cursor: if doc.get("_id", None) is not None: doc["id"] = str(doc.pop("_id")) find_data.append(doc) except InvalidName as e: raise MongoInvalidNameError("Invalid collention name {} {}".format(name, e)) except PyMongoError as err: aelog.exception("Find many documents failed, {}".format(err)) raise HttpError(400, message=self.message[104][self.msg_zh], error=err) else: return find_data
async def _aggregate(self, name, pipline): """ 根据pipline进行聚合查询 Args: name: collection name pipline: 聚合查询的pipeline,包含一个后者多个聚合命令 Returns: 返回聚合后的documents """ result = [] try: async for doc in self.db.get_collection(name).aggregate(pipline): if doc.get("_id", None) is not None: doc["id"] = str(doc.pop("_id")) result.append(doc) except InvalidName as e: raise MongoInvalidNameError("Invalid collention name {} {}".format( name, e)) except PyMongoError as err: aelog.exception("Aggregate documents failed, {}".format(err)) raise HttpError(400, message=self.message[105][self.msg_zh], error=err) else: return result
def execute(session: FesSession, query: Union[FesQuery, str], params: Dict = None) -> Optional[RowProxy]: """ 插入数据,更新或者删除数据 Args: query: SQL的查询字符串或者sqlalchemy表达式 params: SQL表达式中的参数 session: session对象, 默认是self.session Returns: 不确定执行的是什么查询,直接返回RowProxy实例 """ cursor: Optional[ResultProxy] = None try: cursor = session.execute(query, params) session.commit() except IntegrityError as e: session.rollback() if "Duplicate" in str(e): raise DBDuplicateKeyError(e) else: raise DBError(e) except DatabaseError as e: session.rollback() aelog.exception(e) raise DBError(e) except Exception as e: session.rollback() aelog.exception(e) raise HttpError(400, message=mysql_msg[2]["msg_zh"], error=e) else: return cursor.fetchone() if cursor.returns_rows else None finally: if cursor: cursor.close()
async def _update_document(self, name, query_key: dict, update_data: dict, upsert=False, update_one=True): """ 更新匹配到的一个的document Args: name: collection name query_key: 查询document的过滤条件 update_data: 对匹配的document进行更新的document upsert: 没有匹配到document的话执行插入操作,默认False update_one: update_one or update_many的匹配条件 Returns: 返回匹配的数量和修改数量的dict, eg:{"matched_count": 1, "modified_count": 1, "upserted_id":"f"} """ try: if update_one: result = await self.db.get_collection(name).update_one(query_key, update_data, upsert=upsert) else: result = await self.db.get_collection(name).update_many(query_key, update_data, upsert=upsert) except InvalidName as e: raise MongoInvalidNameError("Invalid collention name {} {}".format(name, e)) except DuplicateKeyError as e: raise MongoDuplicateKeyError("Duplicate key error, {}".format(e)) except PyMongoError as err: aelog.exception("Update documents failed, {}".format(err)) raise HttpError(400, message=self.message[101][self.msg_zh], error=err) else: return {"matched_count": result.matched_count, "modified_count": result.modified_count, "upserted_id": str(result.upserted_id) if result.upserted_id else None}
def save_session(self, session: Session, dump_responses: bool = False, ex: int = SESSION_EXPIRED) -> str: """ 利用hash map保存session Args: session: Session 实例 dump_responses: 是否对每个键值进行dump ex: 过期时间,单位秒 Returns: """ session_data = self.response_dumps(dump_responses, session) try: if not self.redis_db.hmset(session_data["session_id"], session_data): raise RedisClientError("save session failed, session_id={}".format(session_data["session_id"])) if not self.redis_db.expire(session_data["session_id"], ex): aelog.error("set session expire failed, session_id={}".format(session_data["session_id"])) except RedisError as e: aelog.exception("save session error: {}, {}".format(session.session_id, e)) raise RedisClientError(str(e)) else: # 清除老的令牌 try: old_session_id = self.get_hash_data(self._account_key, field_name=session.account_id) except RedisClientError as e: aelog.info(f"{session.account_id} no old token token, {str(e)}") else: with ignore_error(): self.delete_session(old_session_id, False) # 更新新的令牌 self.save_update_hash_data(self._account_key, field_name=session.account_id, hash_data=session.session_id, ex=LONG_EXPIRED) return session.session_id
def execute_many(self, sql: str, args_data: List[Tuple]) -> int: """ 批量插入数据 Args: sql: 插入的SQL语句 args_data: 批量插入的数据,为一个包含元祖的列表 If args is a list or tuple, %s can be used as a placeholder in the query. If args is a dict, %(name)s can be used as a placeholder in the query. Returns: INSERT INTO traffic_100 (IMEI,lbs_dict_id,app_key) VALUES(%s,%s,%s) [('868403022323171', None, 'EB23B21E6E1D930E850E7267E3F00095'), ('865072026982119', None, 'EB23B21E6E1D930E850E7267E3F00095')] """ count: int = 0 try: with self.conn.cursor() as cursor: count = cursor.executemany(sql, args_data) # type: ignore except pymysql.Error as e: self.conn.rollback() aelog.exception(e) except Exception as e: self.conn.rollback() aelog.exception(e) else: self.conn.commit() return count
def insert_context(self, session: Session = None) -> Generator['FlaskAlchemy', None, None]: """ 插入数据context Args: session: session对象, 默认是self.session Returns: """ session = self.session if session is None else session try: yield self session.commit() except IntegrityError as e: session.rollback() if "Duplicate" in str(e): raise DBDuplicateKeyError(e) else: raise DBError(e) except DatabaseError as e: session.rollback() aelog.exception(e) raise DBError(e) except Exception as e: session.rollback() aelog.exception(e) raise HttpError(400, message=self.message[1][self.msg_zh], error=e)
def insert_query(self, insert_data: Union[List[Dict], Dict]) -> 'Query': """ insert query Args: insert_data: 值类型Dict or List[Dict] Returns: Select object """ self._verify_model() try: insert_data_: Union[List[Dict], Dict] if isinstance(insert_data, dict): insert_data_ = { **self._get_model_default_value(), **insert_data } query = insert(self._model).values(insert_data_) else: insert_data_ = [{ **self._get_model_default_value(), **one_data } for one_data in insert_data] query = insert(self._model).values(insert_data_[0]) except SQLAlchemyError as e: aelog.exception(e) raise QueryArgsError( message="Cloumn args error: {}".format(str(e))) else: self._query_obj, self._insert_data = query, insert_data_ return self
def execute(self, sql: str, args_data: Tuple = None) -> int: """ 执行单条记录,更新、插入或者删除 Args: sql: 插入的SQL语句 args_data: tuple, list or dict, 批量插入的数据,为一个包含元祖的列表 If args is a list or tuple, %s can be used as a placeholder in the query. If args is a dict, %(name)s can be used as a placeholder in the query. Returns: INSERT INTO traffic_100 (IMEI,lbs_dict_id,app_key) VALUES(%s,%s,%s) ('868403022323171', None, 'EB23B21E6E1D930E850E7267E3F00095') """ count = 0 try: with self.conn.cursor() as cursor: count = cursor.execute(sql, args_data) except pymysql.Error as e: self.conn.rollback() aelog.exception(e) except Exception as e: self.conn.rollback() aelog.exception(e) else: self.conn.commit() return count
async def _find_one(self, model: list, query_key: dict, or_query_key: dict): """ 查询单条数据 Args: model: 查询的model名称 query_key: 查询model的过滤条件 or_query_key: 或查询model的过滤条件 Returns: 返回匹配的数据或者None """ try: query = select(model) if query_key or or_query_key: query = self._column_expression(model, query, query_key, or_query_key) except SQLAlchemyError as e: aelog.exception(e) raise QueryArgsError( message="Cloumn args error: {}".format(str(e))) else: try: async with self.aio_engine.acquire() as conn: async with conn.execute(query) as cursor: resp = await cursor.fetchone() await conn.execute('commit' ) # 理论上不应该加这个的,但是这里默认就会启动一个事务,很奇怪 except (MySQLError, Error) as err: aelog.exception("Find one data failed, {}".format(err)) raise HttpError(400, message=self.message[4][self.msg_zh], error=err) else: return dict(resp) if resp else None
async def _find_count(self, model, query_key: dict, or_query_key: dict): """ 查询单条数据 Args: model: 查询的model名称 query_key: 查询model的过滤条件 or_query_key: 或查询model的过滤条件 Returns: 返回条数 """ try: query = select([func.count().label("count")]).select_from(model) if query_key or or_query_key: query = self._column_expression(model, query, query_key, or_query_key) except SQLAlchemyError as e: aelog.exception(e) raise QueryArgsError( message="Cloumn args error: {}".format(str(e))) else: try: async with self.aio_engine.acquire() as conn: async with conn.execute(query) as cursor: resp = await cursor.fetchone() await conn.execute('commit') except (MySQLError, Error) as err: aelog.exception("Find data failed, {}".format(err)) raise HttpError(400, message=self.message[5][self.msg_zh], error=err) else: return resp.count
async def _find_one(self, sql, param=None): """ 查询单条数据 Args: model: 查询的model名称 query_key: 查询model的过滤条件 or_query_key: 或查询model的过滤条件 Returns: 返回匹配的数据或者None """ # cursor = await self.execute(sql, param) # resp = await cursor.fetchone() # return dict(resp) if resp else None try: async with self.aio_engine.acquire() as conn: cursor = await self.execute(sql, param) resp = await cursor.fetchone() await conn.execute('commit') # 理论上不应该加这个的,但是这里默认就会启动一个事务,很奇怪 except (MySQLError, Error) as err: aelog.exception("Find one data failed, {}".format(err)) raise HttpError(400, message=self.message[4][self.msg_zh], error=err) else: return dict(resp) if resp else None
async def _delete_data(self, sql, param=None): """ 更新数据 Args: model: model query_key: 删除的查询条件 or_query_key: 或查询model的过滤条件 Returns: 返回删除的条数 """ async with self.aio_engine.acquire() as conn: async with conn.begin() as trans: try: cursor = await self.execute(sql, param) except (MySQLError, Error) as e: await trans.rollback() aelog.exception(e) raise MysqlError(e) except Exception as e: await trans.rollback() aelog.exception(e) raise HttpError(500, message=self.message[3][self.msg_zh], error=e) else: await conn.execute('commit') return cursor.rowcount
def delete_session(self, session_id: str, delete_key: bool = True) -> NoReturn: """ 利用hash map删除session Args: session_id: session id delete_key: 删除account到session的account key Returns: """ try: session_id_ = self.redis_db.hget(session_id, "session_id") if session_id_ != session_id: raise RedisClientError("invalid session_id, session_id={}".format(session_id)) exist_keys = [] session_data = self.get_session(session_id, cls_flag=False) exist_keys.append(session_data["org_id"]) exist_keys.append(session_data["role_id"]) exist_keys.append(session_data["menu_id"]) exist_keys.append(session_data["static_permission_id"]) exist_keys.append(session_data["dynamic_permission_id"]) with ignore_error(): # 删除已经存在的和账户相关的缓存key self.delete_keys(exist_keys) if delete_key is True: self.redis_db.hdel(self._account_key, session_data["account_id"]) if not self.redis_db.delete(session_id): aelog.error("delete session failed, session_id={}".format(session_id)) except RedisError as e: aelog.exception("delete session error: {}, {}".format(session_id, e)) raise RedisClientError(str(e))
def execute(self, sql, args_data): """ 执行单条记录,更新、插入或者删除 Args: sql: 插入的SQL语句 args_data: 批量插入的数据,为一个包含元祖的列表 Returns: INSERT INTO traffic_100 (IMEI,lbs_dict_id,app_key) VALUES(%s,%s,%s) ('868403022323171', None, 'EB23B21E6E1D930E850E7267E3F00095') """ count = None try: with self.conn.cursor() as cursor: count = cursor.execute(sql, args_data) except pymysql.Error as e: self.conn.rollback() aelog.exception(e) except Exception as e: self.conn.rollback() aelog.exception(e) else: self.conn.commit() return count
def update_context( self, session: FesSession) -> Generator['FastapiAlchemy', None, None]: """ 更新数据context Args: session: session对象, 默认是self.session Returns: """ try: yield self session.commit() except IntegrityError as e: session.rollback() if "Duplicate" in str(e): raise DBDuplicateKeyError(e) else: raise DBError(e) except DatabaseError as e: session.rollback() aelog.exception(e) raise DBError(e) except Exception as e: session.rollback() aelog.exception(e) raise HttpError(400, message=mysql_msg[2]["msg_zh"], error=e)
async def _query_execute(self, query: Union[Select, str], params: Dict = None) -> ResultProxy: """ 查询数据 # 读取的时候自动提交为true, 这样查询的时候就不用commit了 # 因为如果是读写分离的操作,则发现写入commit后,再次读取的时候读取不到最新的数据,除非读取的时候手动增加commit的操作 # 而这一步操作会感觉是非常不必要的,除非在同一个connection中才不用增加,而对于读写分离的操作是不现实的 # 而读取的操作占多数设置自动commit后可以提高查询的效率,所以这里把此分开 self.autocommit = True Args: query: SQL的查询字符串或者sqlalchemy表达式 params: 执行的参数值, Returns: 不确定执行的是什么查询,直接返回ResultProxy实例 """ conn: SAConnection = self.aio_engine.acquire() async with conn as conn: await conn.connection.autocommit(True) try: cursor = await conn.execute(query, params or {}) except (MySQLError, Error) as e: aelog.exception("Find data failed, {}".format(e)) raise HttpError(400, message=self.message[4][self.msg_zh]) except Exception as e: aelog.exception(e) raise HttpError(400, message=self.message[4][self.msg_zh]) return cursor
async def _find_document(self, name, query_key, filter_key=None): """ 查询一个单独的document文档 Args: name: collection name query_key: 查询document的过滤条件 filter_key: 过滤返回值中字段的过滤条件 Returns: 返回匹配的document或者None """ try: find_data = await self.db.get_collection(name).find_one( query_key, projection=filter_key) except InvalidName as e: raise MongoInvalidNameError("Invalid collention name {} {}".format( name, e)) except PyMongoError as err: aelog.exception("Find one document failed, {}".format(err)) raise HttpError(400, message=self.message[103][self.msg_zh], error=err) else: if find_data and find_data.get("_id", None) is not None: find_data["id"] = str(find_data.pop("_id")) return find_data
async def _delete_document(self, name, query_key, delete_one=True): """ 删除匹配到的一个的document Args: name: collection name query_key: 查询document的过滤条件 delete_one: delete_one delete_many的匹配条件 Returns: 返回删除的数量 """ try: if delete_one: result = await self.db.get_collection(name).delete_one( query_key) else: result = await self.db.get_collection(name).delete_many( query_key) except InvalidName as e: raise MongoInvalidNameError("Invalid collention name {} {}".format( name, e)) except PyMongoError as err: aelog.exception("Delete documents failed, {}".format(err)) raise HttpError(400, message=self.message[102][self.msg_zh], error=err) else: return result.deleted_count
async def update_session(self, session: Session, dump_responses=False, ex=EXPIRED): """ 利用hash map更新session Args: session: Session实例 ex: 过期时间,单位秒 dump_responses: 是否对每个键值进行dump Returns: """ session_data = dict(vars(session)) # 是否对每个键值进行dump if dump_responses: hash_data = {} for hash_key, hash_val in session_data.items(): if not isinstance(hash_val, str): with ignore_error(): hash_val = ujson.dumps(hash_val) hash_data[hash_key] = hash_val session_data = hash_data try: if not await self.redis_db.hmset(session_data["session_id"], session_data): raise RedisClientError("update session failed, session_id={}".format(session_data["session_id"])) if not await self.redis_db.expire(session_data["session_id"], ex): raise RedisClientError("set session expire failed, session_id={}".format(session_data["session_id"])) except RedisError as e: aelog.exception("update session error: {}, {}".format(session_data["session_id"], e)) raise RedisClientError(str(e))
async def _insert_document(self, name, document, insert_one=True): """ 插入一个单独的文档 Args: name:collection name document: document obj insert_one: insert_one insert_many的过滤条件,默认True Returns: 返回插入的Objectid """ try: if insert_one: result = await self.db.get_collection(name).insert_one(document ) else: result = await self.db.get_collection(name).insert_many( document) except InvalidName as e: raise MongoInvalidNameError("Invalid collention name {} {}".format( name, e)) except DuplicateKeyError as e: raise MongoDuplicateKeyError("Duplicate key error, {}".format(e)) except PyMongoError as err: aelog.exception("Insert one document failed, {}".format(err)) raise HttpError(400, message=self.message[100][self.msg_zh], error=err) else: return str(result.inserted_id) if insert_one else ( str(val) for val in result.inserted_ids)
def _execute(self, query: Union[Query, str], params: Dict = None, session: Session = None) -> ResultProxy: """ 插入数据,更新或者删除数据 Args: query: SQL的查询字符串或者sqlalchemy表达式 params: SQL表达式中的参数 session: session对象, 默认是self.session Returns: 不确定执行的是什么查询,直接返回ResultProxy实例 """ session = self.session if session is None else session try: cursor = session.execute(query, params) session.commit() except IntegrityError as e: session.rollback() if "Duplicate" in str(e): raise DBDuplicateKeyError(e) else: raise DBError(e) except DatabaseError as e: session.rollback() aelog.exception(e) raise DBError(e) except Exception as e: session.rollback() aelog.exception(e) raise HttpError(400, message=self.message[2][self.msg_zh], error=e) else: return cursor
def update_context(self, session: Session = None) -> 'DBClient': """ 更新数据context Args: session: session对象, 默认是self.session Returns: """ session = self.session if session is None else session try: yield self session.commit() except IntegrityError as e: session.rollback() if "Duplicate" in str(e): raise DBDuplicateKeyError(e) else: raise DBError(e) except DatabaseError as e: session.rollback() aelog.exception(e) raise DBError(e) except Exception as e: session.rollback() aelog.exception(e) raise HttpError(400, message=self.message[2][self.msg_zh], error=e)
async def update_session(self, session: Session, dump_responses: bool = False, ex: int = SESSION_EXPIRED) -> NoReturn: """ 利用hash map更新session Args: session: Session实例 ex: 过期时间,单位秒 dump_responses: 是否对每个键值进行dump Returns: """ session_data = await self.response_dumps(dump_responses, session) try: if not await self.redis_db.hmset(session_data["session_id"], session_data): raise RedisClientError( "update session failed, session_id={}".format( session_data["session_id"])) if not await self.redis_db.expire(session_data["session_id"], ex): aelog.error("set session expire failed, session_id={}".format( session_data["session_id"])) except RedisError as e: aelog.exception("update session error: {}, {}".format( session_data["session_id"], e)) raise RedisClientError(str(e))
def remove_apscheduler(): """ 移除redis中保存的标记 Args: Returns: """ rdb_ = None try: rdb_ = redis.StrictRedis( host=app_.config["ECLIENTS_REDIS_HOST"], port=app_.config["ECLIENTS_REDIS_PORT"], db=2, password=app_.config["ECLIENTS_REDIS_PASSWD"], decode_responses=True) except RedisError as err: aelog.exception(err) else: with ignore_error(): rdb_.delete("apscheduler") aelog.info(f"当前进程{os.getpid()}清除redis[2]任务标记[apscheduler].") finally: if rdb_: rdb_.connection_pool.disconnect()
def verify_schema(schema_cls, json_data: Union[List[Dict], Dict], required: Union[Tuple, List] = tuple(), excluded: Union[Tuple, List] = tuple(), is_extends: bool = True, message: Dict = None) -> Union[List[Dict], Dict]: """ 校验post的json格式和类型是否正确 主要用于接口内部校验,非装饰器校验 Args: schema_cls: 定义的schema对象 json_data: json data required: 需要标记require的字段 excluded: 排除不需要的字段 is_extends: 是否继承schemea本身其他字段的require属性, 默认继承 message: 提示消息 Returns: """ schema_obj = schema_cls(unknown=EXCLUDE) if required: for key, val in schema_obj.fields.items(): if key in required: # 反序列化期间,把特别需要的字段标记为required setattr(schema_obj.fields[key], "dump_only", False) schema_obj.load_fields[key] = schema_obj.fields[key] elif not is_extends: setattr(schema_obj.fields[key], "required", False) try: valid_data = schema_obj.load(json_data, unknown=EXCLUDE) # 把load后不需要的字段过滤掉,主要用于不允许修改的字段load后过滤掉 if excluded and isinstance(valid_data, dict): for val in excluded: valid_data.pop(val, None) except ValidationError as err: message = schema_msg if message is None else message aelog.exception( 'Request body validation error, please check! error={}'.format( err.messages)) raise HttpError(400, message=message[201]["msg_zh"], error=err.messages) except Exception as err: message = schema_msg if message is None else message aelog.exception( "Request body validation unknow error, please check!. error={}". format(str(err))) raise HttpError(400, message=message[202]["msg_zh"], error=str(err)) else: return valid_data
def callback_done(fn): """ 线程回调函数 Args: Returns: """ try: data = fn.result() except Exception as e: aelog.exception("error,{} return result: {}".format(task_name, e)) else: aelog.info("{} return result: {}".format(task_name, data))