def insert_multi_data(modelList, dataDict): for model in modelList: if model.select().count() > 0: logging.debug(f"{model.__name__} already had data, so continue") continue for key, value in dataDict.items(): if model.__name__ == key: insert_single_data(model, value)
def insert_single_data(model, dataList, chunk_size=100): with mysql_client.allow_sync(): with mysql_client.atomic(): try: logging.debug(f"start insert data to {model}") for i in range(0, len(dataList), chunk_size): logging.debug(f"data: {dataList[i: i + chunk_size]}") model.insert_many(dataList[i:i + chunk_size]).execute() except BaseException: logging.error(traceback.format_exc())
async def init_db(self) -> Redis: peer_conn = "_".join([ self.config["address"], str(self.config["db"])]) if self.__conn.get(peer_conn): self.client = self.__conn[peer_conn] else: self.client = await aioredis.create_redis_pool(**self.config) self.__conn[peer_conn] = self.client logging.debug(f"connect redis {peer_conn} successful") return self.__conn[peer_conn]
def value(self): returnMap = { "code": self.code, "msg": self.message, "time": str(time.time()), "data": self.data } if self.kwargs: returnMap.update(self.kwargs) logging.debug(f"return data: {returnMap}") return returnMap
def app(options): settings = make_settings(options) app = BaseApp(handlers_loads(), settings) sync_uri(handlers_loads()) server = httpserver.HTTPServer(app) server.listen(options.port, address="0.0.0.0") try: logging.debug(f"start {Constant.PROJECT} {options.env} success," f" at port [%s]" % options.port) ioloop.IOLoop.instance().start() except BaseException: settings["controller"].close() ioloop.IOLoop.instance().stop() logging.debug(f"{Constant.PROJECT} loop safe stop")
def sysnc_fetch(self, req): rpc_data = {"code": CODE_503, "msg": "外部接口调用异常", "time": time.time()} try: response = HTTPClient().fetch(req) rpc_data = ujson.loads(response.body) logging.debug(f"response: {response}, type: {type(response)}") except BaseException: if req.method == "GET": logging.error("route: {} return error".format(req.url)) else: param = ujson.loads(req.body) logging.error("route: {}, param: {} return error".format(req.url, param)) logging.error(traceback.format_exc()) finally: return rpc_data
def init_db(config: dict) -> PooledMySQLDatabase: peer_conn = "_".join([ config["host"], str(config["port"]), config["database"]]) if not AsyncMySQLConnect.__conn.get(peer_conn): _database = PooledMySQLDatabase( database=config["database"], max_connections=config['max_connections'], host=config['host'], user=config['user'], password=config["password"], port=config['port'] ) AsyncMySQLConnect.__conn[peer_conn] = _database logging.debug(f"connect mysql {peer_conn} successful") return AsyncMySQLConnect.__conn[peer_conn]
def init_db(self): config_client = {} self.peer_conn = "_".join([ self.config["host"], str(self.config["port"])]) if self.config["user"]: self.peer_conn = "_".join([self.peer_conn, self.config["user"]]) if not self.__conn.get(self.peer_conn): url = self._connect_url() self.client = AsyncIOMotorClient(url, serverSelectionTimeoutMS=5000) config_client.setdefault("config", self.config) config_client.setdefault("client", self.client) self.__conn.setdefault(self.peer_conn, config_client) logging.debug(f"connect mongodb {self.peer_conn} successful") else: self.client = self.__conn[self.peer_conn]["client"] self.config = self.__conn[self.peer_conn]["config"]
def connect_redis(self): r, i = None, 0 while i < self.retry: try: pool = redis.ConnectionPool(**self.kwags) r = redis.Redis(connection_pool=pool, decode_responses=True) if not r: logging.debug("第[%d]连接失败,继续" % i) else: logging.debug("success connect redis: {}", format(r)) break except BaseException: logging.error(traceback.format_exc()) time.sleep(1) i += 1 return r
def wrapper(self, *args, **kwargs): start_time = time.time() param = args[0] pb2 = dkwargs["service_pod"] scheme = dkwargs["scheme"] _response_function = dkwargs.get("response_function") # 校验参数 if scheme: _check_param = {item: getattr(param, item, None) for item in scheme} logging.debug("=" * 14 + f"params: {_check_param}" + "=" * 14) self.parameter = _check_param _return_data = func(self, *args, **kwargs) # 指定方法返回 if not _response_function: _response_function = "".join([func.__name__, "Response"]) response_function = getattr(pb2, _response_function, None) return_data = response_function(**_return_data.value) end_time = time.time() logging.debug(f"interface: {func.__name__} cost_time: {end_time - start_time}") return return_data
def complete_table(): """ 补全mysql表 :return: """ miss_model = find_orm() with mysql_client.allow_sync(): with mysql_client.atomic(): logging.debug(f"Missing models: " f"{[model.__name__ for model in miss_model]}") if len(miss_model): logging.debug("start create tables...") mysql_client.create_tables(miss_model) logging.debug("end create tables") logging.debug("complete_table done")
def init_db(self): peer_db = "_".join([ self.config["host"], str(self.config["port"]), str(self.config["db"])]) if self.__conn.get(peer_db): self.client = self.__conn[peer_db] return self.client retry = self.config.pop("retry") self.client, i = None, 0 while i < retry: try: pool = redis.ConnectionPool(**self.config) self.client = redis.Redis(connection_pool=pool) if self.client: logging.debug(f"redis connect successful") break else: logging.warning("第[%d]连接失败,继续" % i) except BaseException: logging.error(traceback.format_exc()) time.sleep(1) i += 1 self.__conn[peer_db] = self.client return self.client
def registerUnified(self, request, context): display = ( WxUnifiedConfig.status, WxUnifiedConfig.puid, WxUnifiedConfig.nonce_str, WxUnifiedConfig.secret, WxUnifiedConfig.mch_id, WxUnifiedConfig.appid, WxUnifiedConfig.expire_time, WxUnifiedConfig.channel ) channel = self.parameter["channel"] appid = self.parameter["appid"] mch_id = self.parameter["mch_id"] secret = self.parameter["secret"] nonce_str = self.parameter["nonce_str"] salt = "".join([channel, appid, mch_id, secret, nonce_str]) puid = DealEncrypt.hash_md5_encrypt(salt) logging.debug(f"puid: {puid}") _user = WxUnifiedConfig.get_or_none(puid=puid) if _user: return ReturnData(CODE_0, msg="账户已注册", data={}) insertDIct = { "puid": puid, "channel": channel, "appid": appid, "mch_id": mch_id, "secret": secret, "nonce_str": nonce_str } _id = WxUnifiedConfig.insert(**insertDIct).execute() if not _id: ReturnData(CODE_0, msg="注册失败, 请稍后再试", data={}) puser = WxUnifiedConfig.get(puid=puid) puser = model_to_dict(puser, only=display) return ReturnData(CODE_1, puser)
async def async_wrapper(self, *args, **kwargs): request_id = self.parameter.get("request_id", "") logging.debug(f"parameter: {self.parameter}") try: check_code = await group_check(self, schema, login_check, api_restrict) if check_code == CODE_0: return_data = await func(self, *args, **kwargs) else: return_data = ReturnData(check_code) except Exception as e: logging.error(e) logging.error(traceback.format_exc()) return_data = ReturnData(CODE_500, trace=str(e)) return_data.request_id = request_id return_time = datetime_now() duration = round( (return_time - self.parameter["request_time"]).total_seconds() * 1000, 3) logging.debug(f"{self.request.method} {self.request.path} " f"{request_id} duration: {duration}ms") send_message(self, return_data.value) await self.finish(return_data.value) return
def init_db(self): host = self.config["host"] port = self.config["port"] user = self.config["user"] password = self.config["password"] auth_db = self.config["auth_db"] peer_conn = "_".join([host, str(port)]) if user: peer_conn += "_" + user if self.__conn.get(peer_conn): self.client = self.__conn[peer_conn] return self.client url = "mongodb://" domain = "{host}:{port}/".format( host=host, port=port ) if user and password and auth_db: authentication = "{username}:{password}@".format( username=quote_plus(user), password=quote_plus(password) ) domain = "{host}:{port}/".format( host=host, port=port ) param = "?authSource={auth_db}".format( auth_db=auth_db ) url = "".join([url, authentication, domain, param]) else: url = "".join([url, domain]) self.client = pymongo.MongoClient(url, serverSelectionTimeoutMS=5000) logging.debug(f"mongodb connect successful")
def make_producer(self): kafka_config = Common.yaml_config("kafka_cluster") connect_config = {} connect_config["key_serializer"] = lambda v: ujson.dumps(v).encode( 'utf-8') connect_config["value_serializer"] = lambda v: ujson.dumps(v).encode( 'utf-8') connect_config["max_block_ms"] = 15000 if all([ kafka_config["sasl_plain_username"], kafka_config["sasl_plain_password"] ]): connect_config.update(kafka_config) else: connect_config.update( bootstrap_servers=kafka_config["bootstrap_servers"]) while True: producer = KafkaProducer(**connect_config) if not producer.bootstrap_connected(): logging.debug("will retry connect kafka") continue logging.debug(f"connect kafka cluster " f"{kafka_config['bootstrap_servers']} successful") return producer
def sync_uri(handlers: list): now = time.strftime("%Y-%m-%d %X") # 更新接口到数据库 with mysql_client.allow_sync(): with mysql_client.atomic(): existing_config = UriConfig.select().dicts() existing_path = { config["path"]: config for config in existing_config } running_path = set() for handler in handlers: path = handler.matcher._path pattern = handler.regex.pattern running_path.add(path) _config = existing_path.get(path) _last = UriConfig.select().order_by(-UriConfig.code).first() code = _last.code + 1 if _last else 10 << 10 name = handler.name description = handler.handler_class.__doc__ method = ",".join(handler.handler_class.SUPPORTED_METHODS) # 注册新接口 if not _config: insert_dict = { "code": code, "path": path, "name": name, "description": description.replace(' ', "") if description else "", "method": method, "regex": 1 if pattern else 0, "pattern": pattern } UriConfig.insert(insert_dict).execute() code += 1 else: update_dict = { "name": name, "description": description.replace(' ', "") if description else "", "method": method, "regex": 1 if pattern else 0, "pattern": pattern, "status": 1, "update_time": now, } UriConfig.update(update_dict).where( UriConfig.path == path).execute() effect_existing_path = { path for path in existing_path if existing_path[path]["status"] == 1 } disabled_path = list(effect_existing_path - running_path) if disabled_path: UriConfig.update({ "status": 0, "update_time": now }).where(UriConfig.path << disabled_path).execute() logging.debug(f"sync uri config done")
qq_no = CharField(null=True, max_length=16, verbose_name="qq号") wechat = CharField(null=True, max_length=32, verbose_name="微信号") email = CharField(null=True, max_length=32, verbose_name="电子邮箱") class Meta: verbose_name = "demo用户表" table_name = "demo_user" indexes = ((("uid", "status"), True), ) def generate_subclass(sub_model: list, list_model: list) -> list: for item in sub_model: if item.__subclasses__(): generate_subclass(item.__subclasses__(), list_model) if item.__name__ not in list_model and len(item.__subclasses__()) == 0: list_model.append(item) return list_model if __name__ == '__main__': sub_model = BaseModel.__subclasses__() list_model = generate_subclass(sub_model, []) list_model = [ item for item in list_model if not item.table_exists() and "verbose_name" in item._meta.__dict__ ] logging.debug("Start create models: " + ",".join([item.__name__ for item in list_model])) database_proxy.create_tables(list_model)
async def fetch_json(self, method, url, **kwargs): logging.debug(f"{method} {url} {kwargs}") async with self.session.request(method, url, **kwargs) as response: result = await response.json() return result