class EnvSource(Source): def __init__(self, prefix: str = None, aliases: Map = None): self.prefix = prefix self.aliases = aliases def load(self) -> Map: opts = Map() envs = os.environ for k, v in envs.items(): opt = Option(name=k, value=v) key = opt.name.lower() if self.prefix != None and self.prefix != "": key = key.strip(self.prefix).lower() # key = opt.name.replace("_", ".").lower() # print("key=%s, value=%s" % (key, opt.value)) merge_option(opts, key, opt.value) # print(opts) return opts def set_prefix(self, prefix: str = None): self.prefix = prefix def set_aliases(self, alias: str = None, key: str = None): if self.aliases == None: self.aliases = Map() self.aliases.set(alias, key)
def load_source(t: str, d: bytes) -> Map: if t == "yaml" or t == "yml": return Map.from_dict(d=yaml.load(d, Loader=yaml.FullLoader)) elif t == "json": return Map.from_dict(d=json.loads(d)) else: raise SourceTypeError("unsupported config type")
class Factory(): name = "psd.Factory" """ Connection factory """ def __init__(self, ): self.options = {} self.dbMap = Map() def open(self, name): db = self.dbMap.get(name) if not db: self.init_db(name) db = self.dbMap.get(name) return db def init_db(self, name): db = self.dbMap.get(name) if db != None: return db # 加载配置文件,后面需要提炼出来 self._load_conf_file() def _load_conf_file(self): be_data_sql_conf = config.get("bee.data.sql") for k, v in be_data_sql_conf.items(): # options = DBOptions(name=k, provider=v["provider"] # , max_open_conns=v["max_open_conns"], max_idle_conns=v["max_idle_conns"] # , trace=v["trace"] # , options=Map.from_dict(v["options"])) options = DBOptions() options.name = k options.cover(Map.from_dict(v)) self._add_db(k, options) def _add_db(self, name, options=DBOptions()): print( "opts=(name=%s, provider=%s, max_open_conns=%s, max_idle_conns=%s, trace=%s)" % (options.name, options.provider, options.max_open_conns, options.max_idle_conns, options.trace)) if options != None: self.options[name] = options self.dbMap[name] = Factory.build_database(options) @staticmethod def build_database(options=DBOptions()): if options != None: database = Database(name=options.name) database.opts = options if options.provider == "mysql": database.p = MysqlProvider() elif options.provider == "mssql": database.p = MssqlProvider() elif options.provider == "sqlite": database.p = SqliteProvider() return database
def max_msg_size(self, opts: Map): size = None if opts != None: if opts.get("max_msg_size") != None: size = int(opts.get("max_msg_size")) if size == None or size == 0: size = const.default_max_message_size return size
class Factory(): def __init__(self): self.sem = BoundedSemaphore(1) self.clients = Map() def open(self, name: str) -> Client: self.sem.acquire() client = self.clients.get(name) if client == None: client = self.create(name) self.sem.release() return client def create(self, name: str) -> Client: client = self.clients.get(name) if client != None: return client else: opts = self.load_options(name) if opts == None: return mode = Primary() if opts.read_preference == READ_PREFERENCE_PRIMARY_PREFERRED: mode = PrimaryPreferred() elif opts.read_preference == READ_PREFERENCE_SECONDARY: mode = Secondary() elif opts.read_preference == READ_PREFERENCE_SECONDARY_PREFERRED: mode = SecondaryPreferred() elif opts.read_preference == READ_PREFERENCE_NEAREST: mode = Nearest() kwargs = { "read_preference" : mode, "maxPoolSize" : opts.max_pool_size, "minPoolSize" : opts.min_pool_size, "socketTimeoutMS" : opts.socket_time_out, "connectTimeoutMS" : opts.connect_time_out } _client = MongoClient(host=opts.uri, **kwargs) client = Client(db=name, c=_client, opts=opts) self.clients.set(name, client) return client def load_options(self, name) -> Options: key = "bee.data.mongo." + name if not config.exist(key): return None opts = Options() bee_data_mongo_conf = config.get(key) opts.cover(bee_data_mongo_conf) return opts
def load(self, force: bool): if self.loaded and not force: return None self.options = Map() """read env source""" self.load_source(self.env) """read file source""" srcs = self.find_file_sources() self.load_source(*srcs) """read self.srcs""" self.load_source(*self.srcs) self.loaded = True
def __init__(self, opts: ServerOptions, registry: Registry = None): self.opts = opts if registry == None: self.registry = DirectRegistry(url=opts.address.url) else: self.registry = registry # 初始化service管理容器 self.services = Map() # 初始化session管理容器 self.sessions = SessionMap() # 初始化编解码器 self.init_codec_builder()
def new_Translator(lang: str, file_path) -> ITranslator: with open(file_path, "rb") as f: last_dot_index = file_path.rfind(".") t = file_path[last_dot_index + 1:] b = f.read() content = yaml.load(b, Loader=yaml.FullLoader) m = Map.from_dict(d=content) return Translator(lang=lang, m=m)
def new_server_codec(self, sock) -> IServerCodec: m = self.opts.macher if m == None: raise ServerLoadError("no macher!") else: b = get_codec(m) if not b: raise ServerLoadError("init codec builder error!") return b.new_server_codec(s=Channel(id=Guid().string(), sock=sock), opts=Map())
def _load_conf_file(self): be_data_sql_conf = config.get("bee.data.sql") for k, v in be_data_sql_conf.items(): # options = DBOptions(name=k, provider=v["provider"] # , max_open_conns=v["max_open_conns"], max_idle_conns=v["max_idle_conns"] # , trace=v["trace"] # , options=Map.from_dict(v["options"])) options = DBOptions() options.name = k options.cover(Map.from_dict(v)) self._add_db(k, options)
def load(self) -> Map: opts = Map() envs = os.environ for k, v in envs.items(): opt = Option(name=k, value=v) key = opt.name.lower() if self.prefix != None and self.prefix != "": key = key.strip(self.prefix).lower() # key = opt.name.replace("_", ".").lower() # print("key=%s, value=%s" % (key, opt.value)) merge_option(opts, key, opt.value) # print(opts) return opts
def __init__(self): self.loaded: bool = False self.autoLoad: bool = False self.env: EnvSource = EnvSource() self.options: Map = None self.profiles: List[str] = [] self.dirs: List[str] = [] self.name: str = None self.srcs: List[Source] = [] self.defaults: Map = Map()
def init_config(self): bee_rpc_registry_conf = config.get("bee.rpc.registry") server = Server() server.__dict__ = bee_rpc_registry_conf self.registry = Builder.build(server) bee_rpc_client_conf = config.get("bee.rpc.client") if bee_rpc_client_conf != None: for k, v in dict(bee_rpc_client_conf).items(): opts = ClientOptions() opts.name = k opts.cover(Map.from_dict(v)) self._clients[k] = Client(opts=opts, registry=self.registry)
def find_list(self, query, args=[]): cursor = self.conn.cursor() cursor.execute(query.replace("?", "%s"), args) cols = cursor.description result = Map() result.set("cols", cols) result.set("data", cursor.fetchall()) return result
def get(self, lang: str) -> ITranslator: dirs = self.get_dirs() for dir in dirs: file_name = dir + ".yml" if files.exist(file_name): with open(file_name, "rb") as f: last_dot_index = file_name.rfind(".") t = file_name[last_dot_index + 1:] b = f.read() content = yaml.load(b, Loader=yaml.FullLoader) m = Map.from_dict(d=content) return Translator(lang=lang, m=m) return None return None
def __init__( self, name: str = None, provider: str = "mysql" # , address: str="" , max_open_conns: int = 100, max_idle_conns: int = 5, trace=Map(), options=Map()): self.name = name self.provider = provider # self.address = address self.max_open_conns = max_open_conns self.max_idle_conns = max_idle_conns # self.conn_lifetime = conn_lifetime, self.trace = trace self.options = options self.host = "127.0.0.1" self.port = 3306 self.database = "test" self.user = "******" self.password = "******" self.charset = "utf8mb4"
def cover_option(opts: Map, k: str, v): # print("opts=%s, k=%s, v=%s" % (opts, k, v)) # keys = k.split(".") # length = len(keys) # last = length -1 # for i in range(length): # key = keys[i] # if opts.contains(key): # opt = opts.get(key) # t = type(opt) # if t == Map: # pass # elif t == map: # pass # else: # return # if i == last: # opts[key] = v # else: # opts = opts.setdefault(key, Map()) keys = k.split(".") new_keys = [] for value in keys: if value != "": new_keys.append(value) keys = new_keys length = len(keys) last = length - 1 if length > 1: for i in range(length): key = keys[i:i + 1][0] if i < last: if opts.get(key) == None: opts = opts.setdefault(key, Map()) else: opts = opts.get(key) print(opts) else: opts.setdefault(key, v) else: opts.set(k, v)
def auto_server(): if config.exist("debug"): is_debug = config.get("debug") bee_rpc_registry_conf = config.get("bee.rpc.registry") registry_server = RegistryServer() registry_server.__dict__ = bee_rpc_registry_conf registry = Builder.build(registry_server) bee_rpc_server_conf = config.get("bee.rpc.server") if bee_rpc_server_conf != None: for k, v in dict(bee_rpc_server_conf).items(): opts = ServerOptions() opts.name = k opts.cover(Map.from_dict(v)) srv = Server(opts=opts, registry=registry) srv.startup() logging.info("bee.rpc > server start success")
class SessionMap(): def __init__(self): self.channels = Map() def add(self, s: session): self.channels.set(s.id(), s) def get(self, id: str): self.channels.get(id) def remove(self, id: str): self.channels.remove(id) def count(self): return len(self.channels)
def call(self, service: str, method: str, args=[]) -> Result: """ class remote method :param service: :param method: :param args: :return: """ print("Node.call(service={}, method={}, args={})".format(service, method, str(args))) try: with self.pool.connection() as client: if not client.is_connected(): client.open() sock = client.socket rh = RequestHead() rh.id = self.count self.count += 1 rh.service = service rh.method = method rh.labels = [] r = Request(head=rh, args=args) cc = self._cb.new_client_codec(s=Channel(id=Guid().string(), sock=sock), opts=Map()) cc.encode(req=r) rh = ResponseHead() cc.decode_head(rh) rt = Result() cc.decode_result(rt) return rt except Exception as e: self.error_count +=1 if (self.error_count >= default_max_error_count): self.report_error() raise CodedError(code=-1, message=e.__str__(), detail=e.__str__())
class Server(): def __init__(self, opts: ServerOptions, registry: Registry = None): self.opts = opts if registry == None: self.registry = DirectRegistry(url=opts.address.url) else: self.registry = registry # 初始化service管理容器 self.services = Map() # 初始化session管理容器 self.sessions = SessionMap() # 初始化编解码器 self.init_codec_builder() @staticmethod def new_server(name: str = None, macher: str = "proto", address: Address = Address(url="127.0.0.1:9000"), registry: Registry = None) -> "Server": opts = ServerOptions(name=name, macher=macher, address=address, version="1.0.0") if is_debug: logging.debug("bee.rpc > init server's options") s = Server(opts=opts, registry=registry) if is_debug: logging.debug("bee.rpc > new server...") return s def register_service(self, ins): """register rpc service""" if not ins: return name = ins.__class__.__name__ self.services.set(name, ins) def handle(self, sock, address): print("address= " + str(address)) while True: if sock.closed == True: print("closed...") break self.handle_request(sock, address) sock.close() def init_codec_builder(self): if self.opts.macher == "http": pass elif self.opts.macher == "json": b = get_codec("json") if not b: import bee.net.rpc.codecs.json.json as json json.init() else: # proto b = get_codec("proto") if not b: import bee.net.rpc.codecs.proto.proto as proto proto.init() def new_server_codec(self, sock) -> IServerCodec: m = self.opts.macher if m == None: raise ServerLoadError("no macher!") else: b = get_codec(m) if not b: raise ServerLoadError("init codec builder error!") return b.new_server_codec(s=Channel(id=Guid().string(), sock=sock), opts=Map()) def add_session(self, sc) -> session: s = new_session(sc.stream(), sc) self.sessions.add(s) return s def remove_session(self, sc) -> session: self.sessions.remove(sc.stream().id()) def handle_request(self, sock, address): rh = RequestHead() args = [] sc = self.new_server_codec(sock) sc.decode_head(rh) if rh.id == None or rh.id == 0: sock.close() else: # print("rh.id=" + str(rh.id)) sc.decode_args(args) self.add_session(sc) self.invoke_service(sc, rh=rh, args=args) self.remove_session(sc) def invoke_service(self, sc, rh: RequestHead, args: List): id = rh.id sname = rh.service s = self.services.get(sname) mname = rh.method if s != None: ret = getattr(s, mname)(*(args)) head = ResponseHead(id=id, assets=None) result = Result(value=ret, error=None) resp = Response(head=head, result=result) sc.encode(resp) else: print("do nothing") def find_codec_builder(self, macher: str) -> IServerCodec: cb = const.codecs.get(macher) return cb def register(self): self.nid = Guid().string() self.registry.register(self.opts.name, self.nid, self.opts.address.url, self.opts.version) def handle_signal(self): """register signal""" def handler(signum, frame): logging.info("bee.rpc > kill service={}, nid={}".format( self.opts.name, self.nid)) self.registry.deregister(self.opts.name, self.nid) signal.signal(signal.SIGINT, handler) signal.signal(signal.SIGTERM, handler) def startup(self): """ start the rpc server """ # if not self.handler: # raise ServerLoadError('Methods not exits.') self.handle_signal() pair = self.opts.address.url.split(":") server = StreamServer((pair[0], int(pair[1])), self.handle, backlog=self.opts.backlog, spawn=self.opts.max_conn_size) self.register() server.serve_forever() self.srv = server def shutdown(self): """ shutdown the rpc server """ if self.registry != None: self.registry.close() if self.srv != None: self.srv.close() def __call__(environ, start_response): # for gunicorn wsgi app pass
from typing import List from bee.data.option import Options from bee.data.map import Map from bee.data import const const.codecs = Map() class ReadPeeker(): def read(self, p: bytes) -> int: pass def peek(self, n: int) -> bytes: pass class Stream(ReadPeeker): def id(self) -> str: pass def reader(self): pass def peek(self, n: int) -> bytes: pass def read(self, n: int) -> bytes: pass def read_bytes(self, separator=b'\n') -> bytes:
def __init__(self, ): self.options = {} self.dbMap = Map()
class Factory(): def __init__(self): self.sem = BoundedSemaphore(1) self.cmds = Map() def open(self, name: str) -> Client: self.sem.acquire() client = self.cmds.get(name) if client == None: client = self.create(name) self.sem.release() return client def create(self, name: str) -> Client: client = self.cmds.get(name) if client != None: return client else: opts = self.load_options(name) if opts == None: return None if opts.type == TYPE_SINGLE: client = self.create_single(opts) elif opts.type == TYPE_SENTINEL: client = self.create_sentinel(opts) elif opts.type == TYPE_CLUSTER: client = self.create_cluster(opts) else: client = self.create_single(opts) self.cmds.set(name, client) return client def load_options(self, name) -> Options: key = "bee.data.redis." + name if not config.exist(key): pass opts = Options() bee_data_redis_conf = config.get(key) opts.cover(bee_data_redis_conf) return opts def create_single(self, opts: Options) -> Client: pair = opts.address[0].split(":") pool = redis.ConnectionPool(host=pair[0], port=int(pair[1]), max_connections=opts.pool_size, db=opts.db) r = redis.Redis(connection_pool=pool, socket_timeout=opts.socket_timeout / 1000, socket_connect_timeout=opts.socket_connect_timeout / 1000) return Client(r=r, opts=opts) def create_sentinel(self, opts: Options) -> Client: address_array = [] for x in opts.address: pair = x.split(":") address_array.append(tuple(pair)) sentinel = Sentinel(address_array, socket_timeout=5) return Client(s=sentinel, opts=opts) def create_cluster(self, opts: Options) -> Client: startup_nodes = [] for v in opts.address: pair = v.split(":") startup_nodes.append({"host": str(pair[0]), "port": pair[1]}) rc = RedisCluster(startup_nodes=startup_nodes, decode_responses=True, password=opts.password, max_connections=opts.pool_size, max_connections_per_node=20, socket_timeout=opts.socket_timeout / 1000, socket_connect_timeout=opts.socket_connect_timeout / 1000) return Client(rc=rc, opts=opts)
def __init__(self): self.sem = BoundedSemaphore(1) self.clients = Map()
def set_aliases(self, alias: str = None, key: str = None): if self.aliases == None: self.aliases = Map() self.aliases.set(alias, key)
def merge_option(opts: Map, k: str, v): keys = k.split(".") new_keys = [] for value in keys: if value != "": new_keys.append(value) keys = new_keys length = len(keys) last = length - 1 if length > 1: for i in range(length): key = keys[i:i + 1][0] if i < last: if opts.get(key) == None: opts = opts.setdefault(key, Map()) else: opts = opts.get(key) if opts.contains(keys[i + 1:i + 2][0]): if i == last - 1: return else: t = type(opts.get(keys[i + 1:i + 2][0])) if t != Map and t != dict: return else: opts.setdefault(key, v) else: if opts.contains(k) == False: opts.set(k, v)
def __init__(self): self.channels = Map()
from bee.data.map import Map m1 = Map(('a', 'b', 'c'), (1, 2, 3)) print(m1.keys()) print(m1.values()) print(m1.items()) print(hasattr(m1, 'a')) print(m1.contains('a')) # m1.remove("a") delattr(m1, "a") print(m1.keys()) m1.empty() print(m1.values()) # print(m1.get(1)) m2 = Map(a=1, b=2) print(m2) # m3 = Map({"a" : 1}) # # print(m3)
from bee.net.rpc.codecs.proto.proto import Builder id = 1 sock = socket.create_connection(address=("127.0.0.1", 8080)); print(sock.closed) while id<5: rh = RequestHead() rh.id = id rh.service = "Test" rh.method = "hello" rh.labels = [Option(name="test", value="test")] r = Request(head=rh, args=["Mr. " + str(id)]) cc = Builder().new_client_codec(s=Channel(id="", sock=sock), opts=Map()) cc.encode(req=r) rh = ResponseHead() cc.decode_head(rh) rt = Result() cc.decode_result(rt) print(rt.value) # length_data = sock.recv(4) # length = int.from_bytes(length_data, byteorder="little") # if length > 0: # data = sock.recv(length) # print(data) id +=1