def serializer(func): sig = signature(func) parameter_type = list(sig.parameters.values())[1] assert parameter_type != sig.empty, \ "Serialization function must first argument (not counting self) of protocol buffer type!" parameter_type = parameter_type.annotation assert hasattr(parameter_type, "SerializeToString"), \ "Serialization function seems not to have a valid protocol buffer first argument type!" log = get_logger("Serializer") @functools.wraps(func) async def wrapper(self, msg: Serializable): nonlocal log if msg.target_type == Serializable.TargetType.JSON: try: return json_format.MessageToJson(_process_tupple(msg.proto)) except Exception as e: log.exception( "Exception while trying to serialize protocol buffer to json! Because: ", str(e)) print("Serializer got data: ", msg.proto) else: try: return _process_tupple(msg.proto).SerializeToString() except Exception as e: log.exception( "Exception while trying to serialize protocol buffer! Because: %s", str(e)) print("Serializer got data: ", msg.proto) return b'' return wrapper
def create_client(ip: str, port: int): log = get_logger("Client @ {}:{}".format(ip, port)) try: queue = Queue() response_queue = Queue() except Exception as e: log.exception(e) return @client_handler async def com_handler(transport: ClientTransport): log.info("connected to the server") while True: log.info("in loop") cmd = await queue.get() if len(cmd) != 2: break cmd, data = cmd if len(cmd) == 0 or cmd == "close": break log.info("Sending %s request with data: %s", cmd, str(data)) call_id = await transport.write(data.SerializeToString(), cmd) response = await transport.read(call_id) if response.success: log.info("Got response: %s", str(response.data)) await response_queue.put(response.data) else: log.warning("Error: code=%d, narrative: %s", response.error_code, response.msg()) log.info("close connection") transport.close() return functools.partial(run_client, com_handler, ip, port), queue, response_queue
async def main(args): director = Director() log = get_logger("Main") if args.targets is not None and len(args.targets) > 0: await set_nodes(director, args.targets) if args.type == "location": await set_locations(director, args.city_num) elif args.type == "weather_agent": await set_weather_agents(director) elif args.type == "location_and_connection": await set_locations_and_connections(director, args.city_num) elif args.type == "json_config": if len(args.config_file) == 0: print("json_config requires a json config file!") return await config_from_json(director, args.config_file) elif args.type == "bootstrap": with open(args.bootstrap_config, "r") as f: config = json.load(f) config = update_config_from_state(config, log) b = Bootstrap(config["bootstrap"]) config["bootstrap"] = b.update_config() update_state(config, log) await from_bootstrap(director, b, config) else: print("Command type {} not supported!".format(args.type)) await director.close_all() await director.wait()
def deserializer(func): sig = signature(func) return_type = sig.return_annotation assert return_type != sig.empty, \ "Deserialization function must have a protocol buffer return type!" assert hasattr(return_type, "ParseFromString"), \ "Deserialization function seems not to have a valid protocol buffer return type!" log = get_logger("Deserializer") @functools.wraps(func) async def wrapper(self, data): nonlocal log if isinstance(data, dict): try: return json_format.Parse(json.dumps(data), return_type()) except Exception as e: log.exception( "Exception while trying to parse json to protocol buffer! Because: %s", str(e)) else: msg = return_type() try: msg.ParseFromString(data) return msg except Exception as e: log.exception( "Exception while trying to parse data to protocol buffer! Because: %s", str(e)) return msg return wrapper
def start_network(router: BackendRouter, ip: str, socket_port: int, http_port: int = -1, ssl_certificate=None, html_dir: str = None, *, logger=None, name_flag: str = "_", config: dict = {}): if logger is None: logger = get_logger("NetworkInterfaceCreator") http_info = "" if http_port != -1: http_info = " & HTTP interface @ {}:{}".format(ip, http_port) logger.info("Starting network interface for router {} @ {}:{}{}".format( router.name, ip, socket_port, http_info)) threads = 1 if http_port != -1: logger.info("Starting thread for HTTP server.") threads = 2 com = CommunicationHandler(threads) com.add(socket_server, ip, socket_port, "{}:{}".format(ip, socket_port)) if http_port != -1: logger.info("Starting HTTP server.") com.add(http_server, ip, http_port, ssl_certificate, html_dir=html_dir, config=config) com.start(router) return com
def socket_handler(router: BackendRouter, name: str): log = get_logger("SocketHandler@" + name) @handler async def on_connection(transport: Transport): log.info("Got socket client") queue = asyncio.Queue() tasks = [asyncio.create_task(read_task(transport, queue, log))] while True: request = await queue.get() if isinstance(request, str) and request == "CLOSE": for task in tasks: task.cancel() break tasks.append( asyncio.create_task( handle_task(transport, request, log, router))) for task in tasks: try: await task except asyncio.CancelledError: log.info("Task cancelled") log.info("Connection lost") transport.close() return on_connection
def _run_search_node(self, config: dict, q: multiprocessing.Queue): from utils.src.python.Monitoring import init init(config["search_config"].get("search_prometheus_log_file")) from network_oef.src.python.SearchNode import SearchNode from utils.src.python.Logging import configure as configure_logging, get_logger configure_logging(file=config["search_log"]) logger = get_logger("NODE_RUN: " + config["search_key"]) search_node = SearchNode(config) logger.info("**** Node started @ {}:{}".format(config["host"], config["search_port"])) time.sleep(1) try: while True: con = q.get() logger.info("**** SearchProcess got peer: %s @ %s ", con[2], con[0] + ":" + str(con[1])) if len(con) != 3: logger.error( "**** Stopping connection queue listening, because invalid con: ", con) break search_node.connect_to_search_node(*con) search_node.block() except Exception as e: logger.exception("Exception in run_search_node: ", e) except: logger.exception("Exception") logger.error("******* EXIT SEARCH NODE")
def http_json_handler(router): log = get_logger("HttpJsonRequestHandler") def on_request(path=""): global _loop log.info("Got json request over http") try: asyncio.set_event_loop(_loop) response = _loop.run_until_complete( router.route(path, bottle.request.json)) bottle.response.headers['Content-Type'] = 'application/json' return response.data except bottle.HTTPError as e: log.error("Not valid JSON request: ", e) return on_request
def _run_search_node(name: str, node_ip: str, node_port: int, dap_port_start: int, director_api_port: int, http_port: int, ssl_certificate: str, q: multiprocessing.Queue, log_file: str): from network_oef.src.python.FullLocalSearchNode import FullSearchNone from utils.src.python.Logging import configure as configure_logging configure_logging(file=log_file) logger = get_logger("NODE_RUN: " + name) node = FullSearchNone( name, node_ip, node_port, [{ #"run_py_dap": True, #"file": "ai_search_engine/src/resources/dap_config.json", #"port": dap_port_start, "run_mode": "CPP", #PY/CPP "port": dap_port_start, "name": "in_memory_dap" }], http_port, ssl_certificate, "api/src/resources/website", director_api_port=director_api_port, log_dir=os.path.split(log_file)[0]) logger.error("**** Node %s started", name) time.sleep(1) try: while True: con = q.get() logger.info("**** SearchProcess got peer: %s @ %s ", con[2], con[0] + ":" + str(con[1])) if len(con) != 3: logger.error( "**** Stopping connection queue listening, because invalid con: ", con) break node.add_remote_peer(*con) node.block() except Exception as e: logger.exception("Exception in run_search_node: ", e) except: logger.exception("Exception") logger.error("******* EXIT SEARCH NODE")
def socket_message_handler(adapter: OEFSocketAdapter): log = get_logger("OEFSocketConnectionHandler") async def on_connection(reader, writer): log.error("Got socket client") transport = Transport(reader, writer) session = ComSession() while True: response = await transport.read() if not response.success: log.error("Error response for uri %s, code: %d, reason: %s", response.uri, response.error_code, response.msg()) break response = await adapter.handle_message(response.data, session) await transport.write(response) transport.close() return on_connection
def socket_handler(router: BackendRouter): log = get_logger("SocketConnectionHandler") @handler async def on_connection(transport: Transport): log.info("Got socket client") request = await transport.read() if not request.success: log.error("Error response for uri %s, code: %d, reason: %s", request.uri, request.error_code, request.msg()) return response = await router.route(request.uri, request.data) if response.success: await transport.write(response.data, request.uri, call_id=request.id) else: await transport.write_error(response.error_code, response.narrative, request.uri, call_id=request.id) transport.close() return on_connection
async def from_bootstrap(director: Director, b: Bootstrap, config: dict): log = get_logger("BootstrapDirector") result = b.run_nodelist() oef_nodes = {} for binfo in result: if not binfo["last_seen"]: log.info("Skipping node ({}@{}:{}) because last_seen is null: {}". format(binfo["public_key"], binfo["host"], binfo["port"], json.dumps(binfo, indent=2))) continue if abs(time.time() - time_parser.parse(binfo["last_seen"]).timestamp() ) > config["last_seen_threshold_in_sec"]: log.info( "Skipping node ({}@{}:{}) because last_time was too long ago ({} s time threshold): {}" .format(config["last_seen_threshold_in_sec"], binfo["public_key"], binfo["host"], binfo["port"], json.dumps(binfo, indent=2))) continue await director.add_node(binfo["host"], binfo["port"]) info = await director.get_info( director.name_from_address(binfo["host"], binfo["port"])) city = info.search_key.replace("-search", "") if binfo["host"] != info.host: log.info( "Host got as response from director info endpoint is different. Called: {}, got: {}" .format(binfo["host"], info.host)) oef_nodes[city] = { "search_key": info.search_key, "core_key": info.core_key, "host": binfo["host"], "search_port": info.search_port, "core_port": info.core_port, "director_port": binfo["port"], "original_key": info.search_key } log.info("Got search nodes: {}".format(json.dumps(oef_nodes, indent=2))) map_data = get_map_data(len(oef_nodes)) log.info("Map Data: {}".format(map_data)) name_map = config["name_map"] oef_nodes_with_city_keys = {} for key, info in oef_nodes.items(): right_key = key try: location, peer_list = map_data[right_key] except KeyError: try: right_key = name_map[info["original_key"]] log.info( "Key %s not a valid city name! Key lookup in config resulted with " "the following city: %s -> %s", key, info["original_key"], right_key) location, peer_list = map_data[right_key] except KeyError: log.error( "Key %s not a valid city name! Key lookup failed in name_map (set in config): %s!" " Skipping node...", key, info["original_key"]) continue dname = director.name_from_address(info['host'], info['director_port']) await director.set_location(dname, info["core_key"].encode("UTF-8"), location) oef_nodes_with_city_keys[right_key] = info oef_nodes_with_city_keys[right_key]["peer_list"] = peer_list oef_nodes_with_city_keys[right_key]["dname"] = dname for key, info in oef_nodes_with_city_keys.items(): peers = [(inf["search_key"], inf["host"], inf["search_port"]) for inf in map(lambda name: oef_nodes_with_city_keys[name], info["peer_list"])] await director.add_peers(info["dname"], peers) await director.close_all() await director.wait()
c["bootstrap"]["remote-host"] = c["host"] if c["bootstrap"]["remote-port"] < 1000: c["bootstrap"]["remote-port"] = c["director_port"] b = Bootstrap(c["bootstrap"]) try: c["bootstrap"] = b.update_config() except ValueError: logger.warning("Bootstrap state not saved because isn't configured properly!") update_state(c, logger) b.work() if __name__ == "__main__": configure_logging() logger = get_logger("FullNode") parser = argparse.ArgumentParser(description='Full PLUTO app! \n At least one of the arguments must be specified!') parser.add_argument("--config_file", required=False, default="", type=str, help="Path to the config JSON file.") parser.add_argument("--config_str", required=False, default="", type=str, help="JSON config string") parser.add_argument("--initial_sleep", required=False, default=0, type=int, help="Startup sleep in sec") args = parser.parse_args() if len(args.config_file) == 0 and len(args.config_str) == 0: parser.print_help() logger.error("No config set!") exit(1) CONFIG_REQUIRED_KEYS = [ ("host", str_value_set), ("search_key", str_value_set),