class BackEndPool(Daemon): def __init__(self): self.backends = {} def run(self): self.server = SimpleJSONRPCServer(('localhost', 8473)) self.server.register_function(self.spawn_backends, "spawn") self.server.register_function(self.kill_backends, "kill") self.server.serve_forever() def spawn_backends(self, backend): space = backend['space'] model = backend['model'] if space not in self.backends: self.backends[space] = {} if model not in self.backends[space]: self.backends[space][model] = [] self.backends[space][model].append(BackEndProcess(backend)) for be in self.backends[backend['space']][backend['model']]: be.start() def kill_backends(self, backend): for be in self.backends[backend['space']][backend['model']]: be.join()
def main(): """ The code below starts an JSONRPC server """ from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer parser = optparse.OptionParser(usage="%prog [OPTIONS]") parser.add_option('-p', '--port', default='8080', help='Port to serve on (default 8080)') parser.add_option('-H', '--host', default='127.0.0.1', help='Host to serve on (default localhost; 0.0.0.0 to make public)') parser.add_option('-q', '--quiet', action='store_false', default=True, dest='verbose', help="Quiet mode, don't print status msgs to stdout") parser.add_option('-S', '--corenlp', default=DIRECTORY, help='Stanford CoreNLP tool directory (default %s)' % DIRECTORY) parser.add_option('-P', '--properties', default='default.properties', help='Stanford CoreNLP properties fieles (default: default.properties)') options, args = parser.parse_args() VERBOSE = options.verbose # server = jsonrpc.Server(jsonrpc.JsonRpc20(), # jsonrpc.TransportTcpIp(addr=(options.host, int(options.port)))) try: server = SimpleJSONRPCServer((options.host, int(options.port))) nlp = StanfordCoreNLP(options.corenlp, properties=options.properties, serving=True) server.register_function(nlp.parse) server.register_function(nlp.raw_parse) print 'Serving on http://%s:%s' % (options.host, options.port) # server.serve() server.serve_forever() except KeyboardInterrupt: print >>sys.stderr, "Bye." exit()
def start_master_daemon_func(): funcs_by_name = {} # master_daemon functions funcs_by_name['add'] = add funcs_by_name['check_connectivity'] = check_connectivity funcs_by_name['get_trex_path'] = get_trex_path funcs_by_name['update_trex'] = update_trex # trex_daemon_server funcs_by_name['is_trex_daemon_running'] = trex_daemon_server.is_running funcs_by_name['restart_trex_daemon'] = trex_daemon_server.restart funcs_by_name['start_trex_daemon'] = trex_daemon_server.start funcs_by_name['stop_trex_daemon'] = trex_daemon_server.stop # stl rpc proxy funcs_by_name['is_stl_rpc_proxy_running'] = stl_rpc_proxy.is_running funcs_by_name['restart_stl_rpc_proxy'] = stl_rpc_proxy.restart funcs_by_name['start_stl_rpc_proxy'] = stl_rpc_proxy.start funcs_by_name['stop_stl_rpc_proxy'] = stl_rpc_proxy.stop try: set_logger() register_socket(master_daemon.tag) server = SimpleJSONRPCServer(('0.0.0.0', master_daemon.port)) logging.info('Started master daemon (port %s)' % master_daemon.port) for name, func in funcs_by_name.items(): server.register_function(partial(log_usage, name, func), name) server.register_function(server.funcs.keys, 'get_methods') # should be last signal.signal(signal.SIGTSTP, stop_handler) # ctrl+z signal.signal(signal.SIGTERM, stop_handler) # kill server.serve_forever() except KeyboardInterrupt: logging.info('Ctrl+C') except Exception as e: logging.error('Closing due to error: %s' % e)
def start(host=SERVER_HOST, port=SERVER_PORT): # Start RPC server server = SimpleJSONRPCServer((host, port)) server.register_function(add, 'add') server.register_function(mazeService.maze_creation, 'maze_creation') LOGGER.info("Starting RPC server on %s:%d", host, port) server.serve_forever()
def server(): serverRPC = SimpleJSONRPCServer(('localhost', 7002)) serverRPC.register_function(create_mine_field) serverRPC.register_function(play_on_coordinates) serverRPC.register_function(get_positions_cleared) print("Starting server") serverRPC.serve_forever()
def start_sim_index_server(port, backends=(), remote_urls=(), root=True, logRequests=True): server = SimpleRPCServer(('localhost', port), logRequests=logRequests, requestHandler=RequestHandler) backend_list = list(backends) if remote_urls: backend_list.extend([RemoteSimIndex(url) for url in remote_urls]) if backend_list: if len(backend_list) == 1: index = ConcurrentSimIndex(backend_list[0]) else: index = ConcurrentSimIndex( SimIndexCollection(shards=backend_list, root=root)) else: index = ConcurrentSimIndex(MemorySimIndex()) index.set_query_scorer('tfidf') server.register_instance(SimIndexService(index)) try: print('Use Control-C to exit') server.serve_forever() except KeyboardInterrupt: print('Exiting')
def server_thread(conn): from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer server = SimpleJSONRPCServer(( my_host, my_port)) server.register_function(process_request, 'request') server.register_function(get_mpk, 'mpk') server.register_function(do_stop, 'stop') server.serve_forever()
def server_thread(conn): from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer server = SimpleJSONRPCServer((my_host, my_port)) server.register_function(process_request, "request") server.register_function(do_stop, "stop") server.serve_forever()
def start_master_daemon_func(): try: set_logger() register_socket(master_daemon.tag) server = SimpleJSONRPCServer(('0.0.0.0', master_daemon.port)) logging.info('Started master daemon (port %s)' % master_daemon.port) server.register_function(add) server.register_function(check_connectivity) server.register_function(get_trex_path) server.register_function(update_trex) # trex_daemon_server server.register_function(trex_daemon_server.is_running, 'is_trex_daemon_running') server.register_function(trex_daemon_server.restart, 'restart_trex_daemon') server.register_function(trex_daemon_server.start, 'start_trex_daemon') server.register_function(trex_daemon_server.stop, 'stop_trex_daemon') # stl rpc proxy server.register_function(stl_rpc_proxy.is_running, 'is_stl_rpc_proxy_running') server.register_function(stl_rpc_proxy.restart, 'restart_stl_rpc_proxy') server.register_function(stl_rpc_proxy.start, 'start_stl_rpc_proxy') server.register_function(stl_rpc_proxy.stop, 'stop_stl_rpc_proxy') server.register_function(server.funcs.keys, 'get_methods') # should be last signal.signal(signal.SIGTSTP, stop_handler) # ctrl+z signal.signal(signal.SIGTERM, stop_handler) # kill server.serve_forever() except KeyboardInterrupt: logging.info('Ctrl+C') except Exception as e: logging.error('Closing due to error: %s' % e)
def start_sim_index_server(port, backends=(), remote_urls=(), root=True, logRequests=True): server = SimpleRPCServer(('localhost', port), logRequests=logRequests, requestHandler=RequestHandler) backend_list = list(backends) if remote_urls: backend_list.extend( [RemoteSimIndex(url) for url in remote_urls]) if backend_list: if len(backend_list) == 1: index = ConcurrentSimIndex(backend_list[0]) else: index = ConcurrentSimIndex( SimIndexCollection( shards=backend_list, root=root)) else: index = ConcurrentSimIndex(MemorySimIndex()) index.set_query_scorer('tfidf') server.register_instance(SimIndexService(index)) try: print('Use Control-C to exit') server.serve_forever() except KeyboardInterrupt: print('Exiting')
def server_thread(conn): from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer server = SimpleJSONRPCServer((my_host, my_port)) server.register_function(process_request, 'request') server.register_function(get_mpk, 'mpk') server.register_function(do_stop, 'stop') server.serve_forever()
def LoadAndServe(host, port): server = SimpleJSONRPCServer((host, port)) for func in API: server.register_function(func.values()[0], func.keys()[0]) server.serve_forever()
def server_thread(): from SocketServer import ThreadingMixIn from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer server = SimpleJSONRPCServer(( host, port), requestHandler=authHandler) server.register_function(get_new_address, 'getnewaddress') server.register_function(get_num, 'getnum') server.register_function(get_mpk, 'getkey') server.register_function(do_stop, 'stop') server.serve_forever()
def start(host=SERVER_HOST, port=SERVER_PORT): RPC_SERVER = SimpleJSONRPCServer((host, port)) RPC_SERVER.register_function(add, "add") RPC_SERVER.register_function(get_one_news, 'get_one_news') RPC_SERVER.register_function(log_news_click_for_user, 'logNewsClickForUser') logger.info("Starting RPC server on %s: %d", host, port) RPC_SERVER.serve_forever()
def _main(args): server = SimpleJSONRPCServer((args.host, args.port)) functions = [f for f in args.functions if f in FUNCTION_TABLE] _message(functions, args.port) for f in functions: server.register_function(FUNCTION_TABLE[f]) server.serve_forever()
def servidor(): serverRPC = SimpleJSONRPCServer(('localhost', 7002)) serverRPC.register_function(criar_novo_jogo) serverRPC.register_function(tabuleiro_show) # serverRPC.register_function(efetuar_jogada) # serverRPC.register_function(jogadas_restantes) # serverRPC.register_function(retorna_tabuleiro) print("Starting server") serverRPC.serve_forever()
def startJsonRPC(): server = SimpleJSONRPCServer(('localhost', 6663)) server.register_function(lambda x: x, 'echo') server.register_function(create_wallet, 'create_wallet') server.register_function(import_wallet, 'import_wallet') server.register_function(sign_transaction, 'sign_transaction') server.register_function(list_wallet_addresses, 'list_wallet_addresses') print 'SimpleJSONRPCServer started!' server.serve_forever()
def start(host=SERVER_HOST, port=SERVER_PORT): # Start RPC server server = SimpleJSONRPCServer((host, port)) server.register_function(add, 'add') server.register_function(get_one_news, 'get_one_news') server.register_function(get_news, 'get_news') server.register_function(log_news_click_for_user, 'log_news_click_for_user') LOGGER.info("Starting RPC server on %s:%d", host, port) server.serve_forever()
def __init__(self, host, port): srv = SimpleJSONRPCServer((host, port)) srv.register_introspection_functions() srv.register_function(self.ping) mc = MissionCache() srv.register_instance(mc) print "[GameServerService] Up and running!" srv.serve_forever()
def server_thread(): from SocketServer import ThreadingMixIn from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer server = SimpleJSONRPCServer((host, port), requestHandler=authHandler) server.register_function(get_new_address, 'getnewaddress') server.register_function(get_num, 'getnum') server.register_function(get_mpk, 'getkey') server.register_function(do_stop, 'stop') server.serve_forever()
def start(host=SERVER_HOST, port=SERVER_PORT): cloudAMQP_client = CloudAMQPClient(LOG_CLICKS_TASK_QUEUE_URL, LOG_CLICKS_TASK_QUEUE_NAME) """ Start rpc server. """ server = SimpleJSONRPCServer((host, port)) server.register_function(add, "add") # TODO: register your function onto the server. LOGGER.info("Starting RPC server on %s:%d", host, port) server.serve_forever()
def server_thread(): from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer host = "127.0.0.1" port = int(serverPort) #http://user:[email protected]:12345 server = SimpleJSONRPCServer((host, port), requestHandler=SleuthSimpleJSONRPCRequestHandler, logRequests=True) server.register_function(handleRequest, 'sendPost') server.serve_forever()
def start_server(): adb = ADB(ADB_PATH) funcs = extract_all_the_functions(adb, 'adb_') server = SimpleJSONRPCServer((RPC_HOST, RPC_PORT)) print type(funcs) for name, func in funcs.iteritems(): server.register_function(func) server.register_function(lambda x: x, 'ping') server.register_function(upload_apk) #server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) server.serve_forever()
class JSONRPCapi(threading.Thread): def __init__(self, port): self.server = SimpleJSONRPCServer(('0.0.0.0', port), logRequests=False) self.server.register_instance(apiDispatcher) threading.Thread.__init__(self) def run(self): self.server.serve_forever() def register_function(self, *args, **kwargs): self.server.register_function(*args, **kwargs)
class Server(object): _server = None _ip = None _port = None def __init__(self, ip='127.0.0.1', port=8000): self._ip = ip self._port = port self._server = SimpleJSONRPCServer((self._ip, self._port)) self.register() def register(self): self._server.register_multicall_functions() self._server.register_function(self._callRemote, 'callRemote') self._server.register_function(self._out_int, 'out_int') self._server.register_function(self._out_string, 'out_string') self._server.register_function(self._in_int, 'in_int') self._server.register_function(self._in_string, 'in_string') self._server.register_function(self._in2, 'in2') def start(self): self._server.serve_forever() def _callRemote(self): print('Method has been called!!!!') def _out_int(self): return 666 def _out_string(self): return 'Sucker' def _in_int(self, i): print(i) return i + 6 def _in_string(self, s): print(s) return s + ' xxx' def _in2(self, i1, i2): return str(i1) + str(i2)
def __init__(self, host, port, testing=False, injector=None): srv = SimpleJSONRPCServer((host, port)) srv.register_introspection_functions() srv.register_function(self.ping) mc = EventServiceMethods() srv.register_instance(mc) if testing and injector is not None: injector.inject(mc) print "[GameServerService] Up and running!" srv.serve_forever()
class RPCProxyNode(BaseNode): def __init__(self, namespace, node_name, launch_params): super(RPCProxyNode, self).__init__(namespace, node_name, launch_params) self.server = SimpleJSONRPCServer((self.get_param('hostname', "0.0.0.0"), self.get_param('port', 8080))) self.server.register_instance(self) self.server.register_function(self.move_fwd) self.server.register_function(self.move_back) self.server.register_function(self.rotate_left) self.server.register_function(self.rotate_right) self.server.register_function(self.stop) self.server.register_function(self.slider_changed) self.server.register_function(self.enable_camera) self.server.register_function(self.disable_camera) self.server.register_function(self.talk) # def _dispatch(self, name, params): # self.message_bus.publish('/control/cmd', {'name': name, params: params}) def talk(self): self.message_bus.publish('/rpc/cmd', {'method': 'talk'}) # transform to talk def move_fwd(self): self.message_bus.publish('/rpc/cmd', {'method': 'move_fwd'}) def move_back(self): self.message_bus.publish('/rpc/cmd', {'method': 'move_back'}) def rotate_left(self): self.message_bus.publish('/rpc/cmd', {'method': 'rotate_left'}) def rotate_right(self): self.message_bus.publish('/rpc/cmd', {'method': 'rotate_right'}) def stop(self): self.message_bus.publish('/rpc/cmd', {'method': 'stop'}) def slider_changed(self, **kwargs): self.message_bus.publish('/rpc/cmd', {'method': 'slider_changed', 'value': kwargs['value']}) def enable_camera(self): self.message_bus.publish('/rpc/cmd', {'method': 'enable_camera'}) def disable_camera(self): self.message_bus.publish('/rpc/cmd', {'method': 'disable_camera'}) def run(self): self.server.serve_forever()
class Browser2DPlot: def __init__(self, data=None): self.server = SimpleJSONRPCServer(("localhost", 0), encoding="utf8", requestHandler=JSONRPCRequestHandler) self.rpc_port = self.server.socket.getsockname()[1] self.data = data self.server.register_function(self.get_plottable, "get_plottable") self.server.register_function(self.kill_server, "shutdown") webbrowser.open_new_tab("http://localhost:%d/sliceplot.html" % (self.rpc_port,)) self.server.serve_forever() def get_plottable(self): return self.data def kill_server(self): thread.start_new_thread(self.server.shutdown, ())
def start_jsonrpc_server(): server = SimpleJSONRPCServer(('0.0.0.0', 1775)) server.register_function(tts, 'tts') server.register_function(r2d2, 'r2d2') server.register_function(play, 'play') server.register_function(setvolume, 'setvolume') server.register_function(getvolume, 'getvolume') server.register_function(voices, 'voices') server.register_function(sounds, 'sounds') server.register_function(loop_sounds, 'loop_sounds') server.register_function(c_out, 'c_out') server.register_function(announce, 'announce') server.register_function(enable, 'enable') server.register_function(disable, 'disable') server.serve_forever()
def main(argv): from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer gpio = GPIOInterface() gpio.configure_ports(port_numbers) hostname = '127.0.0.1' port = 8001 if '-h' in sys.argv: hostname = sys.argv[sys.argv.index('-h') + 1] if '-p' in sys.argv: port = int(sys.argv[sys.argv.index('-p') + 1]) s = SimpleJSONRPCServer((hostname, port)) s.register_function(gpio.set_port) s.register_function(gpio.get_port) s.serve_forever()
class MonitorServer(object): monitor = None server = None logger = None def __init__(self, monitor): self.logger = logging.getLogger(self.__class__.__name__) self.monitor = monitor def _register_func(self, func): def wrap(fun, *args, **kwargs): try: return True, fun(*args, **kwargs) except Exception as e: if isinstance(e, MonitorException): return False, e.message else: import traceback self.logger.error("%s" % traceback.format_exc()) raise e partial_func = partial(wrap, func) update_wrapper(partial_func, func) self.server.register_function(partial_func) def start(self): self.logger.info("Starting...") self.server = SimpleJSONRPCServer(('localhost', 8080), logRequests=False) for maybeCommand in self.monitor.__class__.__dict__.values(): if hasattr(maybeCommand, 'is_command'): command_name = maybeCommand.__name__ self.logger.debug("Registering command %s" % command_name) command = getattr(self.monitor, command_name) self._register_func(command) def stop(self): if self.server: self.logger.info("Stopping server") self.server.shutdown() self.server.server_close() def serve_forever(self): self.logger.info("Entering main loop") self.server.serve_forever()
def main(): #tts("julia", "c") server = SimpleJSONRPCServer(('0.0.0.0', 1775)) server.register_function(tts, 'tts') server.register_function(r2d2, 'r2d2') server.register_function(play, 'play') server.register_function(setvolume, 'setvolume') server.register_function(getvolume, 'getvolume') server.register_function(voices, 'voices') server.register_function(sounds, 'sounds') server.register_function(c_out, 'c_out') server.register_function(announce, 'announce') server.register_function(enable, 'enable') server.register_function(disable, 'disable') server.serve_forever()
def run_server(port=8095): native_methods = [ 'acquire', 'connect', 'disconnect', 'get_stats', 'get_warnings', 'push_remote', 'reset', 'wait_on_traffic', ] hltapi_methods = [ 'connect', 'cleanup_session', 'interface_config', 'traffic_config', 'traffic_control', 'traffic_stats', ] try: server = SimpleJSONRPCServer(('0.0.0.0', port)) server.register_function(add) server.register_function(check_connectivity) server.register_function(native_proxy_init) server.register_function(native_proxy_del) server.register_function(hltapi_proxy_init) server.register_function(hltapi_proxy_del) server.register_function(native_method) server.register_function(hltapi_method) for method in native_methods: server.register_function(partial(native_method, method), method) for method in hltapi_methods: if method in native_methods: # collision in names method_hlt_name = 'hlt_%s' % method else: method_hlt_name = method server.register_function(partial(hltapi_method, method), method_hlt_name) server.register_function(server.funcs.keys, 'get_methods') # should be last print('Started Stateless RPC proxy at port %s' % port) server.serve_forever() except KeyboardInterrupt: print('Done')
def main(): """ The code below starts an JSONRPC server """ from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer parser = optparse.OptionParser(usage="%prog [OPTIONS]") parser.add_option("-p", "--port", default="8080", help="Port to serve on (default 8080)") parser.add_option( "-H", "--host", default="127.0.0.1", help="Host to serve on (default localhost; 0.0.0.0 to make public)" ) parser.add_option( "-q", "--quiet", action="store_false", default=True, dest="verbose", help="Quiet mode, don't print status msgs to stdout", ) parser.add_option( "-S", "--corenlp", default=DIRECTORY, help="Stanford CoreNLP tool directory (default %s)" % DIRECTORY ) parser.add_option( "-P", "--properties", default="default.properties", help="Stanford CoreNLP properties fieles (default: default.properties)", ) options, args = parser.parse_args() VERBOSE = options.verbose # server = jsonrpc.Server(jsonrpc.JsonRpc20(), # jsonrpc.TransportTcpIp(addr=(options.host, int(options.port)))) try: server = SimpleJSONRPCServer((options.host, int(options.port))) nlp = StanfordCoreNLP(options.corenlp, properties=options.properties, serving=True) server.register_function(nlp.parse) server.register_function(nlp.raw_parse) print "Serving on http://%s:%s" % (options.host, options.port) # server.serve() server.serve_forever() except KeyboardInterrupt: print >> sys.stderr, "Bye." exit()
def main(): """ The code below starts an JSONRPC server """ parser = optparse.OptionParser(usage="%prog [OPTIONS]") parser.add_option('-p', '--port', default='8082', help='Port to serve on (default 8082)') parser.add_option( '-H', '--host', default='127.0.0.1', help='Host to serve on (default localhost; 0.0.0.0 to make public)') parser.add_option('-v', '--verbose', action='store_false', default=False, dest='verbose', help="Quiet mode, don't print status msgs to stdout") options, args = parser.parse_args() VERBOSE = options.verbose # server = jsonrpc.Server(jsonrpc.JsonRpc20(), # jsonrpc.TransportTcpIp(addr=(options.host, int(options.port)))) try: #rh = AllPathRequestHandler if options.ignorepath else SimpleJSONRPCRequestHandler rh = SimpleJSONRPCRequestHandler server = SimpleJSONRPCServer((options.host, int(options.port)), requestHandler=rh) #inp = '/sc01a7/users/ixamed/BaliabideSemantikoak/SnomedCT_RF1Release_INT_20150131/Terminology/Content/' inp = '/ixadata/users/operezdevina001/Doktoretza/kodea/txt2snomed/SnomedCT_RF1Release_eus_20150731' des = HierarkiakKargatu(inp) server.register_function(des.deskribapenakJaso) server.register_function(des.deskribapenArabera) server.register_function(des.sct2term) server.register_function(des.sct2desc) server.register_function(des.sct2hierarkiak) server.register_function(des.desc2sct) print('Serving on http://%s:%s' % (options.host, options.port)) # server.serve() server.serve_forever() except KeyboardInterrupt: print(sys.stderr, "Bye.") exit()
def run_server(port = 8095): native_methods = [ 'acquire', 'connect', 'disconnect', 'get_stats', 'get_warnings', 'push_remote', 'reset', 'wait_on_traffic', ] hltapi_methods = [ 'connect', 'cleanup_session', 'interface_config', 'traffic_config', 'traffic_control', 'traffic_stats', ] try: register_socket('trex_stl_rpc_proxy') server = SimpleJSONRPCServer(('0.0.0.0', port)) server.register_function(add) server.register_function(check_connectivity) server.register_function(native_proxy_init) server.register_function(native_proxy_del) server.register_function(hltapi_proxy_init) server.register_function(hltapi_proxy_del) server.register_function(native_method) server.register_function(hltapi_method) for method in native_methods: server.register_function(partial(native_method, method), method) for method in hltapi_methods: if method in native_methods: # collision in names method_hlt_name = 'hlt_%s' % method else: method_hlt_name = method server.register_function(partial(hltapi_method, method), method_hlt_name) server.register_function(server.funcs.keys, 'get_methods') # should be last print('Started Stateless RPC proxy at port %s' % port) server.serve_forever() except KeyboardInterrupt: print('Done')
def main(): global _PIDS # Create a new JSON-RPC server on localhost:8080 s = SimpleJSONRPCServer(('localhost', 8080)) # Register the functions to be called by the PHP client s.register_function(parse_html, 'parse_html') # Creates 5 child server processes for i in range(5): # Fork current process pid = os.fork() # Child fork: if 0 == pid: # Prevent interrupt messages for signum in ( signal.SIGINT, signal.SIGTERM, ): signal.signal(signum, _gogentle) # Start server s.serve_forever() os._exit(0) # Parent: else: _PIDS.append(pid) # Handle interrupt signals quietly for signum in ( signal.SIGINT, signal.SIGTERM, ): signal.signal(signum, _kronos) # Wait for child processes while len(_PIDS): pid, rc = os.waitpid(-1, 0) _PIDS.remove(pid) return 0
def main(): parser = optparse.OptionParser(usage="%prog [OPTIONS]") parser.add_option('-p', '--port', type="int", default=8080, help="Port to bind to [8080]") parser.add_option('--path', default=DIRECTORY, help="Path to OpenNLP install [%s]" % DIRECTORY) options, args = parser.parse_args() addr = ('localhost', options.port) uri = 'http://%s:%s' % addr server = SimpleJSONRPCServer(addr) print("Starting OpenNLP") nlp = OpenNLP(options.path) server.register_function(nlp.parse) print("Serving on %s" % uri) server.serve_forever()
class RPCApi(): functions = [] def __init__(self, config): self.config = config self.server = SimpleJSONRPCServer( (self.config['rpc_host'], self.config['rpc_port'])) self.server.timeout = self.config[ 'rpc_timeout'] if "rpc_timeout" in config else 1 self.server.logRequests = self.config[ 'verbose'] if "verbose" in config else False self.register_function(self.list_functions, "list_functions") def register_functions(self, **kwargs): """Registers functions with the server.""" for function_name in kwargs: function = kwargs[function_name] self.register_function(function, function_name) def register_function(self, function, function_name): """Registers a single function with the server.""" self.server.register_function(function, function_name) self.functions.append(function_name) def list_functions(self): """An externally accessible function returning all the registered function names""" return list(set(self.functions)) def poll(self): """Serves one request from the waiting requests and returns""" self.server.handle_request() def run(self): """Blocks execution and runs the server till the program shutdown""" self.server.serve_forever() def start_thread(self): """Starts self.run() in a separate thread""" self.thread = threading.Thread(target=self.run) self.thread.daemon = True self.thread.start()
def start_master_daemon_func(): register_socket(master_daemon.tag) server = SimpleJSONRPCServer(('0.0.0.0', master_daemon.port)) print('Started master daemon (port %s)' % master_daemon.port) server.register_function(add) server.register_function(check_connectivity) server.register_function(get_trex_path) server.register_function(update_trex) # trex_daemon_server server.register_function(trex_daemon_server.is_running, 'is_trex_daemon_running') server.register_function(trex_daemon_server.restart, 'restart_trex_daemon') server.register_function(trex_daemon_server.start, 'start_trex_daemon') server.register_function(trex_daemon_server.stop, 'stop_trex_daemon') # stl rpc proxy server.register_function(stl_rpc_proxy.is_running, 'is_stl_rpc_proxy_running') server.register_function(stl_rpc_proxy.restart, 'restart_stl_rpc_proxy') server.register_function(stl_rpc_proxy.start, 'start_stl_rpc_proxy') server.register_function(stl_rpc_proxy.stop, 'stop_stl_rpc_proxy') server.register_function(server.funcs.keys, 'get_methods') # should be last server.serve_forever()
class MockRPCServiceProvider(Thread): """Implement a mock of a remote 3rd party JSON-RPC service provider.""" url = ('localhost', 8484) messages = [] name = '' ident = 0 def __init__(self, *args, **kwargs): Thread.__init__(self) self.server = SimpleJSONRPCServer(self.url) self.server.register_function(self.push, 'push') self.server.verbose = False self.messages = [] def run(self): logging.debug("MockRPCServiceProvider thread %i start" % self.ident) try: self.server.serve_forever() except: pass logging.debug("MockRPCServiceProvider thread %i exit" % self.ident) exit() def stop(self): logging.debug("Stop MockRPCServiceProvider") self.server.shutdown() self.server.server_close() def register_function(self,*args,**kwargs): return self.server.register_function(*args,**kwargs) def push(self, msg, *args): logging.debug("saving message: %s" % msg) self.messages.append(msg) return "ok" def get_messages(self): return self.messages
class GDFANN_Node: def __init__(self, domain='0.0.0.0', port=5000): self._server = SimpleJSONRPCServer( (domain, port) ) functions = { 'run' : self.run } for name in functions.keys(): self._server.register_function(functions[name], name) self._pop = None def run(self, population): if type(population) <> list: return 'Population must be a list' if self._pop is not None: del(self._pop) self._pop = Populacao(None, verboso=True) for i in population: print i self._pop.addIndividuo(Cromossomo(9,i)) print 'Evaluating population...' self._pop.avaliarPopulacao(avaliacaoRNA) print 'Sending answers...' answersMSE = [] for i in self._pop.getIndividuos(): answersMSE.append(i.getAvaliacao()) print answersMSE return answersMSE def serve(self): try: print 'Server running. Press CTRL+C to stop...' self._server.serve_forever() except KeyboardInterrupt: print 'Exiting'
class DGFANN_Node: def __init__(self, domain='0.0.0.0', port=5000): self._server = SimpleJSONRPCServer( (domain, port) ) functions = { 'run' : self.run } for name in functions.keys(): self._server.register_function(functions[name], name) self._pop = None def run(self, population): if type(population) <> list: return 'Population must be a list' if self._pop is not None: del(self._pop) self._pop = Populacao(None, verboso=True) for i in population: print i self._pop.addIndividuo(Cromossomo(9,i)) print 'Evaluating population...' self._pop.avaliarPopulacao(avaliacaoRNA) print 'Sending answers...' answersMSE = [] for i in self._pop.getIndividuos(): answersMSE.append(i.getAvaliacao()) print answersMSE return answersMSE def serve(self): try: print 'Server running. Press CTRL+C to stop...' self._server.serve_forever() except KeyboardInterrupt: print 'Exiting'
def main(): print 'Building graph...' g = WikiGraph(MAX_NODES) with open('datasets/wikipedia.abc', 'r') as fh: for i, line in enumerate(fh): u, v = line.split() g.insert_edge(int(u), int(v)) print 'Building name dict...' names = {} with open('datasets/names.map', 'r') as fh: for line in fh: k, v = line.split() names[int(k)] = v print 'Starting JSON RPC Server...' server = SimpleJSONRPCServer(('localhost', 8080)) server.register_function(g.successors, 'successors') server.register_function(g.predecessors, 'predecessors') server.register_function(names.get, 'name') server.serve_forever()
class OverlordClientDaemon(object): """Overlord Client Daemon.""" def __init__(self): self._state = DaemonState() self._server = None def Start(self): self.StartRPCServer() def StartRPCServer(self): self._server = SimpleJSONRPCServer(_OVERLORD_CLIENT_DAEMON_RPC_ADDR, logRequests=False) exports = [ (self.State, 'State'), (self.Ping, 'Ping'), (self.GetPid, 'GetPid'), (self.Connect, 'Connect'), (self.Clients, 'Clients'), (self.SelectClient, 'SelectClient'), (self.AddForward, 'AddForward'), (self.RemoveForward, 'RemoveForward'), (self.RemoveAllForward, 'RemoveAllForward'), ] for func, name in exports: self._server.register_function(func, name) pid = os.fork() if pid == 0: self._server.serve_forever() @staticmethod def GetRPCServer(): """Returns the Overlord client daemon RPC server.""" server = jsonrpclib.Server('http://%s:%d' % _OVERLORD_CLIENT_DAEMON_RPC_ADDR) try: server.Ping() except Exception: return None return server def State(self): return self._state def Ping(self): return True def GetPid(self): return os.getpid() def _GetJSON(self, path): url = '%s:%d%s' % (self._state.host, self._state.port, path) return json.loads(UrlOpen(self._state, url).read()) def _TLSEnabled(self): """Determine if TLS is enabled on given server address.""" sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: # Allow any certificate since we only want to check if server talks TLS. context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) context.verify_mode = ssl.CERT_NONE sock = context.wrap_socket(sock, server_hostname=self._state.host) sock.settimeout(_CONNECT_TIMEOUT) sock.connect((self._state.host, self._state.port)) return True except ssl.SSLError: return False except socket.error: # Connect refused or timeout raise except Exception: return False # For whatever reason above failed, assume False def _CheckTLSCertificate(self): """Check TLS certificate. Returns: A tupple (check_result, if_certificate_is_loaded) """ def _DoConnect(context): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: sock.settimeout(_CONNECT_TIMEOUT) sock = context.wrap_socket(sock, server_hostname=self._state.host) sock.connect((self._state.host, self._state.port)) except ssl.SSLError: return False finally: sock.close() # Save SSLContext for future use. self._state.ssl_context = context return True # First try connect with built-in certificates tls_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) if _DoConnect(tls_context): return True # Try with already saved certificate, if any. tls_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) tls_context.verify_mode = ssl.CERT_REQUIRED tls_context.check_hostname = True tls_cert_path = GetTLSCertPath(self._state.host) if os.path.exists(tls_cert_path): tls_context.load_verify_locations(tls_cert_path) self._state.ssl_self_signed = True return _DoConnect(tls_context) def Connect(self, host, port=_OVERLORD_HTTP_PORT, ssh_pid=None, username=None, password=None, orig_host=None): self._state.username = username self._state.password = password self._state.host = host self._state.port = port self._state.ssl = False self._state.ssl_self_signed = False self._state.orig_host = orig_host self._state.ssh_pid = ssh_pid self._state.selected_mid = None tls_enabled = self._TLSEnabled() if tls_enabled: result = self._CheckTLSCertificate() if not result: if self._state.ssl_self_signed: return ('SSLCertificateChanged', ssl.get_server_certificate( (self._state.host, self._state.port))) else: return ('SSLVerifyFailed', ssl.get_server_certificate( (self._state.host, self._state.port))) try: self._state.ssl = tls_enabled UrlOpen(self._state, '%s:%d' % (host, port)) except urllib2.HTTPError as e: return ('HTTPError', e.getcode(), str(e), e.read().strip()) except Exception as e: return str(e) else: return True def Clients(self): if time.time() - self._state.last_list <= _LIST_CACHE_TIMEOUT: return self._state.listing mids = [client['mid'] for client in self._GetJSON('/api/agents/list')] self._state.listing = sorted(list(set(mids))) self._state.last_list = time.time() return self._state.listing def SelectClient(self, mid): self._state.selected_mid = mid def AddForward(self, mid, remote, local, pid): self._state.forwards[local] = (mid, remote, pid) def RemoveForward(self, local_port): try: unused_mid, unused_remote, pid = self._state.forwards[local_port] KillGraceful(pid) del self._state.forwards[local_port] except (KeyError, OSError): pass def RemoveAllForward(self): for unused_mid, unused_remote, pid in self._state.forwards.values(): try: KillGraceful(pid) except OSError: pass self._state.forwards = {}
# Setup watchdog observer = Observer() observer.schedule(ReloadModelHandler(), path=MODEL_DIR, recursive=False) observer.start() def classify(text): # text_tokens = word_tokenize(text) # stemmed_tokens = [stemmer.stem(w.lower()) for w in text_tokens if not w in stop_words] # norm_sentence = ' '.join(stemmed_tokens) text_series = pd.Series([text]) predict_x = np.array(list(vocab_processor.transform(text_series))) print(predict_x) y_predicted = [ p['class'] for p in classifier.predict(predict_x, as_iterable=True) ] print(y_predicted[0]) topic = news_class.class_map[str(y_predicted[0])] return topic # Threading RPC Server RPC_SERVER = SimpleJSONRPCServer((SERVER_HOST, SERVER_PORT)) RPC_SERVER.register_function(classify, 'classify') print(("Starting RPC server on %s:%d" % (SERVER_HOST, SERVER_PORT))) RPC_SERVER.serve_forever()
def daemon(self): network = self.network devices = self.devices classifiers = {} def _classify(params): ret = { } output_dim = {} hash = hashlib.new('ripemd160') hash.update(json.dumps(params)) hash = hash.hexdigest() for k in params: try: params[k] = numpy.asarray(params[k], dtype='float32') if k != 'data': output_dim[k] = network.n_out[k] # = [network.n_in,2] if k == 'data' else network.n_out[k] except Exception: if k != 'data' and not k in network.n_out: ret['error'] = 'unknown target: %s' % k else: ret['error'] = 'unable to convert %s to an array from value %s' % (k,str(params[k])) break if not 'error' in ret: data = StaticDataset(data=[params], output_dim=output_dim) data.init_seq_order() try: data = StaticDataset(data=[params], output_dim=output_dim) data.init_seq_order() except Exception: ret['error'] = "invalid data: %s" % params else: batches = data.generate_batches(recurrent_net=network.recurrent, batch_size=sys.maxint, max_seqs=1) if not hash in classifiers: classifiers[hash] = ClassificationTaskThread(network, devices, data, batches) classifiers[hash].json_params = params print >> log.v3, "classifier started:", hash ret['result'] = { 'hash' : hash } return ret def _result(hash): if not classifiers[hash].isAlive(): return { 'result' : { k : classifiers[hash].result[k].tolist() for k in classifiers[hash].result } } else: return { 'error' : "classification in progress"} class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def do_POST(self): if len(self.path) == 0: self.send_response(404) return self.path = self.path[1:] ret = {} if self.path in ['classify']: ctype, pdict = cgi.parse_header(self.headers.getheader('content-type')) if ctype == 'application/json': length = int(self.headers.getheader('content-length')) params = cgi.parse_qs(self.rfile.read(length),keep_blank_values=1) try: content = params.keys()[0].decode('utf-8') # this is weird params = json.loads(content) except Exception: ret['error'] = 'unable to decode object' else: ret.update(_classify(params)) else: ret['error'] = 'invalid header: %s' % ctype else: ret['error'] = 'invalid command: %s' % self.path self.send_response(200) self.send_header('Content-Type', 'application/json') self.wfile.write("\n") self.wfile.write(json.dumps(ret)) self.end_headers() def do_GET(self): if len(self.path.replace('/', '')) == 0: self.send_response(200) else: if len(self.path) == 0: self.send_response(404) return ret = { 'error' : "" } self.path = self.path[1:].split('/') if self.path[0] in ['result']: if self.path[1] in classifiers: if not classifiers[self.path[1]].isAlive(): ret['result'] = { k : classifiers[self.path[1]].result[k].tolist() for k in classifiers[self.path[1]].result } else: ret['error'] = "classification in progress" else: ret['error'] = "unknown hash: " % self.path[1] else: ret['error'] = "invalid command: %s" % self.path[0] self.send_response(200) self.send_header('Content-Type', 'application/json') self.wfile.write("\n") self.wfile.write(json.dumps(ret)) self.end_headers() def log_message(self, format, *args): pass class ThreadingServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer): pass httpd = ThreadingServer(("", 3333), RequestHandler) print >> log.v3, "httpd listening on port", 3333 try: from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer # https://pypi.python.org/pypi/jsonrpclib/0.1.6 except Exception: httpd.serve_forever() else: from thread import start_new_thread start_new_thread(httpd.serve_forever, ()) server = SimpleJSONRPCServer(('localhost', 3334)) server.register_function(_classify, 'classify') server.register_function(_result, 'result') print >> log.v3, "json-rpc listening on port", 3334 server.serve_forever()
def daemon(self): network = self.network devices = self.devices classifiers = {} def _classify(params): ret = {} output_dim = {} hash = hashlib.new('ripemd160') hash.update(json.dumps(params)) hash = hash.hexdigest() for k in params: try: params[k] = numpy.asarray(params[k], dtype='float32') if k != 'data': output_dim[k] = network.n_out[ k] # = [network.n_in,2] if k == 'data' else network.n_out[k] except Exception: if k != 'data' and not k in network.n_out: ret['error'] = 'unknown target: %s' % k else: ret['error'] = 'unable to convert %s to an array from value %s' % ( k, str(params[k])) break if not 'error' in ret: data = StaticDataset(data=[params], output_dim=output_dim) data.init_seq_order() try: data = StaticDataset(data=[params], output_dim=output_dim) data.init_seq_order() except Exception: ret['error'] = "invalid data: %s" % params else: batches = data.generate_batches( recurrent_net=network.recurrent, batch_size=sys.maxsize, max_seqs=1) if not hash in classifiers: classifiers[hash] = ClassificationTaskThread( network, devices, data, batches) classifiers[hash].json_params = params print >> log.v3, "classifier started:", hash ret['result'] = {'hash': hash} return ret def _result(hash): if not classifiers[hash].isAlive(): return { 'result': { k: classifiers[hash].result[k].tolist() for k in classifiers[hash].result } } else: return {'error': "classification in progress"} class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def do_POST(self): if len(self.path) == 0: self.send_response(404) return self.path = self.path[1:] ret = {} if self.path in ['classify']: ctype, pdict = cgi.parse_header( self.headers.getheader('content-type')) if ctype == 'application/json': length = int(self.headers.getheader('content-length')) params = cgi.parse_qs(self.rfile.read(length), keep_blank_values=1) try: content = params.keys()[0].decode( 'utf-8') # this is weird params = json.loads(content) except Exception: ret['error'] = 'unable to decode object' else: ret.update(_classify(params)) else: ret['error'] = 'invalid header: %s' % ctype else: ret['error'] = 'invalid command: %s' % self.path self.send_response(200) self.send_header('Content-Type', 'application/json') self.wfile.write("\n") self.wfile.write(json.dumps(ret)) self.end_headers() def do_GET(self): if len(self.path.replace('/', '')) == 0: self.send_response(200) else: if len(self.path) == 0: self.send_response(404) return ret = {'error': ""} self.path = self.path[1:].split('/') if self.path[0] in ['result']: if self.path[1] in classifiers: if not classifiers[self.path[1]].isAlive(): ret['result'] = { k: classifiers[ self.path[1]].result[k].tolist() for k in classifiers[self.path[1]].result } else: ret['error'] = "classification in progress" else: ret['error'] = "unknown hash: " % self.path[1] else: ret['error'] = "invalid command: %s" % self.path[0] self.send_response(200) self.send_header('Content-Type', 'application/json') self.wfile.write("\n") self.wfile.write(json.dumps(ret)) self.end_headers() def log_message(self, format, *args): pass class ThreadingServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer): pass httpd = ThreadingServer(("", 3333), RequestHandler) print >> log.v3, "httpd listening on port", 3333 try: from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer # https://pypi.python.org/pypi/jsonrpclib/0.1.6 except Exception: httpd.serve_forever() else: from thread import start_new_thread start_new_thread(httpd.serve_forever, ()) server = SimpleJSONRPCServer(('localhost', 3334)) server.register_function(_classify, 'classify') server.register_function(_result, 'result') print >> log.v3, "json-rpc listening on port", 3334 server.serve_forever()
def main(): server = SimpleJSONRPCServer(('', 8888)) server.register_instance(AudaciousProxy()) server.serve_forever()
def server(): serverRPC = SimpleJSONRPCServer(('localhost', 7002)) serverRPC.register_function(printName) print("Starting server") serverRPC.serve_forever()
class GetworkWrapper: """ The main server class. It sets up a JSON-RPC server and handles requests coming in. """ def __init__(self, backend, host, port): self.backend = backend self.server = SimpleJSONRPCServer((host, port)) def getwork(data=None): if data is None: return self.createWork() return self.submitWork(data) self.server.register_function(getwork) # We use our own extra nonce to not return the same work twice if # asked again for new work. self.extraNonce = 0 # Dictionary that holds all created works so they can be retrieved when # necessary for matching. The key is the (byte-order swapped) block merkle # hash, which is not changed by the miner when processing the work. # This is cleared only once a submitted block was accepted. We do not try # to detect if the chain tip changes externally. self.works = {} def keyForWork(self, data): """ Returns the key used in self.works for the given, hex-encoded and byte-swapped, getwork 'data'. """ return data[2 * 36:2 * 68] def createWork(self): auxblock = self.backend.getauxblock() (tx, hdr) = auxpow.constructAuxpow(auxblock['hash']) en = self.extraNonce self.extraNonce = (self.extraNonce + 1) % (1 << 32) hdrBytes = bytearray(codecs.decode(hdr, 'hex_codec')) hdrBytes[0:4] = struct.pack('<I', en) formatted = auxpow.getworkByteswap(hdrBytes) formatted += bytearray([0] * (128 - len(formatted))) formatted[83] = 0x80 formatted[-4] = 0x80 formatted[-3] = 0x02 work = codecs.encode(formatted, 'hex_codec') self.works[self.keyForWork(work)] = {"auxblock": auxblock, "tx": tx} return {"data": work, "target": auxblock['_target']} def submitWork(self, data): key = self.keyForWork(data) if not key in self.works: print('Error: stale / unknown work submitted') return False w = self.works[key] dataBytes = codecs.decode(data, 'hex_codec') fixedBytes = auxpow.getworkByteswap(dataBytes[:80]) hdrHex = codecs.encode(fixedBytes, 'hex_codec') auxpowHex = auxpow.finishAuxpow(w['tx'], hdrHex) try: res = self.backend.submitauxblock(w['auxblock']['hash'], auxpowHex) except ProtocolError as exc: print('Error submitting work: %s' % exc) return False # Clear cache of created works when a new block was accepted. if res: self.works = {} return res def serve(self): self.server.serve_forever()
class CTRexServer(object): """This class defines the server side of the RESTfull interaction with TRex""" DEFAULT_TREX_PATH = '/auto/proj-pcube-b/apps/PL-b/tools/bp_sim2/v1.55/' #'/auto/proj-pcube-b/apps/PL-b/tools/nightly/trex_latest' TREX_START_CMD = './t-rex-64' DEFAULT_FILE_PATH = '/tmp/trex_files/' def __init__(self, trex_path, trex_files_path, trex_host='0.0.0.0', trex_daemon_port=8090, trex_zmq_port=4500): """ Parameters ---------- trex_host : str a string of the t-rex ip address or hostname. default value: machine hostname as fetched from socket.gethostname() trex_daemon_port : int the port number on which the trex-daemon server can be reached default value: 8090 trex_zmq_port : int the port number on which trex's zmq module will interact with daemon server default value: 4500 Instantiate a TRex client object, and connecting it to listening daemon-server """ self.TREX_PATH = os.path.abspath(os.path.dirname(trex_path + '/')) self.trex_files_path = os.path.abspath( os.path.dirname(trex_files_path + '/')) self.__check_trex_path_validity() self.__check_files_path_validity() self.trex = CTRex() self.trex_version = None self.trex_host = trex_host self.trex_daemon_port = trex_daemon_port self.trex_zmq_port = trex_zmq_port self.trex_server_path = "http://{hostname}:{port}".format( hostname=trex_host, port=trex_daemon_port) self.start_lock = threading.Lock() self.__reservation = None self.zmq_monitor = ZmqMonitorSession( self.trex, self.trex_zmq_port ) # intiate single ZMQ monitor thread for server usage def add(self, x, y): print "server function add ", x, y logger.info( "Processing add function. Parameters are: {0}, {1} ".format(x, y)) return x + y # return Fault(-10, "") def push_file(self, filename, bin_data): logger.info("Processing push_file() command.") try: filepath = os.path.abspath( os.path.join(self.trex_files_path, filename)) with open(filepath, 'wb') as f: f.write(binascii.a2b_base64(bin_data)) logger.info( "push_file() command finished. `{name}` was saved at {fpath}". format(name=filename, fpath=self.trex_files_path)) return True except IOError as inst: logger.error("push_file method failed. " + str(inst)) return False def connectivity_check(self): logger.info("Processing connectivity_check function.") return True def start(self): """This method fires up the daemon server based on initialized parameters of the class""" # initialize the server instance with given resources try: print "Firing up TRex REST daemon @ port {trex_port} ...\n".format( trex_port=self.trex_daemon_port) logger.info( "Firing up TRex REST daemon @ port {trex_port} ...".format( trex_port=self.trex_daemon_port)) logger.info("current working dir is: {0}".format(self.TREX_PATH)) logger.info("current files dir is : {0}".format( self.trex_files_path)) logger.debug( "Starting TRex server. Registering methods to process.") logger.info(self.get_trex_version(base64=False)) self.server = SimpleJSONRPCServer( (self.trex_host, self.trex_daemon_port)) except socket.error as e: if e.errno == errno.EADDRINUSE: logger.error( "TRex server requested address already in use. Aborting server launching." ) print "TRex server requested address already in use. Aborting server launching." raise socket.error( errno.EADDRINUSE, "TRex daemon requested address already in use. " "Server launch aborted. Please make sure no other process is " "using the desired server properties.") elif isinstance(e, socket.gaierror) and e.errno == -3: # handling Temporary failure in name resolution exception raise socket.gaierror( -3, "Temporary failure in name resolution.\n" "Make sure provided hostname has DNS resolving.") else: raise # set further functionality and peripherals to server instance try: self.server.register_function(self.add) self.server.register_function(self.get_trex_log) self.server.register_function(self.get_trex_daemon_log) self.server.register_function(self.get_trex_version) self.server.register_function(self.connectivity_check) self.server.register_function(self.start_trex) self.server.register_function(self.stop_trex) self.server.register_function(self.wait_until_kickoff_finish) self.server.register_function(self.get_running_status) self.server.register_function(self.is_running) self.server.register_function(self.get_running_info) self.server.register_function(self.is_reserved) self.server.register_function(self.get_files_path) self.server.register_function(self.push_file) self.server.register_function(self.reserve_trex) self.server.register_function(self.cancel_reservation) self.server.register_function(self.force_trex_kill) signal.signal(signal.SIGTSTP, self.stop_handler) signal.signal(signal.SIGTERM, self.stop_handler) self.zmq_monitor.start() self.server.serve_forever() except KeyboardInterrupt: logger.info("Daemon shutdown request detected.") finally: self.zmq_monitor.join() # close ZMQ monitor thread resources self.server.shutdown() pass # get files from Trex server and return their content (mainly for logs) @staticmethod def _pull_file(filepath): try: with open(filepath, 'rb') as f: file_content = f.read() return binascii.b2a_base64(file_content) except Exception as e: err_str = "Can't get requested file: {0}, possibly due to TRex that did not run".format( filepath) logger.error('{0}, error: {1}'.format(err_str, e)) return Fault(-33, err_str) # get Trex log /tmp/trex.txt def get_trex_log(self): logger.info("Processing get_trex_log() command.") return self._pull_file('/tmp/trex.txt') # get daemon log /var/log/trex/trex_daemon_server.log def get_trex_daemon_log(self): logger.info("Processing get_trex_daemon_log() command.") return self._pull_file('/var/log/trex/trex_daemon_server.log') # get Trex version from ./t-rex-64 --help (last 4 lines) def get_trex_version(self, base64=True): try: logger.info("Processing get_trex_version() command.") if not self.trex_version: help_print = subprocess.Popen(['./t-rex-64', '--help'], cwd=self.TREX_PATH, stdout=subprocess.PIPE, stderr=subprocess.PIPE) help_print.wait() help_print_stdout = help_print.stdout.read() self.trex_version = binascii.b2a_base64('\n'.join( help_print_stdout.split('\n')[-5:-1])) if base64: return self.trex_version else: return binascii.a2b_base64(self.trex_version) except Exception as e: err_str = "Can't get trex version, error: {0}".format(e) logger.error(err_str) return Fault(-33, err_str) def stop_handler(self, signum, frame): logger.info("Daemon STOP request detected.") if self.is_running(): # in case TRex process is currently running, stop it before terminating server process self.stop_trex(self.trex.get_seq()) sys.exit(0) def is_running(self): run_status = self.trex.get_status() logger.info( "Processing is_running() command. Running status is: {stat}". format(stat=run_status)) if run_status == TRexStatus.Running: return True else: return False def is_reserved(self): logger.info("Processing is_reserved() command.") return bool(self.__reservation) def get_running_status(self): run_status = self.trex.get_status() logger.info( "Processing get_running_status() command. Running status is: {stat}" .format(stat=run_status)) return { 'state': run_status.value, 'verbose': self.trex.get_verbose_status() } def get_files_path(self): logger.info("Processing get_files_path() command.") return self.trex_files_path def reserve_trex(self, user): if user == "": logger.info( "TRex reservation cannot apply to empty string user. Request denied." ) return Fault( -33, "TRex reservation cannot apply to empty string user. Request denied." ) with self.start_lock: logger.info("Processing reserve_trex() command.") if self.is_reserved(): if user == self.__reservation['user']: # return True is the same user is asking and already has the resrvation logger.info( "the same user is asking and already has the resrvation. Re-reserving TRex." ) return True logger.info( "TRex is already reserved to another user ({res_user}), cannot reserve to another user." .format(res_user=self.__reservation['user'])) return Fault( -33, "TRex is already reserved to another user ({res_user}). Please make sure TRex is free before reserving it." .format(res_user=self.__reservation['user'] )) # raise at client TRexInUseError elif self.trex.get_status() != TRexStatus.Idle: logger.info( "TRex is currently running, cannot reserve TRex unless in Idle state." ) return Fault( -13, 'TRex is currently running, cannot reserve TRex unless in Idle state. Please try again when TRex run finished.' ) # raise at client TRexInUseError else: logger.info( "TRex is now reserved for user ({res_user}).".format( res_user=user)) self.__reservation = {'user': user, 'since': time.ctime()} logger.debug("Reservation details: " + str(self.__reservation)) return True def cancel_reservation(self, user): with self.start_lock: logger.info("Processing cancel_reservation() command.") if self.is_reserved(): if self.__reservation['user'] == user: logger.info( "TRex reservation to {res_user} has been canceled successfully." .format(res_user=self.__reservation['user'])) self.__reservation = None return True else: logger.warning( "TRex is reserved to different user than the provided one. Reservation wasn't canceled." ) return Fault( -33, "Cancel reservation request is available to the user that holds the reservation. Request denied" ) # raise at client TRexRequestDenied else: logger.info( "TRex is not reserved to anyone. No need to cancel anything" ) assert (self.__reservation is None) return False def start_trex(self, trex_cmd_options, user, block_to_success=True, timeout=30): with self.start_lock: logger.info("Processing start_trex() command.") if self.is_reserved(): # check if this is not the user to which TRex is reserved if self.__reservation['user'] != user: logger.info( "TRex is reserved to another user ({res_user}). Only that user is allowed to initiate new runs." .format(res_user=self.__reservation['user'])) return Fault( -33, "TRex is reserved to another user ({res_user}). Only that user is allowed to initiate new runs." .format(res_user=self.__reservation['user'] )) # raise at client TRexRequestDenied elif self.trex.get_status() != TRexStatus.Idle: logger.info( "TRex is already taken, cannot create another run until done." ) return Fault(-13, '') # raise at client TRexInUseError try: server_cmd_data = self.generate_run_cmd(**trex_cmd_options) self.zmq_monitor.first_dump = True self.trex.start_trex(self.TREX_PATH, server_cmd_data) logger.info("TRex session has been successfully initiated.") if block_to_success: # delay server response until TRex is at 'Running' state. start_time = time.time() trex_state = None while (time.time() - start_time) < timeout: trex_state = self.trex.get_status() if trex_state != TRexStatus.Starting: break else: time.sleep(0.5) # check for TRex run started normally if trex_state == TRexStatus.Starting: # reached timeout logger.warning( "TimeoutError: TRex initiation outcome could not be obtained, since TRex stays at Starting state beyond defined timeout." ) return Fault( -12, 'TimeoutError: TRex initiation outcome could not be obtained, since TRex stays at Starting state beyond defined timeout.' ) # raise at client TRexWarning elif trex_state == TRexStatus.Idle: return Fault(-11, self.trex.get_verbose_status() ) # raise at client TRexError # reach here only if TRex is at 'Running' state self.trex.gen_seq() return self.trex.get_seq( ) # return unique seq number to client except TypeError as e: logger.error( "TRex command generation failed, probably because either -f (traffic generation .yaml file) and -c (num of cores) was not specified correctly.\nReceived params: {params}" .format(params=trex_cmd_options)) raise TypeError( 'TRex -f (traffic generation .yaml file) and -c (num of cores) must be specified.' ) def stop_trex(self, seq): logger.info("Processing stop_trex() command.") if self.trex.get_seq() == seq: logger.debug("Abort request legit since seq# match") return self.trex.stop_trex() else: if self.trex.get_status() != TRexStatus.Idle: logger.warning( "Abort request is only allowed to process initiated the run. Request denied." ) return Fault( -33, 'Abort request is only allowed to process initiated the run. Request denied.' ) # raise at client TRexRequestDenied else: return False def force_trex_kill(self): logger.info( "Processing force_trex_kill() command. --> Killing TRex session indiscriminately." ) return self.trex.stop_trex() def wait_until_kickoff_finish(self, timeout=40): # block until TRex exits Starting state logger.info("Processing wait_until_kickoff_finish() command.") trex_state = None start_time = time.time() while (time.time() - start_time) < timeout: trex_state = self.trex.get_status() if trex_state != TRexStatus.Starting: return return Fault( -12, 'TimeoutError: TRex initiation outcome could not be obtained, since TRex stays at Starting state beyond defined timeout.' ) # raise at client TRexWarning def get_running_info(self): logger.info("Processing get_running_info() command.") return self.trex.get_running_info() def generate_run_cmd(self, f, d, iom=0, export_path="/tmp/trex.txt", **kwargs): """ generate_run_cmd(self, trex_cmd_options, export_path) -> str Generates a custom running command for the kick-off of the TRex traffic generator. Returns a tuple of command (string) and export path (string) to be issued on the trex server Parameters ---------- trex_cmd_options : str Defines the exact command to run on the t-rex Example: "-c 2 -m 0.500000 -d 100 -f cap2/sfr.yaml --nc -p -l 1000" export_path : str a full system path to which the results of the trex-run will be logged. """ if 'results_file_path' in kwargs: export_path = kwargs['results_file_path'] del kwargs['results_file_path'] # adding additional options to the command trex_cmd_options = '' for key, value in kwargs.iteritems(): tmp_key = key.replace('_', '-') dash = ' -' if (len(key) == 1) else ' --' if (value == True) and ( str(value) != '1' ): # checking also int(value) to excape from situation that 1 translates by python to 'True' trex_cmd_options += (dash + tmp_key) else: trex_cmd_options += (dash + '{k} {val}'.format(k=tmp_key, val=value)) cmd = "{run_command} -f {gen_file} -d {duration} --iom {io} {cmd_options} --no-key > {export}".format( # -- iom 0 disables the periodic log to the screen (not needed) run_command=self.TREX_START_CMD, gen_file=f, duration=d, cmd_options=trex_cmd_options, io=iom, export=export_path) logger.info("TREX FULL COMMAND: {command}".format(command=cmd)) return (cmd, export_path, long(d)) def __check_trex_path_validity(self): # check for executable existance if not os.path.exists(self.TREX_PATH + '/t-rex-64'): print "The provided TRex path do not contain an executable TRex file.\nPlease check the path and retry." logger.error( "The provided TRex path do not contain an executable TRex file" ) exit(-1) # check for executable permissions st = os.stat(self.TREX_PATH + '/t-rex-64') if not bool(st.st_mode & (stat.S_IXUSR)): print "The provided TRex path do not contain an TRex file with execution privileges.\nPlease check the files permissions and retry." logger.error( "The provided TRex path do not contain an TRex file with execution privileges" ) exit(-1) else: return def __check_files_path_validity(self): # first, check for path existance. otherwise, try creating it with appropriate credentials if not os.path.exists(self.trex_files_path): try: os.makedirs(self.trex_files_path, 0660) return except os.error as inst: print "The provided files path does not exist and cannot be created with needed access credentials using root user.\nPlease check the path's permissions and retry." logger.error( "The provided files path does not exist and cannot be created with needed access credentials using root user." ) exit(-1) elif os.access(self.trex_files_path, os.W_OK): return else: print "The provided files path has insufficient access credentials for root user.\nPlease check the path's permissions and retry." logger.error( "The provided files path has insufficient access credentials for root user" ) exit(-1)
def main(): server = SimpleJSONRPCServer(('localhost', 1006)) server.register_function(predict) print("Start server") server.serve_forever()
from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer import os port = int(os.environ.get("PORT", 5000)) server = SimpleJSONRPCServer(('0.0.0.0', port)) server.register_function(pow) server.register_function(lambda x,y: x+y, 'add') server.register_function(lambda x: x, 'ping') server.serve_forever()
out = [ '\n'.join((name, pformat(mock_.call_args_list))) for name, mock_ in mocker.mocks.items() ] return '\n'.join(out) else: out = [ name + ' ' + str(mock_.call_count) for name, mock_ in mocker.mocks.items() ] return '\n'.join(out) def givememoney(): import pdb pdb.set_trace() inst = ArithA() mocker.register_instance(inst) mocker.register_instance(ArithB()) mocker.unregister_instance(inst) mocker.register_instance(ArithA2()) mocker.register_function(printme) mocker.register_function(givememoney) server = SimpleJSONRPCServer(('localhost', 8080), logRequests=False) server.register_instance(mocker) print('Server listening on %s:%s' % server.socket.getsockname()) server.serve_forever()