class AxonController(object): def __init__(self): self.axon_port = conf.AXON_PORT self.service = AxonService() self.protocol_config = self.service.RPYC_PROTOCOL_CONFIG self.logger = logging.getLogger(__name__) self.logger.setLevel(logging.WARN) self.axon_service = ThreadPoolServer( self.service, port=self.axon_port, reuse_addr=True, protocol_config=self.protocol_config, logger=self.logger, nbThreads=50) def start(self): try: self.service.exposed_traffic.start_servers() self.service.exposed_traffic.start_clients() except Exception: self.logger.exception("Ooops!! Exception during Traffic Start") self.axon_service.start() def stop(self): try: self.service.exposed_traffic.stop_clients() self.service.exposed_traffic.stop_servers() except Exception: self.logger.exception("Ooops!! Exception during Traffic Stop") self.axon_service.close()
def run_worker_process(self): setup_logger(self.quiet, log) if self.registry_type == "UDP": if self.registry_host is None: self.registry_host = "255.255.255.255" self.registrar = UDPRegistryClient(ip=self.registry_host, port=self.registry_port) else: if self.registry_host is None: raise ValueError( "With TCP registry, you must specify --registry-host") self.registrar = TCPRegistryClient(ip=self.registry_host, port=self.registry_port) t = ThreadPoolServer(ServiceEnvelope.SERVICE_CLASS, hostname=self.host, port=self.port, reuse_addr=True, ipv6=self.ipv6, registrar=self.registrar, auto_register=self.auto_register, nbThreads=self.max_client_count, requestBatchSize=self.max_requests_per_client, protocol_config={ 'exposed_prefix': '', 'log_exceptions': True }) t.start()
def _remote(): """ Code to execute on forked process. """ service = _create_srpo_service(obj, name, registry_path=registry_path) # set new process group protocol = dict(allow_all_attrs=True) kwargs = dict( hostname="localhost", nbThreads=server_threads, protocol_config=protocol, port=port, ) server = ThreadPoolServer(service(), **kwargs) sql_kwargs = dict( filename=server_registry.filename, tablename=server_registry.tablename, flag="c", ) # register new server registery = SqliteDict(**sql_kwargs) registery[name] = (server.host, server.port, os.getpid()) registery.commit() # get a new new view of registry, make sure name is there assert name in SqliteDict(**sql_kwargs) service._server = server server.start()
def run_server(): print("Started.") serverLog = logging.getLogger("Main.RPyCServer") server = ThreadPoolServer( service=DbInterfaceServer, port=12345, hostname="localhost", logger=serverLog, nbThreads=6 ) server.start()
def run_server(): print("Started.") serverLog = logging.getLogger("Main.RPyCServer") server = ThreadPoolServer(service=DbInterfaceServer, port=12345, hostname='localhost', logger=serverLog, nbThreads=6) server.start()
def main(): # arguments parser = argparse.ArgumentParser() parser.add_argument('--pid', action='store', dest='pid', default='/var/tmp/harvester_rpc.pid', help='pid filename') parser.add_argument('--port', dest='port', type=int, default=18861, help='the TCP port to bind to') parser.add_argument('--backlog', dest='backlog', type=int, default=10, help='backlog for the port') parser.add_argument('--stdout', action='store', dest='stdout', default='/var/tmp/harvester_rpc.out', help='stdout filename') parser.add_argument('--stderr', action='store', dest='stderr', default='/var/tmp/harvester_rpc.err', help='stderr filename') options = parser.parse_args() # logger _logger = logging.getLogger('rpc_bot') setupLogger(_logger, pid=os.getpid()) # make daemon context outfile = open(options.stdout, 'a+') errfile = open(options.stderr, 'a+') dc = daemon.DaemonContext(pidfile=daemon.pidfile.PIDLockFile(options.pid), stdout=outfile, stderr=errfile) # run thread server with dc: from rpyc.utils.server import ThreadPoolServer t = ThreadPoolServer(RpcBot, port=options.port, backlog=options.backlog, logger=_logger, protocol_config={"allow_all_attrs": True}) t.start() # finalize outfile.close() errfile.close()
def start_server(service, thread=True, host='0.0.0.0', port=18861): from rpyc.utils.server import ThreadedServer, ThreadPoolServer server = ThreadPoolServer( service, hostname=host, port=port, reuse_addr=True, auto_register=True, ) if thread: import threading server_thread = threading.Thread(target=server.start) server_thread.daemon = True server_thread.start() return (host, port) else: server.start() return (host, port)
def start_service(service_class, port=None): """ Start an rpyc service given by the provided class. Port can be overridden. :param rpyc.Service service_class: a child class of :class:`rpyc.Service`. :param int port: the port the service should listen for requests on. If it isn't provided by the caller, we we get a value from :class:`combtest.config` :return: a handle to the resulting :class:`ThreadedServer`. """ # rpyc uses logging, and we want to dump its logging somehow on errors _ensure_stderr_handler() if port is None: port = get_service_port() else: set_service_port(port) from rpyc.utils.server import ThreadPoolServer t = ThreadPoolServer(service_class, port=port) t.start() return t
self.hashlist[h] = block # print(self.hashlist) """ b = get_block(h) : Retrieves a block indexed by hash value h As per rpyc syntax, adding the prefix 'exposed_' will expose this method as an RPC call """ def exposed_get_block(self, h): return self.hashlist[h] """ True/False = has_block(h) : Signals whether block indexed by h exists in the BlockStore service As per rpyc syntax, adding the prefix 'exposed_' will expose this method as an RPC call """ def exposed_has_block(self, h): return bool(self.hashlist.get(h)) if __name__ == '__main__': from rpyc.utils.server import ThreadPoolServer port = int(sys.argv[1]) server = ThreadPoolServer(BlockStore(), port=port) server.start()
class PupyServer(threading.Thread): def __init__(self, configFile="pupy.conf"): super(PupyServer, self).__init__() self.daemon=True self.server=None self.authenticator=None self.clients=[] self.jobs={} self.jobs_id=1 self.clients_lock=threading.Lock() self.current_id=1 self.config = configparser.ConfigParser() self.config.read(configFile) self.port=self.config.getint("pupyd","port") self.address=self.config.get("pupyd","address") self.handler=None def register_handler(self, instance): """ register the handler instance, typically a PupyCmd, and PupyWeb in the futur""" self.handler=instance def add_client(self, conn): with self.clients_lock: conn.execute(textwrap.dedent( """ import platform import getpass import uuid import sys import os def get_uuid(): user=None node=None plat=None release=None version=None machine=None macaddr=None pid=None proc_arch=None proc_path=sys.executable try: user=getpass.getuser() except Exception: pass try: node=platform.node() except Exception: pass try: version=platform.platform() except Exception: pass try: plat=platform.system() except Exception: pass try: release=platform.release() except Exception: pass try: version=platform.version() except Exception: pass try: machine=platform.machine() except Exception: pass try: pid=os.getpid() except Exception: pass try: proc_arch=platform.architecture()[0] except Exception: pass try: macaddr=uuid.getnode() macaddr=':'.join(("%012X" % macaddr)[i:i+2] for i in range(0, 12, 2)) except Exception: pass return (user, node, plat, release, version, machine, macaddr, pid, proc_arch, proc_path) """)) l=conn.namespace["get_uuid"]() self.clients.append(PupyClient.PupyClient({ "id": self.current_id, "conn" : conn, "user" : l[0], "hostname" : l[1], "platform" : l[2], "release" : l[3], "version" : l[4], "os_arch" : l[5], "proc_arch" : l[8], "exec_path" : l[9], "macaddr" : l[6], "pid" : l[7], "address" : conn._conn._config['connid'].split(':')[0], }, self)) if self.handler: addr = conn.modules['pupy'].get_connect_back_host() server_ip, server_port = addr.rsplit(':', 1) client_ip, client_port = conn._conn._config['connid'].split(':') self.handler.display_srvinfo("Session {} opened ({}:{} <- {}:{})".format(self.current_id, server_ip, server_port, client_ip, client_port)) self.current_id += 1 def remove_client(self, client): with self.clients_lock: for i,c in enumerate(self.clients): if c.conn is client: if self.handler: self.handler.display_srvinfo('Session {} closed'.format(self.clients[i].desc['id'])) del self.clients[i] break def get_clients(self, search_criteria): """ return a list of clients corresponding to the search criteria. ex: platform:*win* """ #if the criteria is a simple id we return the good client try: index=int(search_criteria) for c in self.clients: if int(c.desc["id"])==index: return [c] return [] except Exception: pass l=set([]) if search_criteria=="*": return self.clients for c in self.clients: take=False for sc in search_criteria.split(): tab=sc.split(":",1) if len(tab)==2 and tab[0] in [x for x in c.desc.iterkeys()]:#if the field is specified we search for the value in this field take=True if not tab[1].lower() in str(c.desc[tab[0]]).lower(): take=False break elif len(tab)!=2:#if there is no field specified we search in every field for at least one match take=False for k,v in c.desc.iteritems(): if type(v) is unicode or type(v) is str: if tab[0].lower() in v.decode('utf8').lower(): take=True break else: if tab[0].lower() in str(v).decode('utf8').lower(): take=True break if not take: break if take: l.add(c) return list(l) def get_clients_list(self): return self.clients def list_modules(self): l=[] for loader, module_name, is_pkg in pkgutil.iter_modules(modules.__path__): module=self.get_module(module_name) l.append((module_name,module.__doc__)) return l def get_module(self, name): script_found=False for loader, module_name, is_pkg in pkgutil.iter_modules(modules.__path__): if module_name==name: script_found=True module=loader.find_module(module_name).load_module(module_name) class_name=None if hasattr(module,"__class_name__"): class_name=module.__class_name__ if not hasattr(module,class_name): logging.error("script %s has a class_name=\"%s\" global variable defined but this class does not exists in the script !"%(script_name,class_name)) if not class_name: #TODO automatically search the class name in the file pass return getattr(module,class_name) def module_parse_args(self, module_name, args): """ This method is used by the PupyCmd class to verify validity of arguments passed to a specific module """ module=self.get_module(module_name) ps=module(None,None) return ps.arg_parser.parse_args(args) def del_job(self, job_id): if job_id is not None: job_id=int(job_id) if job_id in self.jobs: del self.jobs[job_id] def add_job(self, job): job.id=self.jobs_id self.jobs[self.jobs_id]=job self.jobs_id+=1 def get_job(self, job_id): try: job_id=int(job_id) except ValueError: raise PupyModuleError("job id must be an integer !") if job_id not in self.jobs: raise PupyModuleError("%s: no such job !"%job_id) return self.jobs[job_id] def run(self): self.authenticator = SSLAuthenticator(self.config.get("pupyd","keyfile").replace("\\",os.sep).replace("/",os.sep), self.config.get("pupyd","certfile").replace("\\",os.sep).replace("/",os.sep), ciphers="SHA256+AES256:SHA1+AES256:@STRENGTH") self.server = ThreadPoolServer(PupyService.PupyService, port = self.port, hostname=self.address, authenticator=self.authenticator) self.server.start()
self.queryTimer.cancel() self.queryTimer = None if self.leaderHost == "": print("no server connection") return #print("connecting", self.leaderHost, self.leaderPort) queryStr = 'SELECT RoomID, Type, Floor from RoomInfo where Type = 4' args = (self.leaderHost, self.leaderPort, queryStr) t = threading.Thread(target=self.connectAndQuery, args=args) t.start() def insert(self): if self.bookTimer != None: self.bookTimer.cancel() self.bookTimer = None if self.leaderHost == "": print("no server connection") return #print("connecting", self.leaderHost, self.leaderPort) inserStr = f'INSERT INTO RoomInfo (RoomID,Type,Floor) VALUES ({self.roomStartId+1}, 4, 3)' self.roomStartId += 1 args = (self.leaderHost, self.leaderPort, inserStr) t = threading.Thread(target=self.connectAndInsert, args=args) t.start() if __name__ == '__main__': from rpyc.utils.server import ThreadPoolServer client = ThreadPoolServer(Client(), port=4999) client.start()
PRINT_VERB.append("xstats") PRINT_VERB.append("master") [PRINT_VERB.append(b) for b in BACKBONES] printtime(("Threaded heartbeat server listening on port %d\n" "press Ctrl-C to stop\n") % RPC_PORT) FINISHED_EVENT.set() printer = Printer(goOnEvent=FINISHED_EVENT) printer.start() runner = Runner() runner.start() try: t = ThreadPoolServer(MasterService, port=RPC_PORT) t.start() except Exception, e: printtime("%s" % e) FINISHED_EVENT.clear() printtime("Master_Server killing all clients") for host in NODE_WATCHERS: NODE_WATCHERS[host].clearFinish() NODE_WATCHERS[host].clearGoOn() while len(NODE_WATCHERS): time.sleep(1) printtime("Exiting, please wait...") printer.join()
if not res: res="no response from planemo test...." allres.append(res) else: with open(planemo_log, 'w') as replog: replog.write(res) res = self.run_cmd(f"tar -cvz -f {ptooldir}/{toolname}_tested.toolshed.gz --directory {pwork} {toolname}") allres.append(res) res = self.run_cmd(f"cp -r {ptooldir} /galaxy-central/tools/TFtools/") allres.append(res) for fname in os.listdir(prepdir): print('fname', fname, collection) if fname.endswith('.json'): # if this is included, the html disappears. Go figure. continue res = self.run_cmd(f"cp {prepdir}/{fname} {collection}/{fname}") self.run_rsync(f"{ptooldir}/{toolname}_tested.toolshed.gz", toolwork) self.run_rsync(f"{ptooldir}/{toolname}_tested.toolshed.gz", galtooldir) res = self.run_cmd(f"chown -R galaxy:galaxy {pwork} {pworkrep} {galtooldir} {collection}") allres.append(res) return '\n'.join([x for x in allres if len(x) > 0]) if __name__ == "__main__": logger = logging.getLogger() logging.basicConfig(level='INFO') t = ThreadPoolServer(planemo_run, port=9999, logger=logger, nbThreads=1) # single thread experiment to see if planemo/conda behave better. Many condas spoil the conga. t.start()
def run(self) : #To-Do: Might be meaningful to have debug info for start logging.debug("Invoking main thread.") from rpyc.utils.server import ThreadPoolServer thmain = ThreadPoolServer(OSSRpc, port = 20200, nbThreads = 100) thmain.start()
class PupyServer(threading.Thread): def __init__(self, configFile="pupy.conf"): super(PupyServer, self).__init__() self.daemon=True self.server=None self.authenticator=None self.clients=[] self.jobs={} self.jobs_id=1 self.clients_lock=threading.Lock() self.current_id=1 self.config = configparser.ConfigParser() self.config.read(configFile) self.port=self.config.getint("pupyd","port") self.address=self.config.get("pupyd","address") self.handler=None def register_handler(self, instance): """ register the handler instance, typically a PupyCmd, and PupyWeb in the futur""" self.handler=instance def add_client(self, conn): with self.clients_lock: conn.execute(textwrap.dedent( """ import platform import getpass import uuid import sys import os import locale os_encoding = locale.getpreferredencoding() or "utf8" def get_uuid(): user=None node=None plat=None release=None version=None machine=None macaddr=None pid=None proc_arch=None proc_path=sys.executable try: user=getpass.getuser().decode(encoding=os_encoding).encode("utf8") except Exception: pass try: node=platform.node().decode(encoding=os_encoding).encode("utf8") except Exception: pass try: version=platform.platform() except Exception: pass try: plat=platform.system() except Exception: pass try: release=platform.release() except Exception: pass try: version=platform.version() except Exception: pass try: machine=platform.machine() except Exception: pass try: pid=os.getpid() except Exception: pass try: proc_arch=platform.architecture()[0] except Exception: pass try: macaddr=uuid.getnode() macaddr=':'.join(("%012X" % macaddr)[i:i+2] for i in range(0, 12, 2)) except Exception: pass return (user, node, plat, release, version, machine, macaddr, pid, proc_arch, proc_path) """)) l=conn.namespace["get_uuid"]() self.clients.append(PupyClient.PupyClient({ "id": self.current_id, "conn" : conn, "user" : l[0], "hostname" : l[1], "platform" : l[2], "release" : l[3], "version" : l[4], "os_arch" : l[5], "proc_arch" : l[8], "exec_path" : l[9], "macaddr" : l[6], "pid" : l[7], "address" : conn._conn._config['connid'].split(':')[0], }, self)) if self.handler: addr = conn.modules['pupy'].get_connect_back_host() server_ip, server_port = addr.rsplit(':', 1) client_ip, client_port = conn._conn._config['connid'].split(':') self.handler.display_srvinfo("Session {} opened ({}:{} <- {}:{})".format(self.current_id, server_ip, server_port, client_ip, client_port)) self.current_id += 1 def remove_client(self, client): with self.clients_lock: for i,c in enumerate(self.clients): if c.conn is client: if self.handler: self.handler.display_srvinfo('Session {} closed'.format(self.clients[i].desc['id'])) del self.clients[i] break def get_clients(self, search_criteria): """ return a list of clients corresponding to the search criteria. ex: platform:*win* """ #if the criteria is a simple id we return the good client try: index=int(search_criteria) for c in self.clients: if int(c.desc["id"])==index: return [c] return [] except Exception: pass l=set([]) if search_criteria=="*": return self.clients for c in self.clients: take=False for sc in search_criteria.split(): tab=sc.split(":",1) if len(tab)==2 and tab[0] in [x for x in c.desc.iterkeys()]:#if the field is specified we search for the value in this field take=True if not tab[1].lower() in str(c.desc[tab[0]]).lower(): take=False break elif len(tab)!=2:#if there is no field specified we search in every field for at least one match take=False for k,v in c.desc.iteritems(): if type(v) is unicode or type(v) is str: if tab[0].lower() in v.decode('utf8').lower(): take=True break else: if tab[0].lower() in str(v).decode('utf8').lower(): take=True break if not take: break if take: l.add(c) return list(l) def get_clients_list(self): return self.clients def list_modules(self): l=[] for loader, module_name, is_pkg in pkgutil.iter_modules(modules.__path__): module=self.get_module(module_name) l.append((module_name,module.__doc__)) return l def get_module(self, name): script_found=False for loader, module_name, is_pkg in pkgutil.iter_modules(modules.__path__): if module_name==name: script_found=True module=loader.find_module(module_name).load_module(module_name) class_name=None if hasattr(module,"__class_name__"): class_name=module.__class_name__ if not hasattr(module,class_name): logging.error("script %s has a class_name=\"%s\" global variable defined but this class does not exists in the script !"%(script_name,class_name)) if not class_name: #TODO automatically search the class name in the file pass return getattr(module,class_name) def module_parse_args(self, module_name, args): """ This method is used by the PupyCmd class to verify validity of arguments passed to a specific module """ module=self.get_module(module_name) ps=module(None,None) return ps.arg_parser.parse_args(args) def del_job(self, job_id): if job_id is not None: job_id=int(job_id) if job_id in self.jobs: del self.jobs[job_id] def add_job(self, job): job.id=self.jobs_id self.jobs[self.jobs_id]=job self.jobs_id+=1 def get_job(self, job_id): try: job_id=int(job_id) except ValueError: raise PupyModuleError("job id must be an integer !") if job_id not in self.jobs: raise PupyModuleError("%s: no such job !"%job_id) return self.jobs[job_id] def run(self): self.authenticator = SSLAuthenticator(self.config.get("pupyd","keyfile").replace("\\",os.sep).replace("/",os.sep), self.config.get("pupyd","certfile").replace("\\",os.sep).replace("/",os.sep), ciphers="SHA256+AES256:SHA1+AES256:@STRENGTH") self.server = ThreadPoolServer(PupyService.PupyService, port = self.port, hostname=self.address, authenticator=self.authenticator) self.server.start()