def run(self): try: logger.debug("in thread") # data storage self.clients = set() self.plots = OrderedDict() # tornado and zmq play nice? self.ioloop = ZMQIOLoop() # zmq self.context = zmq.Context() self.socket_in = self.context.socket(zmq.SUB) self.socket_in.connect("tcp://0.0.0.0:"+str(self.zmq_port)) self.socket_in.setsockopt_string(zmq.SUBSCRIBE, "") # input handler self.stream = zmqstream.ZMQStream(self.socket_in, io_loop=self.ioloop) self.stream.on_recv(self.on_recv) # tornado self.application = Application(io_loop=self.ioloop) self.application._server = self self.server = HTTPServer(self.application) self.server.listen(self.http_port) logger.debug("starting IOLoop") self.ioloop.start() logger.debug("done thread") except Exception as e: # capture exceptions from daemonic thread to log file import traceback as tb logger.error("Exception in server thread:\n" + str(e) + str(tb.format_exc()))
def main(): ds = DataService(AppDataHandler) ds.bind(APP_INTERFACE) try: ZMQIOLoop.instance().start() except KeyboardInterrupt: print 'Interrupted'
def on_response(message): response = msgpack.unpackb(message[0], use_list=False) if response[0] == 'OK': self.result = response[1] elif response[0] == 'ERR': raise Exception(response[2]) ZMQIOLoop.instance().stop()
def _run(self, request): def on_response(message): response = msgpack.unpackb(message[0], use_list=False) if response[0] == 'OK': self.result = response[1] elif response[0] == 'ERR': raise Exception(response[2]) ZMQIOLoop.instance().stop() self.stream.send(msgpack.packb(request)) self.stream.on_recv(on_response) ZMQIOLoop.instance().start() return self.result
def start(self, delay_s=0): self.reset_socks() loop = ZMQIOLoop() stream = ZMQStream(self.socks['rep'], io_loop=loop) def respond(msg): print 'got %s' % msg request = msg[0] if request == 'start': stream.send('started') self._start_time = datetime.now() elif request == 'notify_completion': time.sleep(delay_s) stream.send('completed') stream.on_recv(respond) loop.start()
def __init__(self, sftp): loop = ZMQIOLoop.instance() ctx = zmq.Context.instance() sftp_connection_socket = ctx.socket(zmq.REP) sftp_connection_socket.bind("tcp://*:4444") self.sftp_connection_stream = ZMQStream(sftp_connection_socket, loop) self.sftp_connection_stream.on_recv(self._sftp_connection_stream_receive_callback) self.sftp_connection_manager = sftp
def on_event(self, event): if self.events_to_fire > 0: self.application.event_listener.event.fire_event({ 'foo': 'bar', 'baz': 'qux', }, 'salt/netapi/test') self.events_to_fire -= 1 # once we've fired all the events, lets call it a day else: # wait so that we can ensure that the next future is ready to go # to make sure we don't explode if the next one is ready ZMQIOLoop.current().add_timeout(time.time() + 0.5, self._stop) event = event.strip() # if we got a retry, just continue if event != 'retry: 400': tag, data = event.splitlines() self.assertTrue(tag.startswith('tag: ')) self.assertTrue(data.startswith('data: '))
def perform(self, request): def on_timeout(): io_loop.remove_timeout(to) self.pool.put_stream(stream) raise TimeoutError("timeout") def on_response(message): io_loop.remove_timeout(to) self.pool.put_stream(stream) response = ResponseFactory().loads(message[0]) if response.error == ERROR_EXCEPTION: raise response.result future.set_result(response) future = Future() stream = self.pool.get_stream() stream.send(RequestFactory().dumps(request)) io_loop = ZMQIOLoop().instance() wait_time = datetime.timedelta(seconds=2) to = io_loop.add_timeout(wait_time, on_timeout) stream.on_recv(on_response) return future
class service: def __init__(self): self.ioloop = ZMQIOLoop() self.ioloop.install() return def process_message_two(self, msg): print "get thread two message" print "processing .....", msg return def process_message_three(self, msg): print "get thread three message" print "processing......", msg return def timeout(self): print "thread one timeout" data = {} data['thread'] = 'one' self.socket_to_others.send(zmqconfig.one_to_two_subject, zmq.SNDMORE) self.socket_to_others.send(json.dumps(data)) self.socket_to_others.send(zmqconfig.one_to_three_subject, zmq.SNDMORE) self.socket_to_others.send(json.dumps(data)) self.ioloop.add_timeout(time.time() + 3, self.timeout) return def run(self): self.socket_to_others = zmqconfig.context.socket(zmq.PUB) self.socket_to_others.bind(zmqconfig.one_zmq_addr) self.socket_from_two = zmqconfig.context.socket(zmq.SUB) self.socket_from_two.connect(zmqconfig.two_zmq_addr) self.socket_from_two.setsockopt(zmq.SUBSCRIBE, zmqconfig.two_to_one_subject) self.stream_from_two_sub = zmqstream.ZMQStream(self.socket_from_two) self.stream_from_two_sub.on_recv(self.process_message_two) self.socket_from_three = zmqconfig.context.socket(zmq.SUB) self.socket_from_three.connect(zmqconfig.three_zmq_addr) self.socket_from_three.setsockopt(zmq.SUBSCRIBE, zmqconfig.three_to_one_subject) self.socket_from_three_sub = zmqstream.ZMQStream( self.socket_from_three) self.socket_from_three_sub.on_recv(self.process_message_three) self.ioloop.add_timeout(time.time(), self.timeout) application = tornado.web.Application(urls) application.listen(8887) self.ioloop.start() return
def _start_ipython(self): from IPython import get_ipython if get_ipython() is not None: raise RuntimeError("Cannot start IPython, it's already running.") from zmq.eventloop.ioloop import ZMQIOLoop from ipykernel.kernelapp import IPKernelApp # save the global IOLoop instance # since IPython relies on it, but we are going to put it in a thread. save_inst = IOLoop.instance() IOLoop.clear_instance() zmq_loop = ZMQIOLoop() zmq_loop.install() # start IPython, disabling its signal handlers that won't work due to running in a thread: app = self._ipython_kernel = IPKernelApp.instance(log=logger) # Don't connect to the history database app.config.HistoryManager.hist_file = ':memory:' # listen on all interfaces, so remote clients can connect: app.ip = self.ip app.init_signal = lambda : None app.initialize([]) app.kernel.pre_handler_hook = lambda : None app.kernel.post_handler_hook = lambda : None app.kernel.start() # save self in the IPython namespace as 'worker' app.kernel.shell.user_ns['worker'] = self # start IPython's IOLoop in a thread zmq_loop_thread = Thread(target=zmq_loop.start) zmq_loop_thread.start() # put the global IOLoop instance back: IOLoop.clear_instance() save_inst.install() return app
def perform(self, request): def on_timeout(): io_loop.remove_timeout(to) future.set_result(response) raise TimeoutError("timeout") def on_response(message): io_loop.remove_timeout(to) if not future.done(): response = ResponseFactory().loads(message[0]) future.set_result(response) if response.error == ERROR_EXCEPTION: raise response.result self.pool.put_stream(stream) future = Future() response = Response() stream = self.pool.get_stream() stream.send(RequestFactory().dumps(request)) io_loop = ZMQIOLoop().instance() wait_time = datetime.timedelta(seconds=2) to = io_loop.add_timeout(wait_time, on_timeout) stream.on_recv(on_response) return future
class service: def __init__(self): self.ioloop = ZMQIOLoop() self.ioloop.install() return def process_message_two(self, msg): print "get thread two message" print "processing .....", msg return def process_message_three(self, msg): print "get thread three message" print "processing......", msg return def timeout(self): print "thread one timeout" data = {} data['thread'] = 'one' self.socket_to_others.send(zmqconfig.one_to_two_subject, zmq.SNDMORE) self.socket_to_others.send(json.dumps(data)) self.socket_to_others.send(zmqconfig.one_to_three_subject, zmq.SNDMORE) self.socket_to_others.send(json.dumps(data)) self.ioloop.add_timeout(time.time() + 3, self.timeout) return def run(self): self.socket_to_others = zmqconfig.context.socket(zmq.PUB) self.socket_to_others.bind(zmqconfig.one_zmq_addr) self.socket_from_two = zmqconfig.context.socket(zmq.SUB) self.socket_from_two.connect(zmqconfig.two_zmq_addr) self.socket_from_two.setsockopt(zmq.SUBSCRIBE, zmqconfig.two_to_one_subject) self.stream_from_two_sub = zmqstream.ZMQStream(self.socket_from_two) self.stream_from_two_sub.on_recv(self.process_message_two) self.socket_from_three = zmqconfig.context.socket(zmq.SUB) self.socket_from_three.connect(zmqconfig.three_zmq_addr) self.socket_from_three.setsockopt(zmq.SUBSCRIBE, zmqconfig.three_to_one_subject) self.socket_from_three_sub = zmqstream.ZMQStream(self.socket_from_three) self.socket_from_three_sub.on_recv(self.process_message_three) self.ioloop.add_timeout(time.time(), self.timeout) application = tornado.web.Application(urls) application.listen(8887) self.ioloop.start() return
def __init__(self, addr="tcp://*:", port=config.GAME_MANAGER_PORT): super(GameManagerServer, self).__init__() self.context = zmq.Context() self.io_loop = ZMQIOLoop.instance() self.client_router_sock = self.context.socket(zmq.ROUTER) self.address = addr + str(port) self.client_router_sock.bind(self.address) self.worker_router_sock = self.context.socket(zmq.ROUTER) self.worker_router_sock.bind("tcp://*:6000") self.client_router_sock = ZMQStream(self.client_router_sock) self.client_router_sock.on_recv(self.recv_from_client) self.worker_router_sock = ZMQStream(self.worker_router_sock) self.worker_router_sock.on_recv(self.recv_from_game) self.manager = GameManager(self.send_to_client, self.send_to_game)
def __init__(self): self.ioloop = ZMQIOLoop() self.ioloop.install() return
# coding: utf8 import getopt import tornado.ioloop import tornado.web from tornado.log import app_log from zmq.eventloop.zmqstream import ZMQStream from zmq.eventloop.ioloop import ZMQIOLoop loop = ZMQIOLoop() loop.install() from lib.autoconf import * from lib.path import * from lib.log import * from biz.core import * ''' conf_drawer是一个配置管理器的实例,类型Config定义在lib.autoconf中,register_my_setup 装饰器会在加载文件的时候执行,把各个文件的setup函数append到其setups列表中去,所以理论上它 先于main函数的执行,而main函数会调用conf_drawer的setup函数,其中就是遍历注册的setups列 执行注册过来的各种setup函数 ''' @conf_drawer.register_my_setup(look='push') def all_start(pcc): files_list = os.listdir(BIZ_PATH) files_list = set(['biz.' + x[:x.rfind(".")] for x in files_list if x.endswith(".py")]) map(__import__, files_list) Hubber(pcc['ihq']) # subscirbe class Hubber(object):
class ConsoleServer(threading.Thread): def __init__(self, zmq_port, http_port, killable=False): super(ConsoleServer,self).__init__() self.name = "ConsoleServer Thread" self.daemon = True self.http_port = http_port self.zmq_port = zmq_port self.killable = killable def run(self): try: logger.debug("in thread") # data storage self.clients = set() self.plots = OrderedDict() # tornado and zmq play nice? self.ioloop = ZMQIOLoop() # zmq self.context = zmq.Context() self.socket_in = self.context.socket(zmq.SUB) self.socket_in.connect("tcp://0.0.0.0:"+str(self.zmq_port)) self.socket_in.setsockopt_string(zmq.SUBSCRIBE, "") # input handler self.stream = zmqstream.ZMQStream(self.socket_in, io_loop=self.ioloop) self.stream.on_recv(self.on_recv) # tornado self.application = Application(io_loop=self.ioloop) self.application._server = self self.server = HTTPServer(self.application) self.server.listen(self.http_port) logger.debug("starting IOLoop") self.ioloop.start() logger.debug("done thread") except Exception as e: # capture exceptions from daemonic thread to log file import traceback as tb logger.error("Exception in server thread:\n" + str(e) + str(tb.format_exc())) # receiver code def on_recv(self, msg): for s in msg: # logger.debug("msg: %s" % s) json_data = json.loads(s.decode()) label = json_data.get('label', '') cmd = json_data.get('cmd', '') if cmd == 'create_plot': self.plots[label] = {} elif cmd == 'remove_plot': self.plots.pop(label) elif cmd == 'clear_plots': self.plots.clear() elif cmd == 'set_title': self.plots[label]['title'] = json_data['title'] elif cmd == 'set_vega': self.plots[label]['spec'] = json_data['spec'] elif cmd == 'set_svg': self.plots[label]['svg'] = json_data['svg'] self.plots[label]['css'] = json_data['css'] # broadcast to clients for c in self.clients: c.write_message(s) if self.killable and cmd == 'die': self.server.stop() self.ioloop.stop()
json.dumps( dict(status='error', message='Session error,please refresh'))) self.finish() if __name__ == '__main__': options.define("p", default=7777, help="run on the given port", type=int) options.parse_command_line() config = Configurations() port = options.options.p logger = get_logger("server", logging.DEBUG) loop = ZMQIOLoop() loop.install() context = zmq.Context() zmq_publish = context.socket(zmq.PUB) zmq_publish.bind("tcp://127.0.0.1:%s" % str(config.get_configuration("zmqPublish"))) zmq_dispatch = context.socket(zmq.REP) zmq_dispatch.bind("tcp://127.0.0.1:%s" % str(config.get_configuration("zmqDispatch"))) zmq_result = context.socket(zmq.PULL) zmq_result.bind("tcp://127.0.0.1:%s" % str(config.get_configuration("zmqResult"))) receiver = ZMQStream(zmq_result) receiver.on_recv(on_worker_data_in) cli_device_dict = {}
from zmq.eventloop.ioloop import ZMQIOLoop from lib.log import Log def prepare(conf_file): cpff = ConfigParserFromFile() conf_file | E(cpff.parseall) | E(conf_drawer.setup) if __name__ == "__main__": includes = None opts, argvs = getopt.getopt(sys.argv[1:], "c:h") for op, value in opts: if op == '-c': includes = value path._ETC_PATH = os.path.dirname(os.path.abspath(value)) elif op == '-h': print u'''使用参数启动: usage: [-c] -c <file> ******加载配置文件 ''' sys.exit(0) if not includes: includes = os.path.join(path._ETC_PATH, 'includes_dev.json') print "no configuration found!,will use [%s] instead" % includes prepare(includes) Log.rose_log().info("starting...") ZMQIOLoop.instance().start()
def run(): try: ZMQIOLoop.instance().start() except KeyboardInterrupt: print 'Interrupted'
from zmq.eventloop.ioloop import ZMQIOLoop from lib.log import Log def prepare(conf_file): cpff = ConfigParserFromFile() conf_file | when(cpff.parseall) | when(conf_drawer.setup) if __name__ == "__main__": includes = None opts, argvs = getopt.getopt(sys.argv[1:], "c:h") for op, value in opts: if op == '-c': includes = value path._ETC_PATH = os.path.dirname(os.path.abspath(value)) elif op == '-h': print u'''使用参数启动: usage: [-c] -c <file> ******加载配置文件 ''' sys.exit(0) if not includes: includes = os.path.join(path._ETC_PATH, 'includes_dev.json') print "no configuration found!,will use [%s] instead" % includes prepare(includes) Log.rose_log().info("starting...") ZMQIOLoop.instance().start()
# coding: utf8 import getopt from tornado.log import app_log from zmq.eventloop.zmqstream import ZMQStream from zmq.eventloop.ioloop import ZMQIOLoop loop = ZMQIOLoop() loop.install() from lib.autoconf import * from lib.path import * from lib.log import * from biz.core import * ''' conf_drawer是一个配置管理器的实例,类型Config定义在lib.autoconf中,register_my_setup 装饰器会在加载文件的时候执行,把各个文件的setup函数append到其setups列表中去,所以理论上它 先于main函数的执行,而main函数会调用conf_drawer的setup函数,其中就是遍历注册的setups列 执行注册过来的各种setup函数 ''' @conf_drawer.register_my_setup(look='push') def all_start(pcc): files_list = os.listdir(BIZ_PATH) files_list = set(['biz.' + x[:x.rfind(".")] for x in files_list if x.endswith(".py")]) map(__import__, files_list) Hubber(pcc['ihq']) # subscirbe class Hubber(object): ''' 此类为一个本地的pub/sub-HUB,任何新websocket连接进来都会通过一个zmqsocket订阅到HUB
def __init__(self): self.ioloop = ZMQIOLoop() self.ioloop.install() self.clients = {} self.in_client = "server" return
def run_message_sequence(filename, heartbeat): """Run through a list of scripted messages. :param str filename: CSV sequence filename. :param bool heartbeat: Send heartbeats every second once connected. """ done = Event() with socket() as sock: @gen.coroutine def send_msg(mtype): yield sock.send(get_message_type(mtype)().jsonize()) @gen.coroutine def recv(): while True: incoming = yield sock.recv() logger.debug("Received %s" % incoming) @gen.coroutine def wait_for_connection(): logger.info("Waiting for connection ") incoming = yield sock.recv() logger.debug("Received %s", incoming) logger.info("Connected!") yield send_msg("CONNECTED") @gen.coroutine def wait_for_start(): ready = False logger.info("Waiting for start message...") while not ready: incoming = yield sock.recv() msg = json.loads(incoming) logger.debug("%s", incoming) if msg["type"] != "START": continue ready = True logger.info("Got start") @gen.coroutine def send_sequence(sequence): for entry in sequence: logger.info("Delaying for %.3f s...", entry.delay) yield gen.sleep(entry.delay) jsonized = entry.msg.jsonize() logger.info("Sending %s", jsonized) sock.send(jsonized) @gen.coroutine def send_heartbeats(): if heartbeat: logger.info("Sending heartbeats...") while not done.is_set(): yield send_msg("HEARTBEAT") yield gen.sleep(1) @gen.coroutine def main(): sequence = parse_csv_file(filename) yield wait_for_connection() send_heartbeats() # yield wait_for_start() recv_future = recv() yield send_sequence(sequence) yield send_msg("EXIT") recv_future.cancel() done.set() loop = ZMQIOLoop.instance() loop.run_sync(main)
def start_ipython(ip=None, ns=None, log=None): """Start an IPython kernel in a thread Parameters ---------- ip: str The IP address to listen on (likely the parent object's ip). ns: dict Any names that should be injected into the IPython namespace. log: logger instance Hook up IPython's logging to an existing logger instead of the default. """ from IPython import get_ipython if get_ipython() is not None: raise RuntimeError("Cannot start IPython, it's already running.") from zmq.eventloop.ioloop import ZMQIOLoop from ipykernel.kernelapp import IPKernelApp # save the global IOLoop instance # since IPython relies on it, but we are going to put it in a thread. save_inst = IOLoop.instance() IOLoop.clear_instance() zmq_loop = ZMQIOLoop() zmq_loop.install() # start IPython, disabling its signal handlers that won't work due to running in a thread: app = IPKernelApp.instance(log=log) # Don't connect to the history database app.config.HistoryManager.hist_file = ':memory:' # listen on all interfaces, so remote clients can connect: if ip: app.ip = ip # disable some signal handling, logging noop = lambda : None app.init_signal = noop app.log_connection_info = noop # start IPython in a thread # initialization happens in the thread to avoid threading problems # with the sqlite history evt = Event() def _start(): app.initialize([]) app.kernel.pre_handler_hook = noop app.kernel.post_handler_hook = noop app.kernel.start() app.kernel.loop = IOLoop.instance() # save self in the IPython namespace as 'worker' # inject things into the IPython namespace if ns: app.kernel.shell.user_ns.update(ns) evt.set() zmq_loop.start() zmq_loop_thread = Thread(target=_start) zmq_loop_thread.daemon = True zmq_loop_thread.start() assert evt.wait(timeout=5), "IPython didn't start in a reasonable amount of time." # put the global IOLoop instance back: IOLoop.clear_instance() save_inst.install() return app
def start_ipython(ip=None, ns=None, log=None): """Start an IPython kernel in a thread Parameters ---------- ip: str The IP address to listen on (likely the parent object's ip). ns: dict Any names that should be injected into the IPython namespace. log: logger instance Hook up IPython's logging to an existing logger instead of the default. """ from IPython import get_ipython if get_ipython() is not None: raise RuntimeError("Cannot start IPython, it's already running.") from zmq.eventloop.ioloop import ZMQIOLoop from ipykernel.kernelapp import IPKernelApp # save the global IOLoop instance # since IPython relies on it, but we are going to put it in a thread. save_inst = IOLoop.instance() IOLoop.clear_instance() zmq_loop = ZMQIOLoop() zmq_loop.install() # start IPython, disabling its signal handlers that won't work due to running in a thread: app = IPKernelApp.instance(log=log) # Don't connect to the history database app.config.HistoryManager.hist_file = ":memory:" # listen on all interfaces, so remote clients can connect: if ip: app.ip = ip # disable some signal handling, logging def noop(): return None app.init_signal = noop app.log_connection_info = noop # start IPython in a thread # initialization happens in the thread to avoid threading problems # with the sqlite history evt = Event() def _start(): app.initialize([]) app.kernel.pre_handler_hook = noop app.kernel.post_handler_hook = noop app.kernel.start() app.kernel.loop = IOLoop.instance() # save self in the IPython namespace as 'worker' # inject things into the IPython namespace if ns: app.kernel.shell.user_ns.update(ns) evt.set() zmq_loop.start() zmq_loop_thread = Thread(target=_start) zmq_loop_thread.daemon = True zmq_loop_thread.start() assert evt.wait(timeout=5), "IPython didn't start in a reasonable amount of time." # put the global IOLoop instance back: IOLoop.clear_instance() save_inst.install() return app
class service: def __init__(self): self.ioloop = ZMQIOLoop() self.ioloop.install() self.clients = {} self.in_client = "server" return def process_message_client(self, msg): body = json.loads(bytes.decode(msg[1])) # from client heart if body["type"] == "heart": clients = json.loads(bytes.decode(msg[0])) ip = clients["ip"] if ip not in self.clients: self.clients[ip] = {} self.clients[ip]["time"] = time.time() self.clients[ip]["tag"] = bytes.decode(msg[0]) else: self.clients[ip]["time"] = time.time() pass # from input t elif body["type"] == "cmd": # a = os.system(body["cmd"]) ip_cmd = body["ip_cmd"] if self.in_client == "server": # input to server temp_ip_cmd = body["ip_cmd"].split(" ", 1) if temp_ip_cmd[0] == "ls": message = {} message["type"] = "server_message" if self.clients != {}: # del overdate for ip in self.clients: if time.time() - self.clients[ip]["time"] > float( server_config.delay): self.clients[ip]["time"] = 0 message["clients"] = self.clients else: message["clients"] = {} self.socket_to_others.send_string( server_config.server_to_input_subject, zmq.SNDMORE) self.socket_to_others.send_string(json.dumps(message)) elif temp_ip_cmd[0] == "ssh": if temp_ip_cmd[1] in self.clients: self.in_client = self.clients[temp_ip_cmd[1]]['tag'] message = {} message["type"] = "state" message["state"] = temp_ip_cmd[1] self.socket_to_others.send_string( server_config.server_to_input_subject, zmq.SNDMORE) self.socket_to_others.send_string(json.dumps(message)) else: message = {} message["type"] = "error" message["error"] = "no client is %s" % temp_ip_cmd[1] self.socket_to_others.send_string( server_config.server_to_input_subject, zmq.SNDMORE) self.socket_to_others.send_string(json.dumps(message)) else: message = {} message["type"] = "error" message["error"] = "sorry, server no this cmd" self.socket_to_others.send_string( server_config.server_to_input_subject, zmq.SNDMORE) self.socket_to_others.send_string(json.dumps(message)) return else: # input to client temp_ip_cmd = body["ip_cmd"].split(" ", 1) if temp_ip_cmd[0] == "exit": self.in_client = "server" message = {} message["type"] = "state" message["state"] = "server" self.socket_to_others.send_string( server_config.server_to_input_subject, zmq.SNDMORE) self.socket_to_others.send_string(json.dumps(message)) else: try: tag = self.in_client self.socket_to_others.send_string(tag, zmq.SNDMORE) self.socket_to_others.send_string( json.dumps({ "type": "cmd", "cmd": ip_cmd })) except Exception as err: message = {} message["type"] = "error" message["error"] = "unexcept error is occur!" self.socket_to_others.send_string( server_config.server_to_input_subject, zmq.SNDMORE) self.socket_to_others.send_string(json.dumps(message)) # from client cmd_result elif body["type"] == "cmd_result": self.socket_to_others.send_string( server_config.server_to_input_subject, zmq.SNDMORE) self.socket_to_others.send_string(json.dumps(body)) return def timeout(self): # self.socket_to_others.send_string(zmqconfig.server_to_client_subject, zmq.SNDMORE) # self.socket_to_others.send_string(json.dumps("")) self.ioloop.add_timeout(time.time() + 3, self.timeout) return def run(self): self.socket_to_others = server_config.context.socket(zmq.PUB) self.socket_to_others.bind(server_config.server_zmq_addr) # 服务端收信息,不同机子 self.socket_from_others = server_config.context.socket(zmq.SUB) self.socket_from_others.setsockopt_string(zmq.SUBSCRIBE, "") self.socket_from_others.bind(server_config.server_zmq_addr_accept) self.stream_from_others_sub = zmqstream.ZMQStream( self.socket_from_others) self.stream_from_others_sub.on_recv(self.process_message_client) self.ioloop.add_timeout(time.time(), self.timeout) # application = tornado.web.Application(urls) # application.listen(8887) self.ioloop.start() return