def __init__(self, server_host, server_port, timeout=3, encoding="utf-8", skip_translations=False, translation_cache=None): """ Create a client instance which can be used to get translations. A client will close all resources on exit. If resource should be freed before that use the close function. :type server_host: str :type server_port: int :param timeout: Timeout for a request, in seconds. :type timeout: int :type encoding: str """ self._timeout = timeout or 0.001 # seconds self._encoding = encoding self._context = Context(1) self._socket = self._context.socket(DEALER) self._socket.set(LINGER, 0) # do not wait for anything when closing self._socket.connect("tcp://{}:{}".format(server_host, server_port)) self._poller = Poller() self._poller.register(self._socket, POLLIN) self._skip_translations = skip_translations self._translation_cache = translation_cache if self._translation_cache and not isinstance(self._translation_cache, TranslationCache): raise TypeError( "The cache needs to be a subclass of TranslationCache")
def __init__(self, addr, stype): self._context = Context() self._socket = self._context.socket(stype) if stype in [REP, PUB]: self._socket.bind(addr) else: self._socket.connect(addr)
class Listener(Thread): def __init__(self): super(Listener, self).__init__(name="Listener") self._shutdown = False self.context = Context() self.sub = self.context.socket(SUB) self.sub.bind('tcp://*:7000') self.sub.setsockopt(SUBSCRIBE, "") self.poller = Poller() self.poller.register(self.sub, POLLIN) def cleanup(self): self.sub.close() self.context.term() def run(self): while True: socks = dict(self.poller.poll(timeout=1)) if socks.get(self.sub) == POLLIN: msg = self.sub.recv(flags=NOBLOCK) print msg if self._shutdown: break self.cleanup()
class Leatherneck(Thread): def __init__(self): super(Leatherneck, self).__init__(name="Leatherneck") self.context = Context() self.pull = self.context.socket(PULL) self.pull.connect("tcp://localhost:7000") self.push = self.context.socket(PUSH) self.push.connect("tcp://localhost:7001") self.poller = Poller() self.poller.register(self.pull, POLLIN) self._shutdown = False def cleanup(self): self.push.close() self.pull.close() self.context.term() def run(self): while True: socks = dict(self.poller.poll(timeout=1)) if socks.get(self.pull) == POLLIN: msg = self.pull.recv() msg += " WORK COMPLETE, " + str(time()) self.push.send(msg) if self._shutdown: break self.cleanup()
def main(name): map = generate_map(name) context = Context() pub_socket = context.socket(PUB) pub_socket.bind('tcp://0.0.0.0:20000') map['pub_server'] = pub_socket rep_socket = context.socket(REP) rep_socket.bind('tcp://0.0.0.0:20001') map['rep_server'] = rep_socket manager = MissionManager() rep_socket = map['rep_server'] manager.start_mission(LocMission(map, None)) return while True: req = MissionMessage() req.ParseFromString(rep_socket.recv()) if req.type == 6: req.type = 7 try: if req.missionType == MissionMessage.CONNECTION: manager.start_mission(PlaceMission(map, (0.0, 0.0), req)) elif req.missionType == MissionMessage.LOCALIZATION: manager.start_mission(LocMission(map, req)) else: raise ValueError('unknown mission type') req.status = MissionMessage.ACTIVE except RuntimeError: req.status = MissionMessage.REJECTED rep_socket.send(req.SerializeToString())
def run(port): """ Run a translations server at a specific port. It always listens on all available network devices! """ context = Context(1) sync_socket = context.socket(ROUTER) sync_socket.bind(_SYNC_ENDPOINT) frontend = context.socket(ROUTER) frontend.bind("tcp://*:{}".format(port)) # Socket facing services backend = context.socket(DEALER) backend.bind(_REQUEST_ENDPOINT) try: worker_threads, worker_identities = _start_workers( context, sync_socket, config.WORKERS, 1000) _LOG.debug("Running device...") try: proxy(frontend, backend) except KeyboardInterrupt: print("\rShutting down...") frontend.close() frontend = None _shut_down_workers(sync_socket, worker_threads, worker_identities, 5) finally: if frontend is not None: frontend.close() backend.close() sync_socket.close() _LOG.debug("Done")
class HomeBase(Thread): def __init__(self): super(HomeBase, self).__init__(name="HomeBase") self.context = Context() self.pull = self.context.socket(PULL) self.pull.bind("tcp://*:7001") self._shutdown = False self.poller = Poller() self.poller.register(self.pull, POLLIN) def cleanup(self): self.pull.close() self.context.term() def run(self): while True: socks = dict(self.poller.poll(timeout=1)) if socks.get(self.pull) == POLLIN: msg = self.pull.recv() msg += ", WORK RECEIVED " print msg if self._shutdown: break self.cleanup()
class wrapped_dispatcher(object): def __init__(self, enqueued=None, on_load=None): self.queue = Queue() kwargs = { 'queue': self.queue } if enqueued: kwargs['enqueued_tasks'] = enqueued if on_load: kwargs['on_daemon_load'] = on_load self.dispatcher = WrappedDispatcher(**kwargs) self.context = None self.sockets = {} def __enter__(self): self.dispatcher.start() self.context = Context() self.sockets['in'] = self.context.socket(PUSH) self.sockets['out'] = self.context.socket(PULL) self.sockets['in'].connect(settings.ZTASKD_URL) self.sockets['out'].connect(settings.ZTASK_WORKER_URL) return (self.queue, self.sockets['in'], self.sockets['out']) def __exit__(self, exc_type, exc_value, traceback): self.dispatcher.terminate() self.context.destroy() self.queue.close()
class Leatherneck(Thread): def __init__(self): super(Leatherneck, self).__init__(name="Leatherneck") self.context = Context() self.pull = self.context.socket(PULL) self.pull.connect("tcp://localhost:7000") self.push = self.context.socket(PUSH) self.push.connect("tcp://localhost:7001") self.poller = Poller() self.poller.register(self.pull, POLLIN) self._shutdown = False for th in t_enum(): if th.name == "MainThread": self.mainthread = th def cleanup(self): print "Workers exiting..." self.push.close() self.pull.close() self.context.term() def run(self): while True: if not self.mainthread.is_alive(): self._shutdown = True break socks = dict(self.poller.poll(timeout=1)) if socks.get(self.pull) == POLLIN: msg = self.pull.recv(flags=NOBLOCK) msg += " WORK COMPLETE, " + str(time()) self.push.send(msg, flags=NOBLOCK) if self._shutdown: break self.cleanup()
class HomeBase(Thread): def __init__(self): super(HomeBase, self).__init__(name="HomeBase") self.context = Context() self.pull = self.context.socket(PULL) self.pull.bind("tcp://*:7001") self._shutdown = False self.poller = Poller() self.poller.register(self.pull, POLLIN) for th in t_enum(): if th.name == "MainThread": self.mainthread = th def cleanup(self): print "Home exiting..." self.pull.close() self.context.term() def run(self): while True: if not self.mainthread.is_alive(): self._shutdown = True break socks = dict(self.poller.poll(timeout=1)) if socks.get(self.pull) == POLLIN: msg = self.pull.recv(flags=NOBLOCK) msg += ", WORK RECEIVED " print msg if self._shutdown: break self.cleanup()
def initialize_zmq_socket(host, port): logger.info("Initializing ZMQ consumer socket: Host: %s, Port: %d", host, port) context = Context() zmq_socket = context.socket(PULL) zmq_socket.connect("tcp://{0}:{1}".format(host, port)) logger.info("ZMQ consumer socker initilized.") return zmq_socket
class DrillingWell(Thread): def __init__(self): super(DrillingWell, self).__init__(name="DrillingWell") self.context = Context() self.push = self.context.socket(PUSH) self.push.bind("tcp://*:7000") self._shutdown = False for th in t_enum(): if th.name == "MainThread": self.mainthread = th def cleanup(self): print "Producer exiting..." self.push.close() self.context.term() def run(self): count = 0 while True: if not self.mainthread.is_alive(): self._shutdown = True break sleep(0.01) count += 1 self.push.send("SOMETHING " + str(count)) if self._shutdown: break self.cleanup()
def __init__(self, **kwargs): self._input_sock = None self._output_sock = None self._control_sock = None # determine if outgoing messages should enable NOBLOCK on send # default behaviour is to block on a send call till receiver is present self.no_block_send = False # configure the interrupt handling self._stop = True signal.signal(signal.SIGINT, self._signal_interrupt_handler) # a regular hearbeat interval must be set to the default. self.heartbeat = 3 # seconds # create the zmq context self.zmq_ctx = Context() # set the default input receive handler, if none has been assigned if not hasattr(self, 'input_recv_handler'): self.input_recv_handler = self._default_recv_handler # set the default handler, if none has been assigned. if not hasattr(self, '_command_handler'): self._command_handler = self._default_command_handler # construct the poller self._poll = Poller() # monitoring of message stream is off by default self.monitor_stream = False Scaffold.__init__(self, **kwargs)
class Zmq_broker(BaseModule): context = None s_pub = None pub_endpoint = None serialize_to = None serialize = None def __init__(self, mod_conf, pub_endpoint, serialize_to): from zmq import Context, PUB BaseModule.__init__(self, mod_conf) self.pub_endpoint = pub_endpoint self.serialize_to = serialize_to logger.info("[Zmq Broker] Binding to endpoint " + self.pub_endpoint) # This doesn't work properly in init() # sometimes it ends up beings called several # times and the address becomes already in use. self.context = Context() self.s_pub = self.context.socket(PUB) self.s_pub.bind(self.pub_endpoint) # Load the correct serialization function # depending on the serialization method # chosen in the configuration. if self.serialize_to == "msgpack": from msgpack import Packer packer = Packer(default=encode_monitoring_data) self.serialize = lambda msg: packer.pack(msg) elif self.serialize_to == "json": self.serialize = lambda msg: json.dumps(msg, cls=SetEncoder) else: raise Exception("[Zmq Broker] No valid serialization method defined (Got " + str(self.serialize_to) + ")!") # Called by Broker to say 'let's prepare yourself guy' def init(self): logger.info("[Zmq Broker] Initialization of the Zmq broker module") # Publish to the ZeroMQ socket # using the chosen serialization method def publish(self, msg, topic=""): from zmq import SNDMORE data = self.serialize(msg) self.s_pub.send(topic, SNDMORE) self.s_pub.send(data) # An host check have just arrived, we UPDATE data info with this def manage_brok(self, b): logger.debug("[Zmq Broker] Got broker update: " + str(b.data)) # Publish update data to the ZeroMQ endpoint. msg = b.data self.publish(msg, b.type) # Properly close down this thing. def do_stop(self): self.s_pub.close() self.context.term()
async def get(socket_id: SocketStruct, msg: bytes, ctx: zmq.Context, timeout=500, linger=2000, retries=10, dealer=False): if retries < 0: return None if dealer: socket = ctx.socket(zmq.DEALER) else: socket = ctx.socket(zmq.REQ) socket.setsockopt(zmq.LINGER, linger) try: # Allow passing an existing socket to save time on initializing a _new one and waiting for connection. socket.connect(str(socket_id)) await socket.send(msg) event = await socket.poll(timeout=timeout, flags=zmq.POLLIN) if event: response = await socket.recv() socket.close() return response else: socket.close() return None except Exception as e: socket.close() return await get(socket_id, msg, ctx, timeout, linger, retries - 1)
class WorkerTest(TestCase): """Ensures the worker correctly handles messages """ def setUp(self): self.queue = Queue() self.context = Context() self.socket = self.context.socket(PUSH) self.socket.bind(settings.ZTASK_WORKER_URL) self.worker = WrappedWorker(queue=self.queue) self.worker.start() def tearDown(self): self.worker.terminate() self.context.destroy() def test_exec(self): """Tests executing a task """ uuid = str(uuid4()) self.socket.send_pyobj((uuid,)) self.assertEqual( self.queue.get(), uuid ) self.assertTrue(self.queue.get()) self.queue.close()
def on_message(self, message): tcp = HSD.get_tcp() poller = zmq.Poller() ctx1 = Context() sub_socket = ctx1.socket(zmq.SUB) sub_socket.connect('tcp://{}:6868'.format(tcp)) sub_socket.setsockopt_unicode(zmq.SUBSCRIBE, '') poller.register(sub_socket, zmq.POLLIN) while 1: # 循环推送数据 for handler in WebSocketHandler.socket_handlers: ticker = sub_socket.recv_pyobj() this_time = ticker.TickerTime objArr = cache.get("objArr") times, opens, high, low, close, vol = objArr if objArr else ( ticker.TickerTime * 1000, ticker.Price, ticker.Price, ticker.Price, ticker.Price, ticker.Qty) GetRealTimeData(ticker.TickerTime, ticker.Price, ticker.Qty) # print(times,opens,high,low,close) self.zs = 0 if time.localtime(this_time).tm_min != time.localtime(times / 1000).tm_min: tm = time.localtime(times / 1000) tm = datetime.datetime(tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min) self.zs = self.zbjs.send((tm, opens, high, low, close)) self.zs = self.zs[tm]['datetimes'][-1][1] if self.zs[tm]['datetimes'] else 0 if this_time * 1000 != times: data = {'times': str(times), 'opens': str(opens), 'high': str(high), 'low': str(low), 'close': str(close), 'vol': str(vol), 'zs': str(self.zs)} data = json.dumps(data).encode() handler.write_message(data)
def __init__(self, mod_conf, pub_endpoint, serialize_to): from zmq import Context, PUB BaseModule.__init__(self, mod_conf) self.pub_endpoint = pub_endpoint self.serialize_to = serialize_to logger.info("[Zmq Broker] Binding to endpoint " + self.pub_endpoint) # This doesn't work properly in init() # sometimes it ends up beings called several # times and the address becomes already in use. self.context = Context() self.s_pub = self.context.socket(PUB) self.s_pub.bind(self.pub_endpoint) # Load the correct serialization function # depending on the serialization method # chosen in the configuration. if self.serialize_to == "msgpack": from msgpack import Packer packer = Packer(default=encode_monitoring_data) self.serialize = lambda msg: packer.pack(msg) elif self.serialize_to == "json": self.serialize = lambda msg: json.dumps(msg, cls=SetEncoder) else: raise Exception( "[Zmq Broker] No valid serialization method defined (Got " + str(self.serialize_to) + ")!")
def __init__(self, context: zmq.Context): self.pull_socket = context.socket(zmq.PULL) self.pull_socket.connect(self.pull_address) self.sub_socket = context.socket(zmq.SUB) self.sub_socket.connect(self.sub_address) self.sub_socket.setsockopt(zmq.SUBSCRIBE, b"10001")
class Zmq_broker(BaseModule): context = None s_pub = None pub_endpoint = None serialize_to = None serialize = None def __init__(self, mod_conf, pub_endpoint, serialize_to): from zmq import Context, PUB BaseModule.__init__(self, mod_conf) self.pub_endpoint = pub_endpoint self.serialize_to = serialize_to logger.info("[Zmq Broker] Binding to endpoint " + self.pub_endpoint) # This doesn't work properly in init() # sometimes it ends up beings called several # times and the address becomes already in use. self.context = Context() self.s_pub = self.context.socket(PUB) self.s_pub.bind(self.pub_endpoint) # Load the correct serialization function # depending on the serialization method # chosen in the configuration. if self.serialize_to == "msgpack": from msgpack import Packer packer = Packer(default=encode_monitoring_data) self.serialize = lambda msg: packer.pack(msg) elif self.serialize_to == "json": self.serialize = lambda msg: json.dumps(msg, cls=SetEncoder) else: raise Exception( "[Zmq Broker] No valid serialization method defined (Got " + str(self.serialize_to) + ")!") # Called by Broker to say 'let's prepare yourself guy' def init(self): logger.info("[Zmq Broker] Initialization of the Zmq broker module") # Publish to the ZeroMQ socket # using the chosen serialization method def publish(self, msg, topic=""): from zmq import SNDMORE data = self.serialize(msg) self.s_pub.send(topic, SNDMORE) self.s_pub.send(data) # An host check have just arrived, we UPDATE data info with this def manage_brok(self, b): logger.debug("[Zmq Broker] Got broker update: " + str(b.data)) #Publish update data to the ZeroMQ endpoint. msg = b.data self.publish(msg, b.type) # Properly close down this thing. def do_stop(self): self.s_pub.close() self.context.term()
class ZmqFactory(object): """ I control individual ZeroMQ connections. Factory creates and destroys ZeroMQ context. :var reactor: reference to Twisted reactor used by all the connections :var ioThreads: number of IO threads ZeroMQ will be using for this context :vartype ioThreads: int :var lingerPeriod: number of milliseconds to block when closing socket (terminating context), when there are some messages pending to be sent :vartype lingerPeriod: int :var connections: set of instanciated :class:`ZmqConnection` :vartype connections: set :var context: ZeroMQ context """ reactor = reactor ioThreads = 1 lingerPeriod = 100 def __init__(self): """ Constructor. Create ZeroMQ context. """ self.connections = set() self.context = Context(self.ioThreads) def __repr__(self): return "ZmqFactory()" def shutdown(self): """ Shutdown factory. This is shutting down all created connections and terminating ZeroMQ context. Also cleans up Twisted reactor. """ for connection in self.connections.copy(): connection.shutdown() self.connections = None self.context.term() self.context = None def registerForShutdown(self): """ Register factory to be automatically shut down on reactor shutdown. It is recommended that this method is called on any created factory. """ reactor.addSystemEventTrigger('during', 'shutdown', self.shutdown)
def __init__(self, prodcode, addr=f'tcp://{server_IP}:6869'): self._sub_socket = Context().socket(zmq.SUB) self._sub_socket.set_string(zmq.SUBSCRIBE, '') self._addr = addr self._prodcode = prodcode self._is_active = False self._is_sub = False self._thread = Thread()
def __init__(self): """ Constructor. Create ZeroMQ context. """ self.connections = set() self.context = Context(self.ioThreads)
def __init__(self): super(HomeBase, self).__init__(name="HomeBase") self.context = Context() self.pull = self.context.socket(PULL) self.pull.bind("tcp://*:7001") self._shutdown = False self.poller = Poller() self.poller.register(self.pull, POLLIN)
def run(self): context = Context() socket = context.socket(self.socket_type) socket.bind(self.socket_url) data = socket.recv_pyobj() self.queue.put(data) context.destroy() self.queue.close()
def __init__(self, context: zmq.Context, tasks_to_send: int): self.tasks_to_send = tasks_to_send self.socket = context.socket(zmq.PUSH) self.socket.bind(self.address) self.sink_socket = context.socket(zmq.PUSH) self.sink_socket.connect(self.sink_address)
def __init__(self, host, port): self._host = host self._port = port self._context = Context() self._socket = self._context.socket(REQ) self._socket.setsockopt(LINGER, 1) self._socket.connect("tcp://" + host + ":" + str(port)) self._poller = Poller() self._poller.register(self._socket, POLLIN)
def test_onload(self): """Tests onload calls """ context = Context() socket = context.socket(PULL) socket.bind('tcp://127.0.0.1:5560') with wrapped_dispatcher(on_load=['django_ztaskq.tests.dummy_onload']) \ as __: self.assertTrue(socket.recv_pyobj())
def __init__(self): super(DrillingWell, self).__init__(name="DrillingWell") self.context = Context() self.push = self.context.socket(PUSH) self.push.bind("tcp://*:7000") self._shutdown = False for th in t_enum(): if th.name == "MainThread": self.mainthread = th
class TestEchoService(WindmillTestCase): def setUp(self): self.zmq_ctx = Context() d = 'test_out' if not os.path.exists(d): os.makedirs(d) def tearDown(self): pass def test_echo_service_default_behavior(self): req_out_sock = self.zmq_ctx.socket(REQ) req_out_sock.bind('tcp://*:8889') t = thread_wrap_windmill('EchoService') try: t.start() self.assertTrue(t.is_alive, 'The EchoService instance should have started.') req_out_sock.send("echo, echo, echo") msg = req_out_sock.recv() self.assertEqual("echo, echo, echo", msg) finally: t.windmill.kill() t.join(3) self.assertFalse(t.is_alive(), 'The EchoService instance should have shutdown.') req_out_sock.close() def test_echo_service_options(self): req_out_sock = self.zmq_ctx.socket(REQ) req_out_sock.bind('tcp://*:8899') argv = ['-m', 'pong', '--reply_sock_url', 'tcp://localhost:8899'] t = thread_wrap_windmill('EchoService', argv=argv) try: t.start() self.assertTrue(t.is_alive(), 'The EchoService instance should have started.') req_out_sock.send('ping') msg = req_out_sock.recv() self.assertEqual('pong', msg) finally: t.windmill.kill() t.join(3) self.assertFalse(t.is_alive(), 'The EchoService instance shold have shutdown.') req_out_sock.close()
def initialize_zmq_socket(interface, port): logger.info("Initializing ZMQ producer socket: Host: %s, Port: %d", interface, port) context = Context() zmq_socket = context.socket(PUSH) zmq_socket.bind("tcp://{0}:{1}".format(interface, port)) logger.info("ZMQ producer socker initilized.") return zmq_socket
def Main(): context = Context() print("Connecting to hello world server...") socket = context.socket(REQ) socket.connect("udp://localhost:15555") for i in range(10): socket.send(b"Hello") message = socket.recv() print("Received reply %s [ %s ]" % (i, message))
def close_connections(ctx:zmq.Context): """ Function used when stopping the tokendealer """ ctx.destroy() if ctx.closed : print("Connection successfully closed.") exit() else : exit(-1)
def __init__(self): super(Leatherneck, self).__init__(name="Leatherneck") self.context = Context() self.pull = self.context.socket(PULL) self.pull.connect("tcp://localhost:7000") self.push = self.context.socket(PUSH) self.push.connect("tcp://localhost:7001") self.poller = Poller() self.poller.register(self.pull, POLLIN) self._shutdown = False
class ZMQ(): """ZMQ client / server base class""" def __init__(self, mode, proto, addr, port): """Initializes the ZMQ handler""" self.state = None self._mode = mode self._socket = None self._proto = proto.lower() self._addr = addr self.port = None if port is None else int(port) def __str__(self): """Print configuration details""" return '{0} {1} {2} {3} {4}'.format( self._mode, self._proto, self._socket, self._addr, self.port) @property def url(self): """Returns the URL""" if self.port: return '{proto}://{addr}:{port}'.format( proto=self._proto, addr=self._addr, port=self.port) else: return '{proto}://{addr}'.format( proto=self._proto, addr=self._addr) def _initsock(self): """Sets the socket""" if self._socket is None: self._socket = Context().socket(self._mode) def close(self): """Closes the socket""" # self._socket.close() pass def receive(self): """Receives byte stream from the socket""" data = self._socket.recv() self.state = ZMQState.SEND return data def send(self, data, autoencode=True): """Sends a string to the socket""" if autoencode: with suppress(AttributeError): data = data.encode() self._socket.send(data) self.state = ZMQState.RECV
class SubPrice: def __init__(self, prodcode, addr=f'tcp://{server_IP}:6869'): self._ctx = Context() self._sub_socket = self._ctx.socket(zmq.SUB) self._sub_socket.set_string(zmq.SUBSCRIBE, '') self._sub_socket.setsockopt(zmq.RCVTIMEO, 5000) self._req_price_socket = self._ctx.socket(zmq.REQ) self._req_price_socket.connect(f'tcp://{server_IP}:6870') self._addr = addr self._prodcode = prodcode self.__is_active = False self.__is_sub = False self.__thread = Thread() self._spfunc = SpFunc() def __run(self, func): while self.__is_active: try: price = self._sub_socket.recv_pyobj() func(price) except zmq.ZMQError: ... def __call__(self, func): self._func = func return self def start(self): if not self.__is_active: self.__is_active = True self._sub_socket.connect(self._addr) self.__thread = Thread(target=self.__run, args=(self._func, )) self.__thread.setDaemon(True) self.__thread.start() def stop(self): self.__is_active = False self.__thread.join() self._sub_socket.disconnect(self._addr) def is_alive(self): return self.__is_active def sub(self): self._spfunc.sub_price(self._prodcode) self.__is_sub = True def unsub(self): self._spfunc.unsub_price(self._prodcode) self.__is_sub = False def get_price(self): self._req_price_socket.send_string(self._prodcode) price = self._req_price_socket.recv_pyobj() return price
def __init__(self, prodcode, addr=f'tcp://{server_IP}:6868'): self._ctx = Context() self._sub_socket = self._ctx.socket(zmq.SUB) self._sub_socket.set_string(zmq.SUBSCRIBE, '') self._sub_socket.setsockopt(zmq.RCVTIMEO, 5000) self._addr = addr self._prodcode = prodcode self.__is_active = False self.__is_sub = False self.__thread = Thread() self._spfunc = SpFunc()
def __init__(self): """ Constructor. Create ZeroMQ context. """ if not self.initialized: self.initialized = True self.connections = set() self.context = Context(self.ioThreads) reactor.addSystemEventTrigger('during', 'shutdown', self.shutdown)
def __init__(self, context: zmq.Context): self.pull_socket = context.socket(zmq.PULL) self.pull_socket.connect(self.pull_address) self.sub_socket = context.socket(zmq.SUB) self.sub_socket.connect(self.sub_address) self.sub_socket.setsockopt(zmq.SUBSCRIBE, b"10001") self.poller = zmq.Poller() self.poller.register(self.pull_socket, zmq.POLLIN) self.poller.register(self.sub_socket, zmq.POLLIN)
class Master(object): def __init__(self, full_socket_address): self.context = Context() self.workers = OrderedDict() self.overflow_launch = False self.stats = False self.full_socket_address = full_socket_address self.socket_address, self.socket_port = full_socket_address.split(':') @property @zeroMQError def init_pubsocket(self): ''' initialise la socket pour permettre de lancer le benchmark via un publish ''' self.pubsocket = self.context.socket(PUB) self.pubsocket.bind('tcp://{}'.format(self.full_socket_address)) @property @zeroMQError def init_repsocket(self): ''' init la socket pour permettre de repondre à un nouveau worker qui vient s'ajouter dynamiquement par default ce port est fixe (55555) ''' self.repsocket = self.context.socket(REP) self.repsocket.bind('tcp://{}:55555'.format(self.socket_address)) @property def wait_workers(self): ''' permet l'ajout de worker en attendand le message pour lancer le benchmark ''' while not self.overflow_launch: message = loads(self.repsocket.recv_json()) # workers if '_id' in message: self.workers[message['_id']] = 'ready' self.repsocket.send('ok') sys.stdout.write('worker {} is ready\n'.format(message['_id'])) # overflow signals elif 'overflow' in message: self.repsocket.send('ok') sys.stdout.write('master: launch overflow for {}\n'.format(self.workers.keys())) self.launch_benchmark @property def launch_benchmark(self): ''' declenche le benchmark ''' self.pubsocket.send('OVERFLOW') self.workers = OrderedDict()
def inicializar_puertos(self): from zmq import Context, SUB, SUBSCRIBE if self.modo == "cinematico": context = Context() self.socket_referencias = context.socket(SUB) self.socket_referencias.connect("tcp://localhost:" + self.puerto_referencias) self.socket_referencias.setsockopt(SUBSCRIBE, b'') if self.modo == "dinamico": raise NotImplementedError if self.modo == "matematico": raise NotImplementedError
def forward_request(self, address, message): """Forward a request to another server. """ if address not in self.mirrors: context = Context() socket = context.socket(REQ) socket.setsockopt(LINGER, 1) socket.connect(address) self.mirrors[address] = socket else: socket = self.mirrors[address] socket.send(str(message)) return socket.recv()
def forward_request(self, address, message): """Forward a request to another server. """ # FIXME: shouldn't we have an open socket at all times ? if address not in self.mirrors: context = Context() socket = context.socket(REQ) socket.connect(address) self.mirrors[address] = socket else: socket = self.mirrors[address] socket.send(str(message)) return socket.recv()
def __init__(self): # pygame self.screen = pygame.display.set_mode((self.WIDTH, self.HEIGHT), HWSURFACE | DOUBLEBUF, 32) pygame.display.set_caption('kbsim - 0.0s') self.clock = pygame.time.Clock() self.world = world(gravity=(0, 0), doSleep=True) # zqm context = Context() self.socket = context.socket(PAIR) self.socket.connect('tcp://localhost:{}'.format(self.ZMQ_PORT))
def setUp(self): self.queue = Queue() self.context = Context() self.socket = self.context.socket(PUSH) self.socket.bind(settings.ZTASK_WORKER_URL) self.worker = WrappedWorker(queue=self.queue) self.worker.start()
def __init__(self, full_socket_address): self.context = Context() self.workers = OrderedDict() self.overflow_launch = False self.stats = False self.full_socket_address = full_socket_address self.socket_address, self.socket_port = full_socket_address.split(':')
def __init__(self): # pygame self.screen = pygame.display.set_mode((self.WIDTH, self.HEIGHT), HWSURFACE | DOUBLEBUF, 32) pygame.display.set_caption('kbsim') self.clock = pygame.time.Clock() # pybox2d self.world = world(gravity=(0, 0), doSleep=True) self.maze = Labyrinth(self.world, self.SCALE_REAL_TO_SIM, self.SCALE_REAL_TO_VIS) # zqm context = Context() self.socket = context.socket(PAIR) self.socket.connect('tcp://localhost:{}'.format(self.ZMQ_PORT))
def __init__(self, mod_conf, pub_endpoint, serialize_to): from zmq import Context, PUB BaseModule.__init__(self, mod_conf) self.pub_endpoint = pub_endpoint self.serialize_to = serialize_to logger.info("[Zmq Broker] Binding to endpoint " + self.pub_endpoint) # This doesn't work properly in init() # sometimes it ends up beings called several # times and the address becomes already in use. self.context = Context() self.s_pub = self.context.socket(PUB) self.s_pub.bind(self.pub_endpoint) # Load the correct serialization function # depending on the serialization method # chosen in the configuration. if self.serialize_to == "msgpack": from msgpack import Packer packer = Packer(default=encode_monitoring_data) self.serialize = lambda msg: packer.pack(msg) elif self.serialize_to == "json": self.serialize = lambda msg: json.dumps(msg, cls=SetEncoder) else: raise Exception("[Zmq Broker] No valid serialization method defined (Got " + str(self.serialize_to) + ")!")
def _init(self): self._context = Context(1) self._queue = BrokerQueue() self._init_frontend() self._init_backend() self._init_pollers() self._active = True