class Client(object): def __init__(self): self.stream = None self.result = None def connect(self, port): context = zmq.Context() socket = context.socket(zmq.REQ) socket.connect(port) self.stream = ZMQStream(socket) def __getattr__(self, item): def wrapper(*args): request = [item] for param in args: request.append(param) return self._run(tuple(request)) return wrapper def _run(self, request): def on_response(message): response = msgpack.unpackb(message[0], use_list=False) if response[0] == 'OK': self.result = response[1] elif response[0] == 'ERR': raise Exception(response[2]) ZMQIOLoop.instance().stop() self.stream.send(msgpack.packb(request)) self.stream.on_recv(on_response) ZMQIOLoop.instance().start() return self.result def disconnect(self): self.stream.close()
class IpcTornadoChannel(IpcChannel): """Inter-process communication channel class for use with Tornado IO Loop. """ CONNECTED = IpcChannel.EVENT_ACCEPTED DISCONNECTED = IpcChannel.EVENT_DISCONNECTED def __init__(self, channel_type, endpoint=None, context=None, identity=None): """Initalise the IpcChannel object. :param channel_type: ZeroMQ socket type, using CHANNEL_TYPE_xxx constants :param endpoint: URI of channel endpoint, can be specified later :param context: ZeroMQ context, will be initialised if not given :param identity: channel identity for DEALER type sockets """ super(IpcTornadoChannel, self).__init__(channel_type, endpoint, context, identity) self._callback = None self._monitor_callback = None self._stream = None def register_callback(self, callback): """Register a callback with this IpcChannel. This will result in the construction of a ZMQStream and the callback will be registered with the stream object. :param: data to send on channel """ self._callback = callback if not self._stream: self._stream = ZMQStream(self.socket) self._stream.on_recv(callback) def send(self, data): """Send data to the IpcChannel. :param: data to send on channel """ # If a Stream is registered send the data out on the tornado IO Loop if self._stream: self._stream.send(data) else: super(IpcTornadoChannel, self).send(data) def register_monitor(self, callback): self._monitor_callback = callback self._monitor_socket = self.socket.get_monitor_socket( IpcChannel.EVENT_ACCEPTED | IpcChannel.EVENT_DISCONNECTED) # Create the socket self._monitor_stream = ZMQStream(self._monitor_socket) self._monitor_stream.on_recv(self._internal_monitor_callback) def _internal_monitor_callback(self, msg): if self._monitor_callback is not None: self._monitor_callback(parse_monitor_message(msg))
class DevPort(object): def __init__(self, card, depth, rate): self.card = card self.depth = depth self.rate = rate self.running = False context = zmq.Context() self.prog_socket = context.socket(zmq.PUB) self.prog_socket.connect('ipc:///tmp/progressIn.ipc') self.prog_stream = ZMQStream(self.prog_socket, IOLoop.instance()) self.scheduleProgress() self.peak_socket = context.socket(zmq.PUB) self.peak_socket.bind('ipc:///tmp/peaks.ipc') self.peak_stream = ZMQStream(self.peak_socket, IOLoop.instance()) self.sendPeaks() def startRecording(self, path): self.curPath = path self.started = time() self.running = True def stopRecording(self): self.running = False def resetPeaks(self): pass def stop(self): self.stopRecording() def waitTillFinished(self): pass def gotSignal(self): return True def sendProgress(self): duration = 0 mode = 0 if self.running: duration = int((time() - self.started) * self.rate) mode = 1 d = {'t':duration, 'm': mode, 'b' : 0 } s = json.dumps(d) self.prog_stream.send(s) IOLoop.instance().add_timeout(time() + .333, self.sendProgress) def sendPeaks(self): l = [-140, -140, -140, -140] self.peak_stream.send(json.dumps(l)) IOLoop.instance().add_timeout(time() + .1, self.sendPeaks) def scheduleProgress(self): IOLoop.instance().add_timeout(time() + .333, self.sendProgress)
class TheWorker(object): def __init__(self, context): #context = zmq.Context(1) self.frontend = ZMQStream(worker_socket(context)) self.frontend.on_recv(self.handle_frontend) self.liveness = HEARTBEAT_LIVENESS self.heartbeat = HEARTBEAT_INTERVAL self.interval = INTERVAL_INIT self.loop = IOLoop.instance() self.time = self.interval * self.heartbeat self.heartbeat_at = time.time() + self.heartbeat * HEARTBEAT_LIVENESS self.callback = None self.timed_out = False self.start() def start(self): self.loop.add_timeout(time.time()+self.heartbeat, self.send_heartbeat) # try: # IOLoop.instance().start() # except KeyboardInterrupt: # times_str('ctrlc') def send_heartbeat(self): if time.time() > self.heartbeat_at: self.time *= 2 if self.time < INTERVAL_MAX else 1 times_str('W: Timed out.. Retrying in {} seconds..'.format(self.time)) self.callback = self.loop.add_timeout(time.time()+self.time*1, self.send_heartbeat) self.timed_out = True return times_str('W: Sending Heartbeat..') self.frontend.send(PPP_HEARTBEAT) self.loop.add_timeout(time.time()+self.heartbeat, self.send_heartbeat) def handle_frontend(self,msg): m = msg[:] if len(m) == 1: times_str('W: Received Heartbeat') if self.timed_out: self.loop.add_timeout(time.time()+self.heartbeat, self.send_heartbeat) self.timed_out = False self.loop.remove_timeout(self.callback) self.time = self.interval * self.heartbeat elif len(m) == 3: times_str('Received: '+str(m)) time.sleep(10) times_str('Sending it back..') self.frontend.send_multipart(m) self.heartbeat_at = time.time() + self.heartbeat * HEARTBEAT_LIVENESS
class TornadoAsyncClient(object): def __init__(self, ip, port): from zmq.eventloop.zmqstream import ZMQStream self.client = context.socket(zmq.PUSH) self.client.connect("tcp://{0}:{1}".format(ip,port)) self.stream = ZMQStream(self.client) def __getattr__(self, attr): def _(*arg): return self.send(attr, *arg) return _ def send(self, func, *arg): send_dict = {'func':func,'arg':arg} self.stream.send(json.dumps(send_dict))
class TornadoAsyncClient(object): def __init__(self, ip, port): from zmq.eventloop.zmqstream import ZMQStream self.client = context.socket(zmq.PUSH) self.client.connect("tcp://{0}:{1}".format(ip, port)) self.stream = ZMQStream(self.client) def __getattr__(self, attr): def _(*arg): return self.send(attr, *arg) return _ def send(self, func, *arg): send_dict = {'func': func, 'arg': arg} self.stream.send(json.dumps(send_dict))
class ZQueue(object): def __init__(self, backend_socket): self.frontend = ZMQStream(backend_socket) self.frontend.on_recv(self.handle_backend) self.loop = IOLoop.instance() def handle_backend(self,msg): m = msg[:] print(m) def run(self): try: self.frontend.send(PPP_READY) self.loop.start() except KeyboardInterrupt: helpers.Helpers.times_str('ctrlc')
class PubQueue(BaseQueue): def __init__(self, bind, ioloop=None): super().__init__(zmq.PUSH) self.socket.bind(bind) self.stream = ZMQStream(self.socket, ioloop) self.stream.on_send(self.__on_send) def __on_send(self, msq, status): print(msq, status) def send(self, data): self.stream.send(data) self.stream.flush() def send_string(self, data): self.stream.send_string(data) self.stream.flush()
class TornadoClient(object): def __init__(self, ip, port): from zmq.eventloop.zmqstream import ZMQStream self.client = context.socket(zmq.REQ) self.client.connect("tcp://{0}:{1}".format(ip,port)) self.stream = ZMQStream(self.client) def __getattr__(self, attr): def _(*arg): return self.send(attr, *arg) return _ def send(self, func, callback, *arg, **kwargs): send_dict = {'func':func,'arg':arg} self.callback = callback self.stream.send(json.dumps(send_dict)) self.stream.on_recv(self._recv_msg, True) def _recv_msg(self, msg): recv_dict = json.loads(msg[0]) main = recv_dict['main'] err = recv_dict['error'] self.callback(main, err)
class ZeroMQHandler(tornado.websocket.WebSocketHandler): def __init__(self, *args, **kwargs): super(ZeroMQHandler, self).__init__(*args, **kwargs) self.socket = None self.stream = None def open(self): settings = self.application.settings self.socket = settings['zeromq']['context'].socket(REQ) self.socket.connect(settings['zeromq']['url']) self.stream = ZMQStream(self.socket, settings['ioloop']) self.stream.on_recv(self.on_dispatch) def on_message(self, message): request = load_message(message) if request: data = message.encode('utf8') self.stream.send(data) else: self.write_message(ERROR_INVALID_REQUEST) def on_dispatch(self, messages): for message in messages: data = message.encode('utf8') self.write_message(data) def on_close(self): self.stream.close() self.socket.close() def check_origin(self, origin): return True def data_received(self, chunk): pass
class TornadoClient(object): def __init__(self, ip, port): from zmq.eventloop.zmqstream import ZMQStream self.client = context.socket(zmq.REQ) self.client.connect("tcp://{0}:{1}".format(ip, port)) self.stream = ZMQStream(self.client) def __getattr__(self, attr): def _(*arg): return self.send(attr, *arg) return _ def send(self, func, callback, *arg, **kwargs): send_dict = {'func': func, 'arg': arg} self.callback = callback self.stream.send(json.dumps(send_dict)) self.stream.on_recv(self._recv_msg, True) def _recv_msg(self, msg): recv_dict = json.loads(msg[0]) main = recv_dict['main'] err = recv_dict['error'] self.callback(main, err)
class AsyncCircusClient(object): def __init__(self, context=None, endpoint=DEFAULT_ENDPOINT_DEALER, timeout=5.0, ssh_server=None, ssh_keyfile=None): self._init_context(context) self.endpoint = endpoint self._id = to_bytes(uuid.uuid4().hex) self.socket = self.context.socket(zmq.DEALER) self.socket.setsockopt(zmq.IDENTITY, self._id) self.socket.setsockopt(zmq.LINGER, 0) get_connection(self.socket, endpoint, ssh_server, ssh_keyfile) self._timeout = timeout self.timeout = timeout * 1000 self.stream = ZMQStream(self.socket, tornado.ioloop.IOLoop.current()) def _init_context(self, context): self.context = context or zmq.Context.instance() def stop(self): self.stream.stop_on_recv() # only supported by libzmq >= 3 if hasattr(self.socket, 'disconnect'): self.socket.disconnect(self.endpoint) self.stream.close() @tornado.gen.coroutine def send_message(self, command, **props): res = yield self.call(make_message(command, **props)) raise tornado.gen.Return(res) @tornado.gen.coroutine def call(self, cmd): if isinstance(cmd, str): raise DeprecationWarning('call() takes a mapping') call_id = uuid.uuid4().hex cmd['id'] = call_id try: cmd = json.dumps(cmd) except ValueError as e: raise CallError(str(e)) try: future = concurrent.Future() def cb(msg, status): future.set_result(msg) self.stream.send(cmd, callback=cb) yield future except zmq.ZMQError as e: raise CallError(str(e)) while True: future = concurrent.Future() self.stream.on_recv(future.set_result) messages = yield future for message in messages: try: res = json.loads(message) if res.get('id') != call_id: # we got the wrong message continue raise tornado.gen.Return(res) except ValueError as e: raise CallError(str(e))
class Channel(object): """Mother of all channels. Defines the interface. Callbacks: The callbacks will receive the channel as first parameter and the message as second parameter. The error callback will get the stream where the error occured as second parameter. Attributes: * stream_in, stream_out : the streams for eventlopp handling * serializer : the serializer used """ def __init__(self, socket_in, socket_out, serializer): self.stream_in = ZMQStream(socket_in) self.stream_out = ZMQStream(socket_out) self.serializer = serializer self._cb_receive = None self._cb_send = None self._cb_error = None self._chan_id = id(self) return def on_receive(self, callback): """Set callback to invoke when a message was received. """ self.stream_in.stop_on_recv() self._cb_receive = callback if callback: self.stream_in.on_recv(self._on_recv) return def on_send(self, callback): """Set callback to invoke when a message was sent. """ self.stream_out.stop_on_send() self._cb_send = callback if callback: self.stream_out.on_send(self._on_send) return def on_error(self, callback): """Set callback to invoke when an error event occured. """ self.stream_in.stop_on_err() self.stream_out.stop_on_err() self._cb_error = callback if callback: self.stream_in.on_err(self._on_err_in) self.stream_out.on_err(self._on_err_out) return def send(self, message): """Send given message. """ m = self.serializer.serialize(message) if self.serializer.multipart: self.stream_out.send_multipart(m) else: self.stream_out.send(m) return def _on_recv(self, msg): """Helper interfacing w/ streams. """ if self.serializer.multipart: msg = self.serializer.deserialize(msg) else: msg = self.serializer.deserialize(msg[0]) self._cb_receive(self, msg) return def _on_send(self, sent, _): """Helper interfacing w/ streams. """ msg = sent[0] if self.serializer.multipart: msg = self.serializer.deserialize(msg) else: msg = self.serializer.deserialize(msg[0]) self._cb_send(self, msg) return def _on_err_in(self): self._cb_error(self, self.stream_in) return def _on_err_out(self): self._cb_error(self, self.stream_out) return def __hash__(self): return self._chan_id def __eq__(self, other): if not isinstance(other, self.__class__): return False return self._chan_id == other._chan_id def __neq__(self, other): if not isinstance(other, self.__class__): return True return self._chan_id != other._chan_id
class TheQueue(object): def __init__(self, context, front, back, top=True): self.workers = WorkerQueue() self.loop = IOLoop.instance() self.liveness = HEARTBEAT_LIVENESS self.heartbeat = HEARTBEAT_INTERVAL self.interval = INTERVAL_INIT self.time = self.interval * self.heartbeat self.heartbeat_at = time.time() + self.heartbeat * HEARTBEAT_LIVENESS self.callback = None self.timed_out = False self.hearbeats = 0 if top: self.frontend_socket = context.socket(zmq.ROUTER) self.frontend_socket.bind(url_str(front,True)) else: self.frontend_socket = context.socket(zmq.DEALER) self.frontend_socket.connect(url_str(front)) self.frontend.send(PPP_READY) self.backend_socket = context.socket(zmq.ROUTER) self.backend_socket.bind(url_str(back,True)) self.frontend = ZMQStream(self.frontend_socket) self.backend = ZMQStream(self.backend_socket) self.start() def start(self): self.frontend.on_recv(self.handle_frontend) self.backend.on_recv(self.handle_backend) self.period = PeriodicCallback(self.workers.purge, self.heartbeat*1000) self.period.start() try: self.frontend.send(PPP_READY) #self.loop.start() except KeyboardInterrupt: times_str('ctrlc1') def handle_frontend(self,msg): m = msg[:] if len(m) == 1: times_str('Q: Received Heartbeat') if self.timed_out: self.loop.add_timeout(time.time()+self.heartbeat, self.send_heartbeat) self.timed_out = False self.loop.remove_timeout(self.callback) elif len(m) == 3: times_str('Received: '+str(m)) address, worker = self.workers.getLRU() worker.working = True m.insert(0,address) self.backend.send_multipart(m) self.heartbeat_at = time.time() + self.heartbeat * self.liveness def handle_backend(self,msg): m = msg[:] address = m[0] #times_str('Backend Received: {}'.format(m)) self.workers.ready(WorkerModel(address)) self.backend.send_multipart([address,PPP_HEARTBEAT]) mm = m[1:] if len(mm) == 1: if mm[0] == PPP_HEARTBEAT: self.hearbeats += 1 times_str('Got hearbeat {}'.format(self.hearbeats)) else: times_str('Sending it back..') self.frontend.send_multipart(mm) def send_heartbeat(self): if time.time() > self.heartbeat_at: self.time *= 2 if self.time < INTERVAL_MAX else 1 times_str('Timed out.. Retrying in {} seconds..'.format(self.time)) self.callback = self.loop.add_timeout(time.time()+self.time, self.send_heartbeat) self.timed_out = True return self.time = self.interval * self.heartbeat times_str('Q: Sending Heartbeat..') self.frontend.send(PPP_HEARTBEAT) self.loop.add_timeout(time.time()+self.heartbeat, self.send_heartbeat)
class Client: def __init__(self): self._current_packet_id = 0 self._request_response_map = {} self._new_response_condition = Condition() self._packet_id_lock = Lock() self._notification_callbacks = [] def connect_to_server(self, server): self.context = zmq.Context() self.socket = self.context.socket(zmq.DEALER) tmpdir = tempfile.gettempdir() connection_string = 'ipc://%s/%s_%s' % (tmpdir, 'zmq', server) self.socket.connect(connection_string) io_loop = ioloop.IOLoop(ioloop.ZMQPoller()) self.stream = ZMQStream(self.socket, io_loop) # create partial function that has self as first argument callback = partial(_on_recv, self) self.stream.on_recv(callback) self.event_loop = EventLoop(io_loop) self.event_loop.start() def disconnect(self): self.stream.flush() self.event_loop.stop() self.socket.close() def register_notification_callback(self, callback): # check a valid function has been past assert callable(callback) self._notification_callbacks.append(callback) def request_queue_list_update(self): pass def submit_job_request(self, request, timeout=None): params = JsonRpc.jobrequest_to_json_params(request) packet_id = self._next_packet_id() jsonrpc = JsonRpc.generate_request(packet_id, 'submitJob', params) self._send_request(packet_id, jsonrpc) response = self._wait_for_response(packet_id, timeout) # Timeout if response == None: return None # if we an error occurred then throw an exception if 'error' in response: exception = JobRequestException(response['id'], response['error']['code'], response['error']['message']) raise exception # otherwise return the molequeue id return response['result']['moleQueueId'] def cancel_job(self): # TODO pass def lookup_job(self, molequeue_id, timeout=None): params = {'moleQueueId': molequeue_id} packet_id = self._next_packet_id() jsonrpc = JsonRpc.generate_request(packet_id, 'lookupJob', params) self._send_request(packet_id, jsonrpc) response = self._wait_for_response(packet_id, timeout) # Timeout if response == None: return None # if we an error occurred then throw an exception if 'error' in response: exception = JobRequestInformationException(response['id'], reponse['error']['data'], reponse['error']['code'], reponse['error']['message']) raise exception jobrequest = JsonRpc.json_to_jobrequest(response) return jobrequest def _on_response(self, packet_id, msg): if packet_id in self._request_response_map: self._new_response_condition.acquire() self._request_response_map[packet_id] = msg # notify any threads waiting that their response may have arrived self._new_response_condition.notify_all() self._new_response_condition.release() # TODO Convert raw JSON into a Python class def _on_notification(self, msg): for callback in self._notification_callbacks: callback(msg) def _next_packet_id(self): with self._packet_id_lock: self._current_packet_id += 1 next = self._current_packet_id return next def _send_request(self, packet_id, jsonrpc): # add to request map so we know we are waiting on response for this packet # id self._request_response_map[packet_id] = None self.stream.send(str(jsonrpc)) self.stream.flush() def _wait_for_response(self, packet_id, timeout): try: start = time.time() # wait for the response to come in self._new_response_condition.acquire() while self._request_response_map[packet_id] == None: # need to set a wait time otherwise the wait can't be interrupted # See http://bugs.python.org/issue8844 wait_time = sys.maxint if timeout != None: wait_time = timeout - (time.time() - start) if wait_time <= 0: break; self._new_response_condition.wait(wait_time) response = self._request_response_map.pop(packet_id) self._new_response_condition.release() return response except KeyboardInterrupt: self.event_loop.stop() raise
class BaseServer(App): """Base class for a ZMQ server object that manages a ROUTER socket. When a new message arrives on the socket, ZMQ cals the _dispatch() method. After validating the message, dispatch() looks for a method on the class whose name corresponds to the 'msg_type' (in the message's header). This method then gets called as method(**msg), with three arguments, header, parent_header and content. The method should respond on the stream by calling send_message() """ zmq_port = Int(12345, config=True, help='ZeroMQ port to serve on') db_path = Unicode('db.sqlite', config=True, help=''' Path to the database (sqlite3 file)''') def start(self): url = 'tcp://*:%s' % int(self.zmq_port) self.uuid = str(uuid.uuid4()) self.ctx = zmq.Context() s = self.ctx.socket(zmq.ROUTER) s.bind(url) self._stream = ZMQStream(s) self._stream.on_recv(self._dispatch) connect_to_sqlite_db(self.db_path) def send_message(self, client_id, msg_type, content=None): """Send a message out to a client Parameters ---------- client_id : uuid Who do you want to send the message to? This is string that identifies the client to the ZMQ routing layer. Within our messaging protocol, when the server recieves a message, it can get the id of the sender from within the message's header -- but this is dependent on the fact that the device.Device() code puts the same string in the message header that it uses to identify the socket to zeromq, in the line setsockopt(zmq.IDENTITY, str(self.uuid)) msg_type : str The type of the message content : dict Content of the message Notes ----- For details on the messaging protocol, refer to message.py """ if content is None: content = {} msg = pack_message(msg_type, self.uuid, content) self.log.info('SENDING MESSAGE: %s', msg) self._stream.send(client_id, zmq.SNDMORE) self._stream.send('', zmq.SNDMORE) self._stream.send_json(msg) def _validate_msg_dict(self, msg_dict): if 'header' not in msg_dict: raise ValueError('msg does not contain "header"') if 'content' not in msg_dict: raise ValueError('msg does not contain "content"') if 'sender_id' not in msg_dict['header']: raise ValueError('msg header does not contain "sender_id"') if 'msg_type' not in msg_dict['header']: raise ValueError('msg header does not contain "msg_type"') def _dispatch(self, frames): """Callback that responds to new messages on the stream This is the first point where new messages enter the system. Basically we just pack them up and send them on to the correct responder. Parameters ---------- stream : ZMQStream The stream that we're responding too (probably self._stream) messages : list A list of messages that have arrived """ # for some reason we seem to be getting double logging # inside the event loop, but not outside it. i'm not sure # if this is a tornado thing or what, but disabling # the parent log handler seems to fix it. self.log.parent.handlers = [] if not len(frames) == 3: self.log.error( 'invalid message received. messages are expected to contain only three frames: %s', str(frames)) client, _, raw_msg = frames # using the PyYaml loader is a hack force regular strings # instead of unicode, since you can't send unicode over zmq # since json is a subset of yaml, this works msg_dict = yaml.load(raw_msg) try: self._validate_msg_dict(msg_dict) except ValueError: # if we recieve an invalid message, we log it out error stream # and then return from this function, so it won't take the server # down self.log.exception('Invalid message: %s', msg_dict) return msg = Message(msg_dict) self.log.info('RECEIVING MESSAGE: %s', msg) try: responder = getattr(self, msg.header.msg_type) except AttributeError: self.log.critical('RESPONDER NOT FOUND FOR MESSAGE: %s', msg.header.msg_type) return responder(msg.header, msg.content)
class MidgardDaemon: def __init__(self, addr): self.init_midgard() self.init_rdf_mapper() self.init_zmq(addr) def init_midgard(self): print("Connecting to midgard") Midgard.init() self.mgd = Midgard.Connection() self.mgd.open_config(DaemonConfig()) print("... DONE") def init_rdf_mapper(self): print("Parsing RDF mapping info") self.rm = RdfMapper(self.mgd) print("... DONE") def init_zmq(self, addr): print("starting 0MQ thread") context = zmq.Context() socket = context.socket(zmq.REP) socket.bind(addr) self.loop = ioloop.IOLoop.instance() self.stream = ZMQStream(socket, self.loop) self.stream.on_recv(self.handler) print("... DONE") def handler(self, message): msg = str(message[0], 'utf8') try: data = json.loads(msg) if 'query' in data: response = self.handleQuery(data['query']) elif 'update' in data: response = self.handleUpdate(data['update']) except (TypeError, ValueError) as e: resp_obj = {"status": {"code": -128, "error": "Invalid request. %s" % (e) }} response = json.dumps(resp_obj) except gobject.GError as e: resp_obj = {"status": {"code": e.code, "error": "Invalid request. %s" % (e.message)}} response = json.dumps(resp_obj) self.stream.send(bytes(response, 'utf8')) def handleQuery(self, fields): handler = QueryHandler(self.mgd, self.rm, fields) return handler.handle() def handleUpdate(self, fields): handler = UpdateHandler(self.mgd, self.rm, fields) return handler.handle() def run(self): print("\nwaiting for requests...") self.loop.start()
class CloneServer(object): # Our server is defined by these properties ctx = None # Context wrapper kvmap = None # Key-value store loop = None # IOLoop reactor port = None # Main port we're working on sequence = 0 # How many updates we're at snapshot = None # Handle snapshot requests publisher = None # Publish updates to clients collector = None # Collect updates from clients def __init__(self, port=5556): self.port = port self.ctx = zmq.Context() self.kvmap = {} self.loop = IOLoop.instance() # Set up our clone server sockets self.snapshot = self.ctx.socket(zmq.ROUTER) self.publisher = self.ctx.socket(zmq.PUB) self.collector = self.ctx.socket(zmq.PULL) self.snapshot.bind("tcp://*:%d" % self.port) self.publisher.bind("tcp://*:%d" % (self.port + 1)) self.collector.bind("tcp://*:%d" % (self.port + 2)) # Wrap sockets in ZMQStreams for IOLoop handlers self.snapshot = ZMQStream(self.snapshot) self.publisher = ZMQStream(self.publisher) self.collector = ZMQStream(self.collector) # Register our handlers with reactor self.snapshot.on_recv(self.handle_snapshot) self.collector.on_recv(self.handle_collect) self.flush_callback = PeriodicCallback(self.flush_ttl, 1000) # basic log formatting: logging.basicConfig(format="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S", level=logging.INFO) def start(self): # Run reactor until process interrupted self.flush_callback.start() try: self.loop.start() except KeyboardInterrupt: pass def handle_snapshot(self, msg): """snapshot requests""" if len(msg) != 3 or msg[1] != "ICANHAZ?": print "E: bad request, aborting" dump(msg) self.loop.stop() return identity, request, subtree = msg if subtree: # Send state snapshot to client route = Route(self.snapshot, identity, subtree) # For each entry in kvmap, send kvmsg to client for k,v in self.kvmap.items(): send_single(k,v,route) # Now send END message with sequence number logging.info("I: Sending state shapshot=%d" % self.sequence) self.snapshot.send(identity, zmq.SNDMORE) kvmsg = KVMsg(self.sequence) kvmsg.key = "KTHXBAI" kvmsg.body = subtree kvmsg.send(self.snapshot) def handle_collect(self, msg): """Collect updates from clients""" kvmsg = KVMsg.from_msg(msg) self.sequence += 1 kvmsg.sequence = self.sequence kvmsg.send(self.publisher) ttl = float(kvmsg.get('ttl', 0)) if ttl: kvmsg['ttl'] = time.time() + ttl kvmsg.store(self.kvmap) logging.info("I: publishing update=%d", self.sequence) def flush_ttl(self): """Purge ephemeral values that have expired""" for key,kvmsg in self.kvmap.items(): self.flush_single(kvmsg) def flush_single(self, kvmsg): """If key-value pair has expired, delete it and publish the fact to listening clients.""" ttl = float(kvmsg.get('ttl', 0)) if ttl and ttl <= time.time(): kvmsg.body = "" self.sequence += 1 kvmsg.sequence = self.sequence kvmsg.send(self.publisher) del self.kvmap[kvmsg.key] logging.info("I: publishing delete=%d", self.sequence)
class MainHandler(tornado.web.RequestHandler): handler = None def initialize(self): # import ipdb; ipdb.set_trace() print(os.getcwd()) self._loader = tornado.template.Loader("data") broadcast = Broadcast() print(broadcast.push_address + " / " + broadcast.subscribe_address) self._push_address = broadcast.push_address self._subscribe_address = broadcast.subscribe_address self._context = zmq.Context.instance() push_socket = self._context.socket(zmq.PUSH) push_socket.connect(self._push_address) self._push_stream = ZMQStream(push_socket) subscribe_socket = self._context.socket(zmq.SUB) subscribe_socket.connect(self._subscribe_address) subscribe_socket.setsockopt(zmq.SUBSCRIBE, "") self._subscribe_stream = ZMQStream(subscribe_socket) self._subscribe_stream.on_recv(self._handle_message_parts) self._routing_id = "temporary_id_" + str(RANDOM.randint(0, 32768)) MainHandler.handler = self @tornado.web.asynchronous def get(self): content = self._loader.load("index.html").generate( videofeed="/test", status="well let's say pending") self.write(content) hello = self._build_hello() print("Send Hello: " + repr(hello)) # self._subscribe_stream.send(hello) self._push_stream.send(hello) @tornado.web.asynchronous def _handle_message_parts(self, message_parts): # print('message received: %s' % map(repr, message_parts)) for message in message_parts: recipient, _, typed_payload = message.partition(' ') message_type, _, payload = typed_payload.partition(' ') self._handle_message(recipient, message_type, payload) def _handle_message(self, recipient, message_type, payload): if (self._destination_matches(recipient)): if ("Welcome" == message_type): self._handle_welcome(payload) elif ("Goodbye" == message_type): self._handle_goodbye(payload) elif ("GameState" == message_type): self._handle_game_state(payload) else: print("Message ignored: " + message_type) def _destination_matches(self, recipient): return True # for now def _handle_welcome(self, payload): message = pb_server_game.Welcome() message.ParseFromString(payload) print( "Welcome ; id = " + str(message.id) + " ; video_address = " + message.video_address + " ; video_port = " + str(message.video_port)) self._routing_id = str(message.id) if (message.game_state): print("playing ? " + str(message.game_state.playing)) print("time left: " + str(message.game_state.seconds)) for team in message.game_state.teams: print(team.name + " (" + str(team.num_players) + ") -> " + str(team.score)) videofeed = message.video_address + ":" + str(message.video_port) print("videofeed =", videofeed, ";", len(OrwellConnection.all_connections)) video_url = "/video?address={}&port={}".format( message.video_address, str(message.video_port)) # video_url = "/test?address={}&port={}".format( # message.video_address, # str(message.video_port)) json_str = json.dumps({"videofeed": video_url}) OrwellConnection.data_to_send.append(json_str) # for connection in OrwellConnection.all_connections: # print("send videofeed(" + json_str + ") to", connection) # connection.send(json_str) print("_handle_welcome - finish") self.finish() def _handle_goodbye(self, payload): message = pb_server_game.Goodbye() message.ParseFromString(payload) print("Goodbye ...") print("_handle_goodbye - finish") self.finish() def _handle_game_state(self, payload): message = pb_server_game.GameState() message.ParseFromString(payload) if (message.HasField("winner")): status = "Game won by team " + message.winner else: if (message.playing): status = "Game running" if (message.HasField("seconds")): status += " ({} second(s) left)".format(message.seconds) else: status = "Game NOT running" print(status) for connection in OrwellConnection.all_connections: connection.send(json.dumps({"status": status})) def _build_hello(self): pb_message = pb_controller.Hello() name = "JAMBON" pb_message.name = name payload = pb_message.SerializeToString() return self._routing_id + ' Hello ' + payload def send_input(self, data): factor = 0.5 left = 0 right = 0 fire_weapon1 = False fire_weapon2 = False if ("LEFT" == data): left = -1 * factor right = 1 * factor elif ("FORWARD" == data): left = 1 * factor right = 1 * factor elif ("RIGHT" == data): left = 1 * factor right = -1 * factor elif ("BACKWARD" == data): left = -1 * factor right = -1 * factor elif ("FIRE1" == data): fire_weapon1 = True elif ("FIRE2" == data): fire_weapon2 = True pb_input = pb_controller.Input() pb_input.move.left = left pb_input.move.right = right pb_input.fire.weapon1 = fire_weapon1 pb_input.fire.weapon2 = fire_weapon2 payload = pb_input.SerializeToString() message = self._routing_id + ' Input ' + payload # self._subscribe_stream.send(hello) self._push_stream.send(message)
class CloneServer(object): # Our server is defined by these properties ctx = None # Context wrapper kvmap = None # Key-value store loop = None # IOLoop reactor port = None # Main port we're working on sequence = 0 # How many updates we're at snapshot = None # Handle snapshot requests publisher = None # Publish updates to clients collector = None # Collect updates from clients def __init__(self, port=5556): self.port = port self.ctx = zmq.Context() self.kvmap = {} self.loop = IOLoop.instance() # Set up our clone server sockets self.snapshot = self.ctx.socket(zmq.ROUTER) self.publisher = self.ctx.socket(zmq.PUB) self.collector = self.ctx.socket(zmq.PULL) self.snapshot.bind(f"tcp://*:{self.port:d}") self.publisher.bind(f"tcp://*:{self.port + 1:d}") self.collector.bind(f"tcp://*:{self.port + 2:d}") # Wrap sockets in ZMQStreams for IOLoop handlers self.snapshot = ZMQStream(self.snapshot) self.publisher = ZMQStream(self.publisher) self.collector = ZMQStream(self.collector) # Register our handlers with reactor self.snapshot.on_recv(self.handle_snapshot) self.collector.on_recv(self.handle_collect) self.flush_callback = PeriodicCallback(self.flush_ttl, 1000) # basic log formatting: logging.basicConfig(format="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S", level=logging.INFO) def start(self): # Run reactor until process interrupted self.flush_callback.start() try: self.loop.start() except KeyboardInterrupt: pass def handle_snapshot(self, msg): """snapshot requests""" if len(msg) != 3 or msg[1] != b"ICANHAZ?": print("E: bad request, aborting") dump(msg) self.loop.stop() return [identity, request, subtree] = msg if subtree: # Send state snapshot to client route = Route(self.snapshot, identity, subtree) # For each entry in kvmap, send kvmsg to client for k, v in self.kvmap.items(): send_single(k, v, route) # Now send END message with sequence number logging.info(f"I: Sending state shapshot={self.sequence:d}") self.snapshot.send(identity, zmq.SNDMORE) kvmsg = KVMsg(self.sequence) kvmsg.key = b"KTHXBAI" kvmsg.body = subtree kvmsg.send(self.snapshot) def handle_collect(self, msg): """Collect updates from clients""" kvmsg = KVMsg.from_msg(msg) self.sequence += 1 kvmsg.sequence = self.sequence kvmsg.send(self.publisher) ttl = float(kvmsg.get(b'ttl', 0)) if ttl: kvmsg[b'ttl'] = b'%f' % (time.time() + ttl) kvmsg.store(self.kvmap) logging.info(f"I: publishing update={self.sequence:d}") def flush_ttl(self): """Purge ephemeral values that have expired""" # used list() to exhaust the iterator before deleting from the dict for key, kvmsg in list(self.kvmap.items()): self.flush_single(kvmsg) def flush_single(self, kvmsg): """If key-value pair has expired, delete it and publish the fact to listening clients.""" ttl = float(kvmsg.get(b'ttl', 0)) if ttl and ttl <= time.time(): kvmsg.body = b"" self.sequence += 1 kvmsg.sequence = self.sequence kvmsg.send(self.publisher) del self.kvmap[kvmsg.key] logging.info(f"I: publishing delete={self.sequence:d}")
class BaseServer(App): """Base class for a ZMQ server object that manages a ROUTER socket. When a new message arrives on the socket, ZMQ cals the _dispatch() method. After validating the message, dispatch() looks for a method on the class whose name corresponds to the 'msg_type' (in the message's header). This method then gets called as method(**msg), with three arguments, header, parent_header and content. The method should respond on the stream by calling send_message() """ zmq_port = Int(12345, config=True, help='ZeroMQ port to serve on') db_path = Unicode('db.sqlite', config=True, help=''' Path to the database (sqlite3 file)''') def start(self): url = 'tcp://*:%s' % int(self.zmq_port) self.uuid = str(uuid.uuid4()) self.ctx = zmq.Context() s = self.ctx.socket(zmq.ROUTER) s.bind(url) self._stream = ZMQStream(s) self._stream.on_recv(self._dispatch) connect_to_sqlite_db(self.db_path) def send_message(self, client_id, msg_type, content=None): """Send a message out to a client Parameters ---------- client_id : uuid Who do you want to send the message to? This is string that identifies the client to the ZMQ routing layer. Within our messaging protocol, when the server recieves a message, it can get the id of the sender from within the message's header -- but this is dependent on the fact that the device.Device() code puts the same string in the message header that it uses to identify the socket to zeromq, in the line setsockopt(zmq.IDENTITY, str(self.uuid)) msg_type : str The type of the message content : dict Content of the message Notes ----- For details on the messaging protocol, refer to message.py """ if content is None: content = {} msg = pack_message(msg_type, self.uuid, content) self.log.info('SENDING MESSAGE: %s', msg) self._stream.send(client_id, zmq.SNDMORE) self._stream.send('', zmq.SNDMORE) self._stream.send_json(msg) def _validate_msg_dict(self, msg_dict): if 'header' not in msg_dict: raise ValueError('msg does not contain "header"') if 'content' not in msg_dict: raise ValueError('msg does not contain "content"') if 'sender_id' not in msg_dict['header']: raise ValueError('msg header does not contain "sender_id"') if 'msg_type' not in msg_dict['header']: raise ValueError('msg header does not contain "msg_type"') def _dispatch(self, frames): """Callback that responds to new messages on the stream This is the first point where new messages enter the system. Basically we just pack them up and send them on to the correct responder. Parameters ---------- stream : ZMQStream The stream that we're responding too (probably self._stream) messages : list A list of messages that have arrived """ # for some reason we seem to be getting double logging # inside the event loop, but not outside it. i'm not sure # if this is a tornado thing or what, but disabling # the parent log handler seems to fix it. self.log.parent.handlers = [] if not len(frames) == 3: self.log.error('invalid message received. messages are expected to contain only three frames: %s', str(frames)) client, _, raw_msg = frames # using the PyYaml loader is a hack force regular strings # instead of unicode, since you can't send unicode over zmq # since json is a subset of yaml, this works msg_dict = yaml.load(raw_msg) try: self._validate_msg_dict(msg_dict) except ValueError: # if we recieve an invalid message, we log it out error stream # and then return from this function, so it won't take the server # down self.log.exception('Invalid message: %s', msg_dict) return msg = Message(msg_dict) self.log.info('RECEIVING MESSAGE: %s', msg) try: responder = getattr(self, msg.header.msg_type) except AttributeError: self.log.critical('RESPONDER NOT FOUND FOR MESSAGE: %s', msg.header.msg_type) return responder(msg.header, msg.content)
class Client: def __init__(self): self._current_packet_id = 0 self._request_response_map = {} self._new_response_condition = Condition() self._packet_id_lock = Lock() self._notification_callbacks = [] def connect_to_server(self, server): self.context = zmq.Context() self.socket = self.context.socket(zmq.DEALER) tmpdir = tempfile.gettempdir() connection_string = 'ipc://%s/%s_%s' % (tmpdir, 'zmq', server) self.socket.connect(connection_string) io_loop = ioloop.IOLoop(ioloop.ZMQPoller()) self.stream = ZMQStream(self.socket, io_loop) # create partial function that has self as first argument callback = partial(_on_recv, self) self.stream.on_recv(callback) self.event_loop = EventLoop(io_loop) self.event_loop.start() def disconnect(self): self.stream.flush() self.event_loop.stop() self.socket.close() def register_notification_callback(self, callback): # check a valid function has been past assert callable(callback) self._notification_callbacks.append(callback) def request_queue_list_update(self): pass def submit_job_request(self, request, timeout=None): params = JsonRpc.jobrequest_to_json_params(request) packet_id = self._next_packet_id() jsonrpc = JsonRpc.generate_request(packet_id, 'submitJob', params) self._send_request(packet_id, jsonrpc) response = self._wait_for_response(packet_id, timeout) # Timeout if response == None: return None # if we an error occurred then throw an exception if 'error' in response: exception = JobRequestException(response['id'], response['error']['code'], response['error']['message']) raise exception # otherwise return the molequeue id return response['result']['moleQueueId'] def cancel_job(self): # TODO pass def lookup_job(self, molequeue_id, timeout=None): params = {'moleQueueId': molequeue_id} packet_id = self._next_packet_id() jsonrpc = JsonRpc.generate_request(packet_id, 'lookupJob', params) self._send_request(packet_id, jsonrpc) response = self._wait_for_response(packet_id, timeout) # Timeout if response == None: return None # if we an error occurred then throw an exception if 'error' in response: exception = JobRequestInformationException( response['id'], reponse['error']['data'], reponse['error']['code'], reponse['error']['message']) raise exception jobrequest = JsonRpc.json_to_jobrequest(response) return jobrequest def _on_response(self, packet_id, msg): if packet_id in self._request_response_map: self._new_response_condition.acquire() self._request_response_map[packet_id] = msg # notify any threads waiting that their response may have arrived self._new_response_condition.notify_all() self._new_response_condition.release() # TODO Convert raw JSON into a Python class def _on_notification(self, msg): for callback in self._notification_callbacks: callback(msg) def _next_packet_id(self): with self._packet_id_lock: self._current_packet_id += 1 next = self._current_packet_id return next def _send_request(self, packet_id, jsonrpc): # add to request map so we know we are waiting on response for this packet # id self._request_response_map[packet_id] = None self.stream.send(str(jsonrpc)) self.stream.flush() def _wait_for_response(self, packet_id, timeout): try: start = time.time() # wait for the response to come in self._new_response_condition.acquire() while self._request_response_map[packet_id] == None: # need to set a wait time otherwise the wait can't be interrupted # See http://bugs.python.org/issue8844 wait_time = sys.maxint if timeout != None: wait_time = timeout - (time.time() - start) if wait_time <= 0: break self._new_response_condition.wait(wait_time) response = self._request_response_map.pop(packet_id) self._new_response_condition.release() return response except KeyboardInterrupt: self.event_loop.stop() raise
class ZQueue(object): def __init__(self, frontend_socket, backend_socket): self.queue = WorkerQueue() self.frontend = ZMQStream(frontend_socket) self.backend = ZMQStream(backend_socket) self.liveness = HEARTBEAT_LIVENESS self.heartbeat = HEARTBEAT_INTERVAL self.interval = INTERVAL_INIT self.loop = IOLoop.instance() self.hearbeats = 0 self.time = self.interval * self.heartbeat self.heartbeat_at = time.time() + self.heartbeat * HEARTBEAT_LIVENESS self.callback = None self.timed_out = False self.frontend.on_recv(self.handle_frontend) self.backend.on_recv(self.handle_backend) self.period = PeriodicCallback(self.purge,HEARTBEAT_INTERVAL*1000) self.period.start() def handle_frontend(self,msg): m = msg[:] if len(m) == 1: times_str('Received heartbeat') if self.timed_out: self.loop.add_timeout(time.time()+HEARTBEAT_INTERVAL, self.send_heartbeat) self.timed_out = False self.loop.remove_timeout(self.callback) elif len(m) == 3: times_str('Received: '+str(m)) address, worker = self.queue.getLRU() worker.working = True m.insert(0,address) self.backend.send_multipart(m) self.heartbeat_at = time.time() + HEARTBEAT_INTERVAL * HEARTBEAT_LIVENESS def handle_backend(self,msg): m = msg[:] address = m[0] times_str('Backend Received: {}'.format(m)) self.queue.ready(WorkerModel(address)) self.backend.send_multipart([address,PPP_HEARTBEAT]) mm = m[1:] if len(mm) == 1: if mm[0] == PPP_HEARTBEAT: self.hearbeats += 1 times_str('Got hearbeat {}'.format(self.hearbeats)) else: times_str('Sending it back..') self.frontend.send_multipart(mm) if not self.queue.empty(): self.frontend.on_recv(self.handle_frontend) def purge(self): self.queue.purge() if self.queue.empty(): self.frontend.stop_on_recv() def send_heartbeat(self): if time.time() > self.heartbeat_at: self.time *= 2 if self.time < INTERVAL_MAX else 1 times_str('Timed out.. Retrying in {} seconds..'.format(self.time)) self.callback = self.loop.add_timeout(time.time()+self.time*1, self.send_heartbeat) self.timed_out = True return self.time = self.interval * self.heartbeat times_str('sending heartbeat..') self.frontend.send(PPP_HEARTBEAT) self.loop.add_timeout(time.time()+self.heartbeat, self.send_heartbeat) def run(self): try: self.frontend.send(PPP_READY) self.loop.start() except KeyboardInterrupt: times_str('ctrlc')