def ans_port_send(self, msg, is_pyobj): try: sendMsg = [self.identity] # Identity is already a frame if is_pyobj: payload = zmq.Frame(pickle.dumps(msg)) # Pickle python payload else: payload = zmq.Frame(msg) # Take bytearray sendMsg += [payload] if self.isTimed: now = time.time() now = struct.pack("d", now) nowFrame = zmq.Frame(now) sendMsg += [nowFrame] self.socket.send_multipart(sendMsg) except zmq.error.ZMQError as e: raise PortError("send error (%d)" % e.errno, e.errno) from e return True
def make_outgoing_message(self): """ we assume get_batch has set self.imgdata_dict to contain image data arrays. we also assume that the worker knows the data thats coming to it. an application requires creating both the client and worker. this is because I do not want to deal with generating a generic message protocal right now. """ tmsg = time.time() msg = [] totmsgsize = 0 totcompsize = 0 ibatch = 0 for (ktype, name), plane_img_v in self.imgdata_dict.items(): meta_v = self.imgmeta_dict[name] rse = self.batch2rse[0] for p, data in enumerate(plane_img_v): meta = meta_v[p] strrse = "(%d,%d,%d)" % rse x_enc = msgpack.packb(data, default=m.encode) x_comp = zlib.compress(x_enc, self.compression_level) if self.print_msg_sizes: frmsg = zmq.Frame(data=x_enc) cmmsg = zmq.Frame(data=x_comp) msg_size = len(frmsg.bytes) com_size = len(cmmsg.bytes) totmsgsize += msg_size totcompsize += com_size msg.append(name) msg.append(meta.dump().strip() + ":" + strrse) msg.append(x_comp) print "CaffeLArCV1Client[{}] sending array name=\"{}\" shape={} meta={}".format( self._identity, name, data.shape, msg[-2]) ibatch += 1 if self.print_msg_sizes: print "CaffeLArCV1Client[{}] created msg of size (numpy arrays only): {} MB".format( self._identity, totmsgsize / 1.0e6) print "CaffeLArCV1Client[{}] created compressed msg of size (numpy arrays only): {} MB".format( self._identity, totcompsize / 1.0e6) tmsg = time.time() - tmsg self._ttracker["makemessage::total"] += tmsg return msg
def test_unicode(self): """Test the unicode representations of the Frames.""" s = u('asdf') self.assertRaises(TypeError, zmq.Frame, s) for i in range(16): s = (2**i) * u('§') m = zmq.Frame(s.encode('utf8')) self.assertEqual(s, unicode(m.bytes, 'utf8'))
def toframe(self): ''' Return self as a frame. ''' frame = zmq.Frame(self.encode()) if self.routing_id: frame.routing_id = routing_id return frame
def test_multi_tracker(self): m = zmq.Frame(b'asdf', copy=False, track=True) m2 = zmq.Frame(b'whoda', copy=False, track=True) mt = zmq.MessageTracker(m, m2) self.assertFalse(m.tracker.done) self.assertFalse(mt.done) self.assertRaises(zmq.NotDone, mt.wait, 0.1) del m for i in range(3): gc.collect() self.assertRaises(zmq.NotDone, mt.wait, 0.1) self.assertFalse(mt.done) del m2 for i in range(3): gc.collect() assert mt.wait(0.1) is None assert mt.done
def test_buffer_out(self): """receiving buffered output""" ins = b("§§¶•ªº˜µ¬˚…∆˙åß∂©œ∑´†≈ç√") m = zmq.Frame(ins) outb = m.buffer self.assertTrue(isinstance(outb, memoryview)) assert outb is m.buffer assert m.buffer is m.buffer
def test_str(self): """Test the str representations of the Frames.""" for i in range(16): s = (2 ** i) * x m = zmq.Frame(s) m_str = str(m) m_str_b = b(m_str) # py3compat self.assertEqual(s, m_str_b)
def test_memoryview(self): """test messages from memoryview""" s = b'carrotjuice' v = memoryview(s) m = zmq.Frame(s) buf = m.buffer s2 = buf.tobytes() self.assertEqual(s2, s) self.assertEqual(m.bytes, s)
def _distribute(self, frames, user_id): """ Distributes the message to all the subscribers subscribed to the same bus and topic. Check if the topic is protected before distributing the message. For protected topics, only authorized publishers can publish the message for the topic. :param frames list of frames :type frames list :param peer identity of the publishing agent :type peer str :param topic topic of the message :type topic str :headers message header containing timestamp and version information :type headers dict :param message actual message :type message None or any :param bus message bus :type bus str :returns: Count of subscribers. :rtype: int :Return Values: Number of subscribers to whom the mess """ publisher, receiver, proto, _, msg_id, subsystem, op, topic, data = frames[0:9] #Check if peer is authorized to publish the topic errmsg = self._check_if_protected_topic(bytes(user_id), bytes(topic)) #Send error message as peer is not authorized to publish to the topic if errmsg is not None: try: frames = [publisher, b'', proto, user_id, msg_id, b'error', zmq.Frame(bytes(UNAUTHORIZED)), zmq.Frame(str(errmsg)), b'', subsystem] except ValueError: self._logger.debug("Value error") self._send(frames, publisher) return 0 # First: Try to send to internal platform subscribers internal_count = self._distribute_internal(frames) # Second: Try to send to external platform subscribers #external_count=0 external_count = self._distribute_external(frames) return internal_count+external_count
def _external_to_local_publish(self, frames): """ Publish external pubsub message to local subscribers :param frames: frames containing publish message :return: count of local subscribers or error message if no local subscribers found """ results = [] subscribers_count = 0 # Check if destination is local VIP -- Todo if len(frames) > 8: publisher, receiver, proto, user_id, msg_id, subsystem, op, topic, data = frames[0:9] data = frames[8].bytes msg = jsonapi.loads(data) # Check if peer is authorized to publish the topic errmsg = self._check_if_protected_topic(bytes(user_id), bytes(topic)) #peer is not authorized to publish to the topic, send error message to the peer if errmsg is not None: try: frames = [publisher, b'', proto, user_id, msg_id, subsystem, b'error', zmq.Frame(bytes(UNAUTHORIZED)), zmq.Frame(str(errmsg))] self._ext_router.send_external(publisher, frames) return except ValueError: self._logger.debug("Value error") # Make it an internal publish frames[6] = 'publish' subscribers_count = self._distribute_internal(frames) # There are no subscribers, send error message back to source platform if subscribers_count == 0: try: errmsg = 'NO SUBSCRIBERS' frames = [publisher, b'', proto, user_id, msg_id, subsystem, zmq.Frame(b'error'), zmq.Frame(bytes(INVALID_REQUEST)), topic] self._ext_router.send_external(publisher, frames) except ValueError: self._logger.debug("Value error") else: self._logger.debug("Incorrect frames {}".format(len(frames))) return subscribers_count
def test_02_01_pipeline_info(self): pipeline = cpp.Pipeline() load_images = LoadImages() load_images.module_num = 1 load_images.add_imagecb() load_images.images[0].channels[0].image_name.value = "Foo" load_images.images[1].channels[0].image_name.value = "Bar" pipeline.add_module(load_images) identify = IdentifyPrimaryObjects() identify.module_num = 2 identify.x_name.value = "Foo" identify.y_name.value = "dizzy" pipeline.add_module(identify) pipeline_txt = StringIO() pipeline.savetxt(pipeline_txt) message = [ zmq.Frame(self.session_id), zmq.Frame(), zmq.Frame(PIPELINE_INFO_REQ_1), zmq.Frame(pipeline_txt.getvalue()) ] self.socket.send_multipart(message) message = self.socket.recv_multipart() self.assertEqual(message.pop(0), self.session_id) self.assertEqual(message.pop(0), "") self.assertEqual(message.pop(0), PIPELINE_INFO_REPLY_1) body = json.loads(message.pop(0)) self.assertEqual(len(body), 3) channels, type_names, measurements = body self.assertTrue("Foo" in channels) self.assertTrue("Bar" in channels) self.assertTrue("dizzy" in measurements) found_location = False found_object_number = False for feature, idx in measurements['dizzy']: if feature == "Location_Center_X": self.assertEqual('java.lang.Double', type_names[idx]) found_location = True elif feature == "Number_Object_Number": self.assertEqual('java.lang.Integer', type_names[idx]) found_object_number = True self.assertTrue(found_location) self.assertTrue(found_object_number)
def test_above_30(self): """Message above 30 bytes are never copied by 0MQ.""" for i in range(5, 16): # 32, 64,..., 65536 s = (2**i) * x self.assertEqual(grc(s), 2) m = zmq.Frame(s) self.assertEqual(grc(s), 4) del m self.assertEqual(grc(s), 2) del s
def test_02_03_clean_pipeline(self): pipeline = cpp.Pipeline() load_images = LoadImages() load_images.module_num = 1 load_images.add_imagecb() load_images.images[0].channels[0].image_name.value = "Foo" load_images.images[1].channels[0].image_name.value = "Bar" pipeline.add_module(load_images) identify = IdentifyPrimaryObjects() identify.module_num = 2 identify.x_name.value = "Foo" identify.y_name.value = "dizzy" pipeline.add_module(identify) saveimages = SaveImages() saveimages.module_num = 3 saveimages.image_name.value = "Foo" pipeline.add_module(saveimages) measureobjectsizeshape = MeasureObjectSizeShape() measureobjectsizeshape.module_num = 4 measureobjectsizeshape.object_groups[0].name.value = "dizzy" pipeline.add_module(measureobjectsizeshape) pipeline_txt = StringIO() pipeline.savetxt(pipeline_txt) module_names = json.dumps([SaveImages.module_name]) message = [ zmq.Frame(self.session_id), zmq.Frame(), zmq.Frame(CLEAN_PIPELINE_REQ_1), zmq.Frame(pipeline_txt.getvalue()), zmq.Frame(module_names) ] self.socket.send_multipart(message) message = self.socket.recv_multipart() self.assertEqual(message.pop(0), self.session_id) self.assertEqual(message.pop(0), "") self.assertEqual(message.pop(0), CLEAN_PIPELINE_REPLY_1) pipeline_txt = message.pop(0) pipeline = cpp.Pipeline() pipeline.loadtxt(StringIO(pipeline_txt)) self.assertEqual(len(pipeline.modules()), 3) self.assertIsInstance(pipeline.modules()[0], LoadImages) self.assertIsInstance(pipeline.modules()[1], IdentifyPrimaryObjects) self.assertIsInstance(pipeline.modules()[2], MeasureObjectSizeShape)
def test_frame_more(self): """test Frame.more attribute""" frame = zmq.Frame(b"hello") self.assertFalse(frame.more) sa, sb = self.create_bound_pair(zmq.PAIR, zmq.PAIR) sa.send_multipart([b'hi', b'there']) frame = self.recv(sb, copy=False) self.assertTrue(frame.more) frame = self.recv(sb, copy=False) self.assertFalse(frame.more)
def test_buffer_out(self): """receiving buffered output""" if unicode is str: ins = "§§¶•ªº˜µ¬˚…∆˙åß∂©œ∑´†≈ç√".encode('utf8') else: ins = "§§¶•ªº˜µ¬˚…∆˙åß∂©œ∑´†≈ç√" m = zmq.Frame(ins) outb = m.buffer self.assertTrue(isinstance(outb, view)) self.assert_(outb is m.buffer) self.assert_(m.buffer is m.buffer)
def test_bytes(self): """Test the Frame.bytes property.""" for i in range(1, 16): s = (2**i) * x m = zmq.Frame(s) b = m.bytes self.assertEquals(s, m.bytes) # check that it copies self.assert_(b is not s) # check that it copies only once self.assert_(b is m.bytes)
def test_unicode(self): """Test the unicode representations of the Frames.""" s = unicode('asdf') self.assertRaises(TypeError, zmq.Frame, s) u = '§' if str is not unicode: u = u.decode('utf8') for i in range(16): s = (2**i) * u m = zmq.Frame(s.encode('utf8')) self.assertEquals(s, unicode(m.bytes, 'utf8'))
def _distribute_internal(self, frames): """ Distribute the publish message to local subscribers :param frames: list of frames :return: Number of local subscribers """ publisher = bytes(frames[0]) topic = bytes(frames[7]) data = bytes(frames[8]) try: msg = jsonapi.loads(data) bus = msg['bus'] except KeyError as exc: self._logger.error( "Missing key in _peer_publish message {}".format(exc)) return 0 except ValueError: self._logger.error("JSON decode error. Invalid character") return 0 all_subscriptions = dict() subscriptions = dict() subs = dict() # Get subscriptions for all platforms try: all_subscriptions = self._peer_subscriptions['all'][bus] except KeyError: pass try: subscriptions = self._peer_subscriptions['internal'][bus] except KeyError: pass subs.update(all_subscriptions) subs.update(subscriptions) subscribers = set() # Check for local subscribers for prefix, subscription in subs.iteritems(): if subscription and topic.startswith(prefix): subscribers |= subscription if subscribers: # self._logger.debug("PUBSUBSERVICE: found subscribers: {}".format(subscribers)) for subscriber in subscribers: frames[0] = zmq.Frame(subscriber) try: # Send the message to the subscriber for sub in self._send(frames, publisher): # Drop the subscriber if unreachable self.peer_drop(sub) except ZMQError: raise return len(subscribers)
def test_tracker(self): m = zmq.Frame(b'asdf', track=True) self.assertFalse(m.tracker.done) pm = zmq.MessageTracker(m) self.assertFalse(pm.done) del m for i in range(10): if pm.done: break time.sleep(0.1) self.assertTrue(pm.done)
def test_memoryview_shape(self): """memoryview shape info""" data = b("§§¶•ªº˜µ¬˚…∆˙åß∂©œ∑´†≈ç√") n = len(data) f = zmq.Frame(data) view1 = f.buffer self.assertEqual(view1.ndim, 1) self.assertEqual(view1.shape, (n,)) self.assertEqual(view1.tobytes(), data) view2 = memoryview(f) self.assertEqual(view2.ndim, 1) self.assertEqual(view2.shape, (n,)) self.assertEqual(view2.tobytes(), data)
def test_memoryview(self): """test messages from memoryview""" major, minor = sys.version_info[:2] if not (major >= 3 or (major == 2 and minor >= 7)): raise SkipTest("memoryviews only in python >= 2.7") s = b'carrotjuice' v = memoryview(s) m = zmq.Frame(s) buf = m.buffer s2 = buf.tobytes() self.assertEqual(s2, s) self.assertEqual(m.bytes, s)
def generate_reply(self): """ our job is to return our data set, then load another """ batch_data = self.fetch_data() reply = [self._identity] totmsgsize = 0.0 totcompsize = 0.0 tstart = time.time() for ibatch, batch in enumerate(batch_data): for key, arr in batch.items(): if len(batch_data) > 0: name = "{}__b{}".format(key, ibatch) else: name = key # encode x_enc = msgpack.packb(arr, default=m.encode) x_comp = zlib.compress(x_enc, self.compression_level) # for debug: inspect compression gains (usually reduction to 1% or lower of original size) if self.print_msg_size: encframe = zmq.Frame(x_enc) comframe = zmq.Frame(x_comp) totmsgsize += len(encframe.bytes) totcompsize += len(comframe.bytes) # message is alternating series of name and numpy array reply.append(name.encode('utf-8')) reply.append(x_comp) if self._verbosity > 1: if self.print_msg_size: print "LArCVServerWorker[{}]: size of array portion={} MB (uncompressed {} MB)".format( self._identity, totcompsize / 1.0e6, totmsgsize / 1.0e6) print "LArCVServerWorker[{}]: generate msg in {} secs".format( self._identity, time.time() - tstart) return reply
def test_codec(): lil_data = b"ddd" big_data = b"D" * 512 lil_frame = zmq.Frame(data=b"fff") big_frame = zmq.Frame(data=b"F" * 512) mmsg = [lil_data, lil_frame, big_data, big_frame] enc = encode_message(mmsg) assert (len(enc) == 2 * (1 + 3) + 2 * (5 + 512)) ptr = 0 assert (enc[ptr] == 3) ptr += 4 assert (enc[ptr] == 3) ptr += 4 assert (enc[ptr] == 0xFF) ptr += 1 siz = struct.unpack('=I', enc[ptr:ptr + 4])[0] ptr += 4 print('big data size', siz) assert (siz == 512) assert (enc[ptr:ptr + siz] == big_data) ptr += 512 assert (enc[ptr] == 0xFF) ptr += 1 siz = struct.unpack('=I', enc[ptr:ptr + 4])[0] assert (siz == 512) print('big frame size', siz) ptr += 4 assert (enc[ptr:ptr + siz] == big_frame.bytes) mmsg2 = decode_message(enc) for part, (m1, m2) in enumerate(zip(mmsg, mmsg2)): if isinstance(m1, zmq.Frame): m1 = m1.bytes assert (m1 == m2)
def clientish_send(sock, msg, *args, **kwds): '''Send a message via a clientish socket''' if not isinstance(msg, list): msg = [msg] if sock.type == zmq.CLIENT: frame = zmq.Frame(data=encode_message(msg)) return sock.send(frame, *args, **kwds) if sock.type == zmq.DEALER: msg = [b''] + msg return sock.send_multipart(msg, *args, **kwds) raise ValueError(f'unsupported socket type {sock.type}')
def test_frame_more(self): """test Frame.more attribute""" frame = zmq.Frame(b"hello") self.assertFalse(frame.more) sa, sb = self.create_bound_pair(zmq.PAIR, zmq.PAIR) sa.send_multipart([b'hi', b'there']) frame = self.recv(sb, copy=False) self.assertTrue(frame.more) if zmq.zmq_version_info()[0] >= 3 and not PYPY: self.assertTrue(frame.get(zmq.MORE)) frame = self.recv(sb, copy=False) self.assertFalse(frame.more) if zmq.zmq_version_info()[0] >= 3 and not PYPY: self.assertFalse(frame.get(zmq.MORE))
def test_buffer_numpy(self): """test non-copying numpy array messages""" try: import numpy except ImportError: raise SkipTest("numpy required") rand = numpy.random.randint shapes = [rand(2, 16) for i in range(5)] for i in range(1, len(shapes) + 1): shape = shapes[:i] A = numpy.random.random(shape) m = zmq.Frame(A) self.assertEqual(memoryview(A), m.buffer) B = numpy.array(m.buffer, dtype=A.dtype).reshape(A.shape) self.assertEqual((A == B).all(), True)
def test_memoryview_shape(self): """memoryview shape info""" if sys.version_info < (3, ): raise SkipTest("only test memoryviews on Python 3") data = b("§§¶•ªº˜µ¬˚…∆˙åß∂©œ∑´†≈ç√") n = len(data) f = zmq.Frame(data) view1 = f.buffer self.assertEqual(view1.ndim, 1) self.assertEqual(view1.shape, (n, )) self.assertEqual(view1.tobytes(), data) view2 = memoryview(f) self.assertEqual(view2.ndim, 1) self.assertEqual(view2.shape, (n, )) self.assertEqual(view2.tobytes(), data)
def pipeline_info(self, session_id, message_type, message): """Handle the pipeline info message""" logging.info("Handling pipeline info request") pipeline_txt = message.pop(0).bytes pipeline = cellprofiler_core.pipeline.Pipeline() try: pipeline.loadtxt(StringIO(pipeline_txt)) except Exception as e: logging.warning( "Failed to load pipeline: sending pipeline exception") self.raise_pipeline_exception(session_id, str(e)) return input_modules, other_modules = self.split_pipeline(pipeline) channels = self.find_channels(input_modules) type_names, measurements = self.find_measurements( other_modules, pipeline) body = json.dumps([channels, type_names, measurements]) msg_out = [ zmq.Frame(session_id), zmq.Frame(), zmq.Frame(PIPELINE_INFO_REPLY_1), zmq.Frame(body), ] self.socket.send_multipart(msg_out)
def serialize(self, obj: Any) -> Iterator[zmq.Frame]: """ Serialize single object of type *type_* to zmq.Frame stream """ type_ = type(obj) logger.debug("serialize(%r)", type_) entry = self._entry_by_type.get(type(obj)) if not entry: raise NotImplementedError( f"Serialization protocol not implemented for {type_}") logger.debug("Using %r serializer", entry.serializer) yield zmq.Frame(b"T:%s" % entry.tag) yield from entry.serializer.serialize(obj)
def test_04_01_run_group(self): pipeline = cpp.Pipeline() load_images = LoadImages() load_images.module_num = 1 load_images.images[0].channels[0].image_name.value = "Foo" pipeline.add_module(load_images) identify = IdentifyPrimaryObjects() identify.use_advanced.value = True identify.module_num = 2 identify.x_name.value = "Foo" identify.y_name.value = "dizzy" identify.threshold.threshold_scope.value = TS_GLOBAL identify.threshold.global_operation.value = TM_MANUAL identify.threshold.manual_threshold.value = .5 identify.exclude_size.value = False pipeline.add_module(identify) pipeline_txt = StringIO() pipeline.savetxt(pipeline_txt) image = np.zeros((2, 11, 17)) image[0, 2:-2, 2:-2] = 1 image[1, 2:-2, 2:7] = 1 image[1, 2:-2, 10:-2] = 1 image_metadata = [[ "Foo", [["Z", image.shape[0], image.strides[0] / 8], ["Y", image.shape[1], image.strides[1] / 8], ["X", image.shape[2], image.strides[2] / 8]] ]] message = [ zmq.Frame(self.session_id), zmq.Frame(), zmq.Frame(RUN_GROUP_REQ_1), zmq.Frame(pipeline_txt.getvalue()), zmq.Frame(json.dumps(image_metadata)), zmq.Frame(image) ] self.socket.send_multipart(message) response = self.socket.recv_multipart() self.assertEqual(response.pop(0), self.session_id) self.assertEqual(response.pop(0), "") self.assertEqual(response.pop(0), RUN_REPLY_1) metadata = json.loads(response.pop(0)) data = response.pop(0) measurements = self.decode_measurements(metadata, data) self.assertEqual(len(measurements[cpmeas.IMAGE][cpmeas.IMAGE_NUMBER]), 2) self.assertEqual(measurements[cpmeas.IMAGE]["Count_dizzy"][0], 1) self.assertEqual(measurements[cpmeas.IMAGE]["Count_dizzy"][1], 2) self.assertEqual(measurements["dizzy"]["Location_Center_Y"][0], 5)