def on_start(self): ''' Binds the publisher to the transform ''' super(TransformStreamPublisher, self).on_start() self.stream_id = self.CFG.get_safe('process.stream_id', '') self.exchange_point = self.CFG.get_safe('process.exchange_point', 'science_data') self.routing_key = self.CFG.get_safe('process.routing_key', '') # We do not want processes to make service calls # A StreamPublisher has a behavior built-in to create a stream # If no stream_id and route are specified. # We will use the container attached endpoints instead of making a new stream if not (self.stream_id or self.routing_key): output_streams = copy(self.CFG.get_safe('process.publish_streams')) first_stream = output_streams.popitem() try: self.publisher = getattr(self, first_stream[0]) except AttributeError: log.warning('no publisher endpoint located') self.publisher = None else: self.publisher = StreamPublisher( process=self, stream_id=self.stream_id, exchange_point=self.exchange_point, routing_key=self.routing_key)
def test_stream_pub_sub(self): self.verified = Event() self.route = StreamRoute(routing_key='stream_name') def verify(message, route, stream): self.assertEquals(message, 'test') self.assertEquals(route.routing_key, self.route.routing_key) self.assertTrue(route.exchange_point.startswith(get_sys_name())) self.assertEquals(stream, 'stream_name') self.verified.set() sub_proc = SimpleProcess() sub_proc.container = self.container sub1 = StreamSubscriber(process=sub_proc, exchange_name='stream_name', callback=verify) sub1.add_stream_subscription("stream_name") sub1.start() self.queue_cleanup.append('data.stream_name') pub_proc = SimpleProcess() pub_proc.container = self.container pub1 = StreamPublisher(process=pub_proc, stream=self.route) sub1.xn.bind(self.route.routing_key, pub1.xp) pub1.publish('test') self.assertTrue(self.verified.wait(2))
class TransformStreamPublisher(TransformStreamProcess): ''' Transforms which publish on a stream. Parameters: process.stream_id Outgoing stream identifier. process.exchange_point Route's exchange point. process.routing_key Route's routing key. Either the stream_id or both the exchange_point and routing_key need to be provided. ''' def __init__(self): super(TransformStreamPublisher,self).__init__() def on_start(self): ''' Binds the publisher to the transform ''' super(TransformStreamPublisher,self).on_start() self.stream_id = self.CFG.get_safe('process.stream_id', '') self.exchange_point = self.CFG.get_safe('process.exchange_point', 'science_data') self.routing_key = self.CFG.get_safe('process.routing_key', '') self.publisher = StreamPublisher(process=self, stream_id=self.stream_id, exchange_point=self.exchange_point, routing_key=self.routing_key) def publish(self, msg, to_name): ''' To be implemented by the transform developer. ''' raise NotImplementedError('Method publish not implemented') def on_quit(self): self.publisher.close() super(TransformStreamPublisher,self).on_quit()
def test_stream_pub_sub(self): self.verified = Event() self.route = StreamRoute(exchange_point='xp_test', routing_key='route') def verify(message, route, stream): self.assertEquals(message,'test') self.assertEquals(route, self.route) self.assertEquals(stream, '') self.verified.set() sub_proc = SimpleProcess() sub_proc.container = self.container sub1 = StreamSubscriber(process=sub_proc, exchange_name='sub1', callback=verify) sub1.start() self.queue_cleanup.append('sub1') pub_proc = SimpleProcess() pub_proc.container = self.container pub1 = StreamPublisher(process=pub_proc,stream_route=self.route) sub1.xn.bind(self.route.routing_key,pub1.xp) pub1.publish('test') self.assertTrue(self.verified.wait(2))
def test_stream_pub_sub(self): self.verified = Event() self.route = StreamRoute(exchange_point='xp_test', routing_key='route') def verify(message, route, stream): self.assertEquals(message, 'test') self.assertEquals(route, self.route) self.assertEquals(stream, '') self.verified.set() sub_proc = SimpleProcess() sub_proc.container = self.container sub1 = StreamSubscriber(process=sub_proc, exchange_name='sub1', callback=verify) sub1.start() self.queue_cleanup.append('sub1') pub_proc = SimpleProcess() pub_proc.container = self.container pub1 = StreamPublisher(process=pub_proc, stream_route=self.route) sub1.xn.bind(self.route.routing_key, pub1.xp) pub1.publish('test') self.assertTrue(self.verified.wait(2))
class TransformStreamPublisher(TransformStreamProcess): ''' Transforms which publish on a stream. Parameters: process.stream_id Outgoing stream identifier. process.exchange_point Route's exchange point. process.routing_key Route's routing key. Either the stream_id or both the exchange_point and routing_key need to be provided. ''' def __init__(self): super(TransformStreamPublisher, self).__init__() def on_start(self): ''' Binds the publisher to the transform ''' super(TransformStreamPublisher, self).on_start() self.stream_id = self.CFG.get_safe('process.stream_id', '') self.exchange_point = self.CFG.get_safe('process.exchange_point', 'science_data') self.routing_key = self.CFG.get_safe('process.routing_key', '') # We do not want processes to make service calls # A StreamPublisher has a behavior built-in to create a stream # If no stream_id and route are specified. # We will use the container attached endpoints instead of making a new stream if not (self.stream_id or self.routing_key): output_streams = copy(self.CFG.get_safe('process.publish_streams')) first_stream = output_streams.popitem() try: self.publisher = getattr(self, first_stream[0]) except AttributeError: log.warning('no publisher endpoint located') self.publisher = None else: self.publisher = StreamPublisher( process=self, stream_id=self.stream_id, exchange_point=self.exchange_point, routing_key=self.routing_key) def publish(self, msg, to_name): ''' To be implemented by the transform developer. ''' raise NotImplementedError('Method publish not implemented') def on_quit(self): if self.publisher: self.publisher.close() super(TransformStreamPublisher, self).on_quit()
class TransformStreamPublisher(TransformStreamProcess): ''' Transforms which publish on a stream. Parameters: process.stream_id Outgoing stream identifier. process.exchange_point Route's exchange point. process.routing_key Route's routing key. Either the stream_id or both the exchange_point and routing_key need to be provided. ''' def __init__(self): super(TransformStreamPublisher,self).__init__() def on_start(self): ''' Binds the publisher to the transform ''' super(TransformStreamPublisher,self).on_start() self.stream_id = self.CFG.get_safe('process.stream_id', '') self.exchange_point = self.CFG.get_safe('process.exchange_point', 'science_data') self.routing_key = self.CFG.get_safe('process.routing_key', '') # We do not want processes to make service calls # A StreamPublisher has a behavior built-in to create a stream # If no stream_id and route are specified. # We will use the container attached endpoints instead of making a new stream if not (self.stream_id or self.routing_key): output_streams = copy(self.CFG.get_safe('process.publish_streams')) first_stream = output_streams.popitem() try: self.publisher = getattr(self,first_stream[0]) except AttributeError: log.warning('no publisher endpoint located') self.publisher = None else: self.publisher = StreamPublisher(process=self, stream_id=self.stream_id, exchange_point=self.exchange_point, routing_key=self.routing_key) def publish(self, msg, to_name): ''' To be implemented by the transform developer. ''' raise NotImplementedError('Method publish not implemented') def on_quit(self): if self.publisher: self.publisher.close() super(TransformStreamPublisher,self).on_quit()
def test_stream_transforms(self): self.verified = Event() input_route = StreamRoute('test_exchange', 'input') output_route = StreamRoute('test_exchange', 'output') def verify(m, route, stream_id): self.assertEquals(route, output_route) self.assertEquals(m, 'test') self.verified.set() # Create I/O Processes #-------------------------------------------------------------------------------- pub_proc = TransformBase() pub_proc.container = self.container publisher = StreamPublisher(process=pub_proc, stream_route=input_route) transform = self.container.spawn_process( 'transform', 'ion.core.process.test.test_transform', 'EmptyDataProcess', { 'process': { 'queue_name': 'transform_input', 'exchange_point': output_route.exchange_point, 'routing_key': output_route.routing_key } }, 'transformpid') transform = self.container.proc_manager.procs[transform] sub_proc = TransformBase() sub_proc.container = self.container subscriber = StreamSubscriber(process=sub_proc, exchange_name='subscriber', callback=verify) # Bind the transports #-------------------------------------------------------------------------------- transform.subscriber.xn.bind(input_route.routing_key, publisher.xp) subscriber.xn.bind(output_route.routing_key, transform.publisher.xp) subscriber.start() self.addCleanup(subscriber.stop) publisher.publish('test') self.assertTrue(self.verified.wait(4))
def on_start(self): ''' Binds the publisher to the transform ''' super(TransformStreamPublisher,self).on_start() self.stream_id = self.CFG.get_safe('process.stream_id', '') self.exchange_point = self.CFG.get_safe('process.exchange_point', 'science_data') self.routing_key = self.CFG.get_safe('process.routing_key', '') self.publisher = StreamPublisher(process=self, stream_id=self.stream_id, exchange_point=self.exchange_point, routing_key=self.routing_key)
def create_publisher(self, dataprocess_id, dataprocess_details): #todo: create correct publisher type for the transform type #todo: DataMonitor, Event Monitor get EventPublishers #todo: DataProcess, EventProcess get stream publishers out_stream_route = dataprocess_details.get('out_stream_route', '') out_stream_id = dataprocess_details.get('out_stream_id', '') publisher = StreamPublisher(process=self, stream_id=out_stream_id, stream_route=out_stream_route) self._publisher_map[dataprocess_id] = publisher
def _on_done(self): ''' Callback for the thread when the query is complete. Iterate through the entries and publish each post and comment(s) on an independent stream ''' num=0 for entry in self.entries: """ We are skipping create stream & register producer here. Create stream should not be called by a steam producing process such as an instrument driver or data agent. It should be called at a higher level. For the reason we will pretend it has already been called and use an unregistered stream. """ p = StreamPublisher(name=(self.XP,'%s.%s' %(num,"data")),process=self,node=self.container.node) p.publish(msg=entry['post']) log.debug('Published post id %s' % entry['post'].post_id) for comment in entry['comments']: p.publish(msg=comment) num+=1 log.info('Completed Publishing Blog Results for Blog %s' % self.feed.blog)
def test_stream_transforms(self): self.verified = Event() input_route = StreamRoute('test_exchange','input') output_route = StreamRoute('test_exchange','output') def verify(m, route, stream_id): self.assertEquals(route,output_route) self.assertEquals(m,'test') self.verified.set() # Create I/O Processes #-------------------------------------------------------------------------------- pub_proc = TransformBase() pub_proc.container = self.container publisher = StreamPublisher(process=pub_proc, stream_route=input_route) transform = self.container.spawn_process('transform','ion.core.process.test.test_transform','EmptyDataProcess',{'process':{'queue_name':'transform_input', 'exchange_point':output_route.exchange_point, 'routing_key':output_route.routing_key}}, 'transformpid') transform = self.container.proc_manager.procs[transform] sub_proc = TransformBase() sub_proc.container = self.container subscriber = StreamSubscriber(process=sub_proc, exchange_name='subscriber', callback=verify) # Bind the transports #-------------------------------------------------------------------------------- transform.subscriber.xn.bind(input_route.routing_key, publisher.xp) subscriber.xn.bind(output_route.routing_key, transform.publisher.xp) subscriber.start() self.addCleanup(subscriber.stop) publisher.publish('test') self.assertTrue(self.verified.wait(4))
def _set_publisher_endpoints(self, process_instance, publisher_streams=None): publisher_streams = publisher_streams or {} names = [] for name, stream_id in publisher_streams.iteritems(): # problem is here pub = StreamPublisher(process=process_instance, stream_id=stream_id) setattr(process_instance, name, pub) names.append(name) return names
def on_start(self): #pragma no cover log.info('Starting Demuxer') TransformStreamListener.on_start(self) log.info('----------') self.output_streams = self.CFG.get_safe('process.out_streams') if self.output_streams is None or not isinstance( self.output_streams, list): log.error( '(%s) Failed to demux, I/O configuration is incorrect: (%s)', self.id, self.output_streams) return self.publishers = [] for stream in self.output_streams: log.info(" -> %s", stream) self.publishers.append( StreamPublisher(process=self, stream_id=stream))
def _construct_publishers(self, stream_info): for (stream_name, stream_config) in stream_info.iteritems(): try: exchange_point = stream_config['exchange_point'] routing_key = stream_config['routing_key'] route = StreamRoute(exchange_point=exchange_point, routing_key=routing_key) stream_id = stream_config['stream_id'] publisher = StreamPublisher(process=self._agent, stream_id=stream_id, stream_route=route) self._publishers[stream_name] = publisher self._stream_greenlets[stream_name] = None self._stream_buffers[stream_name] = [] except Exception as e: errmsg = 'Instrument agent %s' % self._agent._proc_name errmsg += 'error constructing publisher for stream %s. ' % stream_name errmsg += str(e) log.error(errmsg)
def on_start(self): ''' Binds the publisher to the transform ''' super(TransformStreamPublisher,self).on_start() self.stream_id = self.CFG.get_safe('process.stream_id', '') self.exchange_point = self.CFG.get_safe('process.exchange_point', 'science_data') self.routing_key = self.CFG.get_safe('process.routing_key', '') # We do not want processes to make service calls # A StreamPublisher has a behavior built-in to create a stream # If no stream_id and route are specified. # We will use the container attached endpoints instead of making a new stream if not (self.stream_id or self.routing_key): output_streams = copy(self.CFG.get_safe('process.publish_streams')) first_stream = output_streams.popitem() try: self.publisher = getattr(self,first_stream[0]) except AttributeError: log.warning('no publisher endpoint located') self.publisher = None else: self.publisher = StreamPublisher(process=self, stream_id=self.stream_id, exchange_point=self.exchange_point, routing_key=self.routing_key)
def _on_done(self): ''' Callback for the thread when the query is complete. Iterate through the entries and publish each post and comment(s) on an independent stream ''' num = 0 for entry in self.entries: """ We are skipping create stream & register producer here. Create stream should not be called by a steam producing process such as an instrument driver or data agent. It should be called at a higher level. For the reason we will pretend it has already been called and use an unregistered stream. """ p = StreamPublisher(name=(self.XP, '%s.%s' % (num, "data")), process=self, node=self.container.node) p.publish(msg=entry['post']) log.debug('Published post id %s' % entry['post'].post_id) for comment in entry['comments']: p.publish(msg=comment) num += 1 log.info('Completed Publishing Blog Results for Blog %s' % self.feed.blog)
def _create_publisher(self, stream_id, stream_route): publisher = StreamPublisher(process=self._agent, stream_id=stream_id, stream_route=stream_route) return publisher