コード例 #1
0
    def test_subscribe(self):
        #Test Subscriber.
        #The goal of this test is to get messages routed to the callback mock.
        cbmock = Mock()
        sub = Subscriber(node=self._node, from_name="testsub", callback=cbmock)

        # tell the subscriber to create this as the main listening channel
        listen_channel_mock = self._setup_mock_channel(
            ch_type=SubscriberChannel, value="subbed", error_message="")
        sub.node.channel.return_value = listen_channel_mock

        # tell our channel to return itself when accepted
        listen_channel_mock.accept.return_value = listen_channel_mock

        # we're ready! call listen
        sub.listen()

        # make sure we got our message
        cbmock.assert_called_once_with(
            'subbed', {
                'conv-id': sentinel.conv_id,
                'status_code': 200,
                'error_message': '',
                'op': None
            })
コード例 #2
0
    def test_create_endpoint(self):
        def mycb(msg, headers):
            return "test"

        sub = Subscriber(node=self._node, from_name="testsub", callback=mycb)
        e = sub.create_endpoint()

        self.assertEquals(e._callback, mycb)
コード例 #3
0
    def __init__(self, process=None):
        self.process = process

        self.async_res = AsyncResult()
        self.wait_name = "asyncresult_" + create_simple_unique_id()
        if self.process:
            self.wait_name = self.wait_name + "_" + self.process.id
        # TODO: Use same mechanism as pooled RPC response endpoint (without the request)
        self.wait_sub = Subscriber(from_name=self.wait_name,
                                   callback=self._result_callback,
                                   auto_delete=True)
        self.activated = False
コード例 #4
0
ファイル: transform.py プロジェクト: oldpatricka/pyon
    def on_start(self):
        TransformDataProcess.on_start(self)

        # set up subscriber to *
        self._bt_sub = Subscriber(callback=lambda m, h: self.call_process(m),
                                  from_name=NameTrio(get_sys_name(),
                                                     'bench_queue', '*'))

        # spawn listener
        self._sub_gl = spawn(self._bt_sub.listen)

        # set up publisher to anything!
        self._bt_pub = Publisher(to_name=NameTrio(get_sys_name(),
                                                  str(uuid.uuid4())[0:6]))
コード例 #5
0
    def get_realtime_visualization_data(self, query_token='', callback='', tqx=""):
        """This operation returns a block of visualization data for displaying data product in real time. This operation requires a
        user specific token which was provided from a previous request to the init_realtime_visualization operation.

        @param query_token    str
        @retval datatable    str
        @throws NotFound    Throws if specified query_token or its visualization product does not exist
        """

        log.debug("Query token : " + query_token + " CB : " + callback + "TQX : " + tqx)

        reqId = 0
        # If a reqId was passed in tqx, extract it
        if tqx:
            tqx_param_list = tqx.split(";")
            for param in tqx_param_list:
                key, value = param.split(":")
                if key == 'reqId':
                    reqId = value

        ret_val = []
        if not query_token:
            raise BadRequest("The query_token parameter is missing")

        #try:

        #Taking advantage of idempotency
        xq = self.container.ex_manager.create_xn_queue(query_token)


        subscriber = Subscriber(from_name=xq)
        subscriber.initialize()

        msgs = subscriber.get_all_msgs(timeout=2)
        for x in range(len(msgs)):
            msgs[x].ack()

        # Different messages should get processed differently. Ret val will be decided by the viz product type
        ret_val = self._process_visualization_message(msgs, callback, reqId)

        #except Exception, e:
        #    raise e

        #finally:
        #    subscriber.close()

        #TODO - replace as need be to return valid GDT data
        #return {'viz_data': ret_val}

        return ret_val
コード例 #6
0
    def get_realtime_visualization_data(self, query_token=''):
        """This operation returns a block of visualization data for displaying data product in real time. This operation requires a
        user specific token which was provided from a previous request to the init_realtime_visualization operation.

        @param query_token    str
        @retval datatable    str
        @throws NotFound    Throws if specified query_token or its visualization product does not exist
        """
        log.debug("get_realtime_visualization_data Vis worker: %s", self.id)

        ret_val = []
        if not query_token:
            raise BadRequest("The query_token parameter is missing")

        try:
            #Taking advantage of idempotency
            queue_name = '-'.join([USER_VISUALIZATION_QUEUE, query_token])
            xq = self.container.ex_manager.create_xn_queue(queue_name)

            subscriber = Subscriber(from_name=xq)
            subscriber.initialize()

        except:
            # Close the subscriber if it exists
            if subscriber:
                subscriber.close()

            raise BadRequest("Could not subscribe to the real-time queue")

        msgs = subscriber.get_all_msgs(timeout=2)
        for x in range(len(msgs)):
            msgs[x].ack()

        subscriber.close()

        # Different messages should get processed differently. Ret val will be decided by the viz product type
        ret_val = self._process_visualization_message(msgs)

        return ret_val
コード例 #7
0
#!/usr/bin/env python

from pyon.net.endpoint import Subscriber
from pyon.net.messaging import make_node
import gevent
import time

node, iowat = make_node()


def msg_recv(msg, h):
    global counter
    counter += 1


sub = Subscriber(node=node, name="hassan", callback=msg_recv)

counter = 0
st = time.time()


def tick():
    global counter, st
    while True:
        time.sleep(2)
        ct = time.time()
        elapsed_s = ct - st

        mps = counter / elapsed_s

        print counter, "messages, per sec:", mps
コード例 #8
0
ファイル: test_exchange.py プロジェクト: oldpatricka/pyon
    def test_consume_one_message_at_a_time(self):
        # see also pyon.net.test.test_channel:TestChannelInt.test_consume_one_message_at_a_time

        pub3 = Publisher(to_name=(self.container.ex_manager.default_xs.exchange, 'routed.3'))
        pub5 = Publisher(to_name=(self.container.ex_manager.default_xs.exchange, 'routed.5'))

        #
        # SETUP COMPLETE, BEGIN TESTING OF EXCHANGE OBJECTS
        #

        xq = self.container.ex_manager.create_xn_queue('random_queue')
        self.addCleanup(xq.delete)

        # recv'd messages from the subscriber
        self.recv_queue = Queue()

        sub = Subscriber(from_name=xq, callback=lambda m,h: self.recv_queue.put((m, h)))
        sub.prepare_listener()

        # publish 10 messages - we're not bound yet, so they'll just dissapear
        for x in xrange(10):
            pub3.publish("3,%s" % str(x))

        # no messages yet
        self.assertFalse(sub.get_one_msg(timeout=0))

        # now, we'll bind the xq
        xq.bind('routed.3')

        # even tho we are consuming, there are no messages - the previously published ones all dissapeared
        self.assertFalse(sub.get_one_msg(timeout=0))

        # publish those messages again
        for x in xrange(10):
            pub3.publish("3,%s" % str(x))

        # NOW we have messages!
        for x in xrange(10):
            self.assertTrue(sub.get_one_msg(timeout=0))
            m,h = self.recv_queue.get(timeout=0)
            self.assertEquals(m, "3,%s" % str(x))

        # we've cleared it all
        self.assertFalse(sub.get_one_msg(timeout=0))

        # bind a wildcard and publish on both
        xq.bind('routed.*')

        for x in xrange(10):
            time.sleep(0.3)
            pub3.publish("3,%s" % str(x))
            time.sleep(0.3)
            pub5.publish("5,%s" % str(x))

        # should get all 20, interleaved
        for x in xrange(10):
            self.assertTrue(sub.get_one_msg(timeout=0))
            m, h = self.recv_queue.get(timeout=0)
            self.assertEquals(m, "3,%s" % str(x))

            self.assertTrue(sub.get_one_msg(timeout=0))
            m, h = self.recv_queue.get(timeout=0)
            self.assertEquals(m, "5,%s" % str(x))

        # add 5 binding, remove all other bindings
        xq.bind('routed.5')
        xq.unbind('routed.3')
        xq.unbind('routed.*')

        # try publishing to 3, shouldn't arrive anymore
        pub3.publish("3")

        self.assertFalse(sub.get_one_msg(timeout=0))

        # let's turn off the consumer and let things build up a bit
        sub._chan.stop_consume()

        for x in xrange(10):
            pub5.publish("5,%s" % str(x))

        # 10 messages in the queue, no consumers
        self.assertTupleEqual((10, 0), sub._chan.get_stats())

        # drain queue
        sub._chan.start_consume()
        time.sleep(1)       # yield to allow delivery

        for x in xrange(10):
            self.assertTrue(sub.get_one_msg(timeout=0))
            self.recv_queue.get(timeout=0)

        sub.close()
コード例 #9
0
    def test_visualization_queue(self):

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        user_queue_name = USER_VISUALIZATION_QUEUE

        xq = self.container.ex_manager.create_xn_queue(user_queue_name)

        salinity_subscription_id = self.pubsubclient.create_subscription(
            stream_ids=data_product_stream_ids,
            exchange_name = user_queue_name,
            name = "user visualization queue"
        )

        subscriber = Subscriber(from_name=xq)
        subscriber.initialize()

        # after the queue has been created it is safe to activate the subscription
        self.pubsubclient.activate_subscription(subscription_id=salinity_subscription_id)

        #Start the output stream listener to monitor and collect messages
        #results = self.start_output_stream_and_listen(None, data_product_stream_ids)

        #Not sure why this is needed - but it is
        #subscriber._chan.stop_consume()

        ctd_sim_pid = self.start_simple_input_stream_process(ctd_stream_id)
        gevent.sleep(10.0)  # Send some messages - don't care how many

        msg_count,_ = xq.get_stats()
        log.info('Messages in user queue 1: %s ' % msg_count)

        #Validate the data from each of the messages along the way
        #self.validate_messages(results)

#        for x in range(msg_count):
#            mo = subscriber.get_one_msg(timeout=1)
#            print mo.body
#            mo.ack()

        msgs = subscriber.get_all_msgs(timeout=2)
        for x in range(len(msgs)):
            msgs[x].ack()
            self.validate_messages(msgs[x])
           # print msgs[x].body



        #Should be zero after pulling all of the messages.
        msg_count,_ = xq.get_stats()
        log.info('Messages in user queue 2: %s ' % msg_count)


        #Trying to continue to receive messages in the queue
        gevent.sleep(5.0)  # Send some messages - don't care how many


        #Turning off after everything - since it is more representative of an always on stream of data!
        self.process_dispatcher.cancel_process(ctd_sim_pid) # kill the ctd simulator process - that is enough data



        #Should see more messages in the queue
        msg_count,_ = xq.get_stats()
        log.info('Messages in user queue 3: %s ' % msg_count)

        msgs = subscriber.get_all_msgs(timeout=2)
        for x in range(len(msgs)):
            msgs[x].ack()
            self.validate_messages(msgs[x])

        #Should be zero after pulling all of the messages.
        msg_count,_ = xq.get_stats()
        log.info('Messages in user queue 4: %s ' % msg_count)

        subscriber.close()
        self.container.ex_manager.delete_xn(xq)
コード例 #10
0
    def test_multiple_visualization_queue(self):

        # set up a workflow with the salinity transform and the doubler. We will direct the original stream and the doubled stream to queues
        # and test to make sure the subscription to the queues is working correctly
        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Viz_Test_Workflow',description='A workflow to test collection of multiple data products in queues')

        workflow_data_product_name = 'TEST-Workflow_Output_Product' #Set a specific output product name
        #-------------------------------------------------------------------------------------------------------------------------
        #Add a transformation process definition for salinity
        #-------------------------------------------------------------------------------------------------------------------------

        ctd_L2_salinity_dprocdef_id = self.create_salinity_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=ctd_L2_salinity_dprocdef_id, persist_process_output_data=False)  #Don't persist the intermediate data product
        configuration = {'stream_name' : 'salinity'}
        workflow_step_obj.configuration = configuration
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        aids = self.rrclient.find_associations(workflow_def_id, PRED.hasDataProcessDefinition)
        assertions(len(aids) == 1 )

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id, timeout=30)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        # Now for each of the data_product_stream_ids create a queue and pipe their data to the queue


        user_queue_name1 = USER_VISUALIZATION_QUEUE + '1'
        user_queue_name2 = USER_VISUALIZATION_QUEUE + '2'

        # use idempotency to create queues
        xq1 = self.container.ex_manager.create_xn_queue(user_queue_name1)
        self.addCleanup(xq1.delete)
        xq2 = self.container.ex_manager.create_xn_queue(user_queue_name2)
        self.addCleanup(xq2.delete)
        xq1.purge()
        xq2.purge()

        # the create_subscription call takes a list of stream_ids so create temp ones

        dp_stream_id1 = list()
        dp_stream_id1.append(data_product_stream_ids[0])
        dp_stream_id2 = list()
        dp_stream_id2.append(data_product_stream_ids[1])

        salinity_subscription_id1 = self.pubsubclient.create_subscription( stream_ids=dp_stream_id1,
            exchange_name = user_queue_name1, name = "user visualization queue1")

        salinity_subscription_id2 = self.pubsubclient.create_subscription( stream_ids=dp_stream_id2,
            exchange_name = user_queue_name2, name = "user visualization queue2")

        # Create subscribers for the output of the queue
        subscriber1 = Subscriber(from_name=xq1)
        subscriber1.initialize()
        subscriber2 = Subscriber(from_name=xq2)
        subscriber2.initialize()

        # after the queue has been created it is safe to activate the subscription
        self.pubsubclient.activate_subscription(subscription_id=salinity_subscription_id1)
        self.pubsubclient.activate_subscription(subscription_id=salinity_subscription_id2)

        # Start input stream and wait for some time
        ctd_sim_pid = self.start_simple_input_stream_process(ctd_stream_id)
        gevent.sleep(5.0)  # Send some messages - don't care how many

        msg_count,_ = xq1.get_stats()
        log.info('Messages in user queue 1: %s ' % msg_count)
        msg_count,_ = xq2.get_stats()
        log.info('Messages in user queue 2: %s ' % msg_count)

        msgs1 = subscriber1.get_all_msgs(timeout=2)
        msgs2 = subscriber2.get_all_msgs(timeout=2)

        for x in range(min(len(msgs1), len(msgs2))):
            msgs1[x].ack()
            msgs2[x].ack()
            self.validate_multiple_vis_queue_messages(msgs1[x].body, msgs2[x].body)

        # kill the ctd simulator process - that is enough data
        self.process_dispatcher.cancel_process(ctd_sim_pid)

        # close the subscription and queues
        subscriber1.close()
        subscriber2.close()

        return
コード例 #11
0
    def test_replay_integration(self):
        '''
        test_replay_integration
        '''
        import numpy as np
        # Keep the import it's used in the vector comparison below even though pycharm says its unused.

        cc = self.container
        XP = self.XP
        assertions = self.assertTrue

        ### Every thing below here can be run as a script:
        log.debug('Got it')

        pubsub_management_service = PubsubManagementServiceClient(node=cc.node)
        ingestion_management_service = IngestionManagementServiceClient(
            node=cc.node)
        dataset_management_service = DatasetManagementServiceClient(
            node=cc.node)
        data_retriever_service = DataRetrieverServiceClient(node=cc.node)

        datastore_name = 'dm_test_replay_integration'

        producer = Publisher(name=(XP, 'stream producer'))

        ingestion_configuration_id = ingestion_management_service.create_ingestion_configuration(
            exchange_point_id=XP,
            couch_storage=CouchStorage(datastore_name=datastore_name,
                                       datastore_profile='SCIDATA'),
            hdf_storage=HdfStorage(),
            number_of_workers=1)

        ingestion_management_service.activate_ingestion_configuration(
            ingestion_configuration_id=ingestion_configuration_id)

        definition = SBE37_CDM_stream_definition()
        data_stream_id = definition.data_stream_id
        encoding_id = definition.identifiables[data_stream_id].encoding_id
        element_count_id = definition.identifiables[
            data_stream_id].element_count_id

        stream_def_id = pubsub_management_service.create_stream_definition(
            container=definition)
        stream_id = pubsub_management_service.create_stream(
            stream_definition_id=stream_def_id)

        dataset_id = dataset_management_service.create_dataset(
            stream_id=stream_id,
            datastore_name=datastore_name,
            view_name='datasets/dataset_by_id')
        ingestion_management_service.create_dataset_configuration(
            dataset_id=dataset_id,
            archive_data=True,
            archive_metadata=True,
            ingestion_configuration_id=ingestion_configuration_id)
        definition.stream_resource_id = stream_id

        packet = _create_packet(definition)
        input_file = FileSystem.mktemp()
        input_file.write(packet.identifiables[data_stream_id].values)
        input_file_path = input_file.name
        input_file.close()

        fields = [
            'conductivity', 'height', 'latitude', 'longitude', 'pressure',
            'temperature', 'time'
        ]

        input_vectors = acquire_data([input_file_path], fields, 2).next()

        producer.publish(msg=packet, to_name=(XP, '%s.data' % stream_id))

        replay_id, replay_stream_id = data_retriever_service.define_replay(
            dataset_id)
        ar = gevent.event.AsyncResult()

        def sub_listen(msg, headers):

            assertions(isinstance(msg, StreamGranuleContainer),
                       'replayed message is not a granule.')
            hdf_string = msg.identifiables[data_stream_id].values
            sha1 = hashlib.sha1(hdf_string).hexdigest().upper()
            assertions(sha1 == msg.identifiables[encoding_id].sha1,
                       'Checksum failed.')
            assertions(
                msg.identifiables[element_count_id].value == 1,
                'record replay count is incorrect %d.' %
                msg.identifiables[element_count_id].value)
            output_file = FileSystem.mktemp()
            output_file.write(msg.identifiables[data_stream_id].values)
            output_file_path = output_file.name
            output_file.close()
            output_vectors = acquire_data([output_file_path], fields, 2).next()
            for field in fields:
                comparison = (input_vectors[field]['values'] ==
                              output_vectors[field]['values'])
                assertions(
                    comparison.all(), 'vector mismatch: %s vs %s' %
                    (input_vectors[field]['values'],
                     output_vectors[field]['values']))
            FileSystem.unlink(output_file_path)
            ar.set(True)

        subscriber = Subscriber(name=(XP, 'replay listener'),
                                callback=sub_listen)

        g = gevent.Greenlet(subscriber.listen,
                            binding='%s.data' % replay_stream_id)
        g.start()

        data_retriever_service.start_replay(replay_id)

        ar.get(timeout=10)

        FileSystem.unlink(input_file_path)
コード例 #12
0
    def start_listener(self, stream_id, callback):

        sub = Subscriber(name=(self.XP, 'replay_listener'), callback=callback)
        g = gevent.Greenlet(sub.listen, binding='%s.data' % stream_id)
        g.start()
        self.thread_pool.append(g)