Ejemplo n.º 1
0
    def _start_data_subscribers(self, count, raw_count):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = []
        self._raw_samples_received = []
        self._async_sample_result = AsyncResult()
        self._async_raw_sample_result = AsyncResult()

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            log.info('Received parsed data on %s (%s,%s)', stream_id,
                     stream_route.exchange_point, stream_route.routing_key)
            self._samples_received.append(message)
            if len(self._samples_received) == count:
                self._async_sample_result.set()

        def recv_raw_data(message, stream_route, stream_id):
            log.info('Received raw data on %s (%s,%s)', stream_id,
                     stream_route.exchange_point, stream_route.routing_key)
            self._raw_samples_received.append(message)
            if len(self._raw_samples_received) == raw_count:
                self._async_raw_sample_result.set()

        from pyon.util.containers import create_unique_identifier

        stream_name = 'parsed'
        parsed_config = self._stream_config[stream_name]
        stream_id = parsed_config['stream_id']
        exchange_name = create_unique_identifier("%s_queue" % stream_name)
        self._purge_queue(exchange_name)
        sub = StandaloneStreamSubscriber(exchange_name, recv_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = pubsub_client.create_subscription(name=exchange_name,
                                                   stream_ids=[stream_id],
                                                   timeout=120.2)
        pubsub_client.activate_subscription(sub_id, timeout=120.3)
        sub.subscription_id = sub_id  # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)

        stream_name = 'raw'
        parsed_config = self._stream_config[stream_name]
        stream_id = parsed_config['stream_id']
        exchange_name = create_unique_identifier("%s_queue" % stream_name)
        self._purge_queue(exchange_name)
        sub = StandaloneStreamSubscriber(exchange_name, recv_raw_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = pubsub_client.create_subscription(name=exchange_name,
                                                   stream_ids=[stream_id],
                                                   timeout=120.4)
        pubsub_client.activate_subscription(sub_id, timeout=120.5)
        sub.subscription_id = sub_id  # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
Ejemplo n.º 2
0
    def _start_data_subscribers(self, count, raw_count):
        """
        """        
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
                
        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = []
        self._raw_samples_received = []
        self._async_sample_result = AsyncResult()
        self._async_raw_sample_result = AsyncResult()

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            log.info('Received parsed data on %s (%s,%s)', stream_id, stream_route.exchange_point, stream_route.routing_key)
            self._samples_received.append(message)
            if len(self._samples_received) == count:
                self._async_sample_result.set()

        def recv_raw_data(message, stream_route, stream_id):
            log.info('Received raw data on %s (%s,%s)', stream_id, stream_route.exchange_point, stream_route.routing_key)
            self._raw_samples_received.append(message)
            if len(self._raw_samples_received) == raw_count:
                self._async_raw_sample_result.set()

        from pyon.util.containers import create_unique_identifier

        stream_name = 'parsed'
        parsed_config = self._stream_config[stream_name]
        stream_id = parsed_config['stream_id']
        exchange_name = create_unique_identifier("%s_queue" %
                    stream_name)
        self._purge_queue(exchange_name)
        sub = StandaloneStreamSubscriber(exchange_name, recv_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id])
        pubsub_client.activate_subscription(sub_id)
        sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
        
        stream_name = 'raw'
        parsed_config = self._stream_config[stream_name]
        stream_id = parsed_config['stream_id']
        exchange_name = create_unique_identifier("%s_queue" %
                    stream_name)
        self._purge_queue(exchange_name)
        sub = StandaloneStreamSubscriber(exchange_name, recv_raw_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id])
        pubsub_client.activate_subscription(sub_id)
        sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
Ejemplo n.º 3
0
    def _start_data_subscribers(self, count):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = []
        #self._async_data_result = AsyncResult()

        strXterm = "xterm -T InstrumentScienceData -sb -rightbar "
        pOpenString = strXterm + " -e tail -f " + PIPE_PATH
        subprocess.Popen([
            'xterm', '-T', 'InstrumentScienceData', '-e', 'tail', '-f',
            PIPE_PATH
        ])

        #subprocess.Popen(pOpenString)

        #self.pipeData = open(PIPE_PATH, "w", 1)

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            print 'Received message on ' + str(stream_id) + ' (' + str(
                stream_route.exchange_point) + ',' + str(
                    stream_route.routing_key) + ')'
            log.info('Received message on %s (%s,%s)', stream_id,
                     stream_route.exchange_point, stream_route.routing_key)

            self.pipeData = open(PIPE_PATH, "w", 1)
            self.pipeData.write(str(message))
            self.pipeData.flush()
            self.pipeData.close()

            self._samples_received.append(message)
            #if len(self._samples_received) == count:
            #self._async_data_result.set()

        for (stream_name, stream_config) in self._stream_config.iteritems():

            stream_id = stream_config['stream_id']

            # Create subscriptions for each stream.

            exchange_name = '%s_queue' % stream_name
            self._purge_queue(exchange_name)
            sub = StandaloneStreamSubscriber(exchange_name, recv_data)
            sub.start()
            self._data_subscribers.append(sub)
            print 'stream_id: %s' % stream_id
            sub_id = pubsub_client.create_subscription(name=exchange_name,
                                                       stream_ids=[stream_id])
            pubsub_client.activate_subscription(sub_id)
            sub.subscription_id = sub_id  # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
Ejemplo n.º 4
0
    def _start_data_subscriber(self, config, callback):
        """
        Setup and start a data subscriber
        """
        exchange_point = config['exchange_point']
        stream_id = config['stream_id']

        sub = StandaloneStreamSubscriber(exchange_point, callback)
        sub.start()
        self._data_subscribers.append(sub)

        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        sub_id = pubsub_client.create_subscription(name=exchange_point, stream_ids=[stream_id])
        pubsub_client.activate_subscription(sub_id)
        sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
Ejemplo n.º 5
0
    def _start_data_subscriber(self, config, callback):
        """
        Setup and start a data subscriber
        """
        exchange_point = config['exchange_point']
        stream_id = config['stream_id']

        sub = StandaloneStreamSubscriber(exchange_point, callback)
        sub.start()
        self._data_subscribers.append(sub)

        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
        sub_id = pubsub_client.create_subscription(name=exchange_point,
                                                   stream_ids=[stream_id])
        pubsub_client.activate_subscription(sub_id)
        sub.subscription_id = sub_id  # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
Ejemplo n.º 6
0
    def _start_data_subscribers(self, count):
        """
        """        
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
                
        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = []
        #self._async_data_result = AsyncResult()

        strXterm = "xterm -T InstrumentScienceData -sb -rightbar "
        pOpenString = strXterm + " -e tail -f " + PIPE_PATH
        subprocess.Popen(['xterm', '-T', 'InstrumentScienceData', '-e', 'tail', '-f', PIPE_PATH])        
        #subprocess.Popen(pOpenString)
                
        #self.pipeData = open(PIPE_PATH, "w", 1) 

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            print 'Received message on ' + str(stream_id) + ' (' + str(stream_route.exchange_point) + ',' + str(stream_route.routing_key) + ')'
            log.info('Received message on %s (%s,%s)', stream_id, stream_route.exchange_point, stream_route.routing_key)
             
            self.pipeData = open(PIPE_PATH, "w", 1) 
            self.pipeData.write(str(message))
            self.pipeData.flush()
            self.pipeData.close()      

            self._samples_received.append(message)
            #if len(self._samples_received) == count:
                #self._async_data_result.set()

        for (stream_name, stream_config) in self._stream_config.iteritems():
            
            stream_id = stream_config['stream_id']
            
            # Create subscriptions for each stream.

            exchange_name = '%s_queue' % stream_name
            self._purge_queue(exchange_name)
            sub = StandaloneStreamSubscriber(exchange_name, recv_data)
            sub.start()
            self._data_subscribers.append(sub)
            print 'stream_id: %s' % stream_id
            sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id])
            pubsub_client.activate_subscription(sub_id)
            sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
    def _start_data_subscribers(self):
        """
        """

        # Create streams and subscriptions for each stream named in driver.
        self._stream_config = {}
        self._data_subscribers = []

        #
        # TODO retrieve appropriate stream definitions; for the moment, using
        # adhoc_get_stream_names
        #

        # A callback for processing subscribed-to data.
        def consume_data(message, stream_route, stream_id):
            log.info('Subscriber received data message: %s.' % str(message))
            self._samples_received.append(message)
            self._async_data_result.set()

        for stream_name in adhoc_get_stream_names():
            log.info('creating stream %r ...', stream_name)

            # TODO use appropriate exchange_point
            stream_id, stream_route = self._pubsub_client.create_stream(
                name=stream_name, exchange_point='science_data')

            log.info('create_stream(%r): stream_id=%r, stream_route=%s',
                     stream_name, stream_id, str(stream_route))

            pdict = adhoc_get_parameter_dictionary(stream_name)
            stream_config = dict(stream_route=stream_route.routing_key,
                                 stream_id=stream_id,
                                 parameter_dictionary=pdict.dump())

            self._stream_config[stream_name] = stream_config
            log.info('_stream_config[%r]= %r', stream_name, stream_config)

            # Create subscriptions for each stream.
            exchange_name = '%s_queue' % stream_name
            self._purge_queue(exchange_name)
            sub = StandaloneStreamSubscriber(exchange_name, consume_data)
            sub.start()
            self._data_subscribers.append(sub)
            sub_id = self._pubsub_client.create_subscription(
                name=exchange_name, stream_ids=[stream_id])
            self._pubsub_client.activate_subscription(sub_id)
            sub.subscription_id = sub_id
    def _start_data_subscribers(self):
        """
        """

        # Create streams and subscriptions for each stream named in driver.
        self._stream_config = {}
        self._data_subscribers = []

        #
        # TODO retrieve appropriate stream definitions; for the moment, using
        # adhoc_get_stream_names
        #

        # A callback for processing subscribed-to data.
        def consume_data(message, stream_route, stream_id):
            log.info('Subscriber received data message: %s.' % str(message))
            self._samples_received.append(message)
            self._async_data_result.set()

        for stream_name in adhoc_get_stream_names():
            log.info('creating stream %r ...', stream_name)

            # TODO use appropriate exchange_point
            stream_id, stream_route = self._pubsub_client.create_stream(
                name=stream_name, exchange_point='science_data')

            log.info('create_stream(%r): stream_id=%r, stream_route=%s',
                     stream_name, stream_id, str(stream_route))

            pdict = adhoc_get_parameter_dictionary(stream_name)
            stream_config = dict(stream_route=stream_route.routing_key,
                                 stream_id=stream_id,
                                 parameter_dictionary=pdict.dump())

            self._stream_config[stream_name] = stream_config
            log.info('_stream_config[%r]= %r', stream_name, stream_config)

            # Create subscriptions for each stream.
            exchange_name = '%s_queue' % stream_name
            self._purge_queue(exchange_name)
            sub = StandaloneStreamSubscriber(exchange_name, consume_data)
            sub.start()
            self._data_subscribers.append(sub)
            sub_id = self._pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id])
            self._pubsub_client.activate_subscription(sub_id)
            sub.subscription_id = sub_id
    def start_data_subscribers(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        # Create streams and subscriptions for each stream named in driver.
        self.data_subscribers = {}
        self.samples_received = {}

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            log.info('Received message on %s (%s,%s)', stream_id,
                     stream_route.exchange_point, stream_route.routing_key)

            name = None

            for (stream_name, stream_config) in self.stream_config.iteritems():
                if stream_route.routing_key == stream_config['routing_key']:
                    log.debug("NAME: %s %s == %s" %
                              (stream_name, stream_route.routing_key,
                               stream_config['routing_key']))
                    name = stream_name
                    if (not self.samples_received.get(stream_name)):
                        self.samples_received[stream_name] = []
                    self.samples_received[stream_name].append(message)
                    log.debug("Add message to stream '%s' value: %s" %
                              (stream_name, message))

        for (stream_name, stream_config) in self.stream_config.iteritems():
            stream_id = stream_config['stream_id']

            # Create subscriptions for each stream.
            exchange_name = '%s_queue' % stream_name
            self._purge_queue(exchange_name)
            sub = StandaloneStreamSubscriber(exchange_name, recv_data)
            sub.start()
            self.data_subscribers[stream_name] = sub
            sub_id = pubsub_client.create_subscription(name=exchange_name,
                                                       stream_ids=[stream_id])
            pubsub_client.activate_subscription(sub_id)
            sub.subscription_id = sub_id  # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
    def start_data_subscribers(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        # Create streams and subscriptions for each stream named in driver.
        self.data_subscribers = {}
        self.samples_received = {}

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            log.info("Received message on %s (%s,%s)", stream_id, stream_route.exchange_point, stream_route.routing_key)

            name = None

            for (stream_name, stream_config) in self.stream_config.iteritems():
                if stream_route.routing_key == stream_config["routing_key"]:
                    log.debug(
                        "NAME: %s %s == %s" % (stream_name, stream_route.routing_key, stream_config["routing_key"])
                    )
                    name = stream_name
                    if not self.samples_received.get(stream_name):
                        self.samples_received[stream_name] = []
                    self.samples_received[stream_name].append(message)
                    log.debug("Add message to stream '%s' value: %s" % (stream_name, message))

        for (stream_name, stream_config) in self.stream_config.iteritems():
            stream_id = stream_config["stream_id"]

            # Create subscriptions for each stream.
            exchange_name = "%s_queue" % stream_name
            self._purge_queue(exchange_name)
            sub = StandaloneStreamSubscriber(exchange_name, recv_data)
            sub.start()
            self.data_subscribers[stream_name] = sub
            sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id])
            pubsub_client.activate_subscription(sub_id)
            sub.subscription_id = (
                sub_id
            )  # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
Ejemplo n.º 11
0
    def _start_data_subscribers(self, count):
        """
        """        
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)
                
        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = []
        self._async_sample_result = AsyncResult()

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            log.info('Received message on %s (%s,%s)', stream_id, stream_route.exchange_point, stream_route.routing_key)
            #print '######################## stream message:'
            #print str(message)
            self._samples_received.append(message)
            if len(self._samples_received) == count:
                self._async_sample_result.set()

        for (stream_name, stream_config) in self._stream_config.iteritems():
            
            if stream_name == 'parsed':
                
                # Create subscription for parsed stream only.

                stream_id = stream_config['stream_id']
                
                from pyon.util.containers import create_unique_identifier
                # exchange_name = '%s_queue' % stream_name
                exchange_name = create_unique_identifier("%s_queue" %
                        stream_name)
                self._purge_queue(exchange_name)
                sub = StandaloneStreamSubscriber(exchange_name, recv_data)
                sub.start()
                self._data_subscribers.append(sub)
                print 'stream_id: %s' % stream_id
                sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id])
                pubsub_client.activate_subscription(sub_id)
                sub.subscription_id = sub_id # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
Ejemplo n.º 12
0
    def _start_data_subscribers(self, count):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        # Create streams and subscriptions for each stream named in driver.
        self._data_subscribers = []
        self._samples_received = []
        self._async_sample_result = AsyncResult()

        # A callback for processing subscribed-to data.
        def recv_data(message, stream_route, stream_id):
            log.info("Received message on %s (%s,%s)", stream_id, stream_route.exchange_point, stream_route.routing_key)
            self._samples_received.append(message)
            if len(self._samples_received) == count:
                self._async_sample_result.set()

        for (stream_name, stream_config) in self._stream_config.iteritems():

            stream_id = stream_config["stream_id"]

            # Create subscriptions for each stream.

            from pyon.util.containers import create_unique_identifier

            # exchange_name = '%s_queue' % stream_name
            exchange_name = create_unique_identifier("%s_queue" % stream_name)
            self._purge_queue(exchange_name)
            sub = StandaloneStreamSubscriber(exchange_name, recv_data)
            sub.start()
            self._data_subscribers.append(sub)
            print "stream_id: %s" % stream_id
            sub_id = pubsub_client.create_subscription(name=exchange_name, stream_ids=[stream_id])
            pubsub_client.activate_subscription(sub_id)
            sub.subscription_id = (
                sub_id
            )  # Bind the subscription to the standalone subscriber (easier cleanup, not good in real practice)
Ejemplo n.º 13
0
    def _start_data_subscriber(self, stream_name, stream_id):
        """
        Starts data subscriber for the given stream_name and stream_config
        """

        def consume_data(message, stream_route, stream_id):
            # A callback for processing subscribed-to data.
            log.info('Subscriber received data message: %s.', str(message))
            self._samples_received.append(message)
            self._async_data_result.set()

        log.info('_start_data_subscriber stream_name=%r stream_id=%r',
                 stream_name, stream_id)

        # Create subscription for the stream
        exchange_name = '%s_queue' % stream_name
        self.container.ex_manager.create_xn_queue(exchange_name).purge()
        sub = StandaloneStreamSubscriber(exchange_name, consume_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = self.PSC.create_subscription(name=exchange_name, stream_ids=[stream_id])
        self.PSC.activate_subscription(sub_id)
        sub.subscription_id = sub_id
Ejemplo n.º 14
0
    def _start_data_subscriber(self, stream_name, stream_id):
        """
        Starts data subscriber for the given stream_name and stream_config
        """
        def consume_data(message, stream_route, stream_id):
            # A callback for processing subscribed-to data.
            log.info('Subscriber received data message: %s.', str(message))
            self._samples_received.append(message)
            self._async_data_result.set()

        log.info('_start_data_subscriber stream_name=%r stream_id=%r',
                 stream_name, stream_id)

        # Create subscription for the stream
        exchange_name = '%s_queue' % stream_name
        self.container.ex_manager.create_xn_queue(exchange_name).purge()
        sub = StandaloneStreamSubscriber(exchange_name, consume_data)
        sub.start()
        self._data_subscribers.append(sub)
        sub_id = self.PSC.create_subscription(name=exchange_name,
                                              stream_ids=[stream_id])
        self.PSC.activate_subscription(sub_id)
        sub.subscription_id = sub_id