コード例 #1
0
    def setUp(self):
        super(TransformPrototypeIntTest, self).setUp()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrc = ResourceRegistryServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.pubsub_management = PubsubManagementServiceClient()
        self.ssclient = SchedulerServiceClient()
        self.event_publisher = EventPublisher()
        self.user_notification = UserNotificationServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()

        self.exchange_names = []
        self.exchange_points = []
コード例 #2
0
    def setUp(self):
        DMTestCase.setUp(self)

        process_definition = ProcessDefinition(
            name='qc_post_processor',
            executable={
                'module': 'ion.processes.data.transforms.qc_post_processing',
                'class': 'QCPostProcessing'
            })
        self.process_definition_id = self.process_dispatcher.create_process_definition(
            process_definition)
        self.addCleanup(self.process_dispatcher.delete_process_definition,
                        self.process_definition_id)

        self.process_id = self.process_dispatcher.create_process(
            self.process_definition_id)
        self.scheduler_service = SchedulerServiceClient()
コード例 #3
0
    def setUp(self):
        super(TransformPrototypeIntTest, self).setUp()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrc = ResourceRegistryServiceClient()
        self.ssclient = SchedulerServiceClient()
        self.event_publisher = EventPublisher()
コード例 #4
0
    def setUp(self):
        DMTestCase.setUp(self)

        process_definition = ProcessDefinition(name='qc_post_processor',
                executable={'module':'ion.processes.data.transforms.qc_post_processing', 'class':'QCPostProcessing'})
        self.process_definition_id = self.process_dispatcher.create_process_definition(process_definition)
        self.addCleanup(self.process_dispatcher.delete_process_definition,self.process_definition_id)

        self.process_id = self.process_dispatcher.create_process(self.process_definition_id)
        self.scheduler_service = SchedulerServiceClient()
コード例 #5
0
    def setUp(self):
        super(TransformPrototypeIntTest, self).setUp()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrc = ResourceRegistryServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.pubsub_management = PubsubManagementServiceClient()
        self.ssclient = SchedulerServiceClient()
        self.event_publisher = EventPublisher()

        self.exchange_names = []
        self.exchange_points = []
コード例 #6
0
class TransformPrototypeIntTest(IonIntegrationTestCase):
    def setUp(self):
        super(TransformPrototypeIntTest, self).setUp()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrc = ResourceRegistryServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.pubsub_management = PubsubManagementServiceClient()
        self.ssclient = SchedulerServiceClient()
        self.event_publisher = EventPublisher()

        self.exchange_names = []
        self.exchange_points = []

    def tearDown(self):

        for xn in self.exchange_names:
            xni = self.container.ex_manager.create_xn_queue(xn)
            xni.delete()
        for xp in self.exchange_points:
            xpi = self.container.ex_manager.create_xp(xp)
            xpi.delete()

    def now_utc(self):
        return time.mktime(datetime.datetime.utcnow().timetuple())

    def _create_interval_timer_with_end_time(self,timer_interval= None, end_time = None ):
        '''
        A convenience method to set up an interval timer with an end time
        '''
        self.timer_received_time = 0
        self.timer_interval = timer_interval

        start_time = self.now_utc()
        if not end_time:
            end_time = start_time + 2 * timer_interval + 1

        log.debug("got the end time here!! %s" % end_time)

        # Set up the interval timer. The scheduler will publish event with origin set as "Interval Timer"
        sid = self.ssclient.create_interval_timer(start_time="now" ,
            interval=self.timer_interval,
            end_time=end_time,
            event_origin="Interval Timer",
            event_subtype="")

        def cleanup_timer(scheduler, schedule_id):
            """
            Do a friendly cancel of the scheduled event.
            If it fails, it's ok.
            """
            try:
                scheduler.cancel_timer(schedule_id)
            except:
                log.warn("Couldn't cancel")

        self.addCleanup(cleanup_timer, self.ssclient, sid)

        return sid

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_event_processing(self):
        '''
        Test that events are processed by the transforms according to a provided algorithm
        '''


        #-------------------------------------------------------------------------------------
        # Set up the scheduler for an interval timer with an end time
        #-------------------------------------------------------------------------------------
        id = self._create_interval_timer_with_end_time(timer_interval=2)
        self.assertIsNotNone(id)

        #-------------------------------------------------------------------------------------
        # Create an event alert transform....
        # The configuration for the Event Alert Transform... set up the event types to listen to
        #-------------------------------------------------------------------------------------
        configuration = {
            'process':{
                'event_type': 'ResourceEvent',
                'timer_origin': 'Interval Timer',
                'instrument_origin': 'My_favorite_instrument'
            }
        }

        #-------------------------------------------------------------------------------------
        # Create the process
        #-------------------------------------------------------------------------------------
        pid = TransformPrototypeIntTest.create_process(  name= 'event_alert_transform',
            module='ion.processes.data.transforms.event_alert_transform',
            class_name='EventAlertTransform',
            configuration= configuration)

        self.assertIsNotNone(pid)

        #-------------------------------------------------------------------------------------
        # Publish events and make assertions about alerts
        #-------------------------------------------------------------------------------------

        queue = gevent.queue.Queue()

        def event_received(message, headers):
            queue.put(message)

        event_subscriber = EventSubscriber( origin="EventAlertTransform",
            event_type="DeviceEvent",
            callback=event_received)

        event_subscriber.start()
        self.addCleanup(event_subscriber.stop)

        # publish event twice

        for i in xrange(5):
            self.event_publisher.publish_event(    event_type = 'ExampleDetectableEvent',
                origin = "My_favorite_instrument",
                voltage = 5,
                telemetry = 10,
                temperature = 20)
            gevent.sleep(0.1)
            self.assertTrue(queue.empty())



        #publish event the third time but after a time interval larger than 2 seconds
        gevent.sleep(5)

        #-------------------------------------------------------------------------------------
        # Make assertions about the alert event published by the EventAlertTransform
        #-------------------------------------------------------------------------------------

        event = queue.get(timeout=10)

        log.debug("Alarm event received from the EventAertTransform %s" % event)

        self.assertEquals(event.type_, "DeviceEvent")
        self.assertEquals(event.origin, "EventAlertTransform")

        #------------------------------------------------------------------------------------------------
        # Now clear the event queue being populated by alarm events and publish normally once again
        #------------------------------------------------------------------------------------------------

        queue.queue.clear()

        for i in xrange(5):
            self.event_publisher.publish_event(    event_type = 'ExampleDetectableEvent',
                origin = "My_favorite_instrument",
                voltage = 5,
                telemetry = 10,
                temperature = 20)
            gevent.sleep(0.1)
            self.assertTrue(queue.empty())

        log.debug("This completes the requirement that the EventAlertTransform publishes \
                    an alarm event when it does not hear from the instrument for some time.")


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_stream_processing(self):
        #--------------------------------------------------------------------------------
        #Test that streams are processed by the transforms according to a provided algorithm
        #--------------------------------------------------------------------------------

        #todo: In this simple implementation, we are checking if the stream has the word, PUBLISH,
        #todo(contd) and if the word VALUE=<number> exists and that number is less than something

        #todo later on we are going to use complex algorithms to make this prototype powerful

        #-------------------------------------------------------------------------------------
        # Start a subscriber to listen for an alert event from the Stream Alert Transform
        #-------------------------------------------------------------------------------------

        queue = gevent.queue.Queue()

        def event_received(message, headers):
            queue.put(message)

        event_subscriber = EventSubscriber( origin="StreamAlertTransform",
            event_type="DeviceEvent",
            callback=event_received)

        event_subscriber.start()
        self.addCleanup(event_subscriber.stop)

        #-------------------------------------------------------------------------------------
        # The configuration for the Stream Alert Transform... set up the event types to listen to
        #-------------------------------------------------------------------------------------
        config = {
            'process':{
                'queue_name': 'a_queue',
                'value': 10,
                'event_type':'DeviceEvent'
            }
        }

        #-------------------------------------------------------------------------------------
        # Create the process
        #-------------------------------------------------------------------------------------
        pid = TransformPrototypeIntTest.create_process( name= 'transform_data_process',
            module='ion.processes.data.transforms.event_alert_transform',
            class_name='StreamAlertTransform',
            configuration= config)

        self.assertIsNotNone(pid)

        #-------------------------------------------------------------------------------------
        # Publish streams and make assertions about alerts
        #-------------------------------------------------------------------------------------
        exchange_name = 'a_queue'
        exchange_point = 'test_exchange'
        routing_key = 'stream_id.stream'
        stream_route = StreamRoute(exchange_point, routing_key)

        xn = self.container.ex_manager.create_xn_queue(exchange_name)
        xp = self.container.ex_manager.create_xp(exchange_point)
        xn.bind('stream_id.stream', xp)

        pub = StandaloneStreamPublisher('stream_id', stream_route)

        message = "A dummy example message containing the word PUBLISH, and with VALUE = 5 . This message" +\
                  " will trigger an alert event from the StreamAlertTransform because the value provided is "\
                  "less than 10 that was passed in through the config."

        pub.publish(message)

        event = queue.get(timeout=10)
        self.assertEquals(event.type_, "DeviceEvent")
        self.assertEquals(event.origin, "StreamAlertTransform")

    #        self.purge_queues(exchange_name)

    #    def purge_queues(self, exchange_name):
    #        xn = self.container.ex_manager.create_xn_queue(exchange_name)
    #        xn.purge()

    @staticmethod
    def create_process(name= '', module = '', class_name = '', configuration = None):
        '''
        A helper method to create a process
        '''

        producer_definition = ProcessDefinition(name=name)
        producer_definition.executable = {
            'module':module,
            'class': class_name
        }

        process_dispatcher = ProcessDispatcherServiceClient()

        procdef_id = process_dispatcher.create_process_definition(process_definition=producer_definition)
        pid = process_dispatcher.schedule_process(process_definition_id= procdef_id, configuration=configuration)

        return pid

    def test_demo_stream_granules_processing(self):
        """
        Test that the Demo Stream Alert Transform is functioning. The transform coordinates with the scheduler.
        It is configured to listen to a source that publishes granules. It publishes a DeviceStatusEvent if it
        receives a granule with bad data or a DeviceCommsEvent if no granule has arrived between two timer events.

        The transform is configured at launch using a config dictionary.
        """
        #-------------------------------------------------------------------------------------
        # Start a subscriber to listen for an alert event from the Stream Alert Transform
        #-------------------------------------------------------------------------------------

        queue_bad_data = gevent.queue.Queue()
        queue_no_data = gevent.queue.Queue()

        def bad_data(message, headers):
            if message.type_ == "DeviceStatusEvent":
                queue_bad_data.put(message)

        def no_data(message, headers):
            queue_no_data.put(message)

        event_subscriber_bad_data = EventSubscriber( origin="DemoStreamAlertTransform",
            event_type="DeviceStatusEvent",
            callback=bad_data)

        event_subscriber_no_data = EventSubscriber( origin="DemoStreamAlertTransform",
            event_type="DeviceCommsEvent",
            callback=no_data)

        event_subscriber_bad_data.start()
        event_subscriber_no_data.start()

        self.addCleanup(event_subscriber_bad_data.stop)
        self.addCleanup(event_subscriber_no_data.stop)

        #-------------------------------------------------------------------------------------
        # The configuration for the Stream Alert Transform... set up the event types to listen to
        #-------------------------------------------------------------------------------------
        self.valid_values = [-100, 100]
        self.timer_interval = 5
        self.queue_name = 'a_queue'

        config = {
            'process':{
                'timer_interval': self.timer_interval,
                'queue_name': self.queue_name,
                'variable_name': 'input_voltage',
                'time_field_name': 'preferred_timestamp',
                'valid_values': self.valid_values,
                'timer_origin': 'Interval Timer'
            }
        }

        #-------------------------------------------------------------------------------------
        # Create the process
        #-------------------------------------------------------------------------------------
        pid = TransformPrototypeIntTest.create_process( name= 'DemoStreamAlertTransform',
            module='ion.processes.data.transforms.event_alert_transform',
            class_name='DemoStreamAlertTransform',
            configuration= config)

        self.assertIsNotNone(pid)

        #-------------------------------------------------------------------------------------
        # Publish streams and make assertions about alerts
        #-------------------------------------------------------------------------------------

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(name= 'platform_eng_parsed', id_only=True)

        stream_def_id = self.pubsub_management.create_stream_definition('demo_stream', parameter_dictionary_id=pdict_id)
        stream_id, stream_route = self.pubsub_management.create_stream( name='test_demo_alert',
            exchange_point='exch_point_1',
            stream_definition_id=stream_def_id)

        sub_1 = self.pubsub_management.create_subscription(name='sub_1', stream_ids=[stream_id], exchange_points=['exch_point_1'], exchange_name=self.queue_name)
        self.pubsub_management.activate_subscription(sub_1)
        self.exchange_names.append('sub_1')
        self.exchange_points.append('exch_point_1')

        #-------------------------------------------------------------------------------------
        # publish a *GOOD* granule
        #-------------------------------------------------------------------------------------
        self.length = 2
        val = numpy.array([random.uniform(0,50)  for l in xrange(self.length)])
        self._publish_granules(stream_id= stream_id, stream_route= stream_route, number=1, values=val, length=self.length)

        self.assertTrue(queue_bad_data.empty())

        #-------------------------------------------------------------------------------------
        # publish a few *BAD* granules
        #-------------------------------------------------------------------------------------
        self.length = 2
        self.number = 2
        val = numpy.array([random.uniform(110,200)  for l in xrange(self.length)])
        self._publish_granules(stream_id= stream_id, stream_route= stream_route, number= self.number, values=val, length=self.length)

        for i in xrange(self.length * self.number):
            event = queue_bad_data.get(timeout=10)
            self.assertEquals(event.type_, "DeviceStatusEvent")
            self.assertEquals(event.origin, "DemoStreamAlertTransform")
            self.assertEquals(event.state, DeviceStatusType.OUT_OF_RANGE)
            self.assertEquals(event.valid_values, self.valid_values)
            self.assertEquals(event.sub_type, 'input_voltage')
            self.assertIsNotNone(event.value)
            self.assertIsNotNone(event.time_stamp)

        # To ensure that only the bad values generated the alert events. Queue should be empty now
        self.assertEquals(queue_bad_data.qsize(), 0)

        #-------------------------------------------------------------------------------------
        # Do not publish any granules for some time. This should generate a DeviceCommsEvent for the communication status
        #-------------------------------------------------------------------------------------
        event = queue_no_data.get(timeout=15)

        self.assertEquals(event.type_, "DeviceCommsEvent")
        self.assertEquals(event.origin, "DemoStreamAlertTransform")
        self.assertEquals(event.state, DeviceCommsType.DATA_DELIVERY_INTERRUPTION)
        self.assertEquals(event.sub_type, 'input_voltage')

        #-------------------------------------------------------------------------------------
        # Empty the queues and repeat tests
        #-------------------------------------------------------------------------------------
        queue_bad_data.queue.clear()
        queue_no_data.queue.clear()

        #-------------------------------------------------------------------------------------
        # publish a *GOOD* granule again
        #-------------------------------------------------------------------------------------
        self.length = 2
        val = numpy.array([random.uniform(0,50)  for l in xrange(self.length)])
        self._publish_granules(stream_id= stream_id, stream_route= stream_route, number=1, values=val, length=self.length)

        self.assertTrue(queue_bad_data.empty())

        #-------------------------------------------------------------------------------------
        # Again do not publish any granules for some time. This should generate a DeviceCommsEvent for the communication status
        #-------------------------------------------------------------------------------------

        event = queue_no_data.get(timeout=20)

        self.assertEquals(event.type_, "DeviceCommsEvent")
        self.assertEquals(event.origin, "DemoStreamAlertTransform")
        self.assertEquals(event.state, DeviceCommsType.DATA_DELIVERY_INTERRUPTION)
        self.assertEquals(event.sub_type, 'input_voltage')

    def _publish_granules(self, stream_id=None, stream_route=None, values = None,number=None, length=None):

        pub = StandaloneStreamPublisher(stream_id, stream_route)

        stream_def = self.pubsub_management.read_stream_definition(stream_id=stream_id)
        stream_def_id = stream_def._id
        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)

        for i in xrange(number):
            rdt['input_voltage'] = values
            rdt['preferred_timestamp'] = numpy.array([random.uniform(0,1000)  for l in xrange(length)])
            g = rdt.to_granule()
            pub.publish(g)
コード例 #7
0
class TransformPrototypeIntTest(IonIntegrationTestCase):
    def setUp(self):
        super(TransformPrototypeIntTest, self).setUp()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrc = ResourceRegistryServiceClient()
        self.ssclient = SchedulerServiceClient()
        self.event_publisher = EventPublisher()

    def now_utc(self):
        return time.mktime(datetime.datetime.utcnow().timetuple())

    def _create_interval_timer_with_end_time(self,interval_timer_interval= None ):
        '''
        A convenience method to set up an interval timer with an end time
        '''
        self.interval_timer_count = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = interval_timer_interval

        start_time = self.now_utc()
        self.interval_timer_end_time = start_time + 5

        # Set up the interval timer. The scheduler will publish event with origin set as "Interval Timer"
        sid = self.ssclient.create_interval_timer(start_time="now" , interval=self.interval_timer_interval,
            end_time=self.interval_timer_end_time,
            event_origin="Interval Timer", event_subtype="")

        self.interval_timer_sent_time = datetime.datetime.utcnow()

        def cleanup_timer(scheduler, schedule_id):
            """
            Do a friendly cancel of the scheduled event.
            If it fails, it's ok.
            """
            try:
                scheduler.cancel_timer(schedule_id)
            except:
                log.warn("Couldn't cancel")


        self.addCleanup(cleanup_timer, self.ssclient, sid)

        log.debug("Got the id here!! %s" % sid)

        return sid

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_event_processing(self):
        '''
        Test that events are processed by the transforms according to a provided algorithm
        '''


        #-------------------------------------------------------------------------------------
        # Set up the scheduler for an interval timer with an end time
        #-------------------------------------------------------------------------------------
        id = self._create_interval_timer_with_end_time(interval_timer_interval=2)
        self.assertIsNotNone(id)

        #-------------------------------------------------------------------------------------
        # Create an event alert transform....
        # The configuration for the Event Alert Transform... set up the event types to listen to
        #-------------------------------------------------------------------------------------
        configuration = {
                            'process':{
                                'event_type': 'ResourceEvent',
                                'timer_origin': 'Interval Timer',
                                'instrument_origin': 'My_favorite_instrument'
                            }
                        }

        #-------------------------------------------------------------------------------------
        # Create the process
        #-------------------------------------------------------------------------------------
        pid = TransformPrototypeIntTest.create_process(  name= 'event_alert_transform',
                                    module='ion.processes.data.transforms.event_alert_transform',
                                    class_name='EventAlertTransform',
                                    configuration= configuration)

        self.assertIsNotNone(pid)

        #-------------------------------------------------------------------------------------
        # Publish events and make assertions about alerts
        #-------------------------------------------------------------------------------------

        queue = gevent.queue.Queue()

        def event_received(message, headers):
            queue.put(message)

        event_subscriber = EventSubscriber( origin="EventAlertTransform",
                                            event_type="DeviceEvent",
                                            callback=event_received)

        event_subscriber.start()


        # publish event twice

        for i in xrange(5):
            self.event_publisher.publish_event(    event_type = 'ExampleDetectableEvent',
                                                    origin = "My_favorite_instrument",
                                                    voltage = 5,
                                                    telemetry = 10,
                                                    temperature = 20)
            gevent.sleep(0.1)
            self.assertTrue(queue.empty())



        #publish event the third time but after a time interval larger than 2 seconds
        gevent.sleep(5)

        #-------------------------------------------------------------------------------------
        # Make assertions about the alert event published by the EventAlertTransform
        #-------------------------------------------------------------------------------------

        event = queue.get(timeout=10)

        log.debug("Alarm event received from the EventAertTransform %s" % event)

        self.assertEquals(event.type_, "DeviceEvent")
        self.assertEquals(event.origin, "EventAlertTransform")

        #------------------------------------------------------------------------------------------------
        # Now clear the event queue being populated by alarm events and publish normally once again
        #------------------------------------------------------------------------------------------------

        queue.queue.clear()

        for i in xrange(5):
            self.event_publisher.publish_event(    event_type = 'ExampleDetectableEvent',
                origin = "My_favorite_instrument",
                voltage = 5,
                telemetry = 10,
                temperature = 20)
            gevent.sleep(0.1)
            self.assertTrue(queue.empty())

        log.debug("This completes the requirement that the EventAlertTransform publishes \
                    an alarm event when it does not hear from the instrument for some time.")


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_stream_processing(self):
        #--------------------------------------------------------------------------------
        #Test that streams are processed by the transforms according to a provided algorithm
        #--------------------------------------------------------------------------------

        #todo: In this simple implementation, we are checking if the stream has the word, PUBLISH,
        #todo(contd) and if the word VALUE=<number> exists and that number is less than something

        #todo later on we are going to use complex algorithms to make this prototype powerful

        #-------------------------------------------------------------------------------------
        # Start a subscriber to listen for an alert event from the Stream Alert Transform
        #-------------------------------------------------------------------------------------

        queue = gevent.queue.Queue()

        def event_received(message, headers):
            queue.put(message)

        event_subscriber = EventSubscriber( origin="StreamAlertTransform",
            event_type="DeviceEvent",
            callback=event_received)

        event_subscriber.start()

        #-------------------------------------------------------------------------------------
        # The configuration for the Stream Alert Transform... set up the event types to listen to
        #-------------------------------------------------------------------------------------
        config = {
            'process':{
                'queue_name': 'a_queue',
                'value': 10,
                'event_type':'DeviceEvent'
            }
        }

        #-------------------------------------------------------------------------------------
        # Create the process
        #-------------------------------------------------------------------------------------
        pid = TransformPrototypeIntTest.create_process( name= 'transform_data_process',
                                                        module='ion.processes.data.transforms.event_alert_transform',
                                                        class_name='StreamAlertTransform',
                                                        configuration= config)

        self.assertIsNotNone(pid)

        #-------------------------------------------------------------------------------------
        # Publish streams and make assertions about alerts
        #-------------------------------------------------------------------------------------
        exchange_name = 'a_queue'
        exchange_point = 'test_exchange'
        routing_key = 'stream_id.stream'
        stream_route = StreamRoute(exchange_point, routing_key)

        xn = self.container.ex_manager.create_xn_queue(exchange_name)
        xp = self.container.ex_manager.create_xp(exchange_point)
        xn.bind('stream_id.stream', xp)

        pub = StandaloneStreamPublisher('stream_id', stream_route)

        message = "A dummy example message containing the word PUBLISH, and with VALUE = 5 . This message" + \
                    " will trigger an alert event from the StreamAlertTransform because the value provided is " \
                    "less than 10 that was passed in through the config."

        pub.publish(message)

        event = queue.get(timeout=10)
        self.assertEquals(event.type_, "DeviceEvent")
        self.assertEquals(event.origin, "StreamAlertTransform")

        self.purge_queues(exchange_name)

    def purge_queues(self, exchange_name):
        xn = self.container.ex_manager.create_xn_queue(exchange_name)
        xn.purge()

    @staticmethod
    def create_process(name= '', module = '', class_name = '', configuration = None):
        '''
        A helper method to create a process
        '''

        producer_definition = ProcessDefinition(name=name)
        producer_definition.executable = {
            'module':module,
            'class': class_name
        }

        process_dispatcher = ProcessDispatcherServiceClient()

        procdef_id = process_dispatcher.create_process_definition(process_definition=producer_definition)
        pid = process_dispatcher.schedule_process(process_definition_id= procdef_id, configuration=configuration)

        return pid
コード例 #8
0
class TransformPrototypeIntTest(IonIntegrationTestCase):
    def setUp(self):
        super(TransformPrototypeIntTest, self).setUp()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.rrc = ResourceRegistryServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.pubsub_management = PubsubManagementServiceClient()
        self.ssclient = SchedulerServiceClient()
        self.event_publisher = EventPublisher()
        self.user_notification = UserNotificationServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()

        self.exchange_names = []
        self.exchange_points = []

    def tearDown(self):

        for xn in self.exchange_names:
            xni = self.container.ex_manager.create_xn_queue(xn)
            xni.delete()
        for xp in self.exchange_points:
            xpi = self.container.ex_manager.create_xp(xp)
            xpi.delete()

    def now_utc(self):
        return time.time()

    def _create_interval_timer_with_end_time(self,timer_interval= None, end_time = None ):
        '''
        A convenience method to set up an interval timer with an end time
        '''
        self.timer_received_time = 0
        self.timer_interval = timer_interval

        start_time = self.now_utc()
        if not end_time:
            end_time = start_time + 2 * timer_interval + 1

        log.debug("got the end time here!! %s" % end_time)

        # Set up the interval timer. The scheduler will publish event with origin set as "Interval Timer"
        sid = self.ssclient.create_interval_timer(start_time="now" ,
            interval=self.timer_interval,
            end_time=end_time,
            event_origin="Interval Timer",
            event_subtype="")

        def cleanup_timer(scheduler, schedule_id):
            """
            Do a friendly cancel of the scheduled event.
            If it fails, it's ok.
            """
            try:
                scheduler.cancel_timer(schedule_id)
            except:
                log.warn("Couldn't cancel")

        self.addCleanup(cleanup_timer, self.ssclient, sid)

        return sid

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_event_processing(self):
        '''
        Test that events are processed by the transforms according to a provided algorithm
        '''


        #-------------------------------------------------------------------------------------
        # Set up the scheduler for an interval timer with an end time
        #-------------------------------------------------------------------------------------
        id = self._create_interval_timer_with_end_time(timer_interval=2)
        self.assertIsNotNone(id)

        #-------------------------------------------------------------------------------------
        # Create an event alert transform....
        # The configuration for the Event Alert Transform... set up the event types to listen to
        #-------------------------------------------------------------------------------------
        configuration = {
            'process':{
                'event_type': 'ResourceEvent',
                'timer_origin': 'Interval Timer',
                'instrument_origin': 'My_favorite_instrument'
            }
        }

        #-------------------------------------------------------------------------------------
        # Create the process
        #-------------------------------------------------------------------------------------
        pid = TransformPrototypeIntTest.create_process(  name= 'event_alert_transform',
            module='ion.processes.data.transforms.event_alert_transform',
            class_name='EventAlertTransform',
            configuration= configuration)
        self.addCleanup(self.process_dispatcher.cancel_process, pid)
        self.assertIsNotNone(pid)

        #-------------------------------------------------------------------------------------
        # Publish events and make assertions about alerts
        #-------------------------------------------------------------------------------------

        queue = gevent.queue.Queue()

        def event_received(message, headers):
            queue.put(message)

        event_subscriber = EventSubscriber( origin="EventAlertTransform",
            event_type="DeviceEvent",
            callback=event_received)

        event_subscriber.start()
        self.addCleanup(event_subscriber.stop)

        # publish event twice

        for i in xrange(5):
            self.event_publisher.publish_event(    event_type = 'ExampleDetectableEvent',
                origin = "My_favorite_instrument",
                voltage = 5,
                telemetry = 10,
                temperature = 20)
            gevent.sleep(0.1)
            self.assertTrue(queue.empty())



        #publish event the third time but after a time interval larger than 2 seconds
        gevent.sleep(5)

        #-------------------------------------------------------------------------------------
        # Make assertions about the alert event published by the EventAlertTransform
        #-------------------------------------------------------------------------------------

        event = queue.get(timeout=10)

        log.debug("Alarm event received from the EventAertTransform %s" % event)

        self.assertEquals(event.type_, "DeviceEvent")
        self.assertEquals(event.origin, "EventAlertTransform")

        #------------------------------------------------------------------------------------------------
        # Now clear the event queue being populated by alarm events and publish normally once again
        #------------------------------------------------------------------------------------------------

        queue.queue.clear()

        for i in xrange(5):
            self.event_publisher.publish_event(    event_type = 'ExampleDetectableEvent',
                origin = "My_favorite_instrument",
                voltage = 5,
                telemetry = 10,
                temperature = 20)
            gevent.sleep(0.1)
            self.assertTrue(queue.empty())

        log.debug("This completes the requirement that the EventAlertTransform publishes \
                    an alarm event when it does not hear from the instrument for some time.")


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_stream_processing(self):
        #--------------------------------------------------------------------------------
        #Test that streams are processed by the transforms according to a provided algorithm
        #--------------------------------------------------------------------------------

        #todo: In this simple implementation, we are checking if the stream has the word, PUBLISH,
        #todo(contd) and if the word VALUE=<number> exists and that number is less than something

        #todo later on we are going to use complex algorithms to make this prototype powerful

        #-------------------------------------------------------------------------------------
        # Start a subscriber to listen for an alert event from the Stream Alert Transform
        #-------------------------------------------------------------------------------------

        queue = gevent.queue.Queue()

        def event_received(message, headers):
            queue.put(message)

        event_subscriber = EventSubscriber( origin="StreamAlertTransform",
            event_type="DeviceEvent",
            callback=event_received)

        event_subscriber.start()
        self.addCleanup(event_subscriber.stop)

        #-------------------------------------------------------------------------------------
        # The configuration for the Stream Alert Transform... set up the event types to listen to
        #-------------------------------------------------------------------------------------
        config = {
            'process':{
                'queue_name': 'a_queue',
                'value': 10,
                'event_type':'DeviceEvent'
            }
        }

        #-------------------------------------------------------------------------------------
        # Create the process
        #-------------------------------------------------------------------------------------
        pid = TransformPrototypeIntTest.create_process( name= 'transform_data_process',
            module='ion.processes.data.transforms.event_alert_transform',
            class_name='StreamAlertTransform',
            configuration= config)
        self.addCleanup(self.process_dispatcher.cancel_process, pid)
        self.assertIsNotNone(pid)

        #-------------------------------------------------------------------------------------
        # Publish streams and make assertions about alerts
        #-------------------------------------------------------------------------------------
        exchange_name = 'a_queue'
        exchange_point = 'test_exchange'
        routing_key = 'stream_id.stream'
        stream_route = StreamRoute(exchange_point, routing_key)

        xn = self.container.ex_manager.create_xn_queue(exchange_name)
        xp = self.container.ex_manager.create_xp(exchange_point)
        xn.bind('stream_id.stream', xp)

        pub = StandaloneStreamPublisher('stream_id', stream_route)

        message = "A dummy example message containing the word PUBLISH, and with VALUE = 5 . This message" +\
                  " will trigger an alert event from the StreamAlertTransform because the value provided is "\
                  "less than 10 that was passed in through the config."

        pub.publish(message)

        event = queue.get(timeout=10)
        self.assertEquals(event.type_, "DeviceEvent")
        self.assertEquals(event.origin, "StreamAlertTransform")

    #        self.purge_queues(exchange_name)

    #    def purge_queues(self, exchange_name):
    #        xn = self.container.ex_manager.create_xn_queue(exchange_name)
    #        xn.purge()

    @staticmethod
    def create_process(name= '', module = '', class_name = '', configuration = None):
        '''
        A helper method to create a process
        '''

        producer_definition = ProcessDefinition(name=name)
        producer_definition.executable = {
            'module':module,
            'class': class_name
        }

        process_dispatcher = ProcessDispatcherServiceClient()

        procdef_id = process_dispatcher.create_process_definition(process_definition=producer_definition)
        pid = process_dispatcher.schedule_process(process_definition_id= procdef_id, configuration=configuration)

        return pid

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_demo_stream_granules_processing(self):
        """
        Test that the Demo Stream Alert Transform is functioning. The transform coordinates with the scheduler.
        It is configured to listen to a source that publishes granules. It publishes a DeviceStatusEvent if it
        receives a granule with bad data or a DeviceCommsEvent if no granule has arrived between two timer events.

        The transform is configured at launch using a config dictionary.
        """
        #-------------------------------------------------------------------------------------
        # Start a subscriber to listen for an alert event from the Stream Alert Transform
        #-------------------------------------------------------------------------------------

        queue_bad_data = gevent.queue.Queue()
        queue_no_data = gevent.queue.Queue()

        def bad_data(message, headers):
            log.debug("Got a BAD data event: %s" % message)
            if message.type_ == "DeviceStatusEvent":
                queue_bad_data.put(message)

        def no_data(message, headers):
            log.debug("Got a NO data event: %s" % message)
            queue_no_data.put(message)

        event_subscriber_bad_data = EventSubscriber( origin="instrument_1",
            event_type="DeviceStatusEvent",
            callback=bad_data)

        event_subscriber_no_data = EventSubscriber( origin="instrument_1",
            event_type="DeviceCommsEvent",
            callback=no_data)

        event_subscriber_bad_data.start()
        event_subscriber_no_data.start()

        self.addCleanup(event_subscriber_bad_data.stop)
        self.addCleanup(event_subscriber_no_data.stop)

        #-------------------------------------------------------------------------------------
        # The configuration for the Stream Alert Transform... set up the event types to listen to
        #-------------------------------------------------------------------------------------
        self.valid_values = [-100, 100]
        self.timer_interval = 5
        self.queue_name = 'a_queue'

        config = {
            'process':{
                'timer_interval': self.timer_interval,
                'queue_name': self.queue_name,
                'variable_name': 'input_voltage',
                'time_field_name': 'preferred_timestamp',
                'valid_values': self.valid_values,
                'timer_origin': 'Interval Timer',
                'event_origin': 'instrument_1'
            }
        }

        #-------------------------------------------------------------------------------------
        # Create the process
        #-------------------------------------------------------------------------------------
        pid = TransformPrototypeIntTest.create_process( name= 'DemoStreamAlertTransform',
            module='ion.processes.data.transforms.event_alert_transform',
            class_name='DemoStreamAlertTransform',
            configuration= config)
        self.addCleanup(self.process_dispatcher.cancel_process, pid)
        self.assertIsNotNone(pid)

        #-------------------------------------------------------------------------------------
        # Publish streams and make assertions about alerts
        #-------------------------------------------------------------------------------------

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(name= 'platform_eng_parsed', id_only=True)

        stream_def_id = self.pubsub_management.create_stream_definition('demo_stream', parameter_dictionary_id=pdict_id)
        stream_id, stream_route = self.pubsub_management.create_stream( name='test_demo_alert',
            exchange_point='exch_point_1',
            stream_definition_id=stream_def_id)

        sub_1 = self.pubsub_management.create_subscription(name='sub_1', stream_ids=[stream_id], exchange_points=['exch_point_1'], exchange_name=self.queue_name)
        self.pubsub_management.activate_subscription(sub_1)
        self.exchange_names.append('sub_1')
        self.exchange_points.append('exch_point_1')

        #-------------------------------------------------------------------------------------
        # publish a *GOOD* granule
        #-------------------------------------------------------------------------------------
        self.length = 2
        val = numpy.array([random.uniform(0,50)  for l in xrange(self.length)])
        self._publish_granules(stream_id= stream_id, stream_route= stream_route, number=1, values=val)

        self.assertTrue(queue_bad_data.empty())

        #-------------------------------------------------------------------------------------
        # publish a few *BAD* granules
        #-------------------------------------------------------------------------------------
        self.number = 2
        val = numpy.array([(110 + l)  for l in xrange(self.length)])
        self._publish_granules(stream_id= stream_id, stream_route= stream_route, number= self.number, values=val)

        for number in xrange(self.number):
            event = queue_bad_data.get(timeout=40)
            self.assertEquals(event.type_, "DeviceStatusEvent")
            self.assertEquals(event.origin, "instrument_1")
            self.assertEquals(event.state, DeviceStatusType.STATUS_WARNING)
            self.assertEquals(event.valid_values, self.valid_values)
            self.assertEquals(event.sub_type, 'input_voltage')
            self.assertTrue(set(event.values) ==  set(val))

            s = set(event.time_stamps)
            cond = s in [set(numpy.array([1  for l in xrange(self.length)]).tolist()), set(numpy.array([2  for l in xrange(self.length)]).tolist())]
            self.assertTrue(cond)

        # To ensure that only the bad values generated the alert events. Queue should be empty now
        self.assertEquals(queue_bad_data.qsize(), 0)

        #-------------------------------------------------------------------------------------
        # Do not publish any granules for some time. This should generate a DeviceCommsEvent for the communication status
        #-------------------------------------------------------------------------------------
        event = queue_no_data.get(timeout=15)

        self.assertEquals(event.type_, "DeviceCommsEvent")
        self.assertEquals(event.origin, "instrument_1")
        self.assertEquals(event.origin_type, "PlatformDevice")
        self.assertEquals(event.state, DeviceCommsType.DATA_DELIVERY_INTERRUPTION)
        self.assertEquals(event.sub_type, 'input_voltage')

        #-------------------------------------------------------------------------------------
        # Empty the queues and repeat tests
        #-------------------------------------------------------------------------------------
        queue_bad_data.queue.clear()
        queue_no_data.queue.clear()

        #-------------------------------------------------------------------------------------
        # publish a *GOOD* granule again
        #-------------------------------------------------------------------------------------
        val = numpy.array([(l + 20)  for l in xrange(self.length)])
        self._publish_granules(stream_id= stream_id, stream_route= stream_route, number=1, values=val)

        self.assertTrue(queue_bad_data.empty())

        #-------------------------------------------------------------------------------------
        # Again do not publish any granules for some time. This should generate a DeviceCommsEvent for the communication status
        #-------------------------------------------------------------------------------------

        event = queue_no_data.get(timeout=20)

        self.assertEquals(event.type_, "DeviceCommsEvent")
        self.assertEquals(event.origin, "instrument_1")
        self.assertEquals(event.origin_type, "PlatformDevice")
        self.assertEquals(event.state, DeviceCommsType.DATA_DELIVERY_INTERRUPTION)
        self.assertEquals(event.sub_type, 'input_voltage')

        #-------------------------------------------------------------------------------------
        # Again do not publish any granules for some time. This should generate a DeviceCommsEvent for the communication status
        #-------------------------------------------------------------------------------------

        ar = gevent.event.AsyncResult()
        def poller(ar, method, *args):
            events_in_db = method(*args)
            if len(events_in_db) > 0:
                ar.set(events_in_db)
                return True
            else:
                return False

        poll(poller, ar, self.user_notification.find_events, 'instrument_1')

#        events_in_db = self.user_notification.find_events(origin='instrument_1')

        events_in_db = ar.get(10)
        log.debug("events::: %s" % events_in_db)

        bad_data_events = []
        no_data_events = []

        for event in events_in_db:
            if event.type_ == 'DeviceStatusEvent':
                bad_data_events.append(event)
                self.assertEquals(event.origin, "instrument_1")
                self.assertEquals(event.status, DeviceStatusType.STATUS_WARNING)
                self.assertEquals(event.valid_values, self.valid_values)
                self.assertEquals(event.sub_type, 'input_voltage')
            elif event.type_ == 'DeviceCommsEvent':
                no_data_events.append(event)
                self.assertEquals(event.origin, "instrument_1")
                self.assertEquals(event.origin_type, "PlatformDevice")
                self.assertEquals(event.status, DeviceCommsType.DATA_DELIVERY_INTERRUPTION)
                self.assertEquals(event.sub_type, 'input_voltage')

        self.assertTrue(len(bad_data_events) > 0)
        self.assertTrue(len(no_data_events) > 0)

        log.debug("This satisfies L4-CI-SA-RQ-114 : 'Marine facility shall monitor marine infrastructure usage by instruments.'"
                  " The req is satisfied because the stream alert transform"
                  "is able to send device status and communication events over selected time intervals. This capability will be "
                  "augmented in the future.")

    def _publish_granules(self, stream_id=None, stream_route=None, values = None,number=None):

        pub = StandaloneStreamPublisher(stream_id, stream_route)

        stream_def = self.pubsub_management.read_stream_definition(stream_id=stream_id)
        stream_def_id = stream_def._id
        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)

        times = numpy.array([number  for l in xrange(self.length)])

        for i in xrange(number):
            rdt['input_voltage'] = values
            rdt['preferred_timestamp'] = ['time' for l in xrange(len(times))]
            rdt['time'] = times

            g = rdt.to_granule()
            g.data_producer_id = 'instrument_1'

            log.debug("granule #%s published by instrument:: %s" % ( number,g))

            pub.publish(g)

    @staticmethod
    def makeEpochTime(date_time = None):
        """
        provides the seconds since epoch give a python datetime object.

        @param date_time Python datetime object
        @retval seconds_since_epoch int
        """
        seconds_since_epoch = calendar.timegm(date_time.timetuple())

        return seconds_since_epoch
コード例 #9
0
class TestQCPostProcessing(DMTestCase):
    '''
    ion/processes/data/transforms/test/test_qc_post_processing.py:TestQCPostProcessing
    '''
    def setUp(self):
        DMTestCase.setUp(self)

        process_definition = ProcessDefinition(name='qc_post_processor',
                executable={'module':'ion.processes.data.transforms.qc_post_processing', 'class':'QCPostProcessing'})
        self.process_definition_id = self.process_dispatcher.create_process_definition(process_definition)
        self.addCleanup(self.process_dispatcher.delete_process_definition,self.process_definition_id)

        self.process_id = self.process_dispatcher.create_process(self.process_definition_id)
        self.scheduler_service = SchedulerServiceClient()


    def populate_qc_tables(self):
        svm = StoredValueManager(self.container)
        svm.stored_value_cas('grt_QCTEST_TEMPWAT', {'grt_min_value':-2., 'grt_max_value':40.})
        svm.stored_value_cas('svt_QCTEST_TEMPWAT', {'svt_resolution':0.001, 'svt_n': 4})
        svm.stored_value_cas('spike_QCTEST_TEMPWAT', {'acc': 0.1, 'spike_n':5, 'spike_l':5})
    
    def sync_launch(self, config):
        self.process_dispatcher.schedule_process(self.process_definition_id, process_id=self.process_id, configuration=config)

        gate = ProcessStateGate(self.process_dispatcher.read_process,
                self.process_id,
                ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30))
        self.addCleanup(self.process_dispatcher.cancel_process, self.process_id)

    def make_data_product(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_simple_qc_pdict()

        stream_def_id = self.create_stream_definition('global range', parameter_dictionary_id=pdict_id, stream_configuration={'reference_designator':'QCTEST'})

        self.populate_qc_tables()

        dp_id = self.create_data_product('qc data product', stream_def_id=stream_def_id)
        self.activate_data_product(dp_id)
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(dp_id)
        return dp_id, dataset_id, stream_def_id

    def make_large_dataset(self, temp_vector):

        monitor_queue = Queue()
        # Make 27 hours of data
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        data_product_id, dataset_id, stream_def_id = self.make_data_product()
        es = EventSubscriber(event_type=OT.DatasetModified, origin=dataset_id, auto_delete=True, callback = lambda *args, **kwargs : monitor_queue.put(1))
        es.start()
        self.addCleanup(es.stop)
        for rdt in self.populate_vectors(stream_def_id, 3, temp_vector):
            ph.publish_rdt_to_data_product(data_product_id, rdt)

        try:
            for i in xrange(3):
                monitor_queue.get(timeout=10)
        except Empty:
            raise AssertionError('Failed to populate dataset in time')

            
        return data_product_id

    def populate_vectors(self, stream_def_id, hours, temp_vector):
        now = time.time()
        ntp_now = now + 2208988800


        for i in xrange(hours):
            rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
            st = ntp_now - (3600 * (hours-i))
            et = ntp_now - (3600 * (hours - (i+1)))
            rdt['time'] = np.arange(st, et)
            rdt['temp'] = temp_vector(3600)
            yield rdt

    
    def process_execution(self, temp_vector, qc_params, bad_times):
        interval_key = uuid4().hex
        data_product_id = self.make_large_dataset(temp_vector)
        async_queue = Queue()

        def cb(event, *args, **kwargs):
            if '_'.join(event.qc_parameter.split('_')[1:]) not in qc_params:
                # I don't care about
                return
            times = event.temporal_values
            self.assertEquals(len(times), bad_times)
            async_queue.put(1)


        es = EventSubscriber(event_type=OT.ParameterQCEvent, origin=data_product_id, callback=cb, auto_delete=True)
        es.start()
        self.addCleanup(es.stop)
        config = DotDict()
        config.process.interval_key = interval_key
        config.process.qc_params = qc_params
        self.sync_launch(config)

        # So now the process is started, time to throw an event at it
        ep = EventPublisher(event_type='TimerEvent')
        ep.publish_event(origin=interval_key)

        try:
            async_queue.get(timeout=120)
        except Empty:
            raise AssertionError('QC was not flagged in time')

    def test_glblrng_qc_processing(self):
        def temp_vector(size):
            return [41] + [39]*(size-1)

        self.process_execution(temp_vector, ['glblrng_qc'], 1)

    def test_stuckvl_qc_processing(self):
        def temp_vector(size):
            assert size > 7
            return [20] * 6 + range(size-6)

        self.process_execution(temp_vector, ['stuckvl_qc'], 6)

    def test_spketst_qc_processing(self):
        def temp_vector(size):
            assert size > 8
            return [-1, 3, 40, -1, 1, -6, -6, 1] + [5] * (size-8)

        self.process_execution(temp_vector, ['spketst_qc'], 1)


    
    def test_qc_interval_integration(self):

        # 1 need to make a dataset that only has one discrete qc violation
        # 2 Launch the process
        # 3 Setup the scheduler to run it say three times
        # 4 Get the Events and verify the data
    
        #-------------------------------------------------------------------------------- 
        # Make a dataset that has only one discrete qc violation
        #-------------------------------------------------------------------------------- 

        dp_id, dataset_id, stream_def_id = self.make_data_product()
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        monitor = DatasetMonitor(dataset_id)
        self.addCleanup(monitor.stop)
        for rdt in self.populate_vectors(stream_def_id, 1, lambda x : [41] + [39] * (x-1)):
            ph.publish_rdt_to_data_product(dp_id, rdt)
        self.assertTrue(monitor.event.wait(10)) # Give it 10 seconds to populate


        #--------------------------------------------------------------------------------
        # Launch the process
        #--------------------------------------------------------------------------------

        interval_key = uuid4().hex
        config = DotDict()
        config.process.interval_key = interval_key
        config.process.qc_params = ['glblrng_qc'] # The others are tested in other tests for completeness
        self.sync_launch(config)

        async_queue = Queue()
        def callback(event, *args, **kwargs):
            times = event.temporal_values
            self.assertEquals(len(times), 1)
            async_queue.put(1)
        es = EventSubscriber(event_type=OT.ParameterQCEvent, origin=dp_id, callback=callback, auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        #--------------------------------------------------------------------------------
        # Setup the scheduler
        #--------------------------------------------------------------------------------


        timer_id = self.scheduler_service.create_interval_timer(start_time=time.time(),
                end_time=time.time()+13,
                interval=5,
                event_origin=interval_key)


        #--------------------------------------------------------------------------------
        # Get the events and verify them
        #--------------------------------------------------------------------------------

        try:
            for i in xrange(2):
                async_queue.get(timeout=10)
        except Empty:
            raise AssertionError('QC Events not raised')
コード例 #10
0
class TestQCPostProcessing(DMTestCase):
    '''
    ion/processes/data/transforms/test/test_qc_post_processing.py:TestQCPostProcessing
    '''
    def setUp(self):
        DMTestCase.setUp(self)

        process_definition = ProcessDefinition(
            name='qc_post_processor',
            executable={
                'module': 'ion.processes.data.transforms.qc_post_processing',
                'class': 'QCPostProcessing'
            })
        self.process_definition_id = self.process_dispatcher.create_process_definition(
            process_definition)
        self.addCleanup(self.process_dispatcher.delete_process_definition,
                        self.process_definition_id)

        self.process_id = self.process_dispatcher.create_process(
            self.process_definition_id)
        self.scheduler_service = SchedulerServiceClient()

    def populate_qc_tables(self):
        svm = StoredValueManager(self.container)
        svm.stored_value_cas('grt_QCTEST_TEMPWAT', {
            'grt_min_value': -2.,
            'grt_max_value': 40.
        })
        svm.stored_value_cas('svt_QCTEST_TEMPWAT', {
            'svt_resolution': 0.001,
            'svt_n': 4
        })
        svm.stored_value_cas('spike_QCTEST_TEMPWAT', {
            'acc': 0.1,
            'spike_n': 5,
            'spike_l': 5
        })

    def sync_launch(self, config):
        self.process_dispatcher.schedule_process(self.process_definition_id,
                                                 process_id=self.process_id,
                                                 configuration=config)

        gate = ProcessStateGate(self.process_dispatcher.read_process,
                                self.process_id, ProcessStateEnum.RUNNING)
        self.assertTrue(gate. await (30))
        self.addCleanup(self.process_dispatcher.cancel_process,
                        self.process_id)

    def make_data_product(self):
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_simple_qc_pdict()

        stream_def_id = self.create_stream_definition(
            'global range',
            parameter_dictionary_id=pdict_id,
            stream_configuration={'reference_designator': 'QCTEST'})

        self.populate_qc_tables()

        dp_id = self.create_data_product('qc data product',
                                         stream_def_id=stream_def_id)
        self.activate_data_product(dp_id)
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(
            dp_id)
        return dp_id, dataset_id, stream_def_id

    def make_large_dataset(self, temp_vector):

        monitor_queue = Queue()
        # Make 27 hours of data
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        data_product_id, dataset_id, stream_def_id = self.make_data_product()
        es = EventSubscriber(
            event_type=OT.DatasetModified,
            origin=dataset_id,
            auto_delete=True,
            callback=lambda *args, **kwargs: monitor_queue.put(1))
        es.start()
        self.addCleanup(es.stop)
        for rdt in self.populate_vectors(stream_def_id, 3, temp_vector):
            ph.publish_rdt_to_data_product(data_product_id, rdt)

        try:
            for i in xrange(3):
                monitor_queue.get(timeout=10)
        except Empty:
            raise AssertionError('Failed to populate dataset in time')

        return data_product_id

    def populate_vectors(self, stream_def_id, hours, temp_vector):
        now = time.time()
        ntp_now = now + 2208988800

        for i in xrange(hours):
            rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
            st = ntp_now - (3600 * (hours - i))
            et = ntp_now - (3600 * (hours - (i + 1)))
            rdt['time'] = np.arange(st, et)
            rdt['temp'] = temp_vector(3600)
            yield rdt

    def process_execution(self, temp_vector, qc_params, bad_times):
        interval_key = uuid4().hex
        data_product_id = self.make_large_dataset(temp_vector)
        async_queue = Queue()

        def cb(event, *args, **kwargs):
            if '_'.join(event.qc_parameter.split('_')[1:]) not in qc_params:
                # I don't care about
                return
            times = event.temporal_values
            self.assertEquals(len(times), bad_times)
            async_queue.put(1)

        es = EventSubscriber(event_type=OT.ParameterQCEvent,
                             origin=data_product_id,
                             callback=cb,
                             auto_delete=True)
        es.start()
        self.addCleanup(es.stop)
        config = DotDict()
        config.process.interval_key = interval_key
        config.process.qc_params = qc_params
        self.sync_launch(config)

        # So now the process is started, time to throw an event at it
        ep = EventPublisher(event_type='TimerEvent')
        ep.publish_event(origin=interval_key)

        try:
            async_queue.get(timeout=120)
        except Empty:
            raise AssertionError('QC was not flagged in time')

    def test_glblrng_qc_processing(self):
        def temp_vector(size):
            return [41] + [39] * (size - 1)

        self.process_execution(temp_vector, ['glblrng_qc'], 1)

    def test_stuckvl_qc_processing(self):
        def temp_vector(size):
            assert size > 7
            return [20] * 6 + range(size - 6)

        self.process_execution(temp_vector, ['stuckvl_qc'], 6)

    def test_spketst_qc_processing(self):
        def temp_vector(size):
            assert size > 8
            return [-1, 3, 40, -1, 1, -6, -6, 1] + [5] * (size - 8)

        self.process_execution(temp_vector, ['spketst_qc'], 1)

    def test_qc_interval_integration(self):

        # 1 need to make a dataset that only has one discrete qc violation
        # 2 Launch the process
        # 3 Setup the scheduler to run it say three times
        # 4 Get the Events and verify the data

        #--------------------------------------------------------------------------------
        # Make a dataset that has only one discrete qc violation
        #--------------------------------------------------------------------------------

        dp_id, dataset_id, stream_def_id = self.make_data_product()
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        monitor = DatasetMonitor(dataset_id)
        self.addCleanup(monitor.stop)
        for rdt in self.populate_vectors(stream_def_id, 1,
                                         lambda x: [41] + [39] * (x - 1)):
            ph.publish_rdt_to_data_product(dp_id, rdt)
        self.assertTrue(
            monitor.event.wait(10))  # Give it 10 seconds to populate

        #--------------------------------------------------------------------------------
        # Launch the process
        #--------------------------------------------------------------------------------

        interval_key = uuid4().hex
        config = DotDict()
        config.process.interval_key = interval_key
        config.process.qc_params = [
            'glblrng_qc'
        ]  # The others are tested in other tests for completeness
        self.sync_launch(config)

        async_queue = Queue()

        def callback(event, *args, **kwargs):
            times = event.temporal_values
            self.assertEquals(len(times), 1)
            async_queue.put(1)

        es = EventSubscriber(event_type=OT.ParameterQCEvent,
                             origin=dp_id,
                             callback=callback,
                             auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        #--------------------------------------------------------------------------------
        # Setup the scheduler
        #--------------------------------------------------------------------------------

        timer_id = self.scheduler_service.create_interval_timer(
            start_time=time.time(),
            end_time=time.time() + 13,
            interval=5,
            event_origin=interval_key)

        #--------------------------------------------------------------------------------
        # Get the events and verify them
        #--------------------------------------------------------------------------------

        try:
            for i in xrange(2):
                async_queue.get(timeout=10)
        except Empty:
            raise AssertionError('QC Events not raised')