Example #1
0
    def test_pub_on_different_subtypes(self):
        ar = event.AsyncResult()
        gq = queue.Queue()
        self.count = 0

        def cb(event, *args, **kwargs):
            self.count += 1
            gq.put(event)
            if event.description == "end":
                ar.set()

        sub = EventSubscriber(event_type="ResourceModifiedEvent",
                              sub_type="st1",
                              callback=cb)
        sub.activate()

        pub1 = EventPublisher(event_type="ResourceModifiedEvent")
        pub2 = EventPublisher(event_type="ContainerLifecycleEvent")

        pub1.publish_event(origin="two", sub_type="st2", description="2")
        pub2.publish_event(origin="three", sub_type="st1", description="3")
        pub1.publish_event(origin="one", sub_type="st1", description="1")
        pub1.publish_event(origin="four", sub_type="st1", description="end")

        ar.get(timeout=5)
        sub.deactivate()

        res = []
        for x in xrange(self.count):
            res.append(gq.get(timeout=5))

        self.assertEquals(len(res), 2)
        self.assertEquals(res[0].description, "1")
Example #2
0
    def test_base_subscriber_as_catchall(self):
        ar = event.AsyncResult()
        gq = queue.Queue()
        self.count = 0

        def cb(*args, **kwargs):
            self.count += 1
            gq.put(args[0])
            if self.count == 2:
                ar.set()

        sub = EventSubscriber(callback=cb)
        pub1 = EventPublisher(event_type="ResourceEvent")
        pub2 = EventPublisher(event_type="ContainerLifecycleEvent")

        self._listen(sub)

        pub1.publish_event(origin="some", description="1")
        pub2.publish_event(origin="other", description="2")

        ar.get(timeout=5)

        res = []
        for x in xrange(self.count):
            res.append(gq.get(timeout=5))

        self.assertEquals(len(res), 2)
        self.assertEquals(res[0].description, "1")
        self.assertEquals(res[1].description, "2")
Example #3
0
    def _acquire_sample(cls, config, publisher, unlock_new_data_callback,
                        update_new_data_check_attachment):
        """
        Ensures required keys (such as stream_id) are available from config, configures the publisher and then calls:
             BaseDataHandler._constraints_for_new_request (only if config does not contain 'constraints')
             BaseDataHandler._publish_data passing BaseDataHandler._get_data as a parameter
        @param config Dict containing configuration parameters, may include constraints, formatters, etc
        @param publisher the publisher used to publish data
        @param unlock_new_data_callback BaseDataHandler callback function to allow conditional unlocking of the BaseDataHandler._semaphore
        @param update_new_data_check_attachment classmethod to update the external dataset resources file list attachment
        @throws InstrumentParameterException if the data constraints are not a dictionary
        @retval None
        """
        log.debug('start _acquire_sample: config={0}'.format(config))

        cls._init_acquisition_cycle(config)

        constraints = get_safe(config, 'constraints')
        if not constraints:
            gevent.getcurrent().link(unlock_new_data_callback)
            try:
                constraints = cls._constraints_for_new_request(config)
            except NoNewDataWarning:
                #log.info(nndw.message)
                if get_safe(config, 'TESTING'):
                    #log.debug('Publish TestingFinished event')
                    pub = EventPublisher('DeviceCommonLifecycleEvent')
                    pub.publish_event(origin='BaseDataHandler._acquire_sample',
                                      description='TestingFinished')
                return

            if constraints is None:
                raise InstrumentParameterException(
                    "Data constraints returned from _constraints_for_new_request cannot be None"
                )
            config['constraints'] = constraints
        elif isinstance(constraints, dict):
            addnl_constr = cls._constraints_for_historical_request(config)
            if not addnl_constr is None and isinstance(addnl_constr, dict):
                constraints.update(addnl_constr)
        else:
            raise InstrumentParameterException(
                'Data constraints must be of type \'dict\':  {0}'.format(
                    constraints))

        cls._publish_data(publisher, cls._get_data(config), config,
                          update_new_data_check_attachment)

        # Publish a 'TestFinished' event
        if get_safe(config, 'TESTING'):
            #log.debug('Publish TestingFinished event')
            pub = EventPublisher(OT.DeviceCommonLifecycleEvent)
            pub.publish_event(origin='BaseDataHandler._acquire_sample',
                              description='TestingFinished')
Example #4
0
    def test_find_events(self):
        # publish some events for the event repository
        rle_publisher = EventPublisher("ResourceLifecycleEvent")
        de_publisher = EventPublisher("DataEvent")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1',
                                    description="RLE test event1")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1',
                                    description="RLE test event2")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1',
                                    description="RLE test event3")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2',
                                   description="DE test event1")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2',
                                   description="DE test event2")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2',
                                   description="DE test event3")

        # find all events for the originator 'Some_Resource_Agent_ID1'
        events = self.unsc.find_events(origin='Some_Resource_Agent_ID1')
        if len(events) != 3:
            self.fail("failed to find all events")
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'Some_Resource_Agent_ID1':
                self.fail("failed to find correct events")

        # find all events for the originator 'DataEvent'
        events = self.unsc.find_events(type='DataEvent')
        if len(events) != 3:
            self.fail("failed to find all events")
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'DataEvent':
                self.fail("failed to find correct events")

        # find 2 events for the originator 'Some_Resource_Agent_ID1'
        events = self.unsc.find_events(origin='Some_Resource_Agent_ID2',
                                       limit=2)
        if len(events) != 2:
            self.fail("failed to find all events")
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'Some_Resource_Agent_ID2':
                self.fail("failed to find correct events")

        # find all events for the originator 'Some_Resource_Agent_ID1' in reverse time order
        events = self.unsc.find_events(origin='Some_Resource_Agent_ID1',
                                       descending=True)
        if len(events) != 3:
            self.fail("failed to find all events")
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'Some_Resource_Agent_ID1':
                self.fail("failed to find correct events")
def process_oms_event():

    json_params = {}

    # oms direct request
    if request.data:
        json_params = simplejson.loads(str(request.data))
        log.debug('ServiceGatewayService:process_oms_event request.data:  %s',
                  json_params)

    #validate payload
    if 'platform_id' not in json_params or 'message' not in json_params:
        log.warning('Invalid OMS event format. payload_data: %s', json_params)
        #return gateway_json_response(OMS_BAD_REQUEST_RESPONSE)

    #prepare the event information
    try:
        #create a publisher to relay OMS events into the system as DeviceEvents
        event_publisher = EventPublisher()

        event_publisher.publish_event(
            event_type='OMSDeviceStatusEvent',
            origin_type='OMS Platform',
            origin=json_params.get('platform_id', 'NOT PROVIDED'),
            sub_type='',
            description=json_params.get('message', ''),
            status_details=json_params)
    except Exception, e:
        log.error('Could not publish OMS  event: %s. Event data: %s',
                  e.message, json_params)
Example #6
0
    def on_start(self):  #pragma no cover
        #super(TransformWorker,self).on_start()
        #--------------------------------------------------------------------------------
        # Explicit on_start
        #--------------------------------------------------------------------------------

        # Skip TransformStreamListener and go to StreamProcess to avoid the subscriber being created
        # We want explicit management of the thread and subscriber object for ingestion
        #todo: check how to manage multi queue subscription (transform scenario 3)

        TransformStreamProcess.on_start(self)

        #todo: can the subscription be changed or updated when new dataprocesses are added ?
        self.queue_name = self.CFG.get_safe('process.queue_name', self.id)
        self.subscriber = StreamSubscriber(process=self,
                                           exchange_name=self.queue_name,
                                           callback=self.receive_callback)
        self.thread_lock = RLock()

        self._rpc_server = self.container.proc_manager._create_listening_endpoint(
            from_name=self.id, process=self)
        self.add_endpoint(self._rpc_server)

        self.start_listener()

        #todo: determine and publish appropriate set of status events
        self.event_publisher = EventPublisher(OT.DataProcessStatusEvent)
Example #7
0
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR = ResourceRegistryServiceClient(node=self.container.node)
        self.RR2 = EnhancedResourceRegistryClient(self.RR)
        self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
        self.org_management_service = OrgManagementServiceClient(
            node=self.container.node)
        self.IMS = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        #print 'TestObservatoryManagementServiceIntegration: started services'

        self.event_publisher = EventPublisher()
    def on_start(self):
        log.debug('EventAlertTransform.on_start()')
        super(EventAlertTransform, self).on_start()

        #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        # get the algorithm to use
        #- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

        self.timer_origin = self.CFG.get_safe('process.timer_origin', 'Interval Timer')
        self.instrument_origin = self.CFG.get_safe('process.instrument_origin', '')

        self.event_times = []

        #-------------------------------------------------------------------------------------
        # Set up a listener for instrument events
        #-------------------------------------------------------------------------------------

        self.instrument_event_queue = gevent.queue.Queue()

        def instrument_event_received(message, headers):
            log.debug("EventAlertTransform received an instrument event here::: %s" % message)
            self.instrument_event_queue.put(message)

        self.instrument_event_subscriber = EventSubscriber(origin = self.instrument_origin,
            callback=instrument_event_received)

        self.instrument_event_subscriber.start()

        #-------------------------------------------------------------------------------------
        # Create the publisher that will publish the Alert message
        #-------------------------------------------------------------------------------------

        self.event_publisher = EventPublisher()
    def _acquire_data(cls, config, publisher, unlock_new_data_callback):
        """
        Ensures required keys (such as stream_id) are available from config, configures the publisher and then calls:
             BaseDataHandler._new_data_constraints (only if config does not contain 'constraints')
             BaseDataHandler._publish_data passing BaseDataHandler._get_data as a parameter
        @param config Dict containing configuration parameters, may include constraints, formatters, etc
        @param unlock_new_data_callback BaseDataHandler callback function to allow conditional unlocking of the BaseDataHandler._semaphore
        """
        log.debug('start _acquire_data: config={0}'.format(config))

        cls._init_acquisition_cycle(config)

        constraints = get_safe(config, 'constraints')
        if not constraints:
            gevent.getcurrent().link(unlock_new_data_callback)
            constraints = cls._new_data_constraints(config)
            if constraints is None:
                raise InstrumentParameterException(
                    "Data constraints returned from _new_data_constraints cannot be None"
                )
            config['constraints'] = constraints

        cls._publish_data(publisher, cls._get_data(config))

        # Publish a 'TestFinished' event
        if get_safe(config, 'TESTING'):
            log.debug('Publish TestingFinished event')
            pub = EventPublisher('DeviceCommonLifecycleEvent')
            pub.publish_event(origin='BaseDataHandler._acquire_data',
                              description='TestingFinished')
Example #10
0
    def test_pub_with_event_repo(self):
        pub = EventPublisher(event_type="ResourceEvent",
                             node=self.container.node)
        pub.publish_event(origin="specifics", description="hallo")

        evs = self.container.event_repository.find_events(origin='specifics')
        self.assertEquals(len(evs), 1)
Example #11
0
    def test_pub_on_different_origins(self):
        ar = event.AsyncResult()
        gq = queue.Queue()
        self.count = 0

        def cb(*args, **kwargs):
            self.count += 1
            gq.put(args[0])
            if self.count == 3:
                ar.set()

        sub = EventSubscriber(event_type="ResourceEvent", callback=cb)
        pub = EventPublisher(event_type="ResourceEvent")

        self._listen(sub)

        pub.publish_event(origin="one", description="1")
        pub.publish_event(origin="two", description="2")
        pub.publish_event(origin="three", description="3")

        ar.get(timeout=5)

        res = []
        for x in xrange(self.count):
            res.append(gq.get(timeout=5))

        self.assertEquals(len(res), 3)
        self.assertEquals(res[0].description, "1")
        self.assertEquals(res[1].description, "2")
        self.assertEquals(res[2].description, "3")
Example #12
0
def publish_event_for_diagnostics():  # pragma: no cover
    """
    Convenient method to do the publication of the event to generate diagnostic
    information about the statuses kept in each running platform agent.

    ><> from ion.agents.platform.status_manager import publish_event_for_diagnostics
    ><> publish_event_for_diagnostics()

    and something like the following will be logged out:

2013-05-17 17:25:16,076 INFO Dummy-247 ion.agents.platform.status_manager:760 'MJ01C': (99cb3e71302a4e5ca0c137292103e357) statuses:
                                           AGGREGATE_COMMS     AGGREGATE_DATA      AGGREGATE_LOCATION  AGGREGATE_POWER
        d231ccba8d674b4691b039ceecec8d95 : STATUS_UNKNOWN      STATUS_UNKNOWN      STATUS_UNKNOWN      STATUS_UNKNOWN
        40c787fc727a4734b219fde7c8df7543 : STATUS_UNKNOWN      STATUS_UNKNOWN      STATUS_UNKNOWN      STATUS_UNKNOWN
        55ee7225435444e3a862d7ceaa9d1875 : STATUS_OK           STATUS_OK           STATUS_OK           STATUS_OK
        1d27e0c2723149cc9692488dced7dd95 : STATUS_UNKNOWN      STATUS_UNKNOWN      STATUS_UNKNOWN      STATUS_UNKNOWN
                               aggstatus : STATUS_OK           STATUS_OK           STATUS_OK           STATUS_OK
                           rollup_status : STATUS_OK           STATUS_OK           STATUS_OK           STATUS_OK
    """

    from pyon.event.event import EventPublisher
    ep = EventPublisher()
    evt = dict(event_type='DeviceStatusEvent',
               sub_type='diagnoser',
               origin='command_line')
    print("publishing: %s" % str(evt))
    ep.publish_event(**evt)
Example #13
0
    def __init__(self,
                 orgname=None,
                 datastore_manager=None,
                 events_enabled=False):
        # Get an instance of datastore configured as directory.
        datastore_manager = datastore_manager or bootstrap.container_instance.datastore_manager
        self.dir_store = datastore_manager.get_datastore(
            DataStore.DS_DIRECTORY)

        self.orgname = orgname or CFG.system.root_org
        self.is_root = (self.orgname == CFG.system.root_org)

        self.events_enabled = events_enabled
        self.event_pub = None
        self.event_sub = None

        # Create directory root entry (for current org) if not existing
        if CFG.system.auto_bootstrap:
            root_de = self.register("/",
                                    "DIR",
                                    sys_name=bootstrap.get_sys_name())
            if root_de is None:
                # We created this directory just now
                pass

        if self.events_enabled:
            # init change event publisher
            self.event_pub = EventPublisher()

            # Register to receive directory changes
            self.event_sub = EventSubscriber(
                event_type="ContainerConfigModifiedEvent",
                origin="Directory",
                callback=self.receive_directory_change_event)
    def test_sub(self):

        #start interaction observer
        io = InteractionObserver()
        io.start()

        #publish an event
        ev_pub = EventPublisher(event_type="ResourceEvent")
        ev_pub.publish_event(origin="specific", description="event")


        # publish a message
        msg_pub = Publisher()
        msg_pub.publish(to_name='anyone', msg="msg")

        # give 2 seconds for the messages to arrive
        time.sleep(2)

        #verify that two messages (an event and a message) are seen
        self.assertEquals(len(io.msg_log), 2)

        #iterate through the messages observed
        for item in io.msg_log:
            # if event
            if item[2]:
                #verify that the origin is what we sent
                self.assertEquals(item[1]['origin'], 'specific')
        dump = io._get_data(io.msg_log,{})
        sump = dump
Example #15
0
    def setUp(self):
        # Start container
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.processdispatchclient = ProcessDispatcherServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataretrieverclient = DataRetrieverServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

        #setup listerner vars
        self._data_greenlets = []
        self._no_samples = None
        self._samples_received = []

        self.event_publisher = EventPublisher()
Example #16
0
def publish_link_event(up_down, terrestrial_remote=2):
    """
    """
    status = TelemetryStatusType.AVAILABLE if up_down \
        else TelemetryStatusType.UNAVAILABLE
    platform_id = tcaa_args['terrestrial_platform_id'] if terrestrial_remote \
        else tcaa_args['remote_platform_id']

    pub = EventPublisher()
    if terrestrial_remote == 0:
        pub.publish_event(event_type='PlatformTelemetryEvent',
                          origin=tcaa_args['terrestrial_platform_id'],
                          status=status)

    elif terrestrial_remote == 1:
        pub.publish_event(event_type='PlatformTelemetryEvent',
                          origin=tcaa_args['remote_platform_id'],
                          status=status)

    elif terrestrial_remote == 2:
        pub.publish_event(event_type='PlatformTelemetryEvent',
                          origin=tcaa_args['terrestrial_platform_id'],
                          status=status)
        pub.publish_event(event_type='PlatformTelemetryEvent',
                          origin=tcaa_args['remote_platform_id'],
                          status=status)

    else:
        raise ValueError('terrestrial_remote must be in range [0,2].')
Example #17
0
    def subscription_callback(self, message, headers):
        filter_field = self.notification._res_obj.delivery_config.processing[
            'filter_field']
        condition = self.notification._res_obj.delivery_config.processing[
            'condition']
        try:
            comparator = self.notification._res_obj.delivery_config.processing[
                'comparator']
            comparator_func = DetectionEventProcessor.comparators[comparator]
        except KeyError:
            raise BadRequest(
                "Bad comparator specified in Detection filter: '%s'" %
                comparator)

        field_val = getattr(message, filter_field)
        if field_val is not None and comparator_func(field_val, condition):
            log.info('Detected an event')
            event_publisher = EventPublisher("DetectionEvent")

            message = str(self.notification._res_obj.delivery_config)

            #@David What should the origin and origin type be for Detection Events
            event_publisher.publish_event(
                origin='DetectionEventProcessor',
                message="Event Detected by DetectionEventProcessor",
                description="Event was detected by DetectionEventProcessor",
                condition=
                message,  # Concatenate the filter and make it a message
                original_origin=self.notification._res_obj.origin,
                original_type=self.notification._res_obj.origin_type)
Example #18
0
    def test_event_in_stream_out_transform(self):
        """
        Test the event-in/stream-out transform
        """

        stream_id, _ = self.pubsub.create_stream('test_stream', exchange_point='science_data')
        self.exchange_cleanup.append('science_data')

        #---------------------------------------------------------------------------------------------
        # Launch a ctd transform
        #---------------------------------------------------------------------------------------------
        # Create the process definition
        process_definition = ProcessDefinition(
            name='EventToStreamTransform',
            description='For testing an event-in/stream-out transform')
        process_definition.executable['module']= 'ion.processes.data.transforms.event_in_stream_out_transform'
        process_definition.executable['class'] = 'EventToStreamTransform'
        proc_def_id = self.process_dispatcher.create_process_definition(process_definition=process_definition)

        # Build the config
        config = DotDict()
        config.process.queue_name = 'test_queue'
        config.process.exchange_point = 'science_data'
        config.process.publish_streams.output = stream_id
        config.process.event_type = 'ExampleDetectableEvent'
        config.process.variables = ['voltage', 'temperature' ]

        # Schedule the process
        pid = self.process_dispatcher.schedule_process(process_definition_id=proc_def_id, configuration=config)
        self.addCleanup(self.process_dispatcher.cancel_process,pid)

        #---------------------------------------------------------------------------------------------
        # Create a subscriber for testing
        #---------------------------------------------------------------------------------------------

        ar_cond = gevent.event.AsyncResult()
        def subscriber_callback(m, r, s):
            ar_cond.set(m)
        sub = StandaloneStreamSubscriber('sub', subscriber_callback)
        self.addCleanup(sub.stop)
        sub_id = self.pubsub.create_subscription('subscription_cond',
            stream_ids=[stream_id],
            exchange_name='sub')
        self.pubsub.activate_subscription(sub_id)
        self.queue_cleanup.append(sub.xn.queue)
        sub.start()

        gevent.sleep(4)

        #---------------------------------------------------------------------------------------------
        # Publish an event. The transform has been configured to receive this event
        #---------------------------------------------------------------------------------------------

        event_publisher = EventPublisher("ExampleDetectableEvent")
        event_publisher.publish_event(origin = 'fake_origin', voltage = '5', temperature = '273')

        # Assert that the transform processed the event and published data on the output stream
        result_cond = ar_cond.get(timeout=10)
        self.assertTrue(result_cond)
Example #19
0
    def __init__(self, datastore_manager=None):

        # Get an instance of datastore configured as resource registry.
        # May be persistent or mock, forced clean, with indexes
        datastore_manager = datastore_manager or bootstrap.container_instance.datastore_manager
        self.rr_store = datastore_manager.get_datastore("resources", DataStore.DS_PROFILE.RESOURCES)

        self.event_pub = EventPublisher()
    def on_init(self):
        self.event_pub = EventPublisher()

        self.policy_event_subscriber = EventSubscriber(
            event_type="ResourceModifiedEvent",
            origin_type="Policy",
            callback=self.policy_event_callback)
        self.policy_event_subscriber.activate()
Example #21
0
 def publish_alert(self):
     """
     Publishes the alert to ION.
     """
     event_data = self.make_event_data()
     log.trace("publishing alert: %s", event_data)
     pub = EventPublisher()
     pub.publish_event(**event_data)
Example #22
0
    def on_start(self):
        self.event_pub = EventPublisher()

        self.policy_event_subscriber = ProcessEventSubscriber(
            event_type="ResourceModifiedEvent",
            origin_type="Policy",
            callback=self._policy_event_callback,
            process=self)
        self._process.add_endpoint(self.policy_event_subscriber)
Example #23
0
    def on_start(self):
        self.ION_NOTIFICATION_EMAIL_ADDRESS = CFG.get_safe(
            'server.smtp.sender')

        # Create an event processor
        self.event_processor = EmailEventProcessor()

        # Dictionaries that maintain information asetting_up_smtp_clientbout users and their subscribed notifications
        self.user_info = {}

        # The reverse_user_info is calculated from the user_info dictionary
        self.reverse_user_info = {}

        # Get the clients
        # @TODO: Why are these not dependencies in the service YML???
        self.discovery = DiscoveryServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()

        self.event_publisher = EventPublisher()
        self.datastore = self.container.datastore_manager.get_datastore(
            'events')

        self.start_time = get_ion_ts()

        #------------------------------------------------------------------------------------
        # Create an event subscriber for Reload User Info events
        #------------------------------------------------------------------------------------

        def reload_user_info(event_msg, headers):
            """
            Callback method for the subscriber to ReloadUserInfoEvent
            """

            notification_id = event_msg.notification_id
            log.debug(
                "(UNS instance received a ReloadNotificationEvent. The relevant notification_id is %s"
                % notification_id)

            try:
                self.user_info = self.load_user_info()
            except NotFound:
                log.warning("ElasticSearch has not yet loaded the user_index.")

            self.reverse_user_info = calculate_reverse_user_info(
                self.user_info)

            log.debug("(UNS instance) After a reload, the user_info: %s" %
                      self.user_info)
            log.debug("(UNS instance) The recalculated reverse_user_info: %s" %
                      self.reverse_user_info)

        # the subscriber for the ReloadUSerInfoEvent
        self.reload_user_info_subscriber = EventSubscriber(
            event_type=OT.ReloadUserInfoEvent,
            origin='UserNotificationService',
            callback=reload_user_info)
        self.add_endpoint(self.reload_user_info_subscriber)
Example #24
0
    def _init_change_notification(self):

        # init change event publisher
        self.event_pub = EventPublisher()

        # Register to receive directory changes
        self.event_sub = EventSubscriber(
            event_type="ContainerConfigModifiedEvent",
            origin="Directory",
            callback=self.receive_directory_change_event)
Example #25
0
 def call_process(self, packet):
     try:
         self.process(packet)
     except Exception as e:
         log.exception('Unhandled caught in transform process')
         event_publisher = EventPublisher()
         event_publisher.publish_event(origin=self._transform_id,
                                       event_type='ExceptionEvent',
                                       exception_type=str(type(e)),
                                       exception_message=e.message)
Example #26
0
    def on_start(self): # pragma no cover
        super(DiscoveryService,self).on_start()

        self.use_es = CFG.get_safe('system.elasticsearch',False)

        self.elasticsearch_host = CFG.get_safe('server.elasticsearch.host','localhost')
        self.elasticsearch_port = CFG.get_safe('server.elasticsearch.port','9200')

        self.ep = EventPublisher(event_type = 'SearchBufferExceededEvent')
        self.heuristic_cutoff = 4
    def on_start(self):
        super(IngestionManagementService, self).on_start()
        self.event_publisher = EventPublisher(
            event_type="DatasetIngestionConfigurationEvent")

        res_list, _ = self.clients.resource_registry.find_resources(
            restype=RT.ProcessDefinition,
            name='ingestion_worker_process',
            id_only=True)
        if len(res_list):
            self.process_definition_id = res_list[0]
Example #28
0
    def test_two_sided_interval(self):
        """
        test_two_sided_interval
        Test interval alarm and alarm event publishing for a closed
        inteval.
        """

        kwargs = {
            'name': 'temp_high_warning',
            'stream_name': 'fakestreamname',
            'value_id': 'temp',
            'message': 'Temperature is above normal range.',
            'type': StreamAlarmType.WARNING,
            'lower_bound': 10.0,
            'lower_rel_op': '<',
            'upper_bound': 20.0,
            'upper_rel_op': '<'
        }

        # Create alarm object.
        alarm = IonObject('IntervalAlarmDef', **kwargs)
        alarm = construct_alarm_expression(alarm)

        # This sequence will produce 5 alarms.
        # 5.5 warning
        # 10.2 all clear
        # 23.3 warning
        # 17.5 all clear
        # 8.8 warning
        self._event_count = 5
        test_vals = [
            5.5, 5.5, 5.4, 4.6, 4.5, 10.2, 10.3, 10.5, 15.5, 23.3, 23.3, 24.8,
            17.5, 16.5, 12.5, 8.8, 7.7
        ]

        pub = EventPublisher(event_type="StreamAlarmEvent",
                             node=self.container.node)

        for x in test_vals:
            event_data = None
            eval_alarm(alarm, x)
            if alarm.first_time == 1:
                event_data = make_event_data(alarm)

            elif alarm.first_time > 1:
                if alarm.status != alarm.old_status:
                    event_data = make_event_data(alarm)

            if event_data:
                pub.publish_event(origin=self._resource_id, **event_data)

        self._async_event_result.get(timeout=30)
        """ 
def validate_salinity_array(a, context={}):
    from pyon.agent.agent import ResourceAgentState
    from pyon.event.event import EventPublisher
    from pyon.public import OT

    stream_id = context['stream_id']
    dataprocess_id = context['dataprocess_id']

    event_publisher = EventPublisher(OT.DeviceStatusAlertEvent)

    event_publisher.publish_event(origin=stream_id,
                                  values=[dataprocess_id],
                                  description="Invalid value for salinity")
Example #30
0
 def mixin_on_init(self):
     """
     """
     self._server = None
     self._client = None
     self._other_host = self.CFG.other_host
     self._other_port = self.CFG.other_port
     self._this_port = self.CFG.this_port
     self._platform_resource_id = self.CFG.platform_resource_id
     self._link_status = TelemetryStatusType.UNAVAILABLE
     self._event_subscriber = None
     self._server_greenlet = None
     self._publisher = EventPublisher()