コード例 #1
0
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)
コード例 #2
0
    def on_start(self):
        self.ION_NOTIFICATION_EMAIL_ADDRESS = CFG.get_safe(
            'server.smtp.sender')

        # Create an event processor
        self.event_processor = EmailEventProcessor()

        # Dictionaries that maintain information asetting_up_smtp_clientbout users and their subscribed notifications
        self.user_info = {}

        # The reverse_user_info is calculated from the user_info dictionary
        self.reverse_user_info = {}

        # Get the clients
        # @TODO: Why are these not dependencies in the service YML???
        self.discovery = DiscoveryServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()

        self.event_publisher = EventPublisher()
        self.datastore = self.container.datastore_manager.get_datastore(
            'events')

        self.start_time = get_ion_ts()

        #------------------------------------------------------------------------------------
        # Create an event subscriber for Reload User Info events
        #------------------------------------------------------------------------------------

        def reload_user_info(event_msg, headers):
            """
            Callback method for the subscriber to ReloadUserInfoEvent
            """

            notification_id = event_msg.notification_id
            log.debug(
                "(UNS instance received a ReloadNotificationEvent. The relevant notification_id is %s"
                % notification_id)

            try:
                self.user_info = self.load_user_info()
            except NotFound:
                log.warning("ElasticSearch has not yet loaded the user_index.")

            self.reverse_user_info = calculate_reverse_user_info(
                self.user_info)

            log.debug("(UNS instance) After a reload, the user_info: %s" %
                      self.user_info)
            log.debug("(UNS instance) The recalculated reverse_user_info: %s" %
                      self.reverse_user_info)

        # the subscriber for the ReloadUSerInfoEvent
        self.reload_user_info_subscriber = EventSubscriber(
            event_type=OT.ReloadUserInfoEvent,
            origin='UserNotificationService',
            callback=reload_user_info)
        self.add_endpoint(self.reload_user_info_subscriber)
コード例 #3
0
    def on_start(self):

        #---------------------------------------------------------------------------------------------------
        # Get the event Repository
        #---------------------------------------------------------------------------------------------------

        self.event_repo = self.container.instance.event_repository

        self.smtp_client = setting_up_smtp_client()

        self.ION_NOTIFICATION_EMAIL_ADDRESS = '*****@*****.**'

        #---------------------------------------------------------------------------------------------------
        # Create an event processor
        #---------------------------------------------------------------------------------------------------

        self.event_processor = EmailEventProcessor(self.smtp_client)

        #---------------------------------------------------------------------------------------------------
        # load event originators, types, and table
        #---------------------------------------------------------------------------------------------------

        self.notifications = {}

        #---------------------------------------------------------------------------------------------------
        # Get the clients
        #---------------------------------------------------------------------------------------------------

        self.discovery = DiscoveryServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.event_publisher = EventPublisher()

        self.start_time = UserNotificationService.makeEpochTime(self.__now())
コード例 #4
0
    def on_start(self):

        #---------------------------------------------------------------------------------------------------
        # Get the event Repository
        #---------------------------------------------------------------------------------------------------

        self.event_repo = self.container.instance.event_repository

        self.smtp_client = setting_up_smtp_client()

        self.ION_NOTIFICATION_EMAIL_ADDRESS = '*****@*****.**'

        #---------------------------------------------------------------------------------------------------
        # Create an event processor
        #---------------------------------------------------------------------------------------------------

        self.event_processor = EmailEventProcessor(self.smtp_client)

        #---------------------------------------------------------------------------------------------------
        # load event originators, types, and table
        #---------------------------------------------------------------------------------------------------

        self.event_types = CFG.event.types
        self.event_table = {}

        #---------------------------------------------------------------------------------------------------
        # Get the clients
        #---------------------------------------------------------------------------------------------------

        self.discovery = DiscoveryServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.datastore_manager = DatastoreManager()

        self.event_publisher = EventPublisher()
        self.scheduler_service = SchedulerService()
コード例 #5
0
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)
コード例 #6
0
    def on_start(self):
        self.ION_NOTIFICATION_EMAIL_ADDRESS = CFG.get_safe('server.smtp.sender')

        # Create an event processor
        self.event_processor = EmailEventProcessor()

        # Event originators, types, and table
        self.notifications = {}

        # Dictionaries that maintain information about users and their subscribed notifications
        self.user_info = {}

        # The reverse_user_info is calculated from the user_info dictionary
        self.reverse_user_info = {}

        # Get the clients
        # @TODO: Why are these not dependencies in the service YML???
        self.discovery = DiscoveryServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()

        self.event_publisher = EventPublisher()
        self.datastore = self.container.datastore_manager.get_datastore('events')

        self.start_time = get_ion_ts()

        #------------------------------------------------------------------------------------
        # Create an event subscriber for Reload User Info events
        #------------------------------------------------------------------------------------

        def reload_user_info(event_msg, headers):
            '''
            Callback method for the subscriber to ReloadUserInfoEvent
            '''

            notification_id =  event_msg.notification_id
            log.debug("(UNS instance received a ReloadNotificationEvent. The relevant notification_id is %s" % notification_id)

            try:
                self.user_info = self.load_user_info()
            except NotFound:
                log.warning("ElasticSearch has not yet loaded the user_index.")

            self.reverse_user_info =  calculate_reverse_user_info(self.user_info)

            log.debug("(UNS instance) After a reload, the user_info: %s" % self.user_info)
            log.debug("(UNS instance) The recalculated reverse_user_info: %s" % self.reverse_user_info)

        # the subscriber for the ReloadUSerInfoEvent
        self.reload_user_info_subscriber = EventSubscriber(
            event_type="ReloadUserInfoEvent",
            origin='UserNotificationService',
            callback=reload_user_info
        )
        self.add_endpoint(self.reload_user_info_subscriber)
コード例 #7
0
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()
        config = DotDict()
        config.bootstrap.use_es = True

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml', config)

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()
コード例 #8
0
    def setUp(self):
        raise SkipTest("Not yet ported to Postgres")

        super(DiscoveryIntTest, self).setUp()
        config = DotDict()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)

        self.discovery               = DiscoveryServiceClient()
        self.catalog                 = CatalogManagementServiceClient()
        self.ims                     = IndexManagementServiceClient()
        self.rr                      = ResourceRegistryServiceClient()
        self.dataset_management      = DatasetManagementServiceClient()
        self.pubsub_management       = PubsubManagementServiceClient()
        self.data_product_management = DataProductManagementServiceClient()
コード例 #9
0
 def setUp(self):
     self._start_container()
     self.container.start_rel_from_url('res/deploy/r2deploy.yml')
     self.resource_registry = self.container.resource_registry
     self.RR2 = EnhancedResourceRegistryClient(self.resource_registry)
     self.data_acquisition_management = DataAcquisitionManagementServiceClient()
     self.pubsub_management =  PubsubManagementServiceClient()
     self.instrument_management = InstrumentManagementServiceClient()
     self.discovery = DiscoveryServiceClient()
     self.dataset_management =  DatasetManagementServiceClient()
     self.process_dispatcher = ProcessDispatcherServiceClient()
     self.data_process_management = DataProcessManagementServiceClient()
     self.data_product_management = DataProductManagementServiceClient()
     self.data_retriever = DataRetrieverServiceClient()
     self.dataset_management = DatasetManagementServiceClient()
     self.user_notification = UserNotificationServiceClient()
     self.observatory_management = ObservatoryManagementServiceClient()
     self.visualization = VisualizationServiceClient()
     self.ph = ParameterHelper(self.dataset_management, self.addCleanup)
     self.ctd_count = 0
コード例 #10
0
class UserNotificationService(BaseUserNotificationService):
    """
    A service that provides users with an API for CRUD methods for notifications.
    """

    def __init__(self, *args, **kwargs):
        self._schedule_ids = []
        BaseUserNotificationService.__init__(self, *args, **kwargs)

    def on_start(self):

        #---------------------------------------------------------------------------------------------------
        # Get the event Repository
        #---------------------------------------------------------------------------------------------------

#        self.ION_NOTIFICATION_EMAIL_ADDRESS = '*****@*****.**'
        self.ION_NOTIFICATION_EMAIL_ADDRESS = CFG.get_safe('server.smtp.sender')


        #---------------------------------------------------------------------------------------------------
        # Create an event processor
        #---------------------------------------------------------------------------------------------------

        self.event_processor = EmailEventProcessor()

        #---------------------------------------------------------------------------------------------------
        # load event originators, types, and table
        #---------------------------------------------------------------------------------------------------

        self.notifications = {}

        #---------------------------------------------------------------------------------------------------
        # Dictionaries that maintain information about users and their subscribed notifications
        # The reverse_user_info is calculated from the user_info dictionary
        #---------------------------------------------------------------------------------------------------
        self.user_info = {}
        self.reverse_user_info = {}

        #---------------------------------------------------------------------------------------------------
        # Get the clients
        #---------------------------------------------------------------------------------------------------

        self.discovery = DiscoveryServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.event_publisher = EventPublisher()
        self.datastore = self.container.datastore_manager.get_datastore('events')

        self.start_time = get_ion_ts()

        #------------------------------------------------------------------------------------
        # Create an event subscriber for Reload User Info events
        #------------------------------------------------------------------------------------

        def reload_user_info(event_msg, headers):
            '''
            Callback method for the subscriber to ReloadUserInfoEvent
            '''

            notification_id =  event_msg.notification_id
            log.debug("(UNS instance received a ReloadNotificationEvent. The relevant notification_id is %s" % notification_id)

            try:
                self.user_info = self.load_user_info()
            except NotFound:
                log.warning("ElasticSearch has not yet loaded the user_index.")

            self.reverse_user_info =  calculate_reverse_user_info(self.user_info)

            log.debug("(UNS instance) After a reload, the user_info: %s" % self.user_info)
            log.debug("(UNS instance) The recalculated reverse_user_info: %s" % self.reverse_user_info)

        # the subscriber for the ReloadUSerInfoEvent
        self.reload_user_info_subscriber = EventSubscriber(
            event_type="ReloadUserInfoEvent",
            origin='UserNotificationService',
            callback=reload_user_info
        )
        self.add_endpoint(self.reload_user_info_subscriber)

    def on_quit(self):
        """
        Handles stop/terminate.

        Cleans up subscribers spawned here, terminates any scheduled tasks to the scheduler.
        """
        for sid in self._schedule_ids:
            try:
                self.clients.scheduler.cancel_timer(sid)
            except IonException as ex:
                log.info("Ignoring exception while cancelling schedule id (%s): %s: %s", sid, ex.__class__.__name__, ex)

        super(UserNotificationService, self).on_quit()

    def __now(self):
        """
        This method defines what the UNS uses as its "current" time
        """
        return datetime.utcnow()

    def set_process_batch_key(self, process_batch_key = ''):
        """
        This method allows an operator to set the process_batch_key, a string.
        Once this method is used by the operator, the UNS will start listening for timer events
        published by the scheduler with origin = process_batch_key.

        @param process_batch_key str
        """


        def process(event_msg, headers):
            self.end_time = get_ion_ts()

            # run the process_batch() method
            self.process_batch(start_time=self.start_time, end_time=self.end_time)
            self.start_time = self.end_time

        # the subscriber for the batch processing
        """
        To trigger the batch notification, have the scheduler create a timer with event_origin = process_batch_key
        """
        self.batch_processing_subscriber = EventSubscriber(
            event_type="TimerEvent",
            origin=process_batch_key,
            callback=process
        )
        self.add_endpoint(self.batch_processing_subscriber)

    def create_notification(self, notification=None, user_id=''):
        """
        Persists the provided NotificationRequest object for the specified Origin id.
        Associate the Notification resource with the user_id string.
        returned id is the internal id by which NotificationRequest will be identified
        in the data store.

        @param notification        NotificationRequest
        @param user_id             str
        @retval notification_id    str
        @throws BadRequest    if object passed has _id or _rev attribute

        """

        if not user_id:
            raise BadRequest("User id not provided.")

        log.debug("Create notification called for user_id: %s, and notification: %s", user_id, notification)

        #---------------------------------------------------------------------------------------------------
        # Persist Notification object as a resource if it has already not been persisted
        #---------------------------------------------------------------------------------------------------

        # if the notification has already been registered, simply use the old id
        notification_id = self._notification_in_notifications(notification, self.notifications)

        # since the notification has not been registered yet, register it and get the id

        temporal_bounds = TemporalBounds()
        temporal_bounds.start_datetime = get_ion_ts()
        temporal_bounds.end_datetime = ''

        if not notification_id:
            notification.temporal_bounds = temporal_bounds
            notification_id, _ = self.clients.resource_registry.create(notification)
            self.notifications[notification_id] = notification
        else:
            log.debug("Notification object has already been created in resource registry before. No new id to be generated. notification_id: %s", notification_id)
            # Read the old notification already in the resource registry
            notification = self.clients.resource_registry.read(notification_id)

            # Update the temporal bounds of the old notification resource
            notification.temporal_bounds = temporal_bounds

            # Update the notification in the resource registry
            self.clients.resource_registry.update(notification)

            log.debug("The temporal bounds for this resubscribed notification object with id: %s, is: %s", notification_id,notification.temporal_bounds)


        # Link the user and the notification with a hasNotification association
        assocs= self.clients.resource_registry.find_associations(subject=user_id,
                                                                    predicate=PRED.hasNotification,
                                                                    object=notification_id,
                                                                    id_only=True)
        if assocs:
            log.debug("Got an already existing association: %s, between user_id: %s, and notification_id: %s", assocs,user_id,notification_id)
            return notification_id
        else:
            log.debug("Creating association between user_id: %s, and notification_id: %s", user_id, notification_id )
            self.clients.resource_registry.create_association(user_id, PRED.hasNotification, notification_id)

        # read the registered notification request object because this has an _id and is more useful
        notification = self.clients.resource_registry.read(notification_id)

        # Update the user info object with the notification
        self.event_processor.add_notification_for_user(new_notification=notification, user_id=user_id)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by a notification worker so that it can update its user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.debug("(create notification) Publishing ReloadUserInfoEvent for notification_id: %s", notification_id)

        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
            origin="UserNotificationService",
            description= "A notification has been created.",
            notification_id = notification_id)

        return notification_id

    def update_notification(self, notification=None, user_id = ''):
        """Updates the provided NotificationRequest object.  Throws NotFound exception if
        an existing version of NotificationRequest is not found.  Throws Conflict if
        the provided NotificationRequest object is not based on the latest persisted
        version of the object.

        @param notification     NotificationRequest
        @throws BadRequest      if object does not have _id or _rev attribute
        @throws NotFound        object with specified id does not exist
        @throws Conflict        object not based on latest persisted object version
        """

        raise NotImplementedError("This method needs to be worked out in terms of implementation")

#        #-------------------------------------------------------------------------------------------------------------------
#        # Get the old notification
#        #-------------------------------------------------------------------------------------------------------------------
#
#        old_notification = self.clients.resource_registry.read(notification._id)
#
#        #-------------------------------------------------------------------------------------------------------------------
#        # Update the notification in the notifications dict
#        #-------------------------------------------------------------------------------------------------------------------
#
#
#        self._update_notification_in_notifications_dict(new_notification=notification,
#                                                        notifications=self.notifications)
#        #-------------------------------------------------------------------------------------------------------------------
#        # Update the notification in the registry
#        #-------------------------------------------------------------------------------------------------------------------
#        '''
#        Since one user should not be able to update the notification request resource without the knowledge of other users
#        who have subscribed to the same notification request, we do not update the resource in the resource registry
#        '''
#
##        self.clients.resource_registry.update(notification)
#
#        #-------------------------------------------------------------------------------------------------------------------
#        # reading up the notification object to make sure we have the newly registered notification request object
#        #-------------------------------------------------------------------------------------------------------------------
#
#        notification_id = notification._id
#        notification = self.clients.resource_registry.read(notification_id)
#
#        #------------------------------------------------------------------------------------
#        # Update the UserInfo object
#        #------------------------------------------------------------------------------------
#
#        user = self.update_user_info_object(user_id, notification)
#
#        #-------------------------------------------------------------------------------------------------------------------
#        # Generate an event that can be picked by notification workers so that they can update their user_info dictionary
#        #-------------------------------------------------------------------------------------------------------------------
#        log.info("(update notification) Publishing ReloadUserInfoEvent for updated notification")
#
#        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
#            origin="UserNotificationService",
#            description= "A notification has been updated.",
#            notification_id = notification_id
#        )

    def read_notification(self, notification_id=''):
        """Returns the NotificationRequest object for the specified notification id.
        Throws exception if id does not match any persisted NotificationRequest
        objects.

        @param notification_id    str
        @retval notification    NotificationRequest
        @throws NotFound    object with specified id does not exist
        """
        notification = self.clients.resource_registry.read(notification_id)

        return notification

    def delete_notification(self, notification_id=''):
        """For now, permanently deletes NotificationRequest object with the specified
        id. Throws exception if id does not match any persisted NotificationRequest.

        @param notification_id    str
        @throws NotFound    object with specified id does not exist
        """

        #-------------------------------------------------------------------------------------------------------------------
        # Stop the event subscriber for the notification
        #-------------------------------------------------------------------------------------------------------------------
        notification_request = self.clients.resource_registry.read(notification_id)

        #-------------------------------------------------------------------------------------------------------------------
        # Update the resource registry
        #-------------------------------------------------------------------------------------------------------------------

        notification_request.temporal_bounds.end_datetime = get_ion_ts()

        self.clients.resource_registry.update(notification_request)

        #-------------------------------------------------------------------------------------------------------------------
        # Find users who are interested in the notification and update the notification in the list maintained by the UserInfo object
        #-------------------------------------------------------------------------------------------------------------------
        user_ids, _ = self.clients.resource_registry.find_subjects(RT.UserInfo, PRED.hasNotification, notification_id, True)

        for user_id in user_ids:
            self.update_user_info_object(user_id, notification_request)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by a notification worker so that it can update its user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.info("(delete notification) Publishing ReloadUserInfoEvent for notification_id: %s", notification_id)

        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
            origin="UserNotificationService",
            description= "A notification has been deleted.",
            notification_id = notification_id)

#    def delete_notification_from_user_info(self, notification_id):
#        """
#        Helper method to delete the notification from the user_info dictionary
#
#        @param notification_id str
#        """
#
#        user_ids, assocs = self.clients.resource_registry.find_subjects(object=notification_id, predicate=PRED.hasNotification, id_only=True)
#
#        for assoc in assocs:
#            self.clients.resource_registry.delete_association(assoc)
#
#        for user_id in user_ids:
#
#            value = self.user_info[user_id]
#
#            for notif in value['notifications']:
#                if notification_id == notif._id:
#                    # remove the notification
#                    value['notifications'].remove(notif)
#
#        self.reverse_user_info = calculate_reverse_user_info(self.user_info)

    def find_events(self, origin='', type='', min_datetime=0, max_datetime=0, limit= -1, descending=False):
        """
        This method leverages couchdb view and simple filters. It does not use elastic search.

        Returns a list of events that match the specified search criteria. Will throw a not NotFound exception
        if no events exist for the given parameters.

        @param origin         str
        @param event_type     str
        @param min_datetime   int  seconds
        @param max_datetime   int  seconds
        @param limit          int         (integer limiting the number of results (0 means unlimited))
        @param descending     boolean     (if True, reverse order (of production time) is applied, e.g. most recent first)
        @retval event_list    []
        @throws NotFound    object with specified parameters does not exist
        @throws NotFound    object with specified parameters does not exist
        """
        event_tuples = []

        try:
            event_tuples = self.container.event_repository.find_events(event_type=type, origin=origin, start_ts=min_datetime, end_ts=max_datetime, limit=limit, descending=descending)
        except Exception as exc:
            log.warning("The UNS find_events operation for event origin = %s and type = %s failed. Error message = %s", origin, type, exc.message)

        events = [item[2] for item in event_tuples]
        log.debug("(find_events) UNS found the following relevant events: %s", events)

        return events


    #todo Uses Elastic Search. Later extend this to a larger search criteria
    def find_events_extended(self, origin='', type='', min_time= 0, max_time=0, limit=-1, descending=False):
        """Uses Elastic Search. Returns a list of events that match the specified search criteria. Will throw a not NotFound exception
        if no events exist for the given parameters.

        @param origin         str
        @param type           str
        @param min_time   int seconds
        @param max_time   int seconds
        @param limit          int         (integer limiting the number of results (0 means unlimited))
        @param descending     boolean     (if True, reverse order (of production time) is applied, e.g. most recent first)
        @retval event_list    []
        @throws NotFound    object with specified parameters does not exist
        @throws NotFound    object with specified parameters does not exist
        """

        query = []

        if min_time and max_time:
            query.append( "SEARCH 'ts_created' VALUES FROM %s TO %s FROM 'events_index'" % (min_time, max_time))

        if origin:
            query.append( 'search "origin" is "%s" from "events_index"' % origin)

        if type:
            query.append( 'search "type_" is "%s" from "events_index"' % type)

        search_string = ' and '.join(query)


        # get the list of ids corresponding to the events
        ret_vals = self.discovery.parse(search_string)
        if len(query) > 1:
            events = self.datastore.read_mult(ret_vals)
        else:
            events = [i['_source'] for i in ret_vals]

        log.debug("(find_events_extended) Discovery search returned the following event ids: %s", ret_vals)


        log.debug("(find_events_extended) UNS found the following relevant events: %s", events)

        if limit > 0:
            return events[:limit]

        #todo implement time ordering: ascending or descending

        return events

    def publish_event_object(self, event=None):
        """
        This service operation would publish the given event from an event object.

        @param event    !Event
        @retval event   !Event
        """
        event = self.event_publisher.publish_event_object(event_object=event)
        log.info("The publish_event_object(event) method of UNS was used to publish the event: %s", event )

        return event

    def publish_event(self, event_type='', origin='', origin_type='', sub_type='', description='', event_attrs=None):
        """
        This service operation assembles a new Event object based on event_type 
        (e.g. via the pyon Event publisher) with optional additional attributes from a event_attrs
        dict of arbitrary attributes.
        
        
        @param event_type   str
        @param origin       str
        @param origin_type  str
        @param sub_type     str
        @param description  str
        @param event_attrs  dict
        @retval event       !Event
        """
        event_attrs = event_attrs or {}

        event = self.event_publisher.publish_event(
            event_type = event_type,
            origin = origin,
            origin_type = origin_type,
            sub_type = sub_type,
            description = description,
            **event_attrs
            )
        log.info("The publish_event() method of UNS was used to publish an event: %s", event)

        return event

    def get_recent_events(self, resource_id='', limit = 100):
        """
        Get recent events for use in extended resource computed attribute
        @param resource_id str
        @param limit int
        @retval ComputedListValue with value list of 4-tuple with Event objects
        """

        now = get_ion_ts()
        events = self.find_events(origin=resource_id, limit=limit, max_datetime=now, descending=True)

        ret = IonObject(OT.ComputedEventListValue)
        if events:
            ret.value = events
            ret.computed_list = [self._get_event_computed_attributes(event) for event in events]
            ret.status = ComputedValueAvailability.PROVIDED
        else:
            ret.status = ComputedValueAvailability.NOTAVAILABLE

        return ret

    def _get_event_computed_attributes(self, event):
        """
        @param event any Event to compute attributes for
        @retval an EventComputedAttributes object for given event
        """
        evt_computed = IonObject(OT.EventComputedAttributes)
        evt_computed.event_id = event._id
        evt_computed.ts_computed = get_ion_ts()

        try:
            summary = self._get_event_summary(event)
            evt_computed.event_summary = summary

            spc_attrs = ["%s:%s" % (k, str(getattr(event, k))[:50]) for k in sorted(event.__dict__.keys()) if k not in ['_id', '_rev', 'type_', 'origin', 'origin_type', 'ts_created', 'base_types']]
            evt_computed.special_attributes = ", ".join(spc_attrs)

            evt_computed.event_attributes_formatted = pprint.pformat(event.__dict__)
        except Exception as ex:
            log.exception("Error computing EventComputedAttributes for event %s" % event)

        return evt_computed

    def _get_event_summary(self, event):
        event_types = [event.type_] + event.base_types
        summary = ""
        if "ResourceLifecycleEvent" in event_types:
            summary = "%s lifecycle state change: %s_%s" % (event.origin_type, event.lcstate, event.availability)
        elif "ResourceModifiedEvent" in event_types:
            summary = "%s modified: %s" % (event.origin_type, event.sub_type)
        elif "ResourceIssueReportedEvent" in event_types:
            summary = "Issue created: %s" % (event.description)

        elif "ResourceAgentStateEvent" in event_types:
            summary = "%s agent state change: %s" % (event.origin_type, event.state)
        elif "ResourceAgentResourceStateEvent" in event_types:
            summary = "%s agent resource state change: %s" % (event.origin_type, event.state)
        elif "ResourceAgentConfigEvent" in event_types:
            summary = "%s agent config set: %s" % (event.origin_type, event.config)
        elif "ResourceAgentResourceConfigEvent" in event_types:
            summary = "%s agent resource config set: %s" % (event.origin_type, event.config)
        elif "ResourceAgentCommandEvent" in event_types:
            summary = "%s agent command '%s(%s)' succeeded: %s" % (event.origin_type, event.command, event.execute_command, "" if event.result is None else event.result)
        elif "ResourceAgentErrorEvent" in event_types:
            summary = "%s agent command '%s(%s)' failed: %s:%s (%s)" % (event.origin_type, event.command, event.execute_command, event.error_type, event.error_msg, event.error_code)
        elif "ResourceAgentAsyncResultEvent" in event_types:
            summary = "%s agent async command '%s(%s)' succeeded: %s" % (event.origin_type, event.command, event.desc, "" if event.result is None else event.result)

        elif "ResourceAgentResourceCommandEvent" in event_types:
            summary = "%s agent resource command '%s(%s)' executed: %s" % (event.origin_type, event.command, event.execute_command, "OK" if event.result is None else event.result)
        elif "DeviceStatusEvent" in event_types:
            summary = "%s '%s' status change: %s" % (event.origin_type, event.sub_type, DeviceStatusType._str_map.get(event.status,"???"))
        elif "DeviceOperatorEvent" in event_types or "ResourceOperatorEvent" in event_types:
            summary = "Operator entered: %s" % event.description

        elif "OrgMembershipGrantedEvent" in event_types:
            summary = "Joined Org '%s' as member" % (event.org_name)
        elif "OrgMembershipCancelledEvent" in event_types:
            summary = "Cancelled Org '%s' membership" % (event.org_name)
        elif "UserRoleGrantedEvent" in event_types:
            summary = "Granted %s in Org '%s'" % (event.role_name, event.org_name)
        elif "UserRoleRevokedEvent" in event_types:
            summary = "Revoked %s in Org '%s'" % (event.role_name, event.org_name)
        elif "ResourceSharedEvent" in event_types:
            summary = "%s shared in Org: '%s'" % (event.sub_type, event.org_name)
        elif "ResourceUnsharedEvent" in event_types:
            summary = "%s unshared in Org: '%s'" % (event.sub_type, event.org_name)
        elif "ResourceCommitmentCreatedEvent" in event_types:
            summary = "%s commitment created in Org: '%s'" % (event.commitment_type, event.org_name)
        elif "ResourceCommitmentReleasedEvent" in event_types:
            summary = "%s commitment released in Org: '%s'" % (event.commitment_type, event.org_name)


#        if event.description and summary:
#            summary = summary + ". " + event.description
#        elif event.description:
#            summary = event.description
        return summary

    def get_user_notifications(self, user_info_id=''):
        """
        Get the notification request objects that are subscribed to by the user

        @param user_info_id str

        @retval notifications list of NotificationRequest objects
        """

        if self.user_info.has_key(user_info_id):
            notifications = self.user_info[user_info_id]['notifications']

            log.debug("Got %s notifications, for the user: %s", len(notifications), user_info_id)

            for notif in notifications:
                # remove notifications that have expired
                if notif.temporal_bounds.end_datetime != '':
                    log.debug("removing notification: %s", notif)
                    notifications.remove(notif)

            return notifications

#            ret = IonObject(OT.ComputedListValue)
#
#            if notifications:
#                ret.value = notifications
#                ret.status = ComputedValueAvailability.PROVIDED
#            else:
#                ret.status = ComputedValueAvailability.NOTAVAILABLE
#            return ret
#        else:
#            return None

    def create_worker(self, number_of_workers=1):
        """
        Creates notification workers

        @param number_of_workers int
        @retval pids list

        """

        pids = []

        for n in xrange(number_of_workers):

            process_definition = ProcessDefinition( name='notification_worker_%s' % n)

            process_definition.executable = {
                'module': 'ion.processes.data.transforms.notification_worker',
                'class':'NotificationWorker'
            }
            process_definition_id = self.process_dispatcher.create_process_definition(process_definition=process_definition)

            # ------------------------------------------------------------------------------------
            # Process Spawning
            # ------------------------------------------------------------------------------------

            pid2 = self.process_dispatcher.create_process(process_definition_id)

            #@todo put in a configuration
            configuration = {}
            configuration['process'] = dict({
                'name': 'notification_worker_%s' % n,
                'type':'simple',
                'queue_name': 'notification_worker_queue'
            })

            pid  = self.process_dispatcher.schedule_process(
                process_definition_id,
                configuration = configuration,
                process_id=pid2
            )

            pids.append(pid)

        return pids

    def process_batch(self, start_time = '', end_time = ''):
        """
        This method is launched when an process_batch event is received. The user info dictionary maintained
        by the User Notification Service is used to query the event repository for all events for a particular
        user that have occurred in a provided time interval, and then an email is sent to the user containing
        the digest of all the events.

        @param start_time int milliseconds
        @param end_time int milliseconds
        """
        self.smtp_client = setting_up_smtp_client()

        if end_time <= start_time:
            return

        for user_id, value in self.user_info.iteritems():

            notifications = value['notifications']
            notifications_disabled = value['notifications_disabled']
            notifications_daily_digest = value['notifications_daily_digest']


            # Ignore users who do NOT want batch notifications or who have disabled the delivery switch
            # However, if notification preferences have not been set for the user, use the default mechanism and do not bother
            if notifications_disabled or not notifications_daily_digest:
                    continue

            events_for_message = []

            search_time = "SEARCH 'ts_created' VALUES FROM %s TO %s FROM 'events_index'" % (start_time, end_time)

            for notification in notifications:
                # If the notification request has expired, then do not use it in the search
                if notification.temporal_bounds.end_datetime:
                    continue

                if notification.origin:
                    search_origin = 'search "origin" is "%s" from "events_index"' % notification.origin
                else:
                    search_origin = 'search "origin" is "*" from "events_index"'

                if notification.origin_type:
                    search_origin_type= 'search "origin_type" is "%s" from "events_index"' % notification.origin_type
                else:
                    search_origin_type= 'search "origin_type" is "*" from "events_index"'

                if notification.event_type:
                    search_event_type = 'search "type_" is "%s" from "events_index"' % notification.event_type
                else:
                    search_event_type = 'search "type_" is "*" from "events_index"'

                search_string = search_time + ' and ' + search_origin + ' and ' + search_origin_type + ' and ' + search_event_type

                # get the list of ids corresponding to the events
                log.debug('process_batch  search_string: %s', search_string)
                ret_vals = self.discovery.parse(search_string)

                events_for_message.extend(self.datastore.read_mult(ret_vals))

            log.debug("Found following events of interest to user, %s: %s", user_id, events_for_message)

            # send a notification email to each user using a _send_email() method
            if events_for_message:
                self.format_and_send_email(events_for_message = events_for_message,
                                            user_id = user_id,
                                            smtp_client=self.smtp_client)

        self.smtp_client.quit()


    def format_and_send_email(self, events_for_message = None, user_id = None, smtp_client = None):
        """
        Format the message for a particular user containing information about the events he is to be notified about

        @param events_for_message list
        @param user_id str
        """

        message = str(events_for_message)
        log.debug("The user, %s, will get the following events in his batch notification email: %s", user_id, message)

        msg_body = ''
        count = 1

        for event in events_for_message:

            ts_created = _convert_to_human_readable(event.ts_created)

            msg_body += string.join(("\r\n",
                                     "Event %s: %s" %  (count, event),
                                     "",
                                     "Originator: %s" %  event.origin,
                                     "",
                                     "Description: %s" % event.description or "Not provided",
                                     "",
                                     "ts_created: %s" %  ts_created,
                                     "\r\n",
                                     "------------------------"
                                     "\r\n"))
            count += 1

        msg_body += "You received this notification from ION because you asked to be " +\
                    "notified about this event from this source. " +\
                    "To modify or remove notifications about this event, " +\
                    "please access My Notifications Settings in the ION Web UI. " +\
                    "Do not reply to this email.  This email address is not monitored " +\
                    "and the emails will not be read. \r\n "


        log.debug("The email has the following message body: %s", msg_body)

        msg_subject = "(SysName: " + get_sys_name() + ") ION event "

        self.send_batch_email(  msg_body = msg_body,
            msg_subject = msg_subject,
            msg_recipient=self.user_info[user_id]['user_contact'].email,
            smtp_client=smtp_client )


    def send_batch_email(self, msg_body = None, msg_subject = None, msg_recipient = None, smtp_client = None):
        """
        Send the email

        @param msg_body str
        @param msg_subject str
        @param msg_recipient str
        @param smtp_client object
        """

        msg = MIMEText(msg_body)
        msg['Subject'] = msg_subject
        msg['From'] = self.ION_NOTIFICATION_EMAIL_ADDRESS
        msg['To'] = msg_recipient
        log.debug("UNS sending batch (digest) email from %s to %s" , self.ION_NOTIFICATION_EMAIL_ADDRESS, msg_recipient)

        smtp_sender = CFG.get_safe('server.smtp.sender')

        smtp_client.sendmail(smtp_sender, [msg_recipient], msg.as_string())

    def update_user_info_object(self, user_id, new_notification):
        """
        Update the UserInfo object. If the passed in parameter, od_notification, is None, it does not need to remove the old notification

        @param user_id str
        @param new_notification NotificationRequest
        @param old_notification NotificationRequest
        """

        #------------------------------------------------------------------------------------
        # read the user
        #------------------------------------------------------------------------------------

        user = self.clients.resource_registry.read(user_id)

        if not user:
            raise BadRequest("No user with the provided user_id: %s" % user_id)

        for item in user.variables:
            if type(item) is dict and item.has_key('name') and item['name'] == 'notifications':
                for notif in item['value']:
                    if notif._id == new_notification._id:
                        log.debug("came here for updating notification")
                        notifications = item['value']
                        notifications.remove(notif)
                        notifications.append(new_notification)
                break
            else:
                log.warning('Invalid variables attribute on UserInfo instance. UserInfo: %s', user)


        #------------------------------------------------------------------------------------
        # update the resource registry
        #------------------------------------------------------------------------------------

        log.debug("user.variables::: %s", user.variables)

        self.clients.resource_registry.update(user)

        return user


    def _get_subscriptions(self, resource_id='', include_nonactive=False):
        """
        This method is used to get the subscriptions to a data product. The method will return a list of NotificationRequest
        objects for whom the origin is set to this data product. This way all the users who were interested in listening to
        events with origin equal to this data product, will be known and all their subscriptions will be known.

        @param resource_id
        @param include_nonactive
        @return notification_requests []

        """

        search_origin = 'search "origin" is "%s" from "resources_index"' % resource_id
        ret_vals = self.discovery.parse(search_origin)

        log.debug("Using discovery with search_string: %s", search_origin)
        log.debug("_get_subscriptions() got ret_vals: %s", ret_vals )

        notifications_all = set()
        notifications_active = set()

        object_ids = []
        for item in ret_vals:
            if item['_type'] == 'NotificationRequest':
                object_ids.append(item['_id'])

        notifs = self.clients.resource_registry.read_mult(object_ids)

        log.debug("Got %s notifications here. But they include both active and past notifications", len(notifs))

        if include_nonactive:
            # Add active or retired notification
            notifications_all.update(notifs)
        else:
            for notif in notifs:
                log.debug("Got the end_datetime here: notif.temporal_bounds.end_datetime = %s", notif.temporal_bounds.end_datetime)
                if notif.temporal_bounds.end_datetime == '':
                    log.debug("removing the notification: %s", notif._id)
                    # Add the active notification
                    notifications_active.add(notif)

        if include_nonactive:
            return list(notifications_all)
        else:
            return list(notifications_active)

    def get_subscriptions(self, resource_id='', user_id = '', include_nonactive=False):
        """
        This method takes the user-id as an input parameter. The logic will first find all notification requests for this resource
        then if a user_id is present, it will filter on those that this user is associated with.
        """

        # Get the notifications whose origin field has the provided resource_id
        notifs = self._get_subscriptions(resource_id=resource_id, include_nonactive=include_nonactive)

        log.debug("For include_nonactive= %s, UNS fetched the following the notifications subscribed to the resource_id: %s --> %s. "
                      "They are %s in number", include_nonactive,resource_id, notifs, len(notifs))

        if not user_id:
            return notifs

        notifications = []

        # Now find the users who subscribed to the above notifications
        #todo Right now looking at assocs in a loop which is not efficient to find the users linked to these notifications
        # todo(contd) Need to use a more efficient way later
        for notif in notifs:
            notif_id = notif._id
            # Find if the user is associated with this notification request
            ids, _ = self.clients.resource_registry.find_subjects( subject_type = RT.UserInfo, object=notif_id, predicate=PRED.hasNotification, id_only=True)
            log.debug("Got the following users: %s, associated with the notification: %s", ids, notif_id)

            if ids and user_id in ids:
                notifications.append(notif)

        log.debug("For include_nonactive = %s, UNS fetched the following %s notifications subscribed to %s --> %s", include_nonactive,len(notifications),user_id, notifications)

        return notifications

    def get_subscriptions_attribute(self, resource_id='', user_id = '', include_nonactive=False):
        retval = self.get_subscriptions(resource_id=resource_id, user_id=user_id, include_nonactive=include_nonactive)
        container = ComputedListValue(value=retval)
        return container


#    def get_users_who_subscribed(self, resource_id='', include_nonactive=False):
#
#        # Get the notifications whose origin field has the provided resource_id
#        notifications = self.get_subscriptions(resource_id, include_nonactive)
#
#        # Now find the users who subscribed to the above notifications
#        #todo Right now looking at assocs in a loop which is not efficient to find the users linked to these notifications
#        # todo(contd) Need to use a more efficient way later
#
#        user_ids = set()
#        for notif in notifications:
#            notif_id = notif._id
#            # Find the users who are associated with this notification request
#            ids, _ = self.clients.resource_registry.find_subjects( subject_type = RT.UserInfo, object=notif_id, predicate=PRED.hasNotification, id_only=True)
#            user_ids.add(ids)
#
#        return user_ids

    def _notification_in_notifications(self, notification = None, notifications = None):

        for id, notif in notifications.iteritems():
            if notif.name == notification.name and \
            notif.origin == notification.origin and \
            notif.origin_type == notification.origin_type and \
            notif.event_type == notification.event_type:
                return id
        return None

    def _update_notification_in_notifications_dict(self, new_notification = None, notifications = None ):

        for id, notif in notifications.iteritems():
            if id == new_notification._id:
                notifications.pop(id)
                notifications[id] = new_notification
                break


    def load_user_info(self):
        '''
        Method to load the user info dictionary used by the notification workers and the UNS

        @retval user_info dict
        '''

        users, _ = self.clients.resource_registry.find_resources(restype= RT.UserInfo)

        user_info = {}

        if not users:
            return {}

        for user in users:
            notifications = []
            notifications_disabled = False
            notifications_daily_digest = False


            for variable in user.variables:
                if type(variable) is dict and variable.has_key('name'):
                    if variable['name'] == 'notifications':
                        notifications = variable['value']

                    if variable['name'] == 'notifications_daily_digest':
                        notifications_daily_digest = variable['value']

                    if variable['name'] == 'notifications_disabled':
                        notifications_disabled = variable['value']

                else:
                    log.warning('Invalid variables attribute on UserInfo instance. UserInfo: %s', user)

            user_info[user._id] = { 'user_contact' : user.contact, 'notifications' : notifications,
                                    'notifications_daily_digest' : notifications_daily_digest, 'notifications_disabled' : notifications_disabled}

        return user_info
コード例 #11
0
class UserNotificationService(BaseUserNotificationService):
    """
    A service that provides users with an API for CRUD methods for notifications.
    """
    def __init__(self, *args, **kwargs):
        self._schedule_ids = []
        BaseUserNotificationService.__init__(self, *args, **kwargs)

    def on_start(self):
        self.ION_NOTIFICATION_EMAIL_ADDRESS = CFG.get_safe('server.smtp.sender')

        # Create an event processor
        self.event_processor = EmailEventProcessor()

        # Dictionaries that maintain information asetting_up_smtp_clientbout users and their subscribed notifications
        self.user_info = {}

        # The reverse_user_info is calculated from the user_info dictionary
        self.reverse_user_info = {}

        # Get the clients
        # @TODO: Why are these not dependencies in the service YML???
        self.discovery = DiscoveryServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()

        self.event_publisher = EventPublisher()
        self.datastore = self.container.datastore_manager.get_datastore('events')

        self.start_time = get_ion_ts()

        #------------------------------------------------------------------------------------
        # Create an event subscriber for Reload User Info events
        #------------------------------------------------------------------------------------

        def reload_user_info(event_msg, headers):
            """
            Callback method for the subscriber to ReloadUserInfoEvent
            """

            notification_id =  event_msg.notification_id
            log.debug("(UNS instance received a ReloadNotificationEvent. The relevant notification_id is %s" % notification_id)

            try:
                self.user_info = self.load_user_info()
            except NotFound:
                log.warning("ElasticSearch has not yet loaded the user_index.")

            self.reverse_user_info =  calculate_reverse_user_info(self.user_info)

            log.debug("(UNS instance) After a reload, the user_info: %s" % self.user_info)
            log.debug("(UNS instance) The recalculated reverse_user_info: %s" % self.reverse_user_info)

        # the subscriber for the ReloadUSerInfoEvent
        self.reload_user_info_subscriber = EventSubscriber(
            event_type=OT.ReloadUserInfoEvent,
            origin='UserNotificationService',
            callback=reload_user_info
        )
        self.add_endpoint(self.reload_user_info_subscriber)

    def on_quit(self):
        """
        Handles stop/terminate.

        Cleans up subscribers spawned here, terminates any scheduled tasks to the scheduler.
        """
        for sid in self._schedule_ids:
            try:
                self.clients.scheduler.cancel_timer(sid)
            except IonException as ex:
                log.info("Ignoring exception while cancelling schedule id (%s): %s: %s", sid, ex.__class__.__name__, ex)

        super(UserNotificationService, self).on_quit()

    def set_process_batch_key(self, process_batch_key = ''):
        """
        This method allows an operator to set the process_batch_key, a string.
        Once this method is used by the operator, the UNS will start listening for timer events
        published by the scheduler with origin = process_batch_key.

        @param process_batch_key str
        """
        def process(event_msg, headers):
            self.end_time = get_ion_ts()

            # run the process_batch() method
            self.process_batch(start_time=self.start_time, end_time=self.end_time)
            self.start_time = self.end_time

        # the subscriber for the batch processing
        # To trigger the batch notification, have the scheduler create a timer with event_origin = process_batch_key
        self.batch_processing_subscriber = EventSubscriber(
            event_type=OT.TimerEvent,
            origin=process_batch_key,
            callback=process
        )
        self.add_endpoint(self.batch_processing_subscriber)

    def create_notification(self, notification=None, user_id=''):
        """
        Persists the provided NotificationRequest object for the specified Origin id.
        Associate the Notification resource with the user_id string.
        returned id is the internal id by which NotificationRequest will be identified
        in the data store.

        @param notification        NotificationRequest
        @param user_id             str
        @retval notification_id    str
        @throws BadRequest    if object passed has _id or _rev attribute
        """
        if not user_id:
            raise BadRequest("User id not provided.")

        log.debug("Create notification called for user_id: %s, and notification: %s", user_id, notification)

        #---------------------------------------------------------------------------------------------------
        # Persist Notification object as a resource if it has already not been persisted
        #---------------------------------------------------------------------------------------------------

        notification_id = None
        # if the notification has already been registered, simply use the old id
        existing_user_notifications = self.get_user_notifications(user_info_id=user_id)
        if existing_user_notifications:
            notification_id = self._notification_in_notifications(notification, existing_user_notifications)

        # since the notification has not been registered yet, register it and get the id

        temporal_bounds = TemporalBounds()
        temporal_bounds.start_datetime = get_ion_ts()
        temporal_bounds.end_datetime = ''

        if not notification_id:
            notification.temporal_bounds = temporal_bounds
            notification_id, rev = self.clients.resource_registry.create(notification)
        else:
            log.debug("Notification object has already been created in resource registry before. No new id to be generated. notification_id: %s", notification_id)
            # Read the old notification already in the resource registry
            notification = self.clients.resource_registry.read(notification_id)

            # Update the temporal bounds of the old notification resource
            notification.temporal_bounds = temporal_bounds

            # Update the notification in the resource registry
            self.clients.resource_registry.update(notification)

            log.debug("The temporal bounds for this resubscribed notification object with id: %s, is: %s", notification._id,notification.temporal_bounds)


        # Link the user and the notification with a hasNotification association
        assocs= self.clients.resource_registry.find_associations(subject=user_id,
                                                                    predicate=PRED.hasNotification,
                                                                    object=notification_id,
                                                                    id_only=True)


        if assocs:
            log.debug("Got an already existing association: %s, between user_id: %s, and notification_id: %s", assocs,user_id,notification_id)
            return notification_id
        else:
            log.debug("Creating association between user_id: %s, and notification_id: %s", user_id, notification_id )
            self.clients.resource_registry.create_association(user_id, PRED.hasNotification, notification_id)

        # read the registered notification request object because this has an _id and is more useful
        notification = self.clients.resource_registry.read(notification_id)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by a notification worker so that it can update its user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.debug("(create notification) Publishing ReloadUserInfoEvent for notification_id: %s", notification_id)

        self.event_publisher.publish_event( event_type= OT.ReloadUserInfoEvent,
            origin="UserNotificationService",
            description= "A notification has been created.",
            notification_id = notification_id)

        return notification_id

    def update_notification(self, notification=None, user_id = ''):
        """Updates the provided NotificationRequest object.  Throws NotFound exception if
        an existing version of NotificationRequest is not found.  Throws Conflict if
        the provided NotificationRequest object is not based on the latest persisted
        version of the object.

        @param notification     NotificationRequest
        @throws BadRequest      if object does not have _id or _rev attribute
        @throws NotFound        object with specified id does not exist
        @throws Conflict        object not based on latest persisted object version
        """

        raise NotImplementedError("This method needs to be worked out in terms of implementation")

#        #-------------------------------------------------------------------------------------------------------------------
#        # Get the old notification
#        #-------------------------------------------------------------------------------------------------------------------
#
#        old_notification = self.clients.resource_registry.read(notification._id)
#
#        #-------------------------------------------------------------------------------------------------------------------
#        # Update the notification in the notifications dict
#        #-------------------------------------------------------------------------------------------------------------------
#
#
#        self._update_notification_in_notifications_dict(new_notification=notification,
#                                                        notifications=self.notifications)
#        #-------------------------------------------------------------------------------------------------------------------
#        # Update the notification in the registry
#        #-------------------------------------------------------------------------------------------------------------------
#        '''
#        Since one user should not be able to update the notification request resource without the knowledge of other users
#        who have subscribed to the same notification request, we do not update the resource in the resource registry
#        '''
#
##        self.clients.resource_registry.update(notification)
#
#        #-------------------------------------------------------------------------------------------------------------------
#        # reading up the notification object to make sure we have the newly registered notification request object
#        #-------------------------------------------------------------------------------------------------------------------
#
#        notification_id = notification._id
#        notification = self.clients.resource_registry.read(notification_id)
#
#        #------------------------------------------------------------------------------------
#        # Update the UserInfo object
#        #------------------------------------------------------------------------------------
#
#        user = self.update_user_info_object(user_id, notification)
#
#        #-------------------------------------------------------------------------------------------------------------------
#        # Generate an event that can be picked by notification workers so that they can update their user_info dictionary
#        #-------------------------------------------------------------------------------------------------------------------
#        log.info("(update notification) Publishing ReloadUserInfoEvent for updated notification")
#
#        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
#            origin="UserNotificationService",
#            description= "A notification has been updated.",
#            notification_id = notification_id
#        )

    def read_notification(self, notification_id=''):
        """Returns the NotificationRequest object for the specified notification id.
        Throws exception if id does not match any persisted NotificationRequest
        objects.

        @param notification_id    str
        @retval notification    NotificationRequest
        @throws NotFound    object with specified id does not exist
        """
        notification = self.clients.resource_registry.read(notification_id)

        return notification

    def delete_notification(self, notification_id=''):
        """For now, permanently deletes NotificationRequest object with the specified
        id. Throws exception if id does not match any persisted NotificationRequest.

        @param notification_id    str
        @throws NotFound    object with specified id does not exist
        """

        #-------------------------------------------------------------------------------------------------------------------
        # Stop the event subscriber for the notification
        #-------------------------------------------------------------------------------------------------------------------
        notification_request = self.clients.resource_registry.read(notification_id)

        #-------------------------------------------------------------------------------------------------------------------
        # Update the resource registry
        #-------------------------------------------------------------------------------------------------------------------

        notification_request.temporal_bounds.end_datetime = get_ion_ts()

        self.clients.resource_registry.update(notification_request)

        #-------------------------------------------------------------------------------------------------------------------
        # Find users who are interested in the notification and update the notification in the list maintained by the UserInfo object
        #-------------------------------------------------------------------------------------------------------------------
#        user_ids, _ = self.clients.resource_registry.find_subjects(RT.UserInfo, PRED.hasNotification, notification_id, True)
#
#        for user_id in user_ids:
#            self.update_user_info_object(user_id, notification_request)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by a notification worker so that it can update its user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.info("(delete notification) Publishing ReloadUserInfoEvent for notification_id: %s", notification_id)

        self.event_publisher.publish_event( event_type= OT.ReloadUserInfoEvent,
            origin="UserNotificationService",
            description= "A notification has been deleted.",
            notification_id = notification_id)

#    def delete_notification_from_user_info(self, notification_id):
#        """
#        Helper method to delete the notification from the user_info dictionary
#
#        @param notification_id str
#        """
#
#        user_ids, assocs = self.clients.resource_registry.find_subjects(object=notification_id, predicate=PRED.hasNotification, id_only=True)
#
#        for assoc in assocs:
#            self.clients.resource_registry.delete_association(assoc)
#
#        for user_id in user_ids:
#
#            value = self.user_info[user_id]
#
#            for notif in value['notifications']:
#                if notification_id == notif._id:
#                    # remove the notification
#                    value['notifications'].remove(notif)
#
#        self.reverse_user_info = calculate_reverse_user_info(self.user_info)

    def find_events(self, origin='', type='', min_datetime=0, max_datetime=0, limit=-1, descending=False):
        """
        This method leverages couchdb view and simple filters. It does not use elastic search.

        Returns a list of events that match the specified search criteria. Will throw a not NotFound exception
        if no events exist for the given parameters.

        @param origin         str
        @param min_datetime   int  seconds
        @param max_datetime   int  seconds
        @param limit          int         (integer limiting the number of results (0 means unlimited))
        @param descending     boolean     (if True, reverse order (of production time) is applied, e.g. most recent first)
        @retval event_list    []
        @throws NotFound    object with specified parameters does not exist
        @throws NotFound    object with specified parameters does not exist
        """
        event_tuples = []

        try:
            event_tuples = self.container.event_repository.find_events(event_type=type, origin=origin, start_ts=min_datetime, end_ts=max_datetime, limit=limit, descending=descending)
        except Exception as exc:
            log.warning("The UNS find_events operation for event origin = %s and type = %s failed. Error message = %s", origin, type, exc.message)

        events = [item[2] for item in event_tuples]
        log.debug("(find_events) UNS found the following relevant events: %s", events)

        return events


    #todo Uses Elastic Search. Later extend this to a larger search criteria
    def find_events_extended(self, origin='', type='', min_time= 0, max_time=0, limit=-1, descending=False):
        """Uses Elastic Search. Returns a list of events that match the specified search criteria. Will throw a not NotFound exception
        if no events exist for the given parameters.

        @param origin         str
        @param type           str
        @param min_time   int seconds
        @param max_time   int seconds
        @param limit          int         (integer limiting the number of results (0 means unlimited))
        @param descending     boolean     (if True, reverse order (of production time) is applied, e.g. most recent first)
        @retval event_list    []
        @throws NotFound    object with specified parameters does not exist
        @throws NotFound    object with specified parameters does not exist
        """

        query = []

        if min_time and max_time:
            query.append( "SEARCH 'ts_created' VALUES FROM %s TO %s FROM 'events_index'" % (min_time, max_time))

        if origin:
            query.append( 'search "origin" is "%s" from "events_index"' % origin)

        if type:
            query.append( 'search "type_" is "%s" from "events_index"' % type)

        search_string = ' and '.join(query)


        # get the list of ids corresponding to the events
        ret_vals = self.discovery.parse(search_string)
        if len(query) > 1:
            events = self.datastore.read_mult(ret_vals)
        else:
            events = [i['_source'] for i in ret_vals]

        log.debug("(find_events_extended) Discovery search returned the following event ids: %s", ret_vals)


        log.debug("(find_events_extended) UNS found the following relevant events: %s", events)

        if limit > 0:
            return events[:limit]

        #todo implement time ordering: ascending or descending

        return events

    def publish_event_object(self, event=None):
        """
        This service operation would publish the given event from an event object.

        @param event    !Event
        @retval event   !Event
        """
        event = self.event_publisher.publish_event_object(event_object=event)
        log.info("The publish_event_object(event) method of UNS was used to publish the event: %s", event )

        return event

    def publish_event(self, event_type='', origin='', origin_type='', sub_type='', description='', event_attrs=None):
        """
        This service operation assembles a new Event object based on event_type 
        (e.g. via the pyon Event publisher) with optional additional attributes from a event_attrs
        dict of arbitrary attributes.
        
        
        @param event_type   str
        @param origin       str
        @param origin_type  str
        @param sub_type     str
        @param description  str
        @param event_attrs  dict
        @retval event       !Event
        """
        event_attrs = event_attrs or {}

        event = self.event_publisher.publish_event(
            event_type = event_type,
            origin = origin,
            origin_type = origin_type,
            sub_type = sub_type,
            description = description,
            **event_attrs
            )
        log.info("The publish_event() method of UNS was used to publish an event: %s", event)

        return event

    def get_recent_events(self, resource_id='', limit=100):
        """
        Get recent events for use in extended resource computed attribute
        @param resource_id str
        @param limit int
        @retval ComputedListValue with value list of 4-tuple with Event objects
        """

        now = get_ion_ts()
        events = self.find_events(origin=resource_id, limit=limit, max_datetime=now, descending=True)

        ret = IonObject(OT.ComputedEventListValue)
        if events:
            ret.value = events
            ret.computed_list = [get_event_computed_attributes(event) for event in events]
            ret.status = ComputedValueAvailability.PROVIDED
        else:
            ret.status = ComputedValueAvailability.NOTAVAILABLE

        return ret


    def get_user_notifications(self, user_info_id=''):
        """
        Get the notification request objects that are subscribed to by the user

        @param user_info_id str

        @retval notifications list of NotificationRequest objects
        """
        notifications = []
        user_notif_req_objs, _ = self.clients.resource_registry.find_objects(
            subject=user_info_id, predicate=PRED.hasNotification, object_type=RT.NotificationRequest, id_only=False)

        log.debug("Got %s notifications, for the user: %s", len(user_notif_req_objs), user_info_id)

        for notif in user_notif_req_objs:
            # do not include notifications that have expired
            if notif.temporal_bounds.end_datetime == '':
                    notifications.append(notif)

        return notifications


    def create_worker(self, number_of_workers=1):
        """
        Creates notification workers

        @param number_of_workers int
        @retval pids list

        """

        pids = []

        for n in xrange(number_of_workers):

            process_definition = ProcessDefinition( name='notification_worker_%s' % n)

            process_definition.executable = {
                'module': 'ion.processes.data.transforms.notification_worker',
                'class':'NotificationWorker'
            }
            process_definition_id = self.process_dispatcher.create_process_definition(process_definition=process_definition)

            # ------------------------------------------------------------------------------------
            # Process Spawning
            # ------------------------------------------------------------------------------------

            pid2 = self.process_dispatcher.create_process(process_definition_id)

            #@todo put in a configuration
            configuration = {}
            configuration['process'] = dict({
                'name': 'notification_worker_%s' % n,
                'type':'simple',
                'queue_name': 'notification_worker_queue'
            })

            pid  = self.process_dispatcher.schedule_process(
                process_definition_id,
                configuration = configuration,
                process_id=pid2
            )

            pids.append(pid)

        return pids

    def process_batch(self, start_time = '', end_time = ''):
        """
        This method is launched when an process_batch event is received. The user info dictionary maintained
        by the User Notification Service is used to query the event repository for all events for a particular
        user that have occurred in a provided time interval, and then an email is sent to the user containing
        the digest of all the events.

        @param start_time int milliseconds
        @param end_time int milliseconds
        """
        self.smtp_client = setting_up_smtp_client()

        if end_time <= start_time:
            return

        for user_id, value in self.user_info.iteritems():

            notifications = self.get_user_notifications(user_info_id=user_id)
            notifications_disabled = value['notifications_disabled']
            notifications_daily_digest = value['notifications_daily_digest']


            # Ignore users who do NOT want batch notifications or who have disabled the delivery switch
            # However, if notification preferences have not been set for the user, use the default mechanism and do not bother
            if notifications_disabled or not notifications_daily_digest:
                    continue

            events_for_message = []

            search_time = "SEARCH 'ts_created' VALUES FROM %s TO %s FROM 'events_index'" % (start_time, end_time)

            for notification in notifications:
                # If the notification request has expired, then do not use it in the search
                if notification.temporal_bounds.end_datetime:
                    continue

                if CFG_ELASTIC_SEARCH:
                    if notification.origin:
                        search_origin = 'search "origin" is "%s" from "events_index"' % notification.origin
                    else:
                        search_origin = 'search "origin" is "*" from "events_index"'

                    if notification.origin_type:
                        search_origin_type= 'search "origin_type" is "%s" from "events_index"' % notification.origin_type
                    else:
                        search_origin_type= 'search "origin_type" is "*" from "events_index"'

                    if notification.event_type:
                        search_event_type = 'search "type_" is "%s" from "events_index"' % notification.event_type
                    else:
                        search_event_type = 'search "type_" is "*" from "events_index"'

                    search_string = search_time + ' and ' + search_origin + ' and ' + search_origin_type + ' and ' + search_event_type

                    # get the list of ids corresponding to the events
                    log.debug('process_batch  search_string: %s', search_string)
                    ret_vals = self.discovery.parse(search_string)

                    events_for_message.extend(self.datastore.read_mult(ret_vals))

                else:
                    # Adding a branch
                    event_tuples = self.container.event_repository.find_events(
                        origin=notification.origin,
                        event_type=notification.event_type,
                        start_ts=start_time,
                        end_ts=end_time)
                    events = [item[2] for item in event_tuples]

                    events_for_message.extend(events)

            log.debug("Found following events of interest to user, %s: %s", user_id, events_for_message)

            # send a notification email to each user using a _send_email() method
            if events_for_message:
                self.format_and_send_email(events_for_message = events_for_message,
                                            user_id = user_id,
                                            smtp_client=self.smtp_client)

        self.smtp_client.quit()


    def format_and_send_email(self, events_for_message=None, user_id=None, smtp_client=None):
        """
        Format the message for a particular user containing information about the events he is to be notified about

        @param events_for_message list
        @param user_id str
        """
        message = str(events_for_message)
        log.debug("The user, %s, will get the following events in his batch notification email: %s", user_id, message)

        msg = convert_events_to_email_message(events_for_message, self.clients.resource_registry)
        msg["Subject"] = "(SysName: " + get_sys_name() + ") ION event "
        msg["To"] = self.user_info[user_id]['user_contact'].email
        self.send_batch_email(msg, smtp_client)


    def send_batch_email(self, msg=None, smtp_client=None):
        """
        Send the email

        @param msg MIMEText object of email message
        @param smtp_client object
        """

        if msg is None: msg = {}
        for f in ["Subject", "To"]:
            if not f in msg: raise BadRequest("'%s' not in msg %s" % (f, msg))

        msg_subject = msg["Subject"]
        msg_recipient = msg["To"]

        msg['From'] = self.ION_NOTIFICATION_EMAIL_ADDRESS
        log.debug("UNS sending batch (digest) email from %s to %s",
                  self.ION_NOTIFICATION_EMAIL_ADDRESS,
                  msg_recipient)

        smtp_sender = CFG.get_safe('server.smtp.sender')

        smtp_client.sendmail(smtp_sender, [msg_recipient], msg.as_string())

    def update_user_info_object(self, user_id, new_notification):
        """
        Update the UserInfo object. If the passed in parameter, od_notification, is None, it does not need to remove the old notification

        @param user_id str
        @param new_notification NotificationRequest
        """

        #this is not necessary if notifiactions are not stored in the userinfo object
        raise NotImplementedError("This method is not necessary because Notifications are not stored in the userinfo object")

        #------------------------------------------------------------------------------------
        # read the user
        #------------------------------------------------------------------------------------

#        user = self.clients.resource_registry.read(user_id)
#
#        if not user:
#            raise BadRequest("No user with the provided user_id: %s" % user_id)
#
#        for item in user.variables:
#            if type(item) is dict and item.has_key('name') and item['name'] == 'notifications':
#                for notif in item['value']:
#                    if notif._id == new_notification._id:
#                        log.debug("came here for updating notification")
#                        notifications = item['value']
#                        notifications.remove(notif)
#                        notifications.append(new_notification)
#                break
#            else:
#                log.warning('Invalid variables attribute on UserInfo instance. UserInfo: %s', user)
#
#
#        #------------------------------------------------------------------------------------
#        # update the resource registry
#        #------------------------------------------------------------------------------------
#
#        log.debug("user.variables::: %s", user.variables)
#
#        self.clients.resource_registry.update(user)
#
#        return user


    def get_subscriptions(self, resource_id='', user_id = '', include_nonactive=False):
        """
        @param resource_id  a resource id (or other origin) that is the origin of events for notifications
        @param user_id  a UserInfo ID that owns the NotificationRequest
        @param include_nonactive  if False, filter to active NotificationRequest only
        Return all NotificationRequest resources where origin is given resource_id.
        """
        notif_reqs, _ = self.clients.resource_registry.find_resources_ext(
            restype=RT.NotificationRequest, attr_name="origin", attr_value=resource_id, id_only=False)

        log.debug("Got %s active and past NotificationRequests for resource/origin %s", len(notif_reqs), resource_id)

        if not include_nonactive:
            notif_reqs = [nr for nr in notif_reqs if nr.temporal_bounds.end_datetime == '']
            log.debug("Filtered to %s active NotificationRequests", len(notif_reqs))

        if user_id:
            # Get all NotificationRequests (ID) that are associated to given UserInfo_id
            user_notif_req_ids, _ = self.clients.resource_registry.find_objects(
                subject=user_id, predicate=PRED.hasNotification, object_type=RT.NotificationRequest, id_only=True)

            notif_reqs = [nr for nr in notif_reqs if nr._id in user_notif_req_ids]
            log.debug("Filtered to %s NotificationRequests associated to user %s", len(notif_reqs), user_id)

        return notif_reqs

    def get_subscriptions_attribute(self, resource_id='', user_id = '', include_nonactive=False):
        retval = self.get_subscriptions(resource_id=resource_id, user_id=user_id, include_nonactive=include_nonactive)
        container = ComputedListValue(value=retval)
        return container


    def _notification_in_notifications(self, notification = None, notifications = None):

        for notif in notifications:
            if notif.name == notification.name and \
            notif.origin == notification.origin and \
            notif.origin_type == notification.origin_type and \
            notif.event_type == notification.event_type:
                return notif._id
        return None



    def load_user_info(self):
        """
        Method to load the user info dictionary used by the notification workers and the UNS

        @retval user_info dict
        """
        users, _ = self.clients.resource_registry.find_resources(restype=RT.UserInfo)

        user_info = {}

        if not users:
            return {}

        for user in users:
            notifications = []
            notifications_disabled = False
            notifications_daily_digest = False

            #retrieve all the active notifications assoc to this user
            notifications = self.get_user_notifications(user_info_id=user)
            log.debug('load_user_info notifications:   %s', notifications)

            for variable in user.variables:
                if type(variable) is dict and variable.has_key('name'):

                    if variable['name'] == 'notifications_daily_digest':
                        notifications_daily_digest = variable['value']

                    if variable['name'] == 'notifications_disabled':
                        notifications_disabled = variable['value']

                else:
                    log.warning('Invalid variables attribute on UserInfo instance. UserInfo: %s', user)

            user_info[user._id] = { 'user_contact' : user.contact, 'notifications' : notifications,
                                    'notifications_daily_digest' : notifications_daily_digest, 'notifications_disabled' : notifications_disabled}

        return user_info


    ##
    ##
    ##  GOVERNANCE FUNCTIONS
    ##
    ##


    def check_subscription_policy(self, process, message, headers):

        try:
            gov_values = GovernanceHeaderValues(headers=headers, process=process, resource_id_required=False)

        except Inconsistent, ex:
            return False, ex.message

        if gov_values.op == 'delete_notification':
            return True, ''

        notification = message['notification']
        resource_id = notification.origin

        if notification.origin_type == RT.Org:
            org = self.clients.resource_registry.read(resource_id)
            if (has_org_role(gov_values.actor_roles, org.org_governance_name, [ORG_MEMBER_ROLE])):
                    return True, ''
        else:
            orgs,_ = self.clients.resource_registry.find_subjects(subject_type=RT.Org, predicate=PRED.hasResource, object=resource_id, id_only=False)
            for org in orgs:
                if (has_org_role(gov_values.actor_roles, org.org_governance_name, [ORG_MEMBER_ROLE])):
                    return True, ''

        return False, '%s(%s) has been denied since the user is not a member in any org to which the resource id %s belongs ' % (process.name, gov_values.op, resource_id)
コード例 #12
0
class DiscoveryIntTest(IonIntegrationTestCase):
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()
        config = DotDict()
        config.bootstrap.use_es = True

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)

        self.discovery               = DiscoveryServiceClient()
        self.catalog                 = CatalogManagementServiceClient()
        self.ims                     = IndexManagementServiceClient()
        self.rr                      = ResourceRegistryServiceClient()
        self.dataset_management      = DatasetManagementServiceClient()
        self.pubsub_management       = PubsubManagementServiceClient()
        self.data_product_management = DataProductManagementServiceClient()

    @staticmethod
    def es_cleanup():
        es_host = CFG.get_safe('server.elasticsearch.host', 'localhost')
        es_port = CFG.get_safe('server.elasticsearch.port', '9200')
        es = ep.ElasticSearch(
            host=es_host,
            port=es_port,
            timeout=10
        )
        indexes = STD_INDEXES.keys()
        indexes.append('%s_resources_index' % get_sys_name().lower())
        indexes.append('%s_events_index' % get_sys_name().lower())

        for index in indexes:
            IndexManagementService._es_call(es.river_couchdb_delete,index)
            IndexManagementService._es_call(es.index_delete,index)


    def poll(self, tries, callback, *args, **kwargs):
        '''
        Polling wrapper for queries
        Elasticsearch may not index and cache the changes right away so we may need 
        a couple of tries and a little time to go by before the results show.
        '''
        for i in xrange(tries):
            retval = callback(*args, **kwargs)
            if retval:
                return retval
            time.sleep(0.2)
        return None


    def test_traversal(self):
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results = self.discovery.traverse(dp_id)
        results.sort()
        correct = [pd_id, transform_id]
        correct.sort()
        self.assertTrue(results == correct, '%s' % results)

    def test_iterative_traversal(self):
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results = self.discovery.iterative_traverse(dp_id)
        results.sort()
        correct = [transform_id]
        self.assertTrue(results == correct)

        results = self.discovery.iterative_traverse(dp_id, 1)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)
    
    @skipIf(not use_es, 'No ElasticSearch')
    def test_view_crud(self):
        view_id = self.discovery.create_view('big_view',fields=['name'])
        catalog_id = self.discovery.list_catalogs(view_id)[0]
        index_ids = self.catalog.list_indexes(catalog_id)
        self.assertTrue(len(index_ids))

        view = self.discovery.read_view(view_id)
        self.assertIsInstance(view,View)
        self.assertTrue(view.name == 'big_view')

        view.name = 'not_so_big_view'

        self.discovery.update_view(view)

        view = self.discovery.read_view(view_id)
        self.assertTrue(view.name == 'not_so_big_view')

        self.discovery.delete_view(view_id)
        with self.assertRaises(NotFound):
            self.discovery.read_view(view_id)

    def test_view_best_match(self):
        #---------------------------------------------------------------
        # Matches the best catalog available OR creates a new one
        #---------------------------------------------------------------
        catalog_id = self.catalog.create_catalog('dev', keywords=['name','model'])
        view_id    = self.discovery.create_view('exact_view', fields=['name','model'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids == [catalog_id])

        view_id = self.discovery.create_view('another_view', fields=['name','model'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids == [catalog_id])

        view_id = self.discovery.create_view('big_view', fields=['name'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids != [catalog_id])

    @skipIf(not use_es, 'No ElasticSearch')
    def test_basic_searching(self):

        #- - - - - - - - - - - - - - - - - 
        # set up the fake resources
        #- - - - - - - - - - - - - - - - - 

        instrument_pool = [
            InstrumentDevice(name='sonobuoy1', firmware_version='1'),
            InstrumentDevice(name='sonobuoy2', firmware_version='2'),
            InstrumentDevice(name='sonobuoy3', firmware_version='3')
        ]
        for instrument in instrument_pool:
            self.rr.create(instrument)

        view_id = self.discovery.create_view('devices', fields=['firmware_version'])

        search_string = "search 'firmware_version' is '2' from '%s'"%view_id
        results = self.poll(5, self.discovery.parse,search_string)
        result  = results[0]['_source']
        self.assertIsInstance(result, InstrumentDevice)
        self.assertTrue(result.name == 'sonobuoy2')
        self.assertTrue(result.firmware_version == '2')


    @skipIf(not use_es, 'No ElasticSearch')
    def test_associative_searching(self):

        dp_id,_ = self.rr.create(DataProduct('test_foo'))
        ds_id,_ = self.rr.create(Dataset('test_bar', registered=True))
        self.rr.create_association(subject=dp_id, object=ds_id, predicate='hasDataset')

        search_string = "search 'type_' is 'Dataset' from 'resources_index' and belongs to '%s'" % dp_id

        results = self.poll(5, self.discovery.parse,search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(ds_id in results)

    def test_iterative_associative_searching(self):
        #--------------------------------------------------------------------------------
        # Tests the ability to limit the iterations
        #--------------------------------------------------------------------------------
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        search_string = "belongs to '%s' depth 1" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        correct = [transform_id]
        self.assertTrue(results == correct, '%s' % results)

        search_string = "belongs to '%s' depth 2" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)


    @skipIf(not use_es, 'No ElasticSearch')
    def test_ranged_value_searching(self):
        discovery = self.discovery
        rr        = self.rr
        
        view_id = discovery.create_view('bank_view', fields=['cash_balance'])
        bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=10))

        search_string = "search 'cash_balance' values from 0 to 100 from '%s'" % view_id

        results = self.poll(5, discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == bank_id)
        
        bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=90))

        search_string = "search 'cash_balance' values from 80 from '%s'" % view_id

        results = self.poll(5, discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == bank_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_collections_searching(self):

        site_id, _ = self.rr.create(Site(name='black_mesa'))
        view_id    = self.discovery.create_view('big', fields=['name'])

        # Add the site to a new collection
        collection_id = self.ims.create_collection('resource_collection', [site_id])

        search_string = "search 'name' is '*' from '%s' and in '%s'" %(view_id, collection_id)

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0] == site_id, '%s' % results)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_search_by_name(self):
        inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123')

        dev_id, _ = self.rr.create(inst_dev)
        self.discovery.create_view('devs',fields=['name','serial_number'])

        search_string = "search 'serial_number' is 'abc*' from 'devs'"
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dev_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_search_by_name_index(self):
        inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123')

        dev_id, _ = self.rr.create(inst_dev)
        search_string = "search 'serial_number' is 'abc*' from 'resources_index'"
        
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dev_id)

        bank_acc = BankAccount(name='blah', cash_balance=10)
        res_id , _ = self.rr.create(bank_acc)

        search_string = "search 'cash_balance' values from 0 to 100 from 'resources_index'"

        results = self.poll(9, self.discovery.parse,search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == res_id)

    #@skipIf(not use_es, 'No ElasticSearch')
    @skip('Skip until time to refactor, data_format is removed from DataProduct resource')
    def test_data_product_search(self):

        # Create the dataproduct
        dp = DataProduct(name='test_product')
        dp.data_format.name = 'test_signal'
        dp.data_format.description = 'test signal'
        dp.data_format.character_set = 'utf8'
        dp.data_format.nominal_sampling_rate_maximum = '44000'
        dp.data_format.nominal_sampling_rate_minimum = '44000'
        dp.CDM_data_type = 'basic'
        dp_id, _ = self.rr.create(dp)

        search_string = "search 'data_format.name' is 'test_signal' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)

        search_string = "search 'CDM_data_type' is 'basic' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        
        search_string = "search 'data_format.character_set' is 'utf8' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_events_search(self):
        # Create a resource to force a new event

        dp = DataProcess()
        dp_id, rev = self.rr.create(dp)

        search_string = "SEARCH 'origin' IS '%s' FROM 'events_index'" % dp_id

        results = self.poll(9, self.discovery.parse,search_string)
        origin_type = results[0]['_source'].origin_type
        origin_id = results[0]['_source'].origin

        self.assertTrue(origin_type == RT.DataProcess)
        self.assertTrue(origin_id == dp_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_geo_distance_search(self):

        pd = PlatformDevice(name='test_dev')

        pd_id, _ = self.rr.create(pd)

        search_string = "search 'index_location' geo distance 20 km from lat 0 lon 0 from 'devices_index'"

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == pd_id)
        self.assertTrue(results[0]['_source'].name == 'test_dev')
   
    @skipIf(not use_es, 'No ElasticSearch')
    def test_geo_bbox_search(self):

        pd = PlatformDevice(name='test_dev')
        pd.index_location.lat = 5
        pd.index_location.lon = 5

        pd_id, _ = self.rr.create(pd)

        search_string = "search 'index_location' geo box top-left lat 10 lon 0 bottom-right lat 0 lon 10 from 'devices_index'"

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == pd_id)
        self.assertTrue(results[0]['_source'].name == 'test_dev')


    @skipIf(not use_es, 'No ElasticSearch')
    def test_time_search(self):
        today     = date.today()
        yesterday = today - timedelta(days=1)
        tomorrow  = today + timedelta(days=1)

        data_product = DataProduct()
        dp_id, _ = self.rr.create(data_product)
        
        search_string = "search 'ts_created' time from '%s' to '%s' from 'data_products_index'" % (yesterday, tomorrow)

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results,'Results not found')

        self.assertTrue(results[0]['_id'] == dp_id)
        
        search_string = "search 'ts_created' time from '%s' from 'data_products_index'" % yesterday

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results,'Results not found')

        self.assertTrue(results[0]['_id'] == dp_id)

        
    @skipIf(not use_es, 'No ElasticSearch')
    def test_user_search(self):
        user = UserInfo()
        user.name = 'test'
        user.contact.phones.append('5551212')

        user_id, _ = self.rr.create(user)

        search_string = 'search "name" is "test" from "users_index"'

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == user_id)
        self.assertTrue(results[0]['_source'].name == 'test')

        search_string = 'search "contact.phones" is "5551212" from "users_index"'
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == user_id)
        self.assertTrue(results[0]['_source'].name == 'test')


    @skipIf(not use_es, 'No ElasticSearch')
    def test_subobject_search(self):
        contact = ContactInformation()
        contact.email = '*****@*****.**'
        contact.individual_name_family = 'Tester'
        contact.individual_names_given = 'Intern'

        dp = DataProduct(name='example')
        dp.contacts.append(contact)

        dp_id,_ = self.rr.create(dp)

        #--------------------------------------------------------------------------------
        # Example using the full field name
        #--------------------------------------------------------------------------------
        search_string = 'search "contacts.email" is "*****@*****.**" from "data_products"'
        results = self.poll(9, self.discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        self.assertEquals(results[0]['_source'].name, 'example')

        #--------------------------------------------------------------------------------
        # Example using a sub-object's field name (ambiguous searching)
        #--------------------------------------------------------------------------------
        search_string = 'search "individual_names_given" is "Intern" from "data_products"'
        results = self.poll(9, self.discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        self.assertEquals(results[0]['_source'].name, 'example')

    @skipIf(not use_es, 'No ElasticSearch')
    def test_descriptive_phrase_search(self):
        dp = DataProduct(name='example', description='This is simply a description for this data product')
        dp_id, _ = self.rr.create(dp)

        search_string = 'search "description" like "description for" from "data_products"'
        results = self.poll(9, self.discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        self.assertEquals(results[0]['_source'].name, 'example')


    @skipIf(not use_es, 'No ElasticSearch')
    def test_ownership_searching(self):
        # Create two data products so that there is competition to the search, one is parsed 
        # (with conductivity as a parameter) and the other is raw
        dp = DataProduct(name='example dataproduct')
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
        stream_def_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id)
        tdom, sdom = time_series_domain()
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()
        dp_id = self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id, exchange_point='xp1')

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_raw_param_dict')
        stream_def_id = self.pubsub_management.create_stream_definition('ctd raw', parameter_dictionary_id=pdict_id)
        dp = DataProduct(name='WRONG')
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()
        self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id, exchange_point='xp1')

        parameter_search = 'search "name" is "conductivity" from "resources_index"'
        results = self.poll(9, self.discovery.parse, parameter_search)
        param_id = results[0]['_id']

        data_product_search = 'search "name" is "*" from "data_products_index" and has "%s"' % param_id
        results = self.poll(9, self.discovery.parse, data_product_search)
        print results
        self.assertEquals(results[0], dp_id)
コード例 #13
0
class UserNotificationService(BaseUserNotificationService):
    """
    A service that provides users with an API for CRUD methods for notifications.
    """

    def on_start(self):

        #---------------------------------------------------------------------------------------------------
        # Get the event Repository
        #---------------------------------------------------------------------------------------------------

        self.event_repo = self.container.instance.event_repository

        self.smtp_client = setting_up_smtp_client()

        self.ION_NOTIFICATION_EMAIL_ADDRESS = '*****@*****.**'

        #---------------------------------------------------------------------------------------------------
        # Create an event processor
        #---------------------------------------------------------------------------------------------------

        self.event_processor = EmailEventProcessor(self.smtp_client)

        #---------------------------------------------------------------------------------------------------
        # load event originators, types, and table
        #---------------------------------------------------------------------------------------------------

        self.event_types = CFG.event.types
        self.event_table = {}

        #---------------------------------------------------------------------------------------------------
        # Get the clients
        #---------------------------------------------------------------------------------------------------

        self.discovery = DiscoveryServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.datastore_manager = DatastoreManager()

        self.event_publisher = EventPublisher()
        self.scheduler_service = SchedulerService()

    def on_quit(self):

        pass

    def create_notification(self, notification=None, user_id=''):
        """
        Persists the provided NotificationRequest object for the specified Origin id.
        Associate the Notification resource with the user_id string.
        returned id is the internal id by which NotificationRequest will be identified
        in the data store.

        @param notification        NotificationRequest
        @param user_id             str
        @retval notification_id    str
        @throws BadRequest    if object passed has _id or _rev attribute

        """

        if not user_id:
            raise BadRequest("User id not provided.")

        #---------------------------------------------------------------------------------------------------
        # Persist Notification object as a resource if it has already not been persisted
        #---------------------------------------------------------------------------------------------------

        # find all notifications in the system
        notifs, _ = self.clients.resource_registry.find_resources(restype = RT.NotificationRequest)

        # if the notification has already been registered, simply use the old id
        if notification in notifs:
            log.warning("Notification object has already been created in resource registry before for another user. No new id to be generated.")
            notification_id = notification._id
        else:
            # since the notification has not been registered yet, register it and get the id
            notification_id, _ = self.clients.resource_registry.create(notification)

        #-------------------------------------------------------------------------------------------------------------------
        # read the registered notification request object because this has an _id and is more useful
        #-------------------------------------------------------------------------------------------------------------------

        notification = self.clients.resource_registry.read(notification_id)

        #-----------------------------------------------------------------------------------------------------------
        # Create an event processor for user. This sets up callbacks etc.
        # As a side effect this updates the UserInfo object and also the user info and reverse user info dictionaries.
        #-----------------------------------------------------------------------------------------------------------

        user = self.event_processor.add_notification_for_user(notification_request=notification, user_id=user_id)

        #-------------------------------------------------------------------------------------------------------------------
        # Allow the indexes to be updated for ElasticSearch
        # We publish event only after this so that the reload of the user info works by the
        # notification workers work properly
        #-------------------------------------------------------------------------------------------------------------------

        # todo: This is to allow time for the indexes to be created before publishing ReloadUserInfoEvent for notification workers.
        # todo: When things are more refined, it will be nice to have an event generated when the
        # indexes are updated so that a subscriber here when it received that event will publish
        # the reload user info event.
        time.sleep(4)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by a notification worker so that it can update its user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.debug("(create notification) Publishing ReloadUserInfoEvent for notification_id: %s" % notification_id)

        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
                                            origin="UserNotificationService",
                                            description= "A notification has been created.",
                                            notification_id = notification_id)

        return notification_id

    def update_notification(self, notification=None, user_id = ''):
        """Updates the provided NotificationRequest object.  Throws NotFound exception if
        an existing version of NotificationRequest is not found.  Throws Conflict if
        the provided NotificationRequest object is not based on the latest persisted
        version of the object.

        @param notification     NotificationRequest
        @throws BadRequest      if object does not have _id or _rev attribute
        @throws NotFound        object with specified id does not exist
        @throws Conflict        object not based on latest persisted object version
        """

        #-------------------------------------------------------------------------------------------------------------------
        # Get the old notification
        #-------------------------------------------------------------------------------------------------------------------

        old_notification = self.clients.resource_registry.read(notification._id)

        #-------------------------------------------------------------------------------------------------------------------
        # Update the notification
        #-------------------------------------------------------------------------------------------------------------------

        self.clients.resource_registry.update(notification)

        #-------------------------------------------------------------------------------------------------------------------
        # reading up the notification object to make sure we have the newly registered notification request object
        #-------------------------------------------------------------------------------------------------------------------

        notification_id = notification._id
        notification = self.clients.resource_registry.read(notification_id)

        #------------------------------------------------------------------------------------
        # Update the UserInfo object
        #------------------------------------------------------------------------------------

        user = self.update_user_info_object(user_id, notification, old_notification)

        #------------------------------------------------------------------------------------
        # Update the user_info dictionary maintained by UNS
        #------------------------------------------------------------------------------------

        self.update_user_info_dictionary(user, notification, old_notification)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by notification workers so that they can update their user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.info("(update notification) Publishing ReloadUserInfoEvent for updated notification")

        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
                                            origin="UserNotificationService",
                                            description= "A notification has been updated."
                                            )

    def read_notification(self, notification_id=''):
        """Returns the NotificationRequest object for the specified notification id.
        Throws exception if id does not match any persisted NotificationRequest
        objects.

        @param notification_id    str
        @retval notification    NotificationRequest
        @throws NotFound    object with specified id does not exist
        """
        notification = self.clients.resource_registry.read(notification_id)

        return notification

    def delete_notification(self, notification_id=''):
        """For now, permanently deletes NotificationRequest object with the specified
        id. Throws exception if id does not match any persisted NotificationRequest.

        @param notification_id    str
        @throws NotFound    object with specified id does not exist
        """

        #-------------------------------------------------------------------------------------------------------------------
        # Stop the event subscriber for the notification
        #-------------------------------------------------------------------------------------------------------------------
        notification_request = self.clients.resource_registry.read(notification_id)

        self.event_processor.stop_notification_subscriber(notification_request=notification_request)

        #-------------------------------------------------------------------------------------------------------------------
        # delete the notification from the user_info and reverse_user_info dictionaries
        #-------------------------------------------------------------------------------------------------------------------

        self.delete_notification_from_user_info(notification_id)

        #-------------------------------------------------------------------------------------------------------------------
        # delete from the resource registry
        #-------------------------------------------------------------------------------------------------------------------

        self.clients.resource_registry.delete(notification_id)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by a notification worker so that it can update its user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.info("(delete notification) Publishing ReloadUserInfoEvent for notification_id: %s" % notification_id)

        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
                                            origin="UserNotificationService",
                                            description= "A notification has been deleted.",
                                            notification_id = notification_id)

    def delete_notification_from_user_info(self, notification_id):
        '''
        Helper method to delete the notification from the user_info dictionary
        '''

        for user_name, value in self.event_processor.user_info.iteritems():
            for notif in value['notifications']:
                if notification_id == notif._id:
                    # remove the notification
                    value['notifications'].remove(notif)
                    # remove the notification_subscription
                    self.event_processor.user_info[user_name]['notification_subscriptions'].pop(notification_id)

        self.event_processor.reverse_user_info = calculate_reverse_user_info(self.event_processor.user_info)

    def find_events(self, origin='', type='', min_datetime='', max_datetime='', limit=-1, descending=False):
        """Returns a list of events that match the specified search criteria. Will throw a not NotFound exception
        if no events exist for the given parameters.

        @param origin         str
        @param type           str
        @param min_datetime   str
        @param max_datetime   str
        @param limit          int         (integer limiting the number of results (0 means unlimited))
        @param descending     boolean     (if True, reverse order (of production time) is applied, e.g. most recent first)
        @retval event_list    []
        @throws NotFound    object with specified parameters does not exist
        @throws NotFound    object with specified parameters does not exist
        """

        if min_datetime and max_datetime:
            search_time = "SEARCH 'ts_created' VALUES FROM %s TO %s FROM 'events_index'" % (min_datetime, max_datetime)
        else:
            search_time = 'search "ts_created" is "*" from "events_index"'

        if origin:
            search_origin = 'search "origin" is "%s" from "events_index"' % origin
        else:
            search_origin = 'search "origin" is "*" from "events_index"'

        if type:
            search_type = 'search "type_" is "%s" from "events_index"' % type
        else:
            search_type = 'search "type_" is "*" from "events_index"'

        search_string = search_time + ' and ' + search_origin + ' and ' + search_type

        # get the list of ids corresponding to the events
        ret_vals = self.discovery.parse(search_string)
        log.debug("(find_events) Discovery search returned the following event ids: %s" % ret_vals)

        events = []
        for event_id in ret_vals:
            datastore = self.datastore_manager.get_datastore('events')
            event_obj = datastore.read(event_id)
            events.append(event_obj)

        log.debug("(find_events) UNS found the following relevant events: %s" % events)

        if limit > -1:
            list = []
            for i in xrange(limit):
                list.append(events[i])
            return list

        #todo implement time ordering: ascending or descending

        return events

    def publish_event(self, event=None, scheduler_entry= None):
        '''
        Publish a general event at a certain time using the UNS

        @param event Event
        @param scheduler_entry SchedulerEntry This object is created through Scheduler Service
        '''

        log.debug("UNS to publish on schedule the event: %s" % event)

        #--------------------------------------------------------------------------------
        # Set up a subscriber to get the nod from the scheduler to publish the event
        #--------------------------------------------------------------------------------
        def publish(message, headers):
            self.event_publisher._publish_event( event_msg = event,
                                            origin=event.origin,
                                            event_type = event.type_)
            log.info("UNS published an event in response to a nod from the Scheduler Service.")

        event_subscriber = EventSubscriber( event_type = "ResourceEvent", callback=publish)
        event_subscriber.start()

        # Use the scheduler to set up a timer
        self.scheduler_service.create_timer(scheduler_entry)

    def create_worker(self, number_of_workers=1):
        '''
        Creates notification workers

        @param number_of_workers int
        @ret_val pids list

        '''

        pids = []

        for n in xrange(number_of_workers):

            process_definition = ProcessDefinition( name='notification_worker_%s' % n)

            process_definition.executable = {
                'module': 'ion.processes.data.transforms.notification_worker',
                'class':'NotificationWorker'
            }
            process_definition_id = self.process_dispatcher.create_process_definition(process_definition=process_definition)

            # ------------------------------------------------------------------------------------
            # Process Spawning
            # ------------------------------------------------------------------------------------

            pid2 = self.process_dispatcher.create_process(process_definition_id)

            #@todo put in a configuration
            configuration = {}
            configuration['process'] = dict({
                'name': 'notification_worker_%s' % n,
                'type':'simple'
            })

            pid  = self.process_dispatcher.schedule_process(
                process_definition_id,
                configuration = configuration,
                process_id=pid2
            )

            pids.append(pid)

        return pids


    def process_batch(self, start_time = 0, end_time = 10):
        '''
        This method is launched when an process_batch event is received. The user info dictionary maintained
        by the User Notification Service is used to query the event repository for all events for a particular
        user that have occurred in a provided time interval, and then an email is sent to the user containing
        the digest of all the events.
        '''
        for user_name, value in self.event_processor.user_info.iteritems():

            notifications = value['notifications']

            events_for_message = []

            search_time = "SEARCH 'ts_created' VALUES FROM %s TO %s FROM 'events_index'" % (start_time, end_time)

            for notification in notifications:

                if notification.origin:
                    search_origin = 'search "origin" is "%s" from "events_index"' % notification.origin
                else:
                    search_origin = 'search "origin" is "*" from "events_index"'

                if notification.origin_type:
                    search_origin_type= 'search "origin_type" is "%s" from "events_index"' % notification.origin_type
                else:
                    search_origin_type= 'search "origin_type" is "*" from "events_index"'

                if notification.event_type:
                    search_event_type = 'search "type_" is "%s" from "events_index"' % notification.event_type
                else:
                    search_event_type = 'search "type_" is "*" from "events_index"'

                search_string = search_time + ' and ' + search_origin + ' and ' + search_origin_type + ' and ' + search_event_type

                # get the list of ids corresponding to the events
                ret_vals = self.discovery.parse(search_string)

                for event_id in ret_vals:
                    datastore = self.datastore_manager.get_datastore('events')
                    event_obj = datastore.read(event_id)
                    events_for_message.append(event_obj)

            log.debug("Found following events of interest to user, %s: %s" % (user_name, events_for_message))

            # send a notification email to each user using a _send_email() method
            if events_for_message:
                self.format_and_send_email(events_for_message, user_name)

    def format_and_send_email(self, events_for_message, user_name):
        '''
        Format the message for a particular user containing information about the events he is to be notified about
        '''

        message = str(events_for_message)
        log.info("The user, %s, will get the following events in his batch notification email: %s" % (user_name, message))

        msg_body = ''
        count = 1
        for event in events_for_message:
            # build the email from the event content
            msg_body += string.join(("\r\n",
                                     "Event %s: %s" %  (count, event),
                                    "",
                                    "Originator: %s" %  event.origin,
                                    "",
                                    "Description: %s" % event.description ,
                                    "",
                                    "Event time stamp: %s" %  event.ts_created,
                                    "\r\n",
                                    "------------------------"
                                    "\r\n"))
            count += 1

        msg_body += "You received this notification from ION because you asked to be " + \
                    "notified about this event from this source. " + \
                    "To modify or remove notifications about this event, " + \
                    "please access My Notifications Settings in the ION Web UI. " + \
                    "Do not reply to this email.  This email address is not monitored " + \
                    "and the emails will not be read. \r\n "


        log.debug("The email has the following message body: %s" % msg_body)

        msg_subject = "(SysName: " + get_sys_name() + ") ION event "

        self.send_batch_email(  msg_body = msg_body,
                                msg_subject = msg_subject,
                                msg_recipient=self.event_processor.user_info[user_name]['user_contact'].email,
                                smtp_client=self.smtp_client )

    def send_batch_email(self, msg_body, msg_subject, msg_recipient, smtp_client):
        '''
        Send the email
        '''

        msg = MIMEText(msg_body)
        msg['Subject'] = msg_subject
        msg['From'] = self.ION_NOTIFICATION_EMAIL_ADDRESS
        msg['To'] = msg_recipient
        log.debug("EventProcessor.subscription_callback(): sending email to %s"\
        %msg_recipient)

        smtp_sender = CFG.get_safe('server.smtp.sender')

        smtp_client.sendmail(smtp_sender, msg_recipient, msg.as_string())

    def update_user_info_object(self, user_id, new_notification, old_notification):
        '''
        Update the UserInfo object. If the passed in parameter, od_notification, is None, it does not need to remove the old notification
        '''

        #------------------------------------------------------------------------------------
        # read the user
        #------------------------------------------------------------------------------------

        user = self.clients.resource_registry.read(user_id)

        if not user:
            raise BadRequest("No user with the provided user_id: %s" % user_id)

        notifications = []
        for item in user.variables:
            if item['name'] == 'notifications':
                if old_notification and old_notification in item['value']:

                    notifications = item['value']
                    # remove the old notification
                    notifications.remove(old_notification)

                # put in the new notification
                notifications.append(new_notification)

                item['value'] = notifications

                break

        #------------------------------------------------------------------------------------
        # update the resource registry
        #------------------------------------------------------------------------------------

        self.clients.resource_registry.update(user)

        return user

    def update_user_info_dictionary(self, user, new_notification, old_notification):

        #------------------------------------------------------------------------------------
        # Remove the old notifications
        #------------------------------------------------------------------------------------

        if old_notification in self.event_processor.user_info[user.name]['notifications']:

            # remove from notifications list
            self.event_processor.user_info[user.name]['notifications'].remove(old_notification)

            #------------------------------------------------------------------------------------
            # update the notification subscription object
            #------------------------------------------------------------------------------------

            # get the old notification_subscription
            notification_subscription = self.event_processor.user_info[user.name]['notification_subscriptions'].pop(old_notification._id)

            # update that old notification subscription
            notification_subscription._res_obj = new_notification

            # feed the updated notification subscription back into the user info dictionary
            self.event_processor.user_info[user.name]['notification_subscriptions'][old_notification._id] = notification_subscription

        #------------------------------------------------------------------------------------
        # find the already existing notifications for the user
        #------------------------------------------------------------------------------------

        notifications = self.event_processor.user_info[user.name]['notifications']
        notifications.append(new_notification)

        #------------------------------------------------------------------------------------
        # update the user info - contact information, notifications
        #------------------------------------------------------------------------------------

        self.event_processor.user_info[user.name]['user_contact'] = user.contact
        self.event_processor.user_info[user.name]['notifications'] = notifications

        self.event_processor.reverse_user_info = calculate_reverse_user_info(self.event_processor.user_info)
コード例 #14
0
class UserNotificationService(BaseUserNotificationService):
    """
    A service that provides users with an API for CRUD methods for notifications.
    """
    def __init__(self, *args, **kwargs):
        self._schedule_ids = []
        BaseUserNotificationService.__init__(self, *args, **kwargs)

    def on_start(self):
        self.ION_NOTIFICATION_EMAIL_ADDRESS = CFG.get_safe(
            'server.smtp.sender')

        # Create an event processor
        self.event_processor = EmailEventProcessor()

        # Dictionaries that maintain information asetting_up_smtp_clientbout users and their subscribed notifications
        self.user_info = {}

        # The reverse_user_info is calculated from the user_info dictionary
        self.reverse_user_info = {}

        # Get the clients
        # @TODO: Why are these not dependencies in the service YML???
        self.discovery = DiscoveryServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()

        self.event_publisher = EventPublisher()
        self.datastore = self.container.datastore_manager.get_datastore(
            'events')

        self.start_time = get_ion_ts()

        #------------------------------------------------------------------------------------
        # Create an event subscriber for Reload User Info events
        #------------------------------------------------------------------------------------

        def reload_user_info(event_msg, headers):
            """
            Callback method for the subscriber to ReloadUserInfoEvent
            """

            notification_id = event_msg.notification_id
            log.debug(
                "(UNS instance received a ReloadNotificationEvent. The relevant notification_id is %s"
                % notification_id)

            try:
                self.user_info = self.load_user_info()
            except NotFound:
                log.warning("ElasticSearch has not yet loaded the user_index.")

            self.reverse_user_info = calculate_reverse_user_info(
                self.user_info)

            log.debug("(UNS instance) After a reload, the user_info: %s" %
                      self.user_info)
            log.debug("(UNS instance) The recalculated reverse_user_info: %s" %
                      self.reverse_user_info)

        # the subscriber for the ReloadUSerInfoEvent
        self.reload_user_info_subscriber = EventSubscriber(
            event_type=OT.ReloadUserInfoEvent,
            origin='UserNotificationService',
            callback=reload_user_info)
        self.add_endpoint(self.reload_user_info_subscriber)

    def on_quit(self):
        """
        Handles stop/terminate.

        Cleans up subscribers spawned here, terminates any scheduled tasks to the scheduler.
        """
        for sid in self._schedule_ids:
            try:
                self.clients.scheduler.cancel_timer(sid)
            except IonException as ex:
                log.info(
                    "Ignoring exception while cancelling schedule id (%s): %s: %s",
                    sid, ex.__class__.__name__, ex)

        super(UserNotificationService, self).on_quit()

    def set_process_batch_key(self, process_batch_key=''):
        """
        This method allows an operator to set the process_batch_key, a string.
        Once this method is used by the operator, the UNS will start listening for timer events
        published by the scheduler with origin = process_batch_key.

        @param process_batch_key str
        """
        def process(event_msg, headers):
            self.end_time = get_ion_ts()

            # run the process_batch() method
            self.process_batch(start_time=self.start_time,
                               end_time=self.end_time)
            self.start_time = self.end_time

        # the subscriber for the batch processing
        # To trigger the batch notification, have the scheduler create a timer with event_origin = process_batch_key
        self.batch_processing_subscriber = EventSubscriber(
            event_type=OT.TimerEvent,
            origin=process_batch_key,
            callback=process)
        self.add_endpoint(self.batch_processing_subscriber)

    def create_notification(self, notification=None, user_id=''):
        """
        Persists the provided NotificationRequest object for the specified Origin id.
        Associate the Notification resource with the user_id string.
        returned id is the internal id by which NotificationRequest will be identified
        in the data store.

        @param notification        NotificationRequest
        @param user_id             str
        @retval notification_id    str
        @throws BadRequest    if object passed has _id or _rev attribute
        """
        if not user_id:
            raise BadRequest("User id not provided.")

        log.debug(
            "Create notification called for user_id: %s, and notification: %s",
            user_id, notification)

        #---------------------------------------------------------------------------------------------------
        # Persist Notification object as a resource if it has already not been persisted
        #---------------------------------------------------------------------------------------------------

        notification_id = None
        # if the notification has already been registered, simply use the old id
        existing_user_notifications = self.get_user_notifications(
            user_info_id=user_id)
        if existing_user_notifications:
            notification_id = self._notification_in_notifications(
                notification, existing_user_notifications)

        # since the notification has not been registered yet, register it and get the id

        temporal_bounds = TemporalBounds()
        temporal_bounds.start_datetime = get_ion_ts()
        temporal_bounds.end_datetime = ''

        if not notification_id:
            notification.temporal_bounds = temporal_bounds
            notification_id, rev = self.clients.resource_registry.create(
                notification)
        else:
            log.debug(
                "Notification object has already been created in resource registry before. No new id to be generated. notification_id: %s",
                notification_id)
            # Read the old notification already in the resource registry
            notification = self.clients.resource_registry.read(notification_id)

            # Update the temporal bounds of the old notification resource
            notification.temporal_bounds = temporal_bounds

            # Update the notification in the resource registry
            self.clients.resource_registry.update(notification)

            log.debug(
                "The temporal bounds for this resubscribed notification object with id: %s, is: %s",
                notification._id, notification.temporal_bounds)

        # Link the user and the notification with a hasNotification association
        assocs = self.clients.resource_registry.find_associations(
            subject=user_id,
            predicate=PRED.hasNotification,
            object=notification_id,
            id_only=True)

        if assocs:
            log.debug(
                "Got an already existing association: %s, between user_id: %s, and notification_id: %s",
                assocs, user_id, notification_id)
            return notification_id
        else:
            log.debug(
                "Creating association between user_id: %s, and notification_id: %s",
                user_id, notification_id)
            self.clients.resource_registry.create_association(
                user_id, PRED.hasNotification, notification_id)

        # read the registered notification request object because this has an _id and is more useful
        notification = self.clients.resource_registry.read(notification_id)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by a notification worker so that it can update its user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.debug(
            "(create notification) Publishing ReloadUserInfoEvent for notification_id: %s",
            notification_id)

        self.event_publisher.publish_event(
            event_type=OT.ReloadUserInfoEvent,
            origin="UserNotificationService",
            description="A notification has been created.",
            notification_id=notification_id)

        return notification_id

    def update_notification(self, notification=None, user_id=''):
        """Updates the provided NotificationRequest object.  Throws NotFound exception if
        an existing version of NotificationRequest is not found.  Throws Conflict if
        the provided NotificationRequest object is not based on the latest persisted
        version of the object.

        @param notification     NotificationRequest
        @throws BadRequest      if object does not have _id or _rev attribute
        @throws NotFound        object with specified id does not exist
        @throws Conflict        object not based on latest persisted object version
        """

        raise NotImplementedError(
            "This method needs to be worked out in terms of implementation")

#        #-------------------------------------------------------------------------------------------------------------------
#        # Get the old notification
#        #-------------------------------------------------------------------------------------------------------------------
#
#        old_notification = self.clients.resource_registry.read(notification._id)
#
#        #-------------------------------------------------------------------------------------------------------------------
#        # Update the notification in the notifications dict
#        #-------------------------------------------------------------------------------------------------------------------
#
#
#        self._update_notification_in_notifications_dict(new_notification=notification,
#                                                        notifications=self.notifications)
#        #-------------------------------------------------------------------------------------------------------------------
#        # Update the notification in the registry
#        #-------------------------------------------------------------------------------------------------------------------
#        '''
#        Since one user should not be able to update the notification request resource without the knowledge of other users
#        who have subscribed to the same notification request, we do not update the resource in the resource registry
#        '''
#
##        self.clients.resource_registry.update(notification)
#
#        #-------------------------------------------------------------------------------------------------------------------
#        # reading up the notification object to make sure we have the newly registered notification request object
#        #-------------------------------------------------------------------------------------------------------------------
#
#        notification_id = notification._id
#        notification = self.clients.resource_registry.read(notification_id)
#
#        #------------------------------------------------------------------------------------
#        # Update the UserInfo object
#        #------------------------------------------------------------------------------------
#
#        user = self.update_user_info_object(user_id, notification)
#
#        #-------------------------------------------------------------------------------------------------------------------
#        # Generate an event that can be picked by notification workers so that they can update their user_info dictionary
#        #-------------------------------------------------------------------------------------------------------------------
#        log.info("(update notification) Publishing ReloadUserInfoEvent for updated notification")
#
#        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
#            origin="UserNotificationService",
#            description= "A notification has been updated.",
#            notification_id = notification_id
#        )

    def read_notification(self, notification_id=''):
        """Returns the NotificationRequest object for the specified notification id.
        Throws exception if id does not match any persisted NotificationRequest
        objects.

        @param notification_id    str
        @retval notification    NotificationRequest
        @throws NotFound    object with specified id does not exist
        """
        notification = self.clients.resource_registry.read(notification_id)

        return notification

    def delete_notification(self, notification_id=''):
        """For now, permanently deletes NotificationRequest object with the specified
        id. Throws exception if id does not match any persisted NotificationRequest.

        @param notification_id    str
        @throws NotFound    object with specified id does not exist
        """

        #-------------------------------------------------------------------------------------------------------------------
        # Stop the event subscriber for the notification
        #-------------------------------------------------------------------------------------------------------------------
        notification_request = self.clients.resource_registry.read(
            notification_id)

        #-------------------------------------------------------------------------------------------------------------------
        # Update the resource registry
        #-------------------------------------------------------------------------------------------------------------------

        notification_request.temporal_bounds.end_datetime = get_ion_ts()

        self.clients.resource_registry.update(notification_request)

        #-------------------------------------------------------------------------------------------------------------------
        # Find users who are interested in the notification and update the notification in the list maintained by the UserInfo object
        #-------------------------------------------------------------------------------------------------------------------
        #        user_ids, _ = self.clients.resource_registry.find_subjects(RT.UserInfo, PRED.hasNotification, notification_id, True)
        #
        #        for user_id in user_ids:
        #            self.update_user_info_object(user_id, notification_request)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by a notification worker so that it can update its user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.info(
            "(delete notification) Publishing ReloadUserInfoEvent for notification_id: %s",
            notification_id)

        self.event_publisher.publish_event(
            event_type=OT.ReloadUserInfoEvent,
            origin="UserNotificationService",
            description="A notification has been deleted.",
            notification_id=notification_id)

#    def delete_notification_from_user_info(self, notification_id):
#        """
#        Helper method to delete the notification from the user_info dictionary
#
#        @param notification_id str
#        """
#
#        user_ids, assocs = self.clients.resource_registry.find_subjects(object=notification_id, predicate=PRED.hasNotification, id_only=True)
#
#        for assoc in assocs:
#            self.clients.resource_registry.delete_association(assoc)
#
#        for user_id in user_ids:
#
#            value = self.user_info[user_id]
#
#            for notif in value['notifications']:
#                if notification_id == notif._id:
#                    # remove the notification
#                    value['notifications'].remove(notif)
#
#        self.reverse_user_info = calculate_reverse_user_info(self.user_info)

    def find_events(self,
                    origin='',
                    type='',
                    min_datetime=0,
                    max_datetime=0,
                    limit=-1,
                    descending=False):
        """
        This method leverages couchdb view and simple filters. It does not use elastic search.

        Returns a list of events that match the specified search criteria. Will throw a not NotFound exception
        if no events exist for the given parameters.

        @param origin         str
        @param min_datetime   int  seconds
        @param max_datetime   int  seconds
        @param limit          int         (integer limiting the number of results (0 means unlimited))
        @param descending     boolean     (if True, reverse order (of production time) is applied, e.g. most recent first)
        @retval event_list    []
        @throws NotFound    object with specified parameters does not exist
        @throws NotFound    object with specified parameters does not exist
        """
        event_tuples = []

        try:
            event_tuples = self.container.event_repository.find_events(
                event_type=type,
                origin=origin,
                start_ts=min_datetime,
                end_ts=max_datetime,
                limit=limit,
                descending=descending)
        except Exception as exc:
            log.warning(
                "The UNS find_events operation for event origin = %s and type = %s failed. Error message = %s",
                origin, type, exc.message)

        events = [item[2] for item in event_tuples]
        log.debug("(find_events) UNS found the following relevant events: %s",
                  events)

        return events

    #todo Uses Elastic Search. Later extend this to a larger search criteria
    def find_events_extended(self,
                             origin='',
                             type='',
                             min_time=0,
                             max_time=0,
                             limit=-1,
                             descending=False):
        """Uses Elastic Search. Returns a list of events that match the specified search criteria. Will throw a not NotFound exception
        if no events exist for the given parameters.

        @param origin         str
        @param type           str
        @param min_time   int seconds
        @param max_time   int seconds
        @param limit          int         (integer limiting the number of results (0 means unlimited))
        @param descending     boolean     (if True, reverse order (of production time) is applied, e.g. most recent first)
        @retval event_list    []
        @throws NotFound    object with specified parameters does not exist
        @throws NotFound    object with specified parameters does not exist
        """

        query = []

        if min_time and max_time:
            query.append(
                "SEARCH 'ts_created' VALUES FROM %s TO %s FROM 'events_index'"
                % (min_time, max_time))

        if origin:
            query.append('search "origin" is "%s" from "events_index"' %
                         origin)

        if type:
            query.append('search "type_" is "%s" from "events_index"' % type)

        search_string = ' and '.join(query)

        # get the list of ids corresponding to the events
        ret_vals = self.discovery.parse(search_string)
        if len(query) > 1:
            events = self.datastore.read_mult(ret_vals)
        else:
            events = [i['_source'] for i in ret_vals]

        log.debug(
            "(find_events_extended) Discovery search returned the following event ids: %s",
            ret_vals)

        log.debug(
            "(find_events_extended) UNS found the following relevant events: %s",
            events)

        if limit > 0:
            return events[:limit]

        #todo implement time ordering: ascending or descending

        return events

    def publish_event_object(self, event=None):
        """
        This service operation would publish the given event from an event object.

        @param event    !Event
        @retval event   !Event
        """
        event = self.event_publisher.publish_event_object(event_object=event)
        log.info(
            "The publish_event_object(event) method of UNS was used to publish the event: %s",
            event)

        return event

    def publish_event(self,
                      event_type='',
                      origin='',
                      origin_type='',
                      sub_type='',
                      description='',
                      event_attrs=None):
        """
        This service operation assembles a new Event object based on event_type 
        (e.g. via the pyon Event publisher) with optional additional attributes from a event_attrs
        dict of arbitrary attributes.
        
        
        @param event_type   str
        @param origin       str
        @param origin_type  str
        @param sub_type     str
        @param description  str
        @param event_attrs  dict
        @retval event       !Event
        """
        event_attrs = event_attrs or {}

        event = self.event_publisher.publish_event(event_type=event_type,
                                                   origin=origin,
                                                   origin_type=origin_type,
                                                   sub_type=sub_type,
                                                   description=description,
                                                   **event_attrs)
        log.info(
            "The publish_event() method of UNS was used to publish an event: %s",
            event)

        return event

    def get_recent_events(self, resource_id='', limit=100):
        """
        Get recent events for use in extended resource computed attribute
        @param resource_id str
        @param limit int
        @retval ComputedListValue with value list of 4-tuple with Event objects
        """

        now = get_ion_ts()
        events = self.find_events(origin=resource_id,
                                  limit=limit,
                                  max_datetime=now,
                                  descending=True)

        ret = IonObject(OT.ComputedEventListValue)
        if events:
            ret.value = events
            ret.computed_list = [
                get_event_computed_attributes(event) for event in events
            ]
            ret.status = ComputedValueAvailability.PROVIDED
        else:
            ret.status = ComputedValueAvailability.NOTAVAILABLE

        return ret

    def get_user_notifications(self, user_info_id=''):
        """
        Get the notification request objects that are subscribed to by the user

        @param user_info_id str

        @retval notifications list of NotificationRequest objects
        """
        notifications = []
        user_notif_req_objs, _ = self.clients.resource_registry.find_objects(
            subject=user_info_id,
            predicate=PRED.hasNotification,
            object_type=RT.NotificationRequest,
            id_only=False)

        log.debug("Got %s notifications, for the user: %s",
                  len(user_notif_req_objs), user_info_id)

        for notif in user_notif_req_objs:
            # do not include notifications that have expired
            if notif.temporal_bounds.end_datetime == '':
                notifications.append(notif)

        return notifications

    def create_worker(self, number_of_workers=1):
        """
        Creates notification workers

        @param number_of_workers int
        @retval pids list

        """

        pids = []

        for n in xrange(number_of_workers):

            process_definition = ProcessDefinition(
                name='notification_worker_%s' % n)

            process_definition.executable = {
                'module': 'ion.processes.data.transforms.notification_worker',
                'class': 'NotificationWorker'
            }
            process_definition_id = self.process_dispatcher.create_process_definition(
                process_definition=process_definition)

            # ------------------------------------------------------------------------------------
            # Process Spawning
            # ------------------------------------------------------------------------------------

            pid2 = self.process_dispatcher.create_process(
                process_definition_id)

            #@todo put in a configuration
            configuration = {}
            configuration['process'] = dict({
                'name':
                'notification_worker_%s' % n,
                'type':
                'simple',
                'queue_name':
                'notification_worker_queue'
            })

            pid = self.process_dispatcher.schedule_process(
                process_definition_id,
                configuration=configuration,
                process_id=pid2)

            pids.append(pid)

        return pids

    def process_batch(self, start_time='', end_time=''):
        """
        This method is launched when an process_batch event is received. The user info dictionary maintained
        by the User Notification Service is used to query the event repository for all events for a particular
        user that have occurred in a provided time interval, and then an email is sent to the user containing
        the digest of all the events.

        @param start_time int milliseconds
        @param end_time int milliseconds
        """
        self.smtp_client = setting_up_smtp_client()

        if end_time <= start_time:
            return

        for user_id, value in self.user_info.iteritems():

            notifications = self.get_user_notifications(user_info_id=user_id)
            notifications_disabled = value['notifications_disabled']
            notifications_daily_digest = value['notifications_daily_digest']

            # Ignore users who do NOT want batch notifications or who have disabled the delivery switch
            # However, if notification preferences have not been set for the user, use the default mechanism and do not bother
            if notifications_disabled or not notifications_daily_digest:
                continue

            events_for_message = []

            search_time = "SEARCH 'ts_created' VALUES FROM %s TO %s FROM 'events_index'" % (
                start_time, end_time)

            for notification in notifications:
                # If the notification request has expired, then do not use it in the search
                if notification.temporal_bounds.end_datetime:
                    continue

                if CFG_ELASTIC_SEARCH:
                    if notification.origin:
                        search_origin = 'search "origin" is "%s" from "events_index"' % notification.origin
                    else:
                        search_origin = 'search "origin" is "*" from "events_index"'

                    if notification.origin_type:
                        search_origin_type = 'search "origin_type" is "%s" from "events_index"' % notification.origin_type
                    else:
                        search_origin_type = 'search "origin_type" is "*" from "events_index"'

                    if notification.event_type:
                        search_event_type = 'search "type_" is "%s" from "events_index"' % notification.event_type
                    else:
                        search_event_type = 'search "type_" is "*" from "events_index"'

                    search_string = search_time + ' and ' + search_origin + ' and ' + search_origin_type + ' and ' + search_event_type

                    # get the list of ids corresponding to the events
                    log.debug('process_batch  search_string: %s',
                              search_string)
                    ret_vals = self.discovery.parse(search_string)

                    events_for_message.extend(
                        self.datastore.read_mult(ret_vals))

                else:
                    # Adding a branch
                    event_tuples = self.container.event_repository.find_events(
                        origin=notification.origin,
                        event_type=notification.event_type,
                        start_ts=start_time,
                        end_ts=end_time)
                    events = [item[2] for item in event_tuples]

                    events_for_message.extend(events)

            log.debug("Found following events of interest to user, %s: %s",
                      user_id, events_for_message)

            # send a notification email to each user using a _send_email() method
            if events_for_message:
                self.format_and_send_email(
                    events_for_message=events_for_message,
                    user_id=user_id,
                    smtp_client=self.smtp_client)

        self.smtp_client.quit()

    def format_and_send_email(self,
                              events_for_message=None,
                              user_id=None,
                              smtp_client=None):
        """
        Format the message for a particular user containing information about the events he is to be notified about

        @param events_for_message list
        @param user_id str
        """
        message = str(events_for_message)
        log.debug(
            "The user, %s, will get the following events in his batch notification email: %s",
            user_id, message)

        msg = convert_events_to_email_message(events_for_message,
                                              self.clients.resource_registry)
        msg["Subject"] = "(SysName: " + get_sys_name() + ") ION event "
        msg["To"] = self.user_info[user_id]['user_contact'].email
        self.send_batch_email(msg, smtp_client)

    def send_batch_email(self, msg=None, smtp_client=None):
        """
        Send the email

        @param msg MIMEText object of email message
        @param smtp_client object
        """

        if msg is None: msg = {}
        for f in ["Subject", "To"]:
            if not f in msg: raise BadRequest("'%s' not in msg %s" % (f, msg))

        msg_subject = msg["Subject"]
        msg_recipient = msg["To"]

        msg['From'] = self.ION_NOTIFICATION_EMAIL_ADDRESS
        log.debug("UNS sending batch (digest) email from %s to %s",
                  self.ION_NOTIFICATION_EMAIL_ADDRESS, msg_recipient)

        smtp_sender = CFG.get_safe('server.smtp.sender')

        smtp_client.sendmail(smtp_sender, [msg_recipient], msg.as_string())

    def update_user_info_object(self, user_id, new_notification):
        """
        Update the UserInfo object. If the passed in parameter, od_notification, is None, it does not need to remove the old notification

        @param user_id str
        @param new_notification NotificationRequest
        """

        #this is not necessary if notifiactions are not stored in the userinfo object
        raise NotImplementedError(
            "This method is not necessary because Notifications are not stored in the userinfo object"
        )

        #------------------------------------------------------------------------------------
        # read the user
        #------------------------------------------------------------------------------------

#        user = self.clients.resource_registry.read(user_id)
#
#        if not user:
#            raise BadRequest("No user with the provided user_id: %s" % user_id)
#
#        for item in user.variables:
#            if type(item) is dict and item.has_key('name') and item['name'] == 'notifications':
#                for notif in item['value']:
#                    if notif._id == new_notification._id:
#                        log.debug("came here for updating notification")
#                        notifications = item['value']
#                        notifications.remove(notif)
#                        notifications.append(new_notification)
#                break
#            else:
#                log.warning('Invalid variables attribute on UserInfo instance. UserInfo: %s', user)
#
#
#        #------------------------------------------------------------------------------------
#        # update the resource registry
#        #------------------------------------------------------------------------------------
#
#        log.debug("user.variables::: %s", user.variables)
#
#        self.clients.resource_registry.update(user)
#
#        return user

    def get_subscriptions(self,
                          resource_id='',
                          user_id='',
                          include_nonactive=False):
        """
        @param resource_id  a resource id (or other origin) that is the origin of events for notifications
        @param user_id  a UserInfo ID that owns the NotificationRequest
        @param include_nonactive  if False, filter to active NotificationRequest only
        Return all NotificationRequest resources where origin is given resource_id.
        """
        notif_reqs, _ = self.clients.resource_registry.find_resources_ext(
            restype=RT.NotificationRequest,
            attr_name="origin",
            attr_value=resource_id,
            id_only=False)

        log.debug(
            "Got %s active and past NotificationRequests for resource/origin %s",
            len(notif_reqs), resource_id)

        if not include_nonactive:
            notif_reqs = [
                nr for nr in notif_reqs
                if nr.temporal_bounds.end_datetime == ''
            ]
            log.debug("Filtered to %s active NotificationRequests",
                      len(notif_reqs))

        if user_id:
            # Get all NotificationRequests (ID) that are associated to given UserInfo_id
            user_notif_req_ids, _ = self.clients.resource_registry.find_objects(
                subject=user_id,
                predicate=PRED.hasNotification,
                object_type=RT.NotificationRequest,
                id_only=True)

            notif_reqs = [
                nr for nr in notif_reqs if nr._id in user_notif_req_ids
            ]
            log.debug(
                "Filtered to %s NotificationRequests associated to user %s",
                len(notif_reqs), user_id)

        return notif_reqs

    def get_subscriptions_attribute(self,
                                    resource_id='',
                                    user_id='',
                                    include_nonactive=False):
        retval = self.get_subscriptions(resource_id=resource_id,
                                        user_id=user_id,
                                        include_nonactive=include_nonactive)
        container = ComputedListValue(value=retval)
        return container

    def _notification_in_notifications(self,
                                       notification=None,
                                       notifications=None):

        for notif in notifications:
            if notif.name == notification.name and \
            notif.origin == notification.origin and \
            notif.origin_type == notification.origin_type and \
            notif.event_type == notification.event_type:
                return notif._id
        return None

    def load_user_info(self):
        """
        Method to load the user info dictionary used by the notification workers and the UNS

        @retval user_info dict
        """
        users, _ = self.clients.resource_registry.find_resources(
            restype=RT.UserInfo)

        user_info = {}

        if not users:
            return {}

        for user in users:
            notifications = []
            notifications_disabled = False
            notifications_daily_digest = False

            #retrieve all the active notifications assoc to this user
            notifications = self.get_user_notifications(user_info_id=user)
            log.debug('load_user_info notifications:   %s', notifications)

            for variable in user.variables:
                if type(variable) is dict and variable.has_key('name'):

                    if variable['name'] == 'notifications_daily_digest':
                        notifications_daily_digest = variable['value']

                    if variable['name'] == 'notifications_disabled':
                        notifications_disabled = variable['value']

                else:
                    log.warning(
                        'Invalid variables attribute on UserInfo instance. UserInfo: %s',
                        user)

            user_info[user._id] = {
                'user_contact': user.contact,
                'notifications': notifications,
                'notifications_daily_digest': notifications_daily_digest,
                'notifications_disabled': notifications_disabled
            }

        return user_info

    ##
    ##
    ##  GOVERNANCE FUNCTIONS
    ##
    ##

    def check_subscription_policy(self, process, message, headers):

        try:
            gov_values = GovernanceHeaderValues(headers=headers,
                                                process=process,
                                                resource_id_required=False)

        except Inconsistent, ex:
            return False, ex.message

        if gov_values.op == 'delete_notification':
            return True, ''

        notification = message['notification']
        resource_id = notification.origin

        if notification.origin_type == RT.Org:
            org = self.clients.resource_registry.read(resource_id)
            if (has_org_role(gov_values.actor_roles, org.org_governance_name,
                             [ORG_MEMBER_ROLE])):
                return True, ''
        else:
            orgs, _ = self.clients.resource_registry.find_subjects(
                subject_type=RT.Org,
                predicate=PRED.hasResource,
                object=resource_id,
                id_only=False)
            for org in orgs:
                if (has_org_role(gov_values.actor_roles,
                                 org.org_governance_name, [ORG_MEMBER_ROLE])):
                    return True, ''

        return False, '%s(%s) has been denied since the user is not a member in any org to which the resource id %s belongs ' % (
            process.name, gov_values.op, resource_id)
コード例 #15
0
class UserNotificationService(BaseUserNotificationService):
    """
    A service that provides users with an API for CRUD methods for notifications.
    """

    def __init__(self, *args, **kwargs):
        self._subscribers = []
        self._schedule_ids = []
        BaseUserNotificationService.__init__(self, *args, **kwargs)

    def on_start(self):

        #---------------------------------------------------------------------------------------------------
        # Get the event Repository
        #---------------------------------------------------------------------------------------------------

#        self.ION_NOTIFICATION_EMAIL_ADDRESS = '*****@*****.**'
        self.ION_NOTIFICATION_EMAIL_ADDRESS = CFG.get_safe('server.smtp.sender')


        #---------------------------------------------------------------------------------------------------
        # Create an event processor
        #---------------------------------------------------------------------------------------------------

        self.event_processor = EmailEventProcessor()

        #---------------------------------------------------------------------------------------------------
        # load event originators, types, and table
        #---------------------------------------------------------------------------------------------------

        self.notifications = {}

        #---------------------------------------------------------------------------------------------------
        # Dictionaries that maintain information about users and their subscribed notifications
        # The reverse_user_info is calculated from the user_info dictionary
        #---------------------------------------------------------------------------------------------------
        self.user_info = {}
        self.reverse_user_info = {}

        #---------------------------------------------------------------------------------------------------
        # Get the clients
        #---------------------------------------------------------------------------------------------------

        self.discovery = DiscoveryServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.event_publisher = EventPublisher()
        self.datastore = self.container.datastore_manager.get_datastore('events')

        self.start_time = get_ion_ts()

        #------------------------------------------------------------------------------------
        # Create an event subscriber for Reload User Info events
        #------------------------------------------------------------------------------------

        def reload_user_info(event_msg, headers):
            '''
            Callback method for the subscriber to ReloadUserInfoEvent
            '''

            notification_id =  event_msg.notification_id
            log.debug("(UNS instance received a ReloadNotificationEvent. The relevant notification_id is %s" % notification_id)

            try:
                self.user_info = self.load_user_info()
            except NotFound:
                log.warning("ElasticSearch has not yet loaded the user_index.")

            self.reverse_user_info =  calculate_reverse_user_info(self.user_info)

            log.debug("(UNS instance) After a reload, the user_info: %s" % self.user_info)
            log.debug("(UNS instance) The recalculated reverse_user_info: %s" % self.reverse_user_info)

        # the subscriber for the ReloadUSerInfoEvent
        self.reload_user_info_subscriber = EventSubscriber(
            event_type="ReloadUserInfoEvent",
            origin='UserNotificationService',
            callback=reload_user_info
        )
        self.reload_user_info_subscriber.start()
        # For cleanup of the subscriber
        self._subscribers.append(self.reload_user_info_subscriber)

    def on_quit(self):
        """
        Handles stop/terminate.

        Cleans up subscribers spawned here, terminates any scheduled tasks to the scheduler.
        """
        for sub in self._subscribers:
            sub.stop()

        for sid in self._schedule_ids:
            try:
                self.clients.scheduler.cancel_timer(sid)
            except IonException as ex:
                log.info("Ignoring exception while cancelling schedule id (%s): %s: %s", sid, ex.__class__.__name__, ex)

        super(UserNotificationService, self).on_quit()

    def __now(self):
        """
        This method defines what the UNS uses as its "current" time
        """
        return datetime.utcnow()

    def set_process_batch_key(self, process_batch_key = ''):
        """
        This method allows an operator to set the process_batch_key, a string.
        Once this method is used by the operator, the UNS will start listening for timer events
        published by the scheduler with origin = process_batch_key.

        @param process_batch_key str
        """

        def process(event_msg, headers):
            self.end_time = get_ion_ts()

            log.debug("process_batch being called with start_time = %s, end_time = %s", self.start_time, self.end_time)

            # run the process_batch() method
            self.process_batch(start_time=self.start_time, end_time=self.end_time)
            self.start_time = self.end_time

        # the subscriber for the batch processing
        """
        To trigger the batch notification, have the scheduler create a timer with event_origin = process_batch_key
        """
        self.batch_processing_subscriber = EventSubscriber(
            event_type="ResourceEvent",
            origin=process_batch_key,
            queue_name='user_notification',
            callback=process
        )
        self.batch_processing_subscriber.start()
        self._subscribers.append(self.batch_processing_subscriber)

    def create_notification(self, notification=None, user_id=''):
        """
        Persists the provided NotificationRequest object for the specified Origin id.
        Associate the Notification resource with the user_id string.
        returned id is the internal id by which NotificationRequest will be identified
        in the data store.

        @param notification        NotificationRequest
        @param user_id             str
        @retval notification_id    str
        @throws BadRequest    if object passed has _id or _rev attribute

        """

        if not user_id:
            raise BadRequest("User id not provided.")

        log.debug("Create notification called for user_id: %s, and notification: %s", user_id, notification)

        #---------------------------------------------------------------------------------------------------
        # Persist Notification object as a resource if it has already not been persisted
        #---------------------------------------------------------------------------------------------------

        # if the notification has already been registered, simply use the old id
        notification_id = self._notification_in_notifications(notification, self.notifications)

        # since the notification has not been registered yet, register it and get the id

        temporal_bounds = TemporalBounds()
        temporal_bounds.start_datetime = get_ion_ts()
        temporal_bounds.end_datetime = ''

        if not notification_id:
            notification.temporal_bounds = temporal_bounds
            notification_id, _ = self.clients.resource_registry.create(notification)
            self.notifications[notification_id] = notification
        else:
            log.debug("Notification object has already been created in resource registry before. No new id to be generated. notification_id: %s", notification_id)
            # Read the old notification already in the resource registry
            notification = self.clients.resource_registry.read(notification_id)

            # Update the temporal bounds of the old notification resource
            notification.temporal_bounds = temporal_bounds

            # Update the notification in the resource registry
            self.clients.resource_registry.update(notification)

            log.debug("The temporal bounds for this resubscribed notification object with id: %s, is: %s", notification_id,notification.temporal_bounds)


        # Link the user and the notification with a hasNotification association
        assocs= self.clients.resource_registry.find_associations(subject=user_id,
                                                                    predicate=PRED.hasNotification,
                                                                    object=notification_id,
                                                                    id_only=True)
        if assocs:
            log.debug("Got an already existing association: %s, between user_id: %s, and notification_id: %s", assocs,user_id,notification_id)
            return notification_id
        else:
            log.debug("Creating association between user_id: %s, and notification_id: %s", user_id, notification_id )
            self.clients.resource_registry.create_association(user_id, PRED.hasNotification, notification_id)

        # read the registered notification request object because this has an _id and is more useful
        notification = self.clients.resource_registry.read(notification_id)

        # Update the user info object with the notification
        self.event_processor.add_notification_for_user(new_notification=notification, user_id=user_id)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by a notification worker so that it can update its user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.debug("(create notification) Publishing ReloadUserInfoEvent for notification_id: %s", notification_id)

        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
            origin="UserNotificationService",
            description= "A notification has been created.",
            notification_id = notification_id)

        return notification_id

    def update_notification(self, notification=None, user_id = ''):
        """Updates the provided NotificationRequest object.  Throws NotFound exception if
        an existing version of NotificationRequest is not found.  Throws Conflict if
        the provided NotificationRequest object is not based on the latest persisted
        version of the object.

        @param notification     NotificationRequest
        @throws BadRequest      if object does not have _id or _rev attribute
        @throws NotFound        object with specified id does not exist
        @throws Conflict        object not based on latest persisted object version
        """

        raise NotImplementedError("This method needs to be worked out in terms of implementation")

#        #-------------------------------------------------------------------------------------------------------------------
#        # Get the old notification
#        #-------------------------------------------------------------------------------------------------------------------
#
#        old_notification = self.clients.resource_registry.read(notification._id)
#
#        #-------------------------------------------------------------------------------------------------------------------
#        # Update the notification in the notifications dict
#        #-------------------------------------------------------------------------------------------------------------------
#
#
#        self._update_notification_in_notifications_dict(new_notification=notification,
#                                                        notifications=self.notifications)
#        #-------------------------------------------------------------------------------------------------------------------
#        # Update the notification in the registry
#        #-------------------------------------------------------------------------------------------------------------------
#        '''
#        Since one user should not be able to update the notification request resource without the knowledge of other users
#        who have subscribed to the same notification request, we do not update the resource in the resource registry
#        '''
#
##        self.clients.resource_registry.update(notification)
#
#        #-------------------------------------------------------------------------------------------------------------------
#        # reading up the notification object to make sure we have the newly registered notification request object
#        #-------------------------------------------------------------------------------------------------------------------
#
#        notification_id = notification._id
#        notification = self.clients.resource_registry.read(notification_id)
#
#        #------------------------------------------------------------------------------------
#        # Update the UserInfo object
#        #------------------------------------------------------------------------------------
#
#        user = self.update_user_info_object(user_id, notification)
#
#        #-------------------------------------------------------------------------------------------------------------------
#        # Generate an event that can be picked by notification workers so that they can update their user_info dictionary
#        #-------------------------------------------------------------------------------------------------------------------
#        log.info("(update notification) Publishing ReloadUserInfoEvent for updated notification")
#
#        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
#            origin="UserNotificationService",
#            description= "A notification has been updated.",
#            notification_id = notification_id
#        )

    def read_notification(self, notification_id=''):
        """Returns the NotificationRequest object for the specified notification id.
        Throws exception if id does not match any persisted NotificationRequest
        objects.

        @param notification_id    str
        @retval notification    NotificationRequest
        @throws NotFound    object with specified id does not exist
        """
        notification = self.clients.resource_registry.read(notification_id)

        return notification

    def delete_notification(self, notification_id=''):
        """For now, permanently deletes NotificationRequest object with the specified
        id. Throws exception if id does not match any persisted NotificationRequest.

        @param notification_id    str
        @throws NotFound    object with specified id does not exist
        """

        #-------------------------------------------------------------------------------------------------------------------
        # Stop the event subscriber for the notification
        #-------------------------------------------------------------------------------------------------------------------
        notification_request = self.clients.resource_registry.read(notification_id)

        #-------------------------------------------------------------------------------------------------------------------
        # Update the resource registry
        #-------------------------------------------------------------------------------------------------------------------

        notification_request.temporal_bounds.end_datetime = get_ion_ts()

        self.clients.resource_registry.update(notification_request)

        #-------------------------------------------------------------------------------------------------------------------
        # Find users who are interested in the notification and update the notification in the list maintained by the UserInfo object
        #-------------------------------------------------------------------------------------------------------------------
        user_ids, _ = self.clients.resource_registry.find_subjects(RT.UserInfo, PRED.hasNotification, notification_id, True)

        for user_id in user_ids:
            self.update_user_info_object(user_id, notification_request)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by a notification worker so that it can update its user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.info("(delete notification) Publishing ReloadUserInfoEvent for notification_id: %s", notification_id)

        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
            origin="UserNotificationService",
            description= "A notification has been deleted.",
            notification_id = notification_id)

#    def delete_notification_from_user_info(self, notification_id):
#        """
#        Helper method to delete the notification from the user_info dictionary
#
#        @param notification_id str
#        """
#
#        user_ids, assocs = self.clients.resource_registry.find_subjects(object=notification_id, predicate=PRED.hasNotification, id_only=True)
#
#        for assoc in assocs:
#            self.clients.resource_registry.delete_association(assoc)
#
#        for user_id in user_ids:
#
#            value = self.user_info[user_id]
#
#            for notif in value['notifications']:
#                if notification_id == notif._id:
#                    # remove the notification
#                    value['notifications'].remove(notif)
#
#        self.reverse_user_info = calculate_reverse_user_info(self.user_info)

    def find_events(self, origin='', type='', min_datetime=0, max_datetime=0, limit= -1, descending=False):
        """
        This method leverages couchdb view and simple filters. It does not use elastic search.

        Returns a list of events that match the specified search criteria. Will throw a not NotFound exception
        if no events exist for the given parameters.

        @param origin         str
        @param event_type     str
        @param min_datetime   int  seconds
        @param max_datetime   int  seconds
        @param limit          int         (integer limiting the number of results (0 means unlimited))
        @param descending     boolean     (if True, reverse order (of production time) is applied, e.g. most recent first)
        @retval event_list    []
        @throws NotFound    object with specified parameters does not exist
        @throws NotFound    object with specified parameters does not exist
        """
        event_tuples = []

        try:
            event_tuples = self.container.event_repository.find_events(event_type=type, origin=origin, start_ts=min_datetime, end_ts=max_datetime, limit=limit, descending=descending)
        except Exception as exc:
            log.warning("The UNS find_events operation for event origin = %s and type = %s failed. Error message = %s", origin, type, exc.message)

        events = [item[2] for item in event_tuples]
        log.debug("(find_events) UNS found the following relevant events: %s", events)

        return events


    #todo Uses Elastic Search. Later extend this to a larger search criteria
    def find_events_extended(self, origin='', type='', min_time= 0, max_time=0, limit=-1, descending=False):
        """Uses Elastic Search. Returns a list of events that match the specified search criteria. Will throw a not NotFound exception
        if no events exist for the given parameters.

        @param origin         str
        @param type           str
        @param min_time   int seconds
        @param max_time   int seconds
        @param limit          int         (integer limiting the number of results (0 means unlimited))
        @param descending     boolean     (if True, reverse order (of production time) is applied, e.g. most recent first)
        @retval event_list    []
        @throws NotFound    object with specified parameters does not exist
        @throws NotFound    object with specified parameters does not exist
        """

        query = []

        if min_time and max_time:
            query.append( "SEARCH 'ts_created' VALUES FROM %s TO %s FROM 'events_index'" % (min_time, max_time))

        if origin:
            query.append( 'search "origin" is "%s" from "events_index"' % origin)

        if type:
            query.append( 'search "type_" is "%s" from "events_index"' % type)

        search_string = ' and '.join(query)


        # get the list of ids corresponding to the events
        ret_vals = self.discovery.parse(search_string)
        if len(query) > 1:
            events = self.datastore.read_mult(ret_vals)
        else:
            events = [i['_source'] for i in ret_vals]

        log.debug("(find_events_extended) Discovery search returned the following event ids: %s", ret_vals)


        log.debug("(find_events_extended) UNS found the following relevant events: %s", events)

        if limit > 0:
            return events[:limit]

        #todo implement time ordering: ascending or descending

        return events

    def publish_event_object(self, event=None):
        """
        This service operation would publish the given event from an event object.

        @param event    !Event
        @retval event   !Event
        """
        event = self.event_publisher.publish_event_object(event_object=event)
        log.info("The publish_event_object(event) method of UNS was used to publish the event: %s", event )

        return event

    def publish_event(self, event_type='', origin='', origin_type='', sub_type='', description='', event_attrs=None):
        """
        This service operation assembles a new Event object based on event_type 
        (e.g. via the pyon Event publisher) with optional additional attributes from a event_attrs
        dict of arbitrary attributes.
        
        
        @param event_type   str
        @param origin       str
        @param origin_type  str
        @param sub_type     str
        @param description  str
        @param event_attrs  dict
        @retval event       !Event
        """
        event_attrs = event_attrs or {}

        event = self.event_publisher.publish_event(
            event_type = event_type,
            origin = origin,
            origin_type = origin_type,
            sub_type = sub_type,
            description = description,
            **event_attrs
            )
        log.info("The publish_event() method of UNS was used to publish an event: %s", event)

        return event

    def get_recent_events(self, resource_id='', limit = 100):
        """
        Get recent events for use in extended resource computed attribute
        @param resource_id str
        @param limit int
        @retval ComputedListValue with value list of 4-tuple with Event objects
        """

        now = get_ion_ts()
        events = self.find_events(origin=resource_id, limit=limit, max_datetime=now, descending=True)

        ret = IonObject(OT.ComputedEventListValue)
        if events:
            ret.value = events
            ret.computed_list = [self._get_event_computed_attributes(event) for event in events]
            ret.status = ComputedValueAvailability.PROVIDED
        else:
            ret.status = ComputedValueAvailability.NOTAVAILABLE

        return ret

    def _get_event_computed_attributes(self, event):
        """
        @param event any Event to compute attributes for
        @retval an EventComputedAttributes object for given event
        """
        evt_computed = IonObject(OT.EventComputedAttributes)
        evt_computed.event_id = event._id
        evt_computed.ts_computed = get_ion_ts()

        try:
            summary = self._get_event_summary(event)
            evt_computed.event_summary = summary

            spc_attrs = ["%s:%s" % (k, str(getattr(event, k))[:50]) for k in sorted(event.__dict__.keys()) if k not in ['_id', '_rev', 'type_', 'origin', 'origin_type', 'ts_created', 'base_types']]
            evt_computed.special_attributes = ", ".join(spc_attrs)

            evt_computed.event_attributes_formatted = pprint.pformat(event.__dict__)
        except Exception as ex:
            log.exception("Error computing EventComputedAttributes for event %s" % event)

        return evt_computed

    def _get_event_summary(self, event):
        event_types = [event.type_] + event.base_types
        summary = ""
        if "ResourceLifecycleEvent" in event_types:
            summary = "%s lifecycle state change: %s_%s" % (event.origin_type, event.lcstate, event.availability)
        elif "ResourceModifiedEvent" in event_types:
            summary = "%s modified: %s" % (event.origin_type, event.sub_type)

        elif "ResourceAgentStateEvent" in event_types:
            summary = "%s agent state change: %s" % (event.origin_type, event.state)
        elif "ResourceAgentResourceStateEvent" in event_types:
            summary = "%s agent resource state change: %s" % (event.origin_type, event.state)
        elif "ResourceAgentConfigEvent" in event_types:
            summary = "%s agent config set: %s" % (event.origin_type, event.config)
        elif "ResourceAgentResourceConfigEvent" in event_types:
            summary = "%s agent resource config set: %s" % (event.origin_type, event.config)
        elif "ResourceAgentCommandEvent" in event_types:
            summary = "%s agent command '%s(%s)' succeeded: %s" % (event.origin_type, event.command, event.execute_command, "" if event.result is None else event.result)
        elif "ResourceAgentErrorEvent" in event_types:
            summary = "%s agent command '%s(%s)' failed: %s:%s (%s)" % (event.origin_type, event.command, event.execute_command, event.error_type, event.error_msg, event.error_code)
        elif "ResourceAgentAsyncResultEvent" in event_types:
            summary = "%s agent async command '%s(%s)' succeeded: %s" % (event.origin_type, event.command, event.desc, "" if event.result is None else event.result)

        elif "ResourceAgentResourceCommandEvent" in event_types:
            summary = "%s agent resource command '%s(%s)' executed: %s" % (event.origin_type, event.command, event.execute_command, "OK" if event.result is None else event.result)
        elif "DeviceStatusEvent" in event_types:
            summary = "%s '%s' status change: %s" % (event.origin_type, event.sub_type, DeviceStatusType._str_map.get(event.state,"???"))
        elif "DeviceOperatorEvent" in event_types or "ResourceOperatorEvent" in event_types:
            summary = "Operator entered: %s" % event.description

        elif "OrgMembershipGrantedEvent" in event_types:
            summary = "Joined Org '%s' as member" % (event.org_name)
        elif "OrgMembershipCancelledEvent" in event_types:
            summary = "Cancelled Org '%s' membership" % (event.org_name)
        elif "UserRoleGrantedEvent" in event_types:
            summary = "Granted %s in Org '%s'" % (event.role_name, event.org_name)
        elif "UserRoleRevokedEvent" in event_types:
            summary = "Revoked %s in Org '%s'" % (event.role_name, event.org_name)
        elif "ResourceSharedEvent" in event_types:
            summary = "%s shared in Org: '%s'" % (event.sub_type, event.org_name)
        elif "ResourceUnsharedEvent" in event_types:
            summary = "%s unshared in Org: '%s'" % (event.sub_type, event.org_name)
        elif "ResourceCommitmentCreatedEvent" in event_types:
            summary = "%s commitment created in Org: '%s'" % (event.commitment_type, event.org_name)
        elif "ResourceCommitmentReleasedEvent" in event_types:
            summary = "%s commitment released in Org: '%s'" % (event.commitment_type, event.org_name)

#        if event.description and summary:
#            summary = summary + ". " + event.description
#        elif event.description:
#            summary = event.description
        return summary

    def get_user_notifications(self, user_info_id=''):
        """
        Get the notification request objects that are subscribed to by the user

        @param user_info_id str

        @retval notifications list of NotificationRequest objects
        """

        if self.user_info.has_key(user_info_id):
            notifications = self.user_info[user_info_id]['notifications']

            log.debug("Got %s notifications, for the user: %s", len(notifications), user_info_id)

            for notif in notifications:
                # remove notifications that have expired
                if notif.temporal_bounds.end_datetime != '':
                    log.debug("removing notification: %s", notif)
                    notifications.remove(notif)

            return notifications

#            ret = IonObject(OT.ComputedListValue)
#
#            if notifications:
#                ret.value = notifications
#                ret.status = ComputedValueAvailability.PROVIDED
#            else:
#                ret.status = ComputedValueAvailability.NOTAVAILABLE
#            return ret
#        else:
#            return None

    def create_worker(self, number_of_workers=1):
        """
        Creates notification workers

        @param number_of_workers int
        @retval pids list

        """

        pids = []

        for n in xrange(number_of_workers):

            process_definition = ProcessDefinition( name='notification_worker_%s' % n)

            process_definition.executable = {
                'module': 'ion.processes.data.transforms.notification_worker',
                'class':'NotificationWorker'
            }
            process_definition_id = self.process_dispatcher.create_process_definition(process_definition=process_definition)

            # ------------------------------------------------------------------------------------
            # Process Spawning
            # ------------------------------------------------------------------------------------

            pid2 = self.process_dispatcher.create_process(process_definition_id)

            #@todo put in a configuration
            configuration = {}
            configuration['process'] = dict({
                'name': 'notification_worker_%s' % n,
                'type':'simple',
                'queue_name': 'notification_worker_queue'
            })

            pid  = self.process_dispatcher.schedule_process(
                process_definition_id,
                configuration = configuration,
                process_id=pid2
            )

            pids.append(pid)

        return pids

    def process_batch(self, start_time = '', end_time = ''):
        """
        This method is launched when an process_batch event is received. The user info dictionary maintained
        by the User Notification Service is used to query the event repository for all events for a particular
        user that have occurred in a provided time interval, and then an email is sent to the user containing
        the digest of all the events.

        @param start_time int milliseconds
        @param end_time int milliseconds
        """
        self.smtp_client = setting_up_smtp_client()

        if end_time <= start_time:
            return

        for user_id, value in self.user_info.iteritems():

            notifications = value['notifications']
            notification_preferences = value['notification_preferences']

            # Ignore users who do NOT want batch notifications or who have disabled the delivery switch
            # However, if notification preferences have not been set for the user, use the default mechanism and do not bother
            if notification_preferences:
                if notification_preferences.delivery_mode != NotificationDeliveryModeEnum.BATCH \
                    or not notification_preferences.delivery_enabled:
                    continue

            events_for_message = []

            search_time = "SEARCH 'ts_created' VALUES FROM %s TO %s FROM 'events_index'" % (start_time, end_time)

            for notification in notifications:

                # If the notification request has expired, then do not use it in the search
                if notification.temporal_bounds.end_datetime:
                    continue

                if notification.origin:
                    search_origin = 'search "origin" is "%s" from "events_index"' % notification.origin
                else:
                    search_origin = 'search "origin" is "*" from "events_index"'

                if notification.origin_type:
                    search_origin_type= 'search "origin_type" is "%s" from "events_index"' % notification.origin_type
                else:
                    search_origin_type= 'search "origin_type" is "*" from "events_index"'

                if notification.event_type:
                    search_event_type = 'search "type_" is "%s" from "events_index"' % notification.event_type
                else:
                    search_event_type = 'search "type_" is "*" from "events_index"'

                search_string = search_time + ' and ' + search_origin + ' and ' + search_origin_type + ' and ' + search_event_type

                # get the list of ids corresponding to the events
                ret_vals = self.discovery.parse(search_string)

                events_for_message.extend(self.datastore.read_mult(ret_vals))

            log.debug("Found following events of interest to user, %s: %s", user_id, events_for_message)

            # send a notification email to each user using a _send_email() method
            if events_for_message:
                self.format_and_send_email(events_for_message = events_for_message,
                                            user_id = user_id,
                                            smtp_client=self.smtp_client)

        self.smtp_client.quit()


    def format_and_send_email(self, events_for_message = None, user_id = None, smtp_client = None):
        """
        Format the message for a particular user containing information about the events he is to be notified about

        @param events_for_message list
        @param user_id str
        """

        message = str(events_for_message)
        log.debug("The user, %s, will get the following events in his batch notification email: %s", user_id, message)

        msg_body = ''
        count = 1

        for event in events_for_message:

            ts_created = _convert_to_human_readable(event.ts_created)

            msg_body += string.join(("\r\n",
                                     "Event %s: %s" %  (count, event),
                                     "",
                                     "Originator: %s" %  event.origin,
                                     "",
                                     "Description: %s" % event.description or "Not provided",
                                     "",
                                     "ts_created: %s" %  ts_created,
                                     "\r\n",
                                     "------------------------"
                                     "\r\n"))
            count += 1

        msg_body += "You received this notification from ION because you asked to be " +\
                    "notified about this event from this source. " +\
                    "To modify or remove notifications about this event, " +\
                    "please access My Notifications Settings in the ION Web UI. " +\
                    "Do not reply to this email.  This email address is not monitored " +\
                    "and the emails will not be read. \r\n "


        log.debug("The email has the following message body: %s", msg_body)

        msg_subject = "(SysName: " + get_sys_name() + ") ION event "

        self.send_batch_email(  msg_body = msg_body,
            msg_subject = msg_subject,
            msg_recipient=self.user_info[user_id]['user_contact'].email,
            smtp_client=smtp_client )


    def send_batch_email(self, msg_body = None, msg_subject = None, msg_recipient = None, smtp_client = None):
        """
        Send the email

        @param msg_body str
        @param msg_subject str
        @param msg_recipient str
        @param smtp_client object
        """

        msg = MIMEText(msg_body)
        msg['Subject'] = msg_subject
        msg['From'] = self.ION_NOTIFICATION_EMAIL_ADDRESS
        msg['To'] = msg_recipient
        log.debug("UNS sending batch (digest) email from %s to %s" , self.ION_NOTIFICATION_EMAIL_ADDRESS, msg_recipient)

        smtp_sender = CFG.get_safe('server.smtp.sender')

        smtp_client.sendmail(smtp_sender, [msg_recipient], msg.as_string())

    def update_user_info_object(self, user_id, new_notification):
        """
        Update the UserInfo object. If the passed in parameter, od_notification, is None, it does not need to remove the old notification

        @param user_id str
        @param new_notification NotificationRequest
        @param old_notification NotificationRequest
        """

        #------------------------------------------------------------------------------------
        # read the user
        #------------------------------------------------------------------------------------

        user = self.clients.resource_registry.read(user_id)

        if not user:
            raise BadRequest("No user with the provided user_id: %s" % user_id)

        for item in user.variables:
            if item['name'] == 'notifications':
                for notif in item['value']:
                    if notif._id == new_notification._id:
                        log.debug("came here for updating notification")
                        notifications = item['value']
                        notifications.remove(notif)
                        notifications.append(new_notification)

                break

        #------------------------------------------------------------------------------------
        # update the resource registry
        #------------------------------------------------------------------------------------

        log.debug("user.variables::: %s", user.variables)

        self.clients.resource_registry.update(user)

        return user


    def _get_subscriptions(self, resource_id='', include_nonactive=False):
        """
        This method is used to get the subscriptions to a data product. The method will return a list of NotificationRequest
        objects for whom the origin is set to this data product. This way all the users who were interested in listening to
        events with origin equal to this data product, will be known and all their subscriptions will be known.

        @param resource_id
        @param include_nonactive
        @return notification_requests []

        """

        search_origin = 'search "origin" is "%s" from "resources_index"' % resource_id
        ret_vals = self.discovery.parse(search_origin)

        log.debug("Using discovery with search_string: %s", search_origin)
        log.debug("_get_subscriptions() got ret_vals: %s", ret_vals )

        notifications_all = set()
        notifications_active = set()

        object_ids = []
        for item in ret_vals:
            if item['_type'] == 'NotificationRequest':
                object_ids.append(item['_id'])

        notifs = self.clients.resource_registry.read_mult(object_ids)

        log.debug("Got %s notifications here. But they include both active and past notifications", len(notifs))

        if include_nonactive:
            # Add active or retired notification
            notifications_all.update(notifs)
        else:
            for notif in notifs:
                log.debug("Got the end_datetime here: notif.temporal_bounds.end_datetime = %s", notif.temporal_bounds.end_datetime)
                if notif.temporal_bounds.end_datetime == '':
                    log.debug("removing the notification: %s", notif._id)
                    # Add the active notification
                    notifications_active.add(notif)

        if include_nonactive:
            return list(notifications_all)
        else:
            return list(notifications_active)

    def get_subscriptions(self, resource_id='', user_id = '', include_nonactive=False):
        """
        This method takes the user-id as an input parameter. The logic will first find all notification requests for this resource
        then if a user_id is present, it will filter on those that this user is associated with.
        """

        # Get the notifications whose origin field has the provided resource_id
        notifs = self._get_subscriptions(resource_id=resource_id, include_nonactive=include_nonactive)

        log.debug("For include_nonactive= %s, UNS fetched the following the notifications subscribed to the resource_id: %s --> %s. "
                      "They are %s in number", include_nonactive,resource_id, notifs, len(notifs))

        if not user_id:
            return notifs

        notifications = []

        # Now find the users who subscribed to the above notifications
        #todo Right now looking at assocs in a loop which is not efficient to find the users linked to these notifications
        # todo(contd) Need to use a more efficient way later
        for notif in notifs:
            notif_id = notif._id
            # Find if the user is associated with this notification request
            ids, _ = self.clients.resource_registry.find_subjects( subject_type = RT.UserInfo, object=notif_id, predicate=PRED.hasNotification, id_only=True)
            log.debug("Got the following users: %s, associated with the notification: %s", ids, notif_id)

            if ids and user_id in ids:
                notifications.append(notif)

        log.debug("For include_nonactive = %s, UNS fetched the following %s notifications subscribed to %s --> %s", include_nonactive,len(notifications),user_id, notifications)

        return notifications

    def get_subscriptions_attribute(self, resource_id='', user_id = '', include_nonactive=False):
        retval = self.get_subscriptions(resource_id=resource_id, user_id=user_id, include_nonactive=include_nonactive)
        container = ComputedListValue(value=retval)
        return container


#    def get_users_who_subscribed(self, resource_id='', include_nonactive=False):
#
#        # Get the notifications whose origin field has the provided resource_id
#        notifications = self.get_subscriptions(resource_id, include_nonactive)
#
#        # Now find the users who subscribed to the above notifications
#        #todo Right now looking at assocs in a loop which is not efficient to find the users linked to these notifications
#        # todo(contd) Need to use a more efficient way later
#
#        user_ids = set()
#        for notif in notifications:
#            notif_id = notif._id
#            # Find the users who are associated with this notification request
#            ids, _ = self.clients.resource_registry.find_subjects( subject_type = RT.UserInfo, object=notif_id, predicate=PRED.hasNotification, id_only=True)
#            user_ids.add(ids)
#
#        return user_ids

    def _notification_in_notifications(self, notification = None, notifications = None):

        for id, notif in notifications.iteritems():
            if notif.name == notification.name and \
            notif.origin == notification.origin and \
            notif.origin_type == notification.origin_type and \
            notif.event_type == notification.event_type:
                return id
        return None

    def _update_notification_in_notifications_dict(self, new_notification = None, notifications = None ):

        for id, notif in notifications.iteritems():
            if id == new_notification._id:
                notifications.pop(id)
                notifications[id] = new_notification
                break


    def load_user_info(self):
        '''
        Method to load the user info dictionary used by the notification workers and the UNS

        @retval user_info dict
        '''

        users, _ = self.clients.resource_registry.find_resources(restype= RT.UserInfo)

        user_info = {}

        if not users:
            return {}

        for user in users:
            notifications = []
            notification_preferences = None
            for variable in user.variables:
                if variable['name'] == 'notifications':
                    notifications = variable['value']

                if variable['name'] == 'notification_preferences':
                    notification_preferences = variable['value']

            user_info[user._id] = { 'user_contact' : user.contact, 'notifications' : notifications, 'notification_preferences' : notification_preferences}

        return user_info
コード例 #16
0
    def on_start(self):

        #---------------------------------------------------------------------------------------------------
        # Get the event Repository
        #---------------------------------------------------------------------------------------------------

#        self.ION_NOTIFICATION_EMAIL_ADDRESS = '*****@*****.**'
        self.ION_NOTIFICATION_EMAIL_ADDRESS = CFG.get_safe('server.smtp.sender')


        #---------------------------------------------------------------------------------------------------
        # Create an event processor
        #---------------------------------------------------------------------------------------------------

        self.event_processor = EmailEventProcessor()

        #---------------------------------------------------------------------------------------------------
        # load event originators, types, and table
        #---------------------------------------------------------------------------------------------------

        self.notifications = {}

        #---------------------------------------------------------------------------------------------------
        # Dictionaries that maintain information about users and their subscribed notifications
        # The reverse_user_info is calculated from the user_info dictionary
        #---------------------------------------------------------------------------------------------------
        self.user_info = {}
        self.reverse_user_info = {}

        #---------------------------------------------------------------------------------------------------
        # Get the clients
        #---------------------------------------------------------------------------------------------------

        self.discovery = DiscoveryServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.event_publisher = EventPublisher()
        self.datastore = self.container.datastore_manager.get_datastore('events')

        self.start_time = get_ion_ts()

        #------------------------------------------------------------------------------------
        # Create an event subscriber for Reload User Info events
        #------------------------------------------------------------------------------------

        def reload_user_info(event_msg, headers):
            '''
            Callback method for the subscriber to ReloadUserInfoEvent
            '''

            notification_id =  event_msg.notification_id
            log.debug("(UNS instance received a ReloadNotificationEvent. The relevant notification_id is %s" % notification_id)

            try:
                self.user_info = self.load_user_info()
            except NotFound:
                log.warning("ElasticSearch has not yet loaded the user_index.")

            self.reverse_user_info =  calculate_reverse_user_info(self.user_info)

            log.debug("(UNS instance) After a reload, the user_info: %s" % self.user_info)
            log.debug("(UNS instance) The recalculated reverse_user_info: %s" % self.reverse_user_info)

        # the subscriber for the ReloadUSerInfoEvent
        self.reload_user_info_subscriber = EventSubscriber(
            event_type="ReloadUserInfoEvent",
            origin='UserNotificationService',
            callback=reload_user_info
        )
        self.reload_user_info_subscriber.start()
        # For cleanup of the subscriber
        self._subscribers.append(self.reload_user_info_subscriber)
コード例 #17
0
class DiscoveryIntTest(IonIntegrationTestCase):
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)

    @staticmethod
    def es_cleanup():
        es_host = CFG.get_safe('server.elasticsearch.host', 'localhost')
        es_port = CFG.get_safe('server.elasticsearch.port', '9200')
        es = ep.ElasticSearch(
            host=es_host,
            port=es_port,
            timeout=10
        )
        indexes = STD_INDEXES.keys()
        indexes.append('%s_resources_index' % get_sys_name().lower())
        indexes.append('%s_events_index' % get_sys_name().lower())

        for index in indexes:
            IndexManagementService._es_call(es.river_couchdb_delete,index)
            IndexManagementService._es_call(es.index_delete,index)


    def poll(self, tries, callback, *args, **kwargs):
        '''
        Polling wrapper for queries
        Elasticsearch may not index and cache the changes right away so we may need 
        a couple of tries and a little time to go by before the results show.
        '''
        for i in xrange(tries):
            retval = callback(*args, **kwargs)
            if retval:
                return retval
            time.sleep(0.2)
        return None


    def test_traversal(self):
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results = self.discovery.traverse(dp_id)
        results.sort()
        correct = [pd_id, transform_id]
        correct.sort()
        self.assertTrue(results == correct, '%s' % results)

    def test_iterative_traversal(self):
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results = self.discovery.iterative_traverse(dp_id)
        results.sort()
        correct = [transform_id]
        self.assertTrue(results == correct)

        results = self.discovery.iterative_traverse(dp_id, 1)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)
    
    @skipIf(not use_es, 'No ElasticSearch')
    def test_view_crud(self):
        view_id = self.discovery.create_view('big_view',fields=['name'])
        catalog_id = self.discovery.list_catalogs(view_id)[0]
        index_ids = self.catalog.list_indexes(catalog_id)
        self.assertTrue(len(index_ids))

        view = self.discovery.read_view(view_id)
        self.assertIsInstance(view,View)
        self.assertTrue(view.name == 'big_view')

        view.name = 'not_so_big_view'

        self.discovery.update_view(view)

        view = self.discovery.read_view(view_id)
        self.assertTrue(view.name == 'not_so_big_view')

        self.discovery.delete_view(view_id)
        with self.assertRaises(NotFound):
            self.discovery.read_view(view_id)

    def test_view_best_match(self):
        #---------------------------------------------------------------
        # Matches the best catalog available OR creates a new one
        #---------------------------------------------------------------
        catalog_id = self.catalog.create_catalog('dev', keywords=['name','model'])
        view_id    = self.discovery.create_view('exact_view', fields=['name','model'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids == [catalog_id])

        view_id = self.discovery.create_view('another_view', fields=['name','model'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids == [catalog_id])

        view_id = self.discovery.create_view('big_view', fields=['name'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids != [catalog_id])

    @skipIf(not use_es, 'No ElasticSearch')
    def test_basic_searching(self):

        #- - - - - - - - - - - - - - - - - 
        # set up the fake resources
        #- - - - - - - - - - - - - - - - - 

        instrument_pool = [
            InstrumentDevice(name='sonobuoy1', hardware_version='1'),
            InstrumentDevice(name='sonobuoy2', hardware_version='2'),
            InstrumentDevice(name='sonobuoy3', hardware_version='3')
        ]
        for instrument in instrument_pool:
            self.rr.create(instrument)

        view_id = self.discovery.create_view('devices', fields=['hardware_version'])

        search_string = "search 'hardware_version' is '2' from '%s'"%view_id
        results = self.poll(5, self.discovery.parse,search_string)
        result  = results[0]['_source']
        self.assertIsInstance(result, InstrumentDevice)
        self.assertTrue(result.name == 'sonobuoy2')
        self.assertTrue(result.hardware_version == '2')


    @skipIf(not use_es, 'No ElasticSearch')
    def test_associative_searching(self):

        view_id = self.discovery.create_view('devices', fields=['model'])
        site_id,_ = self.rr.create(Site('my_site'))
        pd_id, _  = self.rr.create(PlatformDevice('my_device', model='abc123'))
        self.rr.create_association(subject=site_id, object=pd_id, predicate=PRED.hasDevice)

        search_string = "search 'model' is 'abc*' from '%s' and belongs to '%s'"%(view_id, site_id)

        results = self.poll(5, self.discovery.parse,search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(pd_id in results)

    def test_iterative_associative_searching(self):
        #--------------------------------------------------------------------------------
        # Tests the ability to limit the iterations
        #--------------------------------------------------------------------------------
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        search_string = "belongs to '%s' depth 1" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        correct = [transform_id]
        self.assertTrue(results == correct, '%s' % results)

        search_string = "belongs to '%s' depth 2" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)


    @skipIf(not use_es, 'No ElasticSearch')
    def test_ranged_value_searching(self):
        discovery = self.discovery
        rr        = self.rr
        
        view_id = discovery.create_view('bank_view', fields=['cash_balance'])
        bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=10))

        search_string = "search 'cash_balance' values from 0 to 100 from '%s'" % view_id

        results = self.poll(5, discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == bank_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_collections_searching(self):

        site_id, _ = self.rr.create(Site(name='black_mesa'))
        view_id    = self.discovery.create_view('big', fields=['name'])

        # Add the site to a new collection
        collection_id = self.ims.create_collection('resource_collection', [site_id])

        search_string = "search 'name' is '*' from '%s' and in '%s'" %(view_id, collection_id)

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0] == site_id, '%s' % results)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_search_by_name(self):
        inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123')

        dev_id, _ = self.rr.create(inst_dev)
        self.discovery.create_view('devs',fields=['name','serial_number'])

        search_string = "search 'serial_number' is 'abc*' from 'devs'"
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dev_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_search_by_name_index(self):
        inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123')

        dev_id, _ = self.rr.create(inst_dev)
        search_string = "search 'serial_number' is 'abc*' from 'resources_index'"
        
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dev_id)

        bank_acc = BankAccount(name='blah', cash_balance=10)
        res_id , _ = self.rr.create(bank_acc)

        search_string = "search 'cash_balance' values from 0 to 100 from 'resources_index'"

        results = self.poll(9, self.discovery.parse,search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == res_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_data_product_search(self):

        # Create the dataproduct
        dp = DataProduct(name='test_product')
        dp.data_format.name = 'test_signal'
        dp.data_format.description = 'test signal'
        dp.data_format.character_set = 'utf8'
        dp.data_format.nominal_sampling_rate_maximum = '44000'
        dp.data_format.nominal_sampling_rate_minimum = '44000'
        dp.data_product_level = 'basic'
        dp_id, _ = self.rr.create(dp)

        search_string = "search 'data_format.name' is 'test_signal' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)

        search_string = "search 'data_product_level' is 'basic' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        
        search_string = "search 'data_format.character_set' is 'utf8' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_events_search(self):
        # Create a resource to force a new event

        dp = DataProcess()
        dp_id, rev = self.rr.create(dp)

        search_string = "SEARCH 'origin' IS '%s' FROM 'events_index'" % dp_id

        results = self.poll(9, self.discovery.parse,search_string)
        origin_type = results[0]['_source'].origin_type
        origin_id = results[0]['_source'].origin

        self.assertTrue(origin_type == RT.DataProcess)
        self.assertTrue(origin_id == dp_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_geo_distance_search(self):

        pd = PlatformDevice(name='test_dev')

        pd_id, _ = self.rr.create(pd)

        search_string = "search 'nominal_location' geo distance 20 km from lat 0 lon 0 from 'devices_index'"

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == pd_id)
        self.assertTrue(results[0]['_source'].name == 'test_dev')
   
    @skipIf(not use_es, 'No ElasticSearch')
    def test_geo_bbox_search(self):

        pd = PlatformDevice(name='test_dev')
        pd.nominal_location.lat = 5
        pd.nominal_location.lon = 5

        pd_id, _ = self.rr.create(pd)

        search_string = "search 'nominal_location' geo box top-left lat 10 lon 0 bottom-right lat 0 lon 10 from 'devices_index'"

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == pd_id)
        self.assertTrue(results[0]['_source'].name == 'test_dev')
コード例 #18
0
class UserNotificationService(BaseUserNotificationService):
    """
    A service that provides users with an API for CRUD methods for notifications.
    """

    def __init__(self, *args, **kwargs):
        self._subscribers = []
        self._schedule_ids = []
        BaseUserNotificationService.__init__(self, *args, **kwargs)

    def on_start(self):

        #---------------------------------------------------------------------------------------------------
        # Get the event Repository
        #---------------------------------------------------------------------------------------------------

        self.event_repo = self.container.instance.event_repository

        self.smtp_client = setting_up_smtp_client()

        self.ION_NOTIFICATION_EMAIL_ADDRESS = '*****@*****.**'

        #---------------------------------------------------------------------------------------------------
        # Create an event processor
        #---------------------------------------------------------------------------------------------------

        self.event_processor = EmailEventProcessor(self.smtp_client)

        #---------------------------------------------------------------------------------------------------
        # load event originators, types, and table
        #---------------------------------------------------------------------------------------------------

        self.notifications = {}

        #---------------------------------------------------------------------------------------------------
        # Get the clients
        #---------------------------------------------------------------------------------------------------

        self.discovery = DiscoveryServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.event_publisher = EventPublisher()

        self.start_time = UserNotificationService.makeEpochTime(self.__now())

    def on_quit(self):
        """
        Handles stop/terminate.

        Cleans up subscribers spawned here, terminates any scheduled tasks to the scheduler.
        """
        for sub in self._subscribers:
            sub.stop()

        for sid in self._schedule_ids:
            try:
                self.clients.scheduler.cancel_timer(sid)
            except IonException as ex:
                log.info("Ignoring exception while cancelling schedule id (%s): %s: %s", sid, ex.__class__.__name__, ex)

        # Clean up the notification subscriptions' subscribers created in EmailEventProcessor object
        self.event_processor.cleanup()

        super(UserNotificationService, self).on_quit()

    def __now(self):
        """
        This method defines what the UNS uses as its "current" time
        """
        return datetime.utcnow()

    def set_process_batch_key(self, process_batch_key = ''):
        """
        This method allows an operator to set the process_batch_key, a string.
        Once this method is used by the operator, the UNS will start listening for timer events
        published by the scheduler with origin = process_batch_key.

        @param process_batch_key str
        """

        def process(event_msg, headers):
            assert event_msg.origin == process_batch_key

            self.end_time = UserNotificationService.makeEpochTime(self.__now())

            # run the process_batch() method
            self.process_batch(start_time=self.start_time, end_time=self.end_time)
            self.start_time = self.end_time

        # the subscriber for the batch processing
        """
        To trigger the batch notification, have the scheduler create a timer with event_origin = process_batch_key
        """
        self.batch_processing_subscriber = EventSubscriber(
            event_type="ResourceEvent",
            origin=process_batch_key,
            callback=process
        )
        self.batch_processing_subscriber.start()
        self._subscribers.append(self.batch_processing_subscriber)

    def create_notification(self, notification=None, user_id=''):
        """
        Persists the provided NotificationRequest object for the specified Origin id.
        Associate the Notification resource with the user_id string.
        returned id is the internal id by which NotificationRequest will be identified
        in the data store.

        @param notification        NotificationRequest
        @param user_id             str
        @retval notification_id    str
        @throws BadRequest    if object passed has _id or _rev attribute

        """

        if not user_id:
            raise BadRequest("User id not provided.")

        #---------------------------------------------------------------------------------------------------
        # Persist Notification object as a resource if it has already not been persisted
        #---------------------------------------------------------------------------------------------------

        # if the notification has already been registered, simply use the old id

        id = self._notification_in_notifications(notification, self.notifications)

        if id:
            log.debug("Notification object has already been created in resource registry before. No new id to be generated.")
            notification_id = id
        else:

            # since the notification has not been registered yet, register it and get the id
            notification.temporal_bounds = TemporalBounds()
            notification.temporal_bounds.start_datetime = self.makeEpochTime(self.__now())
            notification.temporal_bounds.end_datetime = ''

            notification_id, _ = self.clients.resource_registry.create(notification)
            self.notifications[notification_id] = notification

            # Link the user and the notification with a hasNotification association
            self.clients.resource_registry.create_association(user_id, PRED.hasNotification, notification_id)

        #-------------------------------------------------------------------------------------------------------------------
        # read the registered notification request object because this has an _id and is more useful
        #-------------------------------------------------------------------------------------------------------------------

        notification = self.clients.resource_registry.read(notification_id)

        #-----------------------------------------------------------------------------------------------------------
        # Create an event processor for user. This sets up callbacks etc.
        # As a side effect this updates the UserInfo object and also the user info and reverse user info dictionaries.
        #-----------------------------------------------------------------------------------------------------------

        user = self.event_processor.add_notification_for_user(notification_request=notification, user_id=user_id)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by a notification worker so that it can update its user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.debug("(create notification) Publishing ReloadUserInfoEvent for notification_id: %s" % notification_id)

        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
            origin="UserNotificationService",
            description= "A notification has been created.",
            notification_id = notification_id)

        return notification_id

    def update_notification(self, notification=None, user_id = ''):
        """Updates the provided NotificationRequest object.  Throws NotFound exception if
        an existing version of NotificationRequest is not found.  Throws Conflict if
        the provided NotificationRequest object is not based on the latest persisted
        version of the object.

        @param notification     NotificationRequest
        @throws BadRequest      if object does not have _id or _rev attribute
        @throws NotFound        object with specified id does not exist
        @throws Conflict        object not based on latest persisted object version
        """

        #-------------------------------------------------------------------------------------------------------------------
        # Get the old notification
        #-------------------------------------------------------------------------------------------------------------------

        old_notification = self.clients.resource_registry.read(notification._id)

        #-------------------------------------------------------------------------------------------------------------------
        # Update the notification in the notifications dict
        #-------------------------------------------------------------------------------------------------------------------


        self._update_notification_in_notifications_dict(new_notification=notification,
                                                        old_notification=old_notification,
                                                        notifications=self.notifications)
        #-------------------------------------------------------------------------------------------------------------------
        # Update the notification in the registry
        #-------------------------------------------------------------------------------------------------------------------

        self.clients.resource_registry.update(notification)

        #-------------------------------------------------------------------------------------------------------------------
        # reading up the notification object to make sure we have the newly registered notification request object
        #-------------------------------------------------------------------------------------------------------------------

        notification_id = notification._id
        notification = self.clients.resource_registry.read(notification_id)

        #------------------------------------------------------------------------------------
        # Update the UserInfo object
        #------------------------------------------------------------------------------------

        user = self.update_user_info_object(user_id, notification, old_notification)

        #------------------------------------------------------------------------------------
        # Update the user_info dictionary maintained by UNS
        #------------------------------------------------------------------------------------

        self.update_user_info_dictionary(user_id, notification, old_notification)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by notification workers so that they can update their user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.info("(update notification) Publishing ReloadUserInfoEvent for updated notification")

        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
            origin="UserNotificationService",
            description= "A notification has been updated."
        )

    def read_notification(self, notification_id=''):
        """Returns the NotificationRequest object for the specified notification id.
        Throws exception if id does not match any persisted NotificationRequest
        objects.

        @param notification_id    str
        @retval notification    NotificationRequest
        @throws NotFound    object with specified id does not exist
        """
        notification = self.clients.resource_registry.read(notification_id)

        return notification

    def delete_notification(self, notification_id=''):
        """For now, permanently deletes NotificationRequest object with the specified
        id. Throws exception if id does not match any persisted NotificationRequest.

        @param notification_id    str
        @throws NotFound    object with specified id does not exist
        """

        #-------------------------------------------------------------------------------------------------------------------
        # Stop the event subscriber for the notification
        #-------------------------------------------------------------------------------------------------------------------
        notification_request = self.clients.resource_registry.read(notification_id)
        old_notification = notification_request

        self.event_processor.stop_notification_subscriber(notification_request=notification_request)

        #-------------------------------------------------------------------------------------------------------------------
        # Update the resource registry
        #-------------------------------------------------------------------------------------------------------------------

        notification_request.temporal_bounds.end_datetime = self.makeEpochTime(self.__now())

        self.clients.resource_registry.update(notification_request)

        #-------------------------------------------------------------------------------------------------------------------
        # Update the user info dictionaries
        #-------------------------------------------------------------------------------------------------------------------

        for user_id in self.event_processor.user_info.iterkeys():
            self.update_user_info_dictionary(user_id, notification_request, old_notification)

        #-------------------------------------------------------------------------------------------------------------------
        # Generate an event that can be picked by a notification worker so that it can update its user_info dictionary
        #-------------------------------------------------------------------------------------------------------------------
        log.info("(delete notification) Publishing ReloadUserInfoEvent for notification_id: %s" % notification_id)

        self.event_publisher.publish_event( event_type= "ReloadUserInfoEvent",
            origin="UserNotificationService",
            description= "A notification has been deleted.",
            notification_id = notification_id)

    def delete_notification_from_user_info(self, notification_id):
        """
        Helper method to delete the notification from the user_info dictionary

        @param notification_id str
        """

        user_ids, assocs = self.clients.resource_registry.find_subjects(object=notification_id, predicate=PRED.hasNotification, id_only=True)

        for assoc in assocs:
            self.clients.resource_registry.delete_association(assoc)

        for user_id in user_ids:

            value = self.event_processor.user_info[user_id]

            for notif in value['notifications']:
                if notification_id == notif._id:
                    # remove the notification
                    value['notifications'].remove(notif)
                    # remove the notification_subscription
                    self.event_processor.user_info[user_id]['notification_subscriptions'].pop(notification_id)

        self.event_processor.reverse_user_info = calculate_reverse_user_info(self.event_processor.user_info)

    def find_events(self, origin='', type='', min_datetime=0, max_datetime=0, limit= -1, descending=False):
        """
        This method leverages couchdb view and simple filters. It does not use elastic search.

        Returns a list of events that match the specified search criteria. Will throw a not NotFound exception
        if no events exist for the given parameters.

        @param origin         str
        @param event_type     str
        @param min_datetime   int  seconds
        @param max_datetime   int  seconds
        @param limit          int         (integer limiting the number of results (0 means unlimited))
        @param descending     boolean     (if True, reverse order (of production time) is applied, e.g. most recent first)
        @retval event_list    []
        @throws NotFound    object with specified parameters does not exist
        @throws NotFound    object with specified parameters does not exist
        """
        datastore = self.container.datastore_manager.get_datastore('events')


        # The reason for the if-else below is that couchdb query_view does not support passing in Null or -1 for limit
        # If the opreator does not want to set a limit for the search results in find_events, and does not therefore
        # provide a limit, one has to just omit it from the opts dictionary and pass that into the query_view() method.
        # Passing a null or negative for the limit to query view through opts results in a ServerError so we cannot do that.
        if limit > -1:
            opts = dict(
                start_key = [origin, type or 0, min_datetime or 0],
                end_key   = [origin, type or {}, max_datetime or {}],
                descending = descending,
                limit = limit,
                include_docs = True
            )

        else:
            opts = dict(
                start_key = [origin, type or 0, min_datetime or 0],
                end_key   = [origin, type or {}, max_datetime or {}],
                descending = descending,
                include_docs = True
            )
        if descending:
            t = opts['start_key']
            opts['start_key'] = opts['end_key']
            opts['end_key'] = t

        results = datastore.query_view('event/by_origintype',opts=opts)

        events = []
        for res in results:
            event_obj = res['doc']
            events.append(event_obj)

        log.debug("(find_events) UNS found the following relevant events: %s" % events)

        if -1 < limit < len(events):
            list = []
            for i in xrange(limit):
                list.append(events[i])
            return list

        return events


    #todo Uses Elastic Search. Later extend this to a larger search criteria
    def find_events_extended(self, origin='', type='', min_time= 0, max_time=0, limit=-1, descending=False):
        """Uses Elastic Search. Returns a list of events that match the specified search criteria. Will throw a not NotFound exception
        if no events exist for the given parameters.

        @param origin         str
        @param type           str
        @param min_time   int seconds
        @param max_time   int seconds
        @param limit          int         (integer limiting the number of results (0 means unlimited))
        @param descending     boolean     (if True, reverse order (of production time) is applied, e.g. most recent first)
        @retval event_list    []
        @throws NotFound    object with specified parameters does not exist
        @throws NotFound    object with specified parameters does not exist
        """

        if min_time and max_time:
            search_time = "SEARCH 'ts_created' VALUES FROM %s TO %s FROM 'events_index'" % (min_time, max_time)
        else:
            search_time = 'search "ts_created" is "*" from "events_index"'

        if origin:
            search_origin = 'search "origin" is "%s" from "events_index"' % origin
        else:
            search_origin = 'search "origin" is "*" from "events_index"'

        if type:
            search_type = 'search "type_" is "%s" from "events_index"' % type
        else:
            search_type = 'search "type_" is "*" from "events_index"'

        search_string = search_time + ' and ' + search_origin + ' and ' + search_type

        # get the list of ids corresponding to the events
        ret_vals = self.discovery.parse(search_string)
        log.debug("(find_events_extended) Discovery search returned the following event ids: %s" % ret_vals)

        events = []
        for event_id in ret_vals:
            datastore = self.container.datastore_manager.get_datastore('events')
            event_obj = datastore.read(event_id)
            events.append(event_obj)

        log.debug("(find_events_extended) UNS found the following relevant events: %s" % events)

        if limit > -1:
            list = []
            for i in xrange(limit):
                list.append(events[i])
            return list

        #todo implement time ordering: ascending or descending

        return events

    def publish_event(self, event=None):
        """
        Publish a general event at a certain time using the UNS

        @param event Event
        """

        self.event_publisher._publish_event( event_msg = event,
            origin=event.origin,
            event_type = event.type_)
        log.info("The publish_event() method of UNS was used to publish an event.")

    def get_recent_events(self, resource_id='', limit = 100):
        """
        Get recent events

        @param resource_id str
        @param limit int

        @retval events list of Event objects
        """

        now = self.makeEpochTime(datetime.utcnow())
        events = self.find_events(origin=resource_id,limit=limit, max_datetime=now, descending=True)

        ret = IonObject(OT.ComputedListValue)
        if events:
            ret.value = events
            ret.status = ComputedValueAvailability.PROVIDED
        else:
            ret.status = ComputedValueAvailability.NOTAVAILABLE

        return ret

    def get_user_notifications(self, user_id=''):
        """
        Get the notification request objects that are subscribed to by the user

        @param user_id str

        @retval notifications list of NotificationRequest objects
        """

        if self.event_processor.user_info.has_key(user_id):
            notifications = self.event_processor.user_info[user_id]['notifications']
            ret = IonObject(OT.ComputedListValue)

            if notifications:
                ret.value = notifications
                ret.status = ComputedValueAvailability.PROVIDED
            else:
                ret.status = ComputedValueAvailability.NOTAVAILABLE
            return ret
        else:
            return None

    def create_worker(self, number_of_workers=1):
        """
        Creates notification workers

        @param number_of_workers int
        @retval pids list

        """

        pids = []

        for n in xrange(number_of_workers):

            process_definition = ProcessDefinition( name='notification_worker_%s' % n)

            process_definition.executable = {
                'module': 'ion.processes.data.transforms.notification_worker',
                'class':'NotificationWorker'
            }
            process_definition_id = self.process_dispatcher.create_process_definition(process_definition=process_definition)

            # ------------------------------------------------------------------------------------
            # Process Spawning
            # ------------------------------------------------------------------------------------

            pid2 = self.process_dispatcher.create_process(process_definition_id)

            #@todo put in a configuration
            configuration = {}
            configuration['process'] = dict({
                'name': 'notification_worker_%s' % n,
                'type':'simple'
            })

            pid  = self.process_dispatcher.schedule_process(
                process_definition_id,
                configuration = configuration,
                process_id=pid2
            )

            pids.append(pid)

        return pids

    @staticmethod
    def makeEpochTime(date_time):
        """
        provides the seconds since epoch give a python datetime object.

        @param date_time Python datetime object
        @retval seconds_since_epoch int
        """
        date_time = date_time.isoformat().split('.')[0].replace('T',' ')
        #'2009-07-04 18:30:47'
        pattern = '%Y-%m-%d %H:%M:%S'
        seconds_since_epoch = int(time.mktime(time.strptime(date_time, pattern)))

        return seconds_since_epoch


    def process_batch(self, start_time = 0, end_time = 0):
        """
        This method is launched when an process_batch event is received. The user info dictionary maintained
        by the User Notification Service is used to query the event repository for all events for a particular
        user that have occurred in a provided time interval, and then an email is sent to the user containing
        the digest of all the events.

        @param start_time int
        @param end_time int
        """

        if end_time <= start_time:
            return

        for user_id, value in self.event_processor.user_info.iteritems():

            notifications = value['notifications']

            events_for_message = []

            search_time = "SEARCH 'ts_created' VALUES FROM %s TO %s FROM 'events_index'" % (start_time, end_time)

            for notification in notifications:

                # If the notification request has expired, then do not use it in the search
                if notification.temporal_bounds.end_datetime:
                    continue

                if notification.origin:
                    search_origin = 'search "origin" is "%s" from "events_index"' % notification.origin
                else:
                    search_origin = 'search "origin" is "*" from "events_index"'

                if notification.origin_type:
                    search_origin_type= 'search "origin_type" is "%s" from "events_index"' % notification.origin_type
                else:
                    search_origin_type= 'search "origin_type" is "*" from "events_index"'

                if notification.event_type:
                    search_event_type = 'search "type_" is "%s" from "events_index"' % notification.event_type
                else:
                    search_event_type = 'search "type_" is "*" from "events_index"'

                search_string = search_time + ' and ' + search_origin + ' and ' + search_origin_type + ' and ' + search_event_type

                # get the list of ids corresponding to the events
                ret_vals = self.discovery.parse(search_string)

                for event_id in ret_vals:
                    datastore = self.container.datastore_manager.get_datastore('events')
                    event_obj = datastore.read(event_id)
                    events_for_message.append(event_obj)

            log.debug("Found following events of interest to user, %s: %s" % (user_id, events_for_message))

            # send a notification email to each user using a _send_email() method
            if events_for_message:
                self.format_and_send_email(events_for_message, user_id)

    def format_and_send_email(self, events_for_message, user_id):
        """
        Format the message for a particular user containing information about the events he is to be notified about

        @param events_for_message list
        @param user_id str
        """

        message = str(events_for_message)
        log.debug("The user, %s, will get the following events in his batch notification email: %s" % (user_id, message))

        msg_body = ''
        count = 1
        for event in events_for_message:
            # build the email from the event content
            msg_body += string.join(("\r\n",
                                     "Event %s: %s" %  (count, event),
                                     "",
                                     "Originator: %s" %  event.origin,
                                     "",
                                     "Description: %s" % event.description ,
                                     "",
                                     "Event time stamp: %s" %  event.ts_created,
                                     "\r\n",
                                     "------------------------"
                                     "\r\n"))
            count += 1

        msg_body += "You received this notification from ION because you asked to be " +\
                    "notified about this event from this source. " +\
                    "To modify or remove notifications about this event, " +\
                    "please access My Notifications Settings in the ION Web UI. " +\
                    "Do not reply to this email.  This email address is not monitored " +\
                    "and the emails will not be read. \r\n "


        log.debug("The email has the following message body: %s" % msg_body)

        msg_subject = "(SysName: " + get_sys_name() + ") ION event "

        self.send_batch_email(  msg_body = msg_body,
            msg_subject = msg_subject,
            msg_recipient=self.event_processor.user_info[user_id]['user_contact'].email,
            smtp_client=self.smtp_client )

    def send_batch_email(self, msg_body, msg_subject, msg_recipient, smtp_client):
        """
        Send the email

        @param msg_body str
        @param msg_subject str
        @param msg_recipient str
        @param smtp_client object
        """

        msg = MIMEText(msg_body)
        msg['Subject'] = msg_subject
        msg['From'] = self.ION_NOTIFICATION_EMAIL_ADDRESS
        msg['To'] = msg_recipient
        log.debug("EventProcessor.subscription_callback(): sending email to %s"\
                  %msg_recipient)

        smtp_sender = CFG.get_safe('server.smtp.sender')

        smtp_client.sendmail(smtp_sender, msg_recipient, msg.as_string())

    def update_user_info_object(self, user_id, new_notification, old_notification):
        """
        Update the UserInfo object. If the passed in parameter, od_notification, is None, it does not need to remove the old notification

        @param user_id str
        @param new_notification NotificationRequest
        @param old_notification NotificationRequest
        """

        #------------------------------------------------------------------------------------
        # read the user
        #------------------------------------------------------------------------------------

        user = self.clients.resource_registry.read(user_id)

        if not user:
            raise BadRequest("No user with the provided user_id: %s" % user_id)

        notifications = []
        for item in user.variables:
            if item['name'] == 'notifications':
                if old_notification and old_notification in item['value']:

                    notifications = item['value']
                    # remove the old notification
                    notifications.remove(old_notification)

                # put in the new notification
                notifications.append(new_notification)

                item['value'] = notifications

                break

        #------------------------------------------------------------------------------------
        # update the resource registry
        #------------------------------------------------------------------------------------

        self.clients.resource_registry.update(user)

        return user

    def update_user_info_dictionary(self, user_id, new_notification, old_notification):

        #------------------------------------------------------------------------------------
        # Remove the old notifications
        #------------------------------------------------------------------------------------

        if old_notification in self.event_processor.user_info[user_id]['notifications']:

            # remove from notifications list
            self.event_processor.user_info[user_id]['notifications'].remove(old_notification)

            #------------------------------------------------------------------------------------
            # update the notification subscription object
            #------------------------------------------------------------------------------------

            # get the old notification_subscription
            notification_subscription = self.event_processor.user_info[user_id]['notification_subscriptions'].pop(old_notification._id)

            # update that old notification subscription
            notification_subscription._res_obj = new_notification

            # feed the updated notification subscription back into the user info dictionary
            self.event_processor.user_info[user_id]['notification_subscriptions'][old_notification._id] = notification_subscription

        #------------------------------------------------------------------------------------
        # find the already existing notifications for the user
        #------------------------------------------------------------------------------------

        notifications = self.event_processor.user_info[user_id]['notifications']
        notifications.append(new_notification)

        #------------------------------------------------------------------------------------
        # update the user info - contact information, notifications
        #------------------------------------------------------------------------------------
        user = self.clients.resource_registry.read(user_id)

        self.event_processor.user_info[user_id]['user_contact'] = user.contact
        self.event_processor.user_info[user_id]['notifications'] = notifications

        self.event_processor.reverse_user_info = calculate_reverse_user_info(self.event_processor.user_info)

    def get_subscriptions(self, resource_id='', include_nonactive=False):
        """
        This method is used to get the subscriptions to a data product. The method will return a list of NotificationRequest
        objects for whom the origin is set to this data product. This way all the users who were interested in listening to
        events with origin equal to this data product, will be known and all their subscriptions will be known.

        @param resource_id
        @param include_nonactive
        @return notification_requests []

        """

        search_origin = 'search "origin" is "%s" from "resources_index"' % resource_id
        ret_vals = self.discovery.parse(search_origin)
        log.debug("Returned results: %s" % ret_vals)

        notifications_all = set()
        notifications_active = set()

        for item in ret_vals:

            if item['_type'] == 'NotificationRequest':
                notif = self.clients.resource_registry.read(item['_id'])

                if include_nonactive:
                    # Add active or retired notification
                    notifications_all.add(notif)

                elif notif.temporal_bounds.end_datetime == '':
                    # Add the active notification
                    notifications_active.add(notif)

        if include_nonactive:
            return list(notifications_all)
        else:
            return list(notifications_active)


    def _notification_in_notifications(self, notification = None, notifications = None):

        for id, notif in notifications.iteritems():
            if notif.name == notification.name and \
            notif.origin == notification.origin and \
            notif.origin_type == notification.origin_type and \
            notif.event_type == notification.event_type:
                return id
        return None

    def _update_notification_in_notifications_dict(self, new_notification = None, old_notification = None, notifications = None ):

        for id, notif in notifications.iteritems():
            if notif.name == old_notification.name and\
               notif.origin == old_notification.origin and\
               notif.origin_type == old_notification.origin_type and\
               notif.event_type == old_notification.event_type:
                notifications.pop(id)
                notifications[id] = new_notification
                break
コード例 #19
0
class DiscoveryIntTest(IonIntegrationTestCase):
    def setUp(self):
        super(DiscoveryIntTest, self).setUp()

        self._start_container()
        self.addCleanup(DiscoveryIntTest.es_cleanup)
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.discovery = DiscoveryServiceClient()
        self.catalog   = CatalogManagementServiceClient()
        self.ims       = IndexManagementServiceClient()
        self.rr        = ResourceRegistryServiceClient()

        if use_es:
            self.es_host   = CFG.get_safe('server.elasticsearch.host', 'localhost')
            self.es_port   = CFG.get_safe('server.elasticsearch.port', '9200')
            CFG.server.elasticsearch.shards         = 1
            CFG.server.elasticsearch.replicas       = 0
            CFG.server.elasticsearch.river_shards   = 1
            CFG.server.elasticsearch.river_replicas = 0
            self.es = ep.ElasticSearch(
                host=self.es_host,
                port=self.es_port,
                timeout=10,
                verbose=True
            )
            op = DotDict(CFG)
            op.op = 'clean_bootstrap'
            self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op)

    @staticmethod
    def es_cleanup():
        es_host = CFG.get_safe('server.elasticsearch.host', 'localhost')
        es_port = CFG.get_safe('server.elasticsearch.port', '9200')
        es = ep.ElasticSearch(
            host=es_host,
            port=es_port,
            timeout=10
        )
        indexes = STD_INDEXES.keys()
        indexes.append('%s_resources_index' % get_sys_name().lower())
        indexes.append('%s_events_index' % get_sys_name().lower())

        for index in indexes:
            IndexManagementService._es_call(es.river_couchdb_delete,index)
            IndexManagementService._es_call(es.index_delete,index)


    def poll(self, tries, callback, *args, **kwargs):
        '''
        Polling wrapper for queries
        Elasticsearch may not index and cache the changes right away so we may need 
        a couple of tries and a little time to go by before the results show.
        '''
        for i in xrange(tries):
            retval = callback(*args, **kwargs)
            if retval:
                return retval
            time.sleep(0.2)
        return None


    def test_traversal(self):
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results = self.discovery.traverse(dp_id)
        results.sort()
        correct = [pd_id, transform_id]
        correct.sort()
        self.assertTrue(results == correct, '%s' % results)

    def test_iterative_traversal(self):
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results = self.discovery.iterative_traverse(dp_id)
        results.sort()
        correct = [transform_id]
        self.assertTrue(results == correct)

        results = self.discovery.iterative_traverse(dp_id, 1)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)
    
    @skipIf(not use_es, 'No ElasticSearch')
    def test_view_crud(self):
        view_id = self.discovery.create_view('big_view',fields=['name'])
        catalog_id = self.discovery.list_catalogs(view_id)[0]
        index_ids = self.catalog.list_indexes(catalog_id)
        self.assertTrue(len(index_ids))

        view = self.discovery.read_view(view_id)
        self.assertIsInstance(view,View)
        self.assertTrue(view.name == 'big_view')

        view.name = 'not_so_big_view'

        self.discovery.update_view(view)

        view = self.discovery.read_view(view_id)
        self.assertTrue(view.name == 'not_so_big_view')

        self.discovery.delete_view(view_id)
        with self.assertRaises(NotFound):
            self.discovery.read_view(view_id)

    def test_view_best_match(self):
        #---------------------------------------------------------------
        # Matches the best catalog available OR creates a new one
        #---------------------------------------------------------------
        catalog_id = self.catalog.create_catalog('dev', keywords=['name','model'])
        view_id    = self.discovery.create_view('exact_view', fields=['name','model'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids == [catalog_id])

        view_id = self.discovery.create_view('another_view', fields=['name','model'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids == [catalog_id])

        view_id = self.discovery.create_view('big_view', fields=['name'])
        catalog_ids = self.discovery.list_catalogs(view_id)
        self.assertTrue(catalog_ids != [catalog_id])

    @skipIf(not use_es, 'No ElasticSearch')
    def test_basic_searching(self):

        #- - - - - - - - - - - - - - - - - 
        # set up the fake resources
        #- - - - - - - - - - - - - - - - - 

        instrument_pool = [
            InstrumentDevice(name='sonobuoy1', hardware_version='1'),
            InstrumentDevice(name='sonobuoy2', hardware_version='2'),
            InstrumentDevice(name='sonobuoy3', hardware_version='3')
        ]
        for instrument in instrument_pool:
            self.rr.create(instrument)

        view_id = self.discovery.create_view('devices', fields=['hardware_version'])

        search_string = "search 'hardware_version' is '2' from '%s'"%view_id
        results = self.poll(5, self.discovery.parse,search_string)
        result  = results[0]['_source']
        self.assertIsInstance(result, InstrumentDevice)
        self.assertTrue(result.name == 'sonobuoy2')
        self.assertTrue(result.hardware_version == '2')


    @skipIf(not use_es, 'No ElasticSearch')
    def test_associative_searching(self):

        view_id = self.discovery.create_view('devices', fields=['model'])
        site_id,_ = self.rr.create(Site('my_site'))
        pd_id, _  = self.rr.create(PlatformDevice('my_device', model='abc123'))
        self.rr.create_association(subject=site_id, object=pd_id, predicate=PRED.hasDevice)

        search_string = "search 'model' is 'abc*' from '%s' and belongs to '%s'"%(view_id, site_id)

        results = self.poll(5, self.discovery.parse,search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(pd_id in results)

    def test_iterative_associative_searching(self):
        #--------------------------------------------------------------------------------
        # Tests the ability to limit the iterations
        #--------------------------------------------------------------------------------
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        search_string = "belongs to '%s' depth 1" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        correct = [transform_id]
        self.assertTrue(results == correct, '%s' % results)

        search_string = "belongs to '%s' depth 2" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)


    @skipIf(not use_es, 'No ElasticSearch')
    def test_ranged_value_searching(self):
        discovery = self.discovery
        rr        = self.rr
        
        view_id = discovery.create_view('bank_view', fields=['cash_balance'])
        bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=10))

        search_string = "search 'cash_balance' values from 0 to 100 from '%s'" % view_id

        results = self.poll(5, discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == bank_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_collections_searching(self):

        site_id, _ = self.rr.create(Site(name='black_mesa'))
        view_id    = self.discovery.create_view('big', fields=['name'])

        # Add the site to a new collection
        collection_id = self.ims.create_collection('resource_collection', [site_id])

        search_string = "search 'name' is '*' from '%s' and in '%s'" %(view_id, collection_id)

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0] == site_id, '%s' % results)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_search_by_name(self):
        inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123')

        dev_id, _ = self.rr.create(inst_dev)
        self.discovery.create_view('devs',fields=['name','serial_number'])

        search_string = "search 'serial_number' is 'abc*' from 'devs'"
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dev_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_search_by_name_index(self):
        inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123')

        dev_id, _ = self.rr.create(inst_dev)
        search_string = "search 'serial_number' is 'abc*' from 'resources_index'"
        
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dev_id)

        bank_acc = BankAccount(name='blah', cash_balance=10)
        res_id , _ = self.rr.create(bank_acc)

        search_string = "search 'cash_balance' values from 0 to 100 from 'resources_index'"

        results = self.poll(9, self.discovery.parse,search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == res_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_data_product_search(self):

        # Create the dataproduct
        dp = DataProduct(name='test_product')
        dp.data_format.name = 'test_signal'
        dp.data_format.description = 'test signal'
        dp.data_format.character_set = 'utf8'
        dp.data_format.nominal_sampling_rate_maximum = '44000'
        dp.data_format.nominal_sampling_rate_minimum = '44000'
        dp.data_product_level = 'basic'
        dp_id, _ = self.rr.create(dp)

        search_string = "search 'data_format.name' is 'test_signal' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)

        search_string = "search 'data_product_level' is 'basic' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        
        search_string = "search 'data_format.character_set' is 'utf8' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_events_search(self):
        # Create a resource to force a new event

        dp = DataProcess()
        dp_id, rev = self.rr.create(dp)

        search_string = "SEARCH 'origin' IS '%s' FROM 'events_index'" % dp_id

        results = self.poll(9, self.discovery.parse,search_string)
        origin_type = results[0]['_source'].origin_type
        origin_id = results[0]['_source'].origin

        self.assertTrue(origin_type == RT.DataProcess)
        self.assertTrue(origin_id == dp_id)

    @skipIf(not use_es, 'No ElasticSearch')
    def test_geo_distance_search(self):

        pd = PlatformDevice(name='test_dev')

        pd_id, _ = self.rr.create(pd)

        search_string = "search 'nominal_location' geo distance 20 km from lat 0 lon 0 from 'devices_index'"

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == pd_id)
        self.assertTrue(results[0]['_source'].name == 'test_dev')
   
    @skipIf(not use_es, 'No ElasticSearch')
    def test_geo_bbox_search(self):

        pd = PlatformDevice(name='test_dev')
        pd.nominal_location.lat = 5
        pd.nominal_location.lon = 5

        pd_id, _ = self.rr.create(pd)

        search_string = "search 'nominal_location' geo box top-left lat 10 lon 0 bottom-right lat 0 lon 10 from 'devices_index'"

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == pd_id)
        self.assertTrue(results[0]['_source'].name == 'test_dev')
        
    @skipIf(not use_es, 'No ElasticSearch')
    def test_user_search(self):
        user = UserInfo()
        user.name = 'test'
        user.contact.phone = '5551212'

        user_id, _ = self.rr.create(user)

        search_string = 'search "name" is "test" from "users_index"'

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == user_id)
        self.assertTrue(results[0]['_source'].name == 'test')

        search_string = 'search "contact.phone" is "5551212" from "users_index"'
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == user_id)
        self.assertTrue(results[0]['_source'].name == 'test')
コード例 #20
0
class DiscoveryIntTest(IonIntegrationTestCase):
    def setUp(self):
        raise SkipTest("Not yet ported to Postgres")

        super(DiscoveryIntTest, self).setUp()
        config = DotDict()

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml', config)

        self.discovery               = DiscoveryServiceClient()
        self.catalog                 = CatalogManagementServiceClient()
        self.ims                     = IndexManagementServiceClient()
        self.rr                      = ResourceRegistryServiceClient()
        self.dataset_management      = DatasetManagementServiceClient()
        self.pubsub_management       = PubsubManagementServiceClient()
        self.data_product_management = DataProductManagementServiceClient()


    def test_geo_distance_search(self):

        pd = PlatformDevice(name='test_dev')

        pd_id, _ = self.rr.create(pd)

        search_string = "search 'index_location' geo distance 20 km from lat 0 lon 0 from 'devices_index'"

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == pd_id)
        self.assertTrue(results[0]['_source'].name == 'test_dev')


    def test_ranged_value_searching(self):
        discovery = self.discovery
        rr        = self.rr
        
        view_id = discovery.create_view('bank_view', fields=['cash_balance'])
        bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=10))

        search_string = "search 'cash_balance' values from 0 to 100 from '%s'" % view_id

        results = self.poll(5, discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == bank_id)
        
        bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=90))

        search_string = "search 'cash_balance' values from 80 from '%s'" % view_id

        results = self.poll(5, discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == bank_id)

    def test_collections_searching(self):
        site_id, _ = self.rr.create(Site(name='black_mesa'))
        view_id    = self.discovery.create_view('big', fields=['name'])

        # Add the site to a new collection
        collection_id = self.ims.create_collection('resource_collection', [site_id])

        search_string = "search 'name' is '*' from '%s' and in '%s'" %(view_id, collection_id)

        results = self.poll(9, self.discovery.parse,search_string,id_only=True)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0] == site_id, '%s' % results)

    def test_search_by_name_index(self):
        inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123')

        dev_id, _ = self.rr.create(inst_dev)
        search_string = "search 'serial_number' is 'abc*' from 'resources_index'"
        
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dev_id)

        bank_acc = BankAccount(name='blah', cash_balance=10)
        res_id , _ = self.rr.create(bank_acc)

        search_string = "search 'cash_balance' values from 0 to 100 from 'resources_index'"

        results = self.poll(9, self.discovery.parse,search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == res_id)

    def test_data_product_search(self):
        # Create the dataproduct
        dp = DataProduct(name='test_product')
        dp.data_format.name = 'test_signal'
        dp.data_format.description = 'test signal'
        dp.data_format.character_set = 'utf8'
        dp.data_format.nominal_sampling_rate_maximum = '44000'
        dp.data_format.nominal_sampling_rate_minimum = '44000'
        dp.CDM_data_type = 'basic'
        dp_id, _ = self.rr.create(dp)

        search_string = "search 'data_format.name' is 'test_signal' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)

        search_string = "search 'CDM_data_type' is 'basic' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        
        search_string = "search 'data_format.character_set' is 'utf8' from 'data_products_index'"
        results = self.poll(9, self.discovery.parse, search_string)

        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)


    def test_events_search(self):
        # Create a resource to force a new event

        dp = DataProcess()
        dp_id, rev = self.rr.create(dp)

        search_string = "SEARCH 'origin' IS '%s' FROM 'events_index'" % dp_id

        results = self.poll(9, self.discovery.parse,search_string)
        origin_type = results[0]['_source'].origin_type
        origin_id = results[0]['_source'].origin

        self.assertTrue(origin_type == RT.DataProcess)
        self.assertTrue(origin_id == dp_id)



    def test_time_search(self):
        today     = date.today()
        past = today - timedelta(days=2)
        future  = today + timedelta(days=2)

        data_product = DataProduct()
        dp_id, _ = self.rr.create(data_product)
        
        search_string = "search 'type_' is 'DataProduct' from 'data_products_index' and search 'ts_created' time from '%s' to '%s' from 'data_products_index'" % (past, future)

        results = self.poll(9, self.discovery.parse,search_string,id_only=True)

        self.assertIsNotNone(results,'Results not found')

        self.assertIn(dp_id, results)
        
        search_string = "search 'type_' is 'DataProduct' from 'data_products_index' and search 'ts_created' time from '%s' from 'data_products_index'" % past

        results = self.poll(9, self.discovery.parse,search_string,id_only=True)

        self.assertIsNotNone(results,'Results not found')

        self.assertIn(dp_id, results)

        
    def test_user_search(self):
        user = UserInfo()
        user.name = 'test'
        user.contact.phones.append('5551212')

        user_id, _ = self.rr.create(user)

        search_string = 'search "name" is "test" from "users_index"'

        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == user_id)
        self.assertTrue(results[0]['_source'].name == 'test')

        search_string = 'search "contact.phones" is "5551212" from "users_index"'
        results = self.poll(9, self.discovery.parse,search_string)

        self.assertIsNotNone(results, 'Results not found')

        self.assertTrue(results[0]['_id'] == user_id)
        self.assertTrue(results[0]['_source'].name == 'test')


    def test_subobject_search(self):
        contact = ContactInformation()
        contact.email = '*****@*****.**'
        contact.individual_name_family = 'Tester'
        contact.individual_names_given = 'Intern'

        dp = DataProduct(name='example')
        dp.contacts.append(contact)

        dp_id,_ = self.rr.create(dp)

        #--------------------------------------------------------------------------------
        # Example using the full field name
        #--------------------------------------------------------------------------------
        search_string = 'search "contacts.email" is "*****@*****.**" from "data_products"'
        results = self.poll(9, self.discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        self.assertEquals(results[0]['_source'].name, 'example')

        #--------------------------------------------------------------------------------
        # Example using a sub-object's field name (ambiguous searching)
        #--------------------------------------------------------------------------------
        search_string = 'search "individual_names_given" is "Intern" from "data_products"'
        results = self.poll(9, self.discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        self.assertEquals(results[0]['_source'].name, 'example')

    def test_descriptive_phrase_search(self):
        dp = DataProduct(name='example', description='This is simply a description for this data product')
        dp_id, _ = self.rr.create(dp)

        search_string = 'search "description" like "description for" from "data_products"'
        results = self.poll(9, self.discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        self.assertEquals(results[0]['_source'].name, 'example')
    
    def test_match_search(self):
        dp = DataProduct(name='example', description='This is simply a description for this data product')
        dp_id, _ = self.rr.create(dp)

        search_string = 'search "description" match "this data product" from "data_products"'
        results = self.poll(9, self.discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(results[0]['_id'] == dp_id)
        self.assertEquals(results[0]['_source'].name, 'example')

    def test_expected_match_results(self):
        names = [
            'Instrument for site1',
            'Instrument for simulator',
            'CTD1',
            'SBE37',
            'SSN-719',
            'Submerssible Expendable Bathyothermograph',
            'VELPT',
            'VELO',
            'Safire2 169'
            ]
        for name in names:
            res_id, _ = self.rr.create(InstrumentDevice(name=name))
            self.addCleanup(self.rr.delete, res_id)

        search_string = 'search "name" match "expendable" from "devices"'
        results = self.poll(9, self.discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')

        self.assertEquals(len(results),1)
        self.assertEquals(results[0]['_source'].name, 'Submerssible Expendable Bathyothermograph')


        search_string = 'search "name" match "instrument for" from "devices"'
        results = self.poll(9, self.discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')

        self.assertEquals(len(results),2)
        self.assertTrue('Instrument for' in results[0]['_source'].name)
        self.assertTrue('Instrument for' in results[1]['_source'].name)


        search_string = 'search "name" match "velo for" from "devices"'
        results = self.poll(9, self.discovery.parse, search_string)
        self.assertIsNotNone(results, 'Results not found')

        self.assertEquals(len(results),1)
        self.assertEquals(results[0]['_source'].name, 'VELO')


    def test_ownership_searching(self):
        # Create two data products so that there is competition to the search, one is parsed 
        # (with conductivity as a parameter) and the other is raw
        dp = DataProduct(name='example dataproduct')
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
        stream_def_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id)
        dp_id = self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id, exchange_point='xp1')

        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_raw_param_dict')
        stream_def_id = self.pubsub_management.create_stream_definition('ctd raw', parameter_dictionary_id=pdict_id)
        dp = DataProduct(name='WRONG')
        self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id, exchange_point='xp1')

        parameter_search = 'search "name" is "conductivity" from "resources_index"'
        results = self.poll(9, self.discovery.parse, parameter_search)
        param_id = results[0]['_id']

        data_product_search = 'search "name" is "*" from "data_products_index" and has "%s"' % param_id
        results = self.poll(9, self.discovery.parse, data_product_search,id_only=True)
        self.assertIn(dp_id, results)
        #self.assertEquals(results[0], dp_id)

    def test_associative_searching(self):
        dp_id,_ = self.rr.create(DataProduct('test_foo'))
        ds_id,_ = self.rr.create(Dataset('test_bar', registered=True))
        self.rr.create_association(subject=dp_id, object=ds_id, predicate='hasDataset')

        search_string = "search 'type_' is 'Dataset' from 'resources_index' and belongs to '%s'" % dp_id

        results = self.poll(5, self.discovery.parse, search_string, id_only=True)
        self.assertIsNotNone(results, 'Results not found')
        self.assertTrue(ds_id in results)

    def test_iterative_associative_searching(self):
        #--------------------------------------------------------------------------------
        # Tests the ability to limit the iterations
        #--------------------------------------------------------------------------------
        dp        = DataProcess()
        transform = Transform()
        pd        = ProcessDefinition()

        dp_id, _        = self.rr.create(dp)
        transform_id, _ = self.rr.create(transform)
        pd_id, _        = self.rr.create(pd)

        self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        search_string = "belongs to '%s' depth 1" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        correct = [transform_id]
        self.assertTrue(results == correct, '%s' % results)

        search_string = "belongs to '%s' depth 2" % dp_id
        results = self.poll(5, self.discovery.parse,search_string)
        results = list([i._id for i in results])
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)