class TestDataProductManagementServiceIntegration(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.dpsc_cli           = DataProductManagementServiceClient()
        self.rrclient           = ResourceRegistryServiceClient()
        self.damsclient         = DataAcquisitionManagementServiceClient()
        self.pubsubcli          = PubsubManagementServiceClient()
        self.ingestclient       = IngestionManagementServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.unsc               = UserNotificationServiceClient()
        self.data_retriever     = DataRetrieverServiceClient()
        self.identcli           = IdentityManagementServiceClient()

        #------------------------------------------
        # Create the environment
        #------------------------------------------

        self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM')

        self.process_definitions  = {}
        ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
        ingestion_worker_definition.executable = {
            'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
            'class' :'ScienceGranuleIngestionWorker'
        }
        process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition)
        self.process_definitions['ingestion_worker'] = process_definition_id

        self.pids = []
        self.exchange_points = []
        self.exchange_names = []

        #------------------------------------------------------------------------------------------------
        # First launch the ingestors
        #------------------------------------------------------------------------------------------------
        self.exchange_space       = 'science_granule_ingestion'
        self.exchange_point       = 'science_data'
        config = DotDict()
        config.process.datastore_name = 'datasets'
        config.process.queue_name = self.exchange_space

        self.exchange_names.append(self.exchange_space)
        self.exchange_points.append(self.exchange_point)

        pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config)
        log.debug("the ingestion worker process id: %s", pid)
        self.pids.append(pid)

        self.addCleanup(self.cleaning_up)

    def cleaning_up(self):
        for pid in self.pids:
            log.debug("number of pids to be terminated: %s", len(self.pids))
            try:
                self.process_dispatcher.cancel_process(pid)
                log.debug("Terminated the process: %s", pid)
            except:
                log.debug("could not terminate the process id: %s" % pid)
        IngestionManagementIntTest.clean_subscriptions()

        for xn in self.exchange_names:
            xni = self.container.ex_manager.create_xn_queue(xn)
            xni.delete()
        for xp in self.exchange_points:
            xpi = self.container.ex_manager.create_xp(xp)
            xpi.delete()

    def get_datastore(self, dataset_id):
        dataset = self.dataset_management.read_dataset(dataset_id)
        datastore_name = dataset.datastore_name
        datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
        return datastore


    @attr('EXT')
    @attr('PREP')
    def test_create_data_product(self):

        #------------------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #------------------------------------------------------------------------------------------------
        parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id)
        log.debug("Created stream def id %s" % ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # test creating a new data product w/o a stream definition
        #------------------------------------------------------------------------------------------------




        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp')

        dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0
        dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0
        dp_obj.ooi_product_name = "PRODNAME"

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------

        dp_id = self.dpsc_cli.create_data_product( data_product= dp_obj,
                                            stream_definition_id=ctd_stream_def_id)
        # Assert that the data product has an associated stream at this stage
        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True)
        self.assertNotEquals(len(stream_ids), 0)

        # Assert that the data product has an associated stream def at this stage
        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True)
        self.assertNotEquals(len(stream_ids), 0)

        self.dpsc_cli.activate_data_product_persistence(dp_id)

        dp_obj = self.dpsc_cli.read_data_product(dp_id)
        self.assertIsNotNone(dp_obj)
        self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0)
        log.debug('Created data product %s', dp_obj)
        #------------------------------------------------------------------------------------------------
        # test creating a new data product with  a stream definition
        #------------------------------------------------------------------------------------------------
        log.debug('Creating new data product with a stream definition')
        dp_obj = IonObject(RT.DataProduct,
            name='DP2',
            description='some new dp')

        dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id)
        self.dpsc_cli.activate_data_product_persistence(dp_id2)
        log.debug('new dp_id = %s' % dp_id2)

        #------------------------------------------------------------------------------------------------
        #make sure data product is associated with stream def
        #------------------------------------------------------------------------------------------------
        streamdefs = []
        streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True)
        for s in streams:
            log.debug("Checking stream %s" % s)
            sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True)
            for sd in sdefs:
                log.debug("Checking streamdef %s" % sd)
                streamdefs.append(sd)
        self.assertIn(ctd_stream_def_id, streamdefs)

        group_names = self.dpsc_cli.get_data_product_group_list()
        self.assertIn("PRODNAME", group_names)


        #----------------------------------------------------------------------------------------
        # Create users then notifications to this data product for each user
        #----------------------------------------------------------------------------------------

        # user_1
        user_1 = UserInfo()
        user_1.name = 'user_1'
        user_1.contact.email = '*****@*****.**'

        # user_2
        user_2 = UserInfo()
        user_2.name = 'user_2'
        user_2.contact.email = '*****@*****.**'
        #user1 is a complete user
        self.subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        actor_id = self.identcli.create_actor_identity(actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials", {"name": self.subject})
        self.identcli.register_user_credentials(actor_id, user_credentials_obj)
        user_id_1 = self.identcli.create_user_info(actor_id, user_1)
        user_id_2, _ = self.rrclient.create(user_2)

        delivery_config1a = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH)
        delivery_config1b = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH)
        notification_request_1 = NotificationRequest(   name = "notification_1",
            origin=dp_id,
            origin_type="type_1",
            event_type=OT.ResourceLifecycleEvent,
            disabled_by_system = False,
            delivery_configurations=[delivery_config1a, delivery_config1b])

        delivery_config2a = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH)
        delivery_config2b = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH)
        notification_request_2 = NotificationRequest(   name = "notification_2",
            origin=dp_id,
            origin_type="type_2",
            disabled_by_system = False,
            event_type=OT.DetectionEvent,
            delivery_configurations=[delivery_config2a, delivery_config2b])

        notification_request_1_id = self.unsc.create_notification(notification=notification_request_1, user_id=user_id_1)
        notification_request_2_id = self.unsc.create_notification(notification=notification_request_2, user_id=user_id_2)
        self.unsc.delete_notification(notification_request_1_id)



        # test reading a non-existent data product
        log.debug('reading non-existent data product')

        with self.assertRaises(NotFound):
            dp_obj = self.dpsc_cli.read_data_product('some_fake_id')

        # update a data product (tests read also)
        log.debug('Updating data product')
        # first get the existing dp object
        dp_obj = self.dpsc_cli.read_data_product(dp_id)

        # now tweak the object
        dp_obj.description = 'the very first dp'
        dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 20.0
        dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -20.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 20.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -20.0
        # now write the dp back to the registry
        update_result = self.dpsc_cli.update_data_product(dp_obj)


        # now get the dp back to see if it was updated
        dp_obj = self.dpsc_cli.read_data_product(dp_id)
        self.assertEquals(dp_obj.description,'the very first dp')
        self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0)
        log.debug('Updated data product %s', dp_obj)

        #test extension
        extended_product = self.dpsc_cli.get_data_product_extension(dp_id)
        #validate that there is one active and one retired user notification for this data product
        self.assertEqual(1, len(extended_product.computed.active_user_subscriptions.value))
        self.assertEqual(1, len(extended_product.computed.past_user_subscriptions.value))

        self.assertEqual(dp_id, extended_product._id)
        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_product.computed.product_download_size_estimated.status)
        self.assertEqual(0, extended_product.computed.product_download_size_estimated.value)

        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_product.computed.parameters.status)
        #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value)


        def ion_object_encoder(obj):
            return obj.__dict__


        #test prepare for create
        data_product_data = self.dpsc_cli.prepare_data_product_support()

        #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2)

        self.assertEqual(data_product_data._id, "")
        self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport)
        self.assertEqual(len(data_product_data.associations['StreamDefinition'].resources), 2)
        self.assertEqual(len(data_product_data.associations['Dataset'].resources), 0)
        self.assertEqual(len(data_product_data.associations['StreamDefinition'].associated_resources), 0)
        self.assertEqual(len(data_product_data.associations['Dataset'].associated_resources), 0)

        #test prepare for update
        data_product_data = self.dpsc_cli.prepare_data_product_support(dp_id)

        #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2)

        self.assertEqual(data_product_data._id, dp_id)
        self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport)
        self.assertEqual(len(data_product_data.associations['StreamDefinition'].resources), 2)

        self.assertEqual(len(data_product_data.associations['Dataset'].resources), 1)

        self.assertEqual(len(data_product_data.associations['StreamDefinition'].associated_resources), 1)
        self.assertEqual(data_product_data.associations['StreamDefinition'].associated_resources[0].s, dp_id)

        self.assertEqual(len(data_product_data.associations['Dataset'].associated_resources), 1)
        self.assertEqual(data_product_data.associations['Dataset'].associated_resources[0].s, dp_id)

        # now 'delete' the data product
        log.debug("deleting data product: %s" % dp_id)
        self.dpsc_cli.delete_data_product(dp_id)

        # Assert that there are no associated streams leftover after deleting the data product
        stream_ids, assoc_ids = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True)
        self.assertEquals(len(stream_ids), 0)
        self.assertEquals(len(assoc_ids), 0)

        self.dpsc_cli.force_delete_data_product(dp_id)

        # now try to get the deleted dp object
        with self.assertRaises(NotFound):
            dp_obj = self.dpsc_cli.read_data_product(dp_id)

        # Get the events corresponding to the data product
        ret = self.unsc.get_recent_events(resource_id=dp_id)
        events = ret.value

        for event in events:
            log.debug("event time: %s" % event.ts_created)

        self.assertTrue(len(events) > 0)

    def test_data_product_stream_def(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)


        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp')
        dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj,
            stream_definition_id=ctd_stream_def_id)

        stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id)
        self.assertEquals(ctd_stream_def_id, stream_def_id)


    def test_derived_data_product(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='ctd parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsubcli.delete_stream_definition, ctd_stream_def_id)


        dp = DataProduct(name='Instrument DP')
        dp_id = self.dpsc_cli.create_data_product(dp, stream_definition_id=ctd_stream_def_id)
        self.addCleanup(self.dpsc_cli.force_delete_data_product, dp_id)

        self.dpsc_cli.activate_data_product_persistence(dp_id)
        self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, dp_id)


        dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True)
        if not dataset_ids:
            raise NotFound("Data Product %s dataset  does not exist" % str(dp_id))
        dataset_id = dataset_ids[0]
        
        # Make the derived data product
        simple_stream_def_id = self.pubsubcli.create_stream_definition(name='TEMPWAT stream def', parameter_dictionary_id=pdict_id, available_fields=['time','temp'])
        tempwat_dp = DataProduct(name='TEMPWAT', category=DataProductTypeEnum.DERIVED)
        tempwat_dp_id = self.dpsc_cli.create_data_product(tempwat_dp, stream_definition_id=simple_stream_def_id, parent_data_product_id=dp_id)
        self.addCleanup(self.dpsc_cli.delete_data_product, tempwat_dp_id)
        # Check that the streams associated with the data product are persisted with
        stream_ids, _ =  self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True)
        for stream_id in stream_ids:
            self.assertTrue(self.ingestclient.is_persisted(stream_id))

        stream_id = stream_ids[0]
        route = self.pubsubcli.read_stream_route(stream_id=stream_id)

        rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id)
        rdt['time'] = np.arange(20)
        rdt['temp'] = np.arange(20)
        rdt['pressure'] = np.arange(20)

        publisher = StandaloneStreamPublisher(stream_id,route)
        
        dataset_modified = Event()
        def cb(*args, **kwargs):
            dataset_modified.set()
        es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        publisher.publish(rdt.to_granule())

        self.assertTrue(dataset_modified.wait(30))

        tempwat_dataset_ids, _ = self.rrclient.find_objects(tempwat_dp_id, PRED.hasDataset, id_only=True)
        tempwat_dataset_id = tempwat_dataset_ids[0]
        granule = self.data_retriever.retrieve(tempwat_dataset_id, delivery_format=simple_stream_def_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt['time'], np.arange(20))
        self.assertEquals(set(rdt.fields), set(['time','temp']))


    def test_activate_suspend_data_product(self):

        #------------------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #------------------------------------------------------------------------------------------------
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)
        log.debug("Created stream def id %s" % ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # test creating a new data product w/o a stream definition
        #------------------------------------------------------------------------------------------------
        # Construct temporal and spatial Coordinate Reference System objects

        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp')

        log.debug("Created an IonObject for a data product: %s" % dp_obj)

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------

        dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj,
            stream_definition_id=ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # Subscribe to persist events
        #------------------------------------------------------------------------------------------------
        queue = gevent.queue.Queue()

        def info_event_received(message, headers):
            queue.put(message)

        es = EventSubscriber(event_type=OT.InformationContentStatusEvent, callback=info_event_received, origin=dp_id, auto_delete=True)
        es.start()
        self.addCleanup(es.stop)


        #------------------------------------------------------------------------------------------------
        # test activate and suspend data product persistence
        #------------------------------------------------------------------------------------------------
        self.dpsc_cli.activate_data_product_persistence(dp_id)
        
        dp_obj = self.dpsc_cli.read_data_product(dp_id)
        self.assertIsNotNone(dp_obj)

        dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True)
        if not dataset_ids:
            raise NotFound("Data Product %s dataset  does not exist" % str(dp_id))
        dataset_id = dataset_ids[0]


        # Check that the streams associated with the data product are persisted with
        stream_ids, _ =  self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True)
        for stream_id in stream_ids:
            self.assertTrue(self.ingestclient.is_persisted(stream_id))

        stream_id = stream_ids[0]
        route = self.pubsubcli.read_stream_route(stream_id=stream_id)

        rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id)
        rdt['time'] = np.arange(20)
        rdt['temp'] = np.arange(20)

        publisher = StandaloneStreamPublisher(stream_id,route)
        
        dataset_modified = Event()
        def cb(*args, **kwargs):
            dataset_modified.set()
        es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        publisher.publish(rdt.to_granule())

        self.assertTrue(dataset_modified.wait(30))

        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------

        replay_data = self.data_retriever.retrieve(dataset_ids[0])
        self.assertIsInstance(replay_data, Granule)

        log.debug("The data retriever was able to replay the dataset that was attached to the data product "
                  "we wanted to be persisted. Therefore the data product was indeed persisted with "
                  "otherwise we could not have retrieved its dataset using the data retriever. Therefore "
                  "this demonstration shows that L4-CI-SA-RQ-267 is satisfied: 'Data product management shall persist data products'")

        data_product_object = self.rrclient.read(dp_id)
        self.assertEquals(data_product_object.name,'DP1')
        self.assertEquals(data_product_object.description,'some new dp')

        log.debug("Towards L4-CI-SA-RQ-308: 'Data product management shall persist data product metadata'. "
                  " Attributes in create for the data product obj, name= '%s', description='%s', match those of object from the "
                  "resource registry, name='%s', desc='%s'" % (dp_obj.name, dp_obj.description,data_product_object.name,
                                                           data_product_object.description))

        #------------------------------------------------------------------------------------------------
        # test suspend data product persistence
        #------------------------------------------------------------------------------------------------
        self.dpsc_cli.suspend_data_product_persistence(dp_id)


        dataset_modified.clear()

        rdt['time'] = np.arange(20,40)

        publisher.publish(rdt.to_granule())
        self.assertFalse(dataset_modified.wait(2))

        self.dpsc_cli.activate_data_product_persistence(dp_id)
        dataset_modified.clear()

        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_modified.wait(30))

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_almost_equal(rdt['time'], np.arange(40))


        dataset_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasDataset, id_only=True)
        self.assertEquals(len(dataset_ids), 1)

        self.dpsc_cli.suspend_data_product_persistence(dp_id)
        self.dpsc_cli.force_delete_data_product(dp_id)
        # now try to get the deleted dp object

        with self.assertRaises(NotFound):
            dp_obj = self.rrclient.read(dp_id)


        info_event_counter = 0
        runtime = 0
        starttime = time.time()
        caught_events = []

        #check that the four InfoStatusEvents were received
        while info_event_counter < 4 and runtime < 60 :
            a = queue.get(timeout=60)
            caught_events.append(a)
            info_event_counter += 1
            runtime = time.time() - starttime

        self.assertEquals(info_event_counter, 4)
class UserNotificationIntTest(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.unsc = UserNotificationServiceClient(node=self.container.node)
        self.rrc = ResourceRegistryServiceClient(node=self.container.node)
        self.imc = IdentityManagementServiceClient(node=self.container.node)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_email(self):

        proc1 = self.container.proc_manager.procs_by_name['user_notification']

        # Create a user and get the user_id
        user = UserInfo(name = 'new_user')
        user_id, _ = self.rrc.create(user)

        # set up....
        notification_id = self.unsc.create_email(event_type='ResourceLifecycleEvent',
            event_subtype=None,
            origin='Some_Resource_Agent_ID1',
            origin_type=None,
            user_id=user_id,
            email='*****@*****.**',
            mode = DeliveryMode.DIGEST,
            message_header='message_header',
            parser='parser',
            period=1)

        #------------------------------------------------------------------------------------------------------
        # Setup so as to be able to get the message and headers going into the
        # subscription callback method of the EmailEventProcessor
        #------------------------------------------------------------------------------------------------------

        # publish an event for each notification to generate the emails
        rle_publisher = EventPublisher("ResourceLifecycleEvent")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event")


        msg_tuple = proc1.event_processors[notification_id].smtp_client.sentmail.get(timeout=4)

        self.assertTrue(proc1.event_processors[notification_id].smtp_client.sentmail.empty())

        message = msg_tuple[2]
        list_lines = message.split("\n")

        #-------------------------------------------------------
        # parse the message body
        #-------------------------------------------------------

        message_dict = {}
        for line in list_lines:
            key_item = line.split(": ")
            if key_item[0] == 'Subject':
                message_dict['Subject'] = key_item[1] + key_item[2]
            else:
                try:
                    message_dict[key_item[0]] = key_item[1]
                except IndexError as exc:
                    # these IndexError exceptions happen only because the message sometimes
                    # has successive /r/n (i.e. new lines) and therefore,
                    # the indexing goes out of range. These new lines
                    # can just be ignored. So we ignore the exceptions here.
                    pass

        #-------------------------------------------------------
        # make assertions
        #-------------------------------------------------------

        self.assertEquals(msg_tuple[1], '*****@*****.**' )
        #self.assertEquals(msg_tuple[0], ION_NOTIFICATION_EMAIL_ADDRESS)

        #self.assertEquals(message_dict['From'], ION_NOTIFICATION_EMAIL_ADDRESS)
        self.assertEquals(message_dict['To'], '*****@*****.**')
        self.assertEquals(message_dict['Event'].rstrip('\r'), 'ResourceLifecycleEvent')
        self.assertEquals(message_dict['Originator'].rstrip('\r'), 'Some_Resource_Agent_ID1')
        self.assertEquals(message_dict['Description'].rstrip('\r'), 'RLE test event')

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_sms(self):

        proc1 = self.container.proc_manager.procs_by_name['user_notification']

        # Create a user and get the user_id
        user = UserInfo(name = 'new_user')
        user_id, _ = self.rrc.create(user)

        # set up....
        notification_id = self.unsc.create_sms(event_type='ResourceLifecycleEvent',
            event_subtype=None,
            origin='Some_Resource_Agent_ID1',
            origin_type=None,
            user_id=user_id,
            phone = '401-XXX-XXXX',
            provider='T-Mobile',
            message_header='message_header',
            parser='parser',
            )

        #------------------------------------------------------------------------------------------------------
        # Setup so as to be able to get the message and headers going into the
        # subscription callback method of the EmailEventProcessor
        #------------------------------------------------------------------------------------------------------

        # publish an event for each notification to generate the emails
        rle_publisher = EventPublisher("ResourceLifecycleEvent")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event")


        msg_tuple = proc1.event_processors[notification_id].smtp_client.sentmail.get(timeout=4)

        self.assertTrue(proc1.event_processors[notification_id].smtp_client.sentmail.empty())

        message = msg_tuple[2]
        list_lines = message.split("\n")

        #-------------------------------------------------------
        # parse the message body
        #-------------------------------------------------------

        message_dict = {}
        for line in list_lines:
            key_item = line.split(": ")
            if key_item[0] == 'Subject':
                message_dict['Subject'] = key_item[1] + key_item[2]
            else:
                try:
                    message_dict[key_item[0]] = key_item[1]
                except IndexError as exc:
                    # these IndexError exceptions happen only because the message sometimes
                    # has successive /r/n (i.e. new lines) and therefore,
                    # the indexing goes out of range. These new lines
                    # can just be ignored. So we ignore the exceptions here.
                    pass

        #-------------------------------------------------------
        # make assertions
        #-------------------------------------------------------

        self.assertEquals(msg_tuple[1], '*****@*****.**' )
        #self.assertEquals(msg_tuple[0], ION_NOTIFICATION_EMAIL_ADDRESS)
        self.assertEquals(message_dict['Description'].rstrip('\r'), 'RLE test event')

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_event_detection(self):

        proc1 = self.container.proc_manager.procs_by_name['user_notification']

        # Create a user and get the user_id
        user = UserInfo(name = 'new_user')
        user_id, _ = self.rrc.create(user)

        # Create detection notification
        dfilt = DetectionFilterConfig()

        dfilt.processing['condition'] = 5
        dfilt.processing['comparator'] = '>'
        dfilt.processing['filter_field'] = 'voltage'

        dfilt.delivery['message'] = 'I got my detection event!'

        notification_id = self.unsc.create_detection_filter(event_type='ExampleDetectableEvent',
            event_subtype=None,
            origin='Some_Resource_Agent_ID1',
            origin_type=None,
            user_id=user_id,
            filter_config=dfilt
            )

        #---------------------------------------------------------------------------------
        # Create event subscription for resulting detection event
        #---------------------------------------------------------------------------------

        # Create an email notification so that when the DetectionEventProcessor
        # detects an event and fires its own output event, this will caught by an
        # EmailEventProcessor and an email will be sent to the user

        notification_id_2 = self.unsc.create_email(event_type='DetectionEvent',
            event_subtype=None,
            origin='DetectionEventProcessor',
            origin_type=None,
            user_id=user_id,
            email='*****@*****.**',
            mode = DeliveryMode.UNFILTERED,
            message_header='Detection event',
            parser='parser',
            period=1)


        # Send event that is not detected
        # publish an event for each notification to generate the emails
        rle_publisher = EventPublisher("ExampleDetectableEvent")

        # since the voltage field in this event is less than 5, it will not be detected
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1',
                                    description="RLE test event",
                                    voltage = 3)

        # Check at the end of the test to make sure this event never triggered a Detectable!

        # Send Event that is detected
        # publish an event for each notification to generate the emails

        # since the voltage field in this event is greater than 5, it WILL be detected
        rle_publisher = EventPublisher("ExampleDetectableEvent")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1',
                                    description="RLE test event",
                                    voltage = 10)

        #-------------------------------------------------------
        # make assertions
        #-------------------------------------------------------

        msg_tuple = proc1.event_processors[notification_id_2].smtp_client.sentmail.get(timeout=4)

        # The first event never triggered an email because the voltage was less than 5, the queue is now empty
        self.assertTrue(proc1.event_processors[notification_id_2].smtp_client.sentmail.empty())

        self.assertEquals(msg_tuple[1], '*****@*****.**' )
        #self.assertEquals(msg_tuple[0], ION_NOTIFICATION_EMAIL_ADDRESS)

        # parse the message body
        message = msg_tuple[2]
        list_lines = message.split("\n")

        message_dict = {}
        for line in list_lines:
            key_item = line.split(": ")
            if key_item[0] == 'Subject':
                message_dict['Subject'] = key_item[1] + key_item[2]
            else:
                try:
                    message_dict[key_item[0]] = key_item[1]
                except IndexError as exc:
                    # these IndexError exceptions happen only because the message sometimes
                    # has successive /r/n (i.e. new lines) and therefore,
                    # the indexing goes out of range. These new lines
                    # can just be ignored. So we ignore the exceptions here.
                    pass

        #self.assertEquals(message_dict['From'], ION_NOTIFICATION_EMAIL_ADDRESS)
        self.assertEquals(message_dict['To'], '*****@*****.**')
        self.assertEquals(message_dict['Event'].rstrip('\r'), 'DetectionEvent')
        self.assertEquals(message_dict['Originator'].rstrip('\r'), 'DetectionEventProcessor')
        self.assertEquals(message_dict['Description'].rstrip('\r'), 'Event was detected by DetectionEventProcessor')

    @unittest.skip('interface has changed!')
    def test_find_event_types_for_resource(self):
        # create a dataset object in the RR to pass into the UNS method
        dataset_object = IonObject(RT.DataSet, name="dataset1")
        dataset_id, version = self.rrc.create(dataset_object)
        
        # get the list of event types for the dataset
        events = self.unsc.find_event_types_for_resource(dataset_id)
        log.debug("dataset events = " + str(events))
        if not events == ['dataset_supplement_added', 'dataset_change']:
            self.fail("failed to return correct list of event types")
            
        # try to pass in an id of a resource that doesn't exist (should fail)
        try:
            events = self.unsc.find_event_types_for_resource("bogus_id")
            self.fail("failed to detect non-existant resource")
        except:
            pass

    @unittest.skip('interface has changed!')
    def test_create_two_user_notifications(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)
        
        # create first notification
        notification_object1 = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['ResourceLifecycleEvent']})
        notification_id1 = self.unsc.create_notification(notification_object1, user_id)
        # create second notification
        notification_object2 = IonObject(RT.NotificationRequest, {"name":"notification2",
                                                                 "origin_list":['Some_Resource_Agent_ID2'],
                                                                 "events_list":['DataEvent']})
        notification_id2 = self.unsc.create_notification(notification_object2, user_id)
        
        # read the notifications back and check that they are correct
        n1 = self.unsc.read_notification(notification_id1)
        if n1.name != notification_object1.name or \
           n1.origin_list != notification_object1.origin_list or \
           n1.events_list != notification_object1.events_list:
            self.fail("notification was not correct")
        n2 = self.unsc.read_notification(notification_id2)
        if n2.name != notification_object2.name or \
           n2.origin_list != notification_object2.origin_list or \
           n2.events_list != notification_object2.events_list:
            self.fail("notification was not correct")

    @unittest.skip('interface has changed!')
    def test_delete_user_notifications(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)
        
        # create first notification
        notification_object1 = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['ResourceLifecycleEvent']})
        notification1_id = self.unsc.create_notification(notification_object1, user_id)
        # create second notification
        notification_object2 = IonObject(RT.NotificationRequest, {"name":"notification2",
                                                                 "origin_list":['Some_Resource_Agent_ID2'],
                                                                 "events_list":['DataEvent']})
        notification2_id = self.unsc.create_notification(notification_object2, user_id)
        
        # delete both notifications
        self.unsc.delete_notification(notification1_id)
        self.unsc.delete_notification(notification2_id)
        
        # check that the notifications are not there
        try:
            n1 = self.unsc.read_notification(notification1_id)
        except:
            try:
                n2 = self.unsc.read_notification(notification2_id)
            except:
                return
        self.fail("failed to delete notifications")

    @unittest.skip('interface has changed!')
    def test_find_user_notifications(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)

        # create first notification
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['ResourceLifecycleEvent']})

        self.unsc.create_notification(notification_object, user_id)
        # create second notification
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification2",
                                                                 "origin_list":['Some_Resource_Agent_ID2'],
                                                                 "events_list":['DataEvent']})
        self.unsc.create_notification(notification_object, user_id)
        
        # try to find all notifications for user
        notifications = self.unsc.find_notifications_by_user(user_id)
        if len(notifications) != 2:
            self.fail("failed to find all notifications")

    @unittest.skip('interface has changed!')
    def test_update_user_notification(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)

        # create a notification
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['ResourceLifecycleEvent']})
        notification_id = self.unsc.create_notification(notification_object, user_id)
        
        # read back the notification and change it
        notification = self.unsc.read_notification(notification_id)
        notification.origin_list = ['Some_Resource_Agent_ID5']
        self.unsc.update_notification(notification)
        
        # read back the notification and check that it got changed
        notification = self.unsc.read_notification(notification_id)
        if notification.origin_list != ['Some_Resource_Agent_ID5']:
            self.fail("failed to change notification")

    @unittest.skip('interface has changed!')
    def test_send_notification_emails(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)

        # create first notification
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['ResourceLifecycleEvent']})
        self.unsc.create_notification(notification_object, user_id)
        # create second notification
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification2",
                                                                 "origin_list":['Some_Resource_Agent_ID2'],
                                                                 "events_list":['DataEvent']})
        self.unsc.create_notification(notification_object, user_id)
        
        # publish an event for each notification to generate the emails
        # this can't be easily check in SW so need to check for these at the [email protected] account
        rle_publisher = EventPublisher("ResourceLifecycleEvent")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event")
        de_publisher = EventPublisher("DataEvent")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2', description="DE test event")
        gevent.sleep(1)

    @unittest.skip('interface has changed!')
    def test_find_events(self):
        # publish some events for the event repository
        rle_publisher = EventPublisher("ResourceLifecycleEvent")
        de_publisher = EventPublisher("DataEvent")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event1")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event2")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event3")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2', description="DE test event1")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2', description="DE test event2")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2', description="DE test event3")
        
        # find all events for the originator 'Some_Resource_Agent_ID1'
        events = self.unsc.find_events(origin='Some_Resource_Agent_ID1')
        if len(events) != 3:
            self.fail("failed to find all events")  
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'Some_Resource_Agent_ID1':
                self.fail("failed to find correct events")
                  
        # find all events for the originator 'DataEvent'
        events = self.unsc.find_events(type='DataEvent')
        if len(events) != 3:
            self.fail("failed to find all events")  
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'DataEvent':
                self.fail("failed to find correct events") 
                 
        # find 2 events for the originator 'Some_Resource_Agent_ID1'
        events = self.unsc.find_events(origin='Some_Resource_Agent_ID2', limit=2)
        if len(events) != 2:
            self.fail("failed to find all events")  
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'Some_Resource_Agent_ID2':
                self.fail("failed to find correct events")
            
        # find all events for the originator 'Some_Resource_Agent_ID1' in reverse time order
        events = self.unsc.find_events(origin='Some_Resource_Agent_ID1', descending=True)
        if len(events) != 3:
            self.fail("failed to find all events")  
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'Some_Resource_Agent_ID1':
                self.fail("failed to find correct events")
Beispiel #3
0
class UserNotificationIntTest(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.unsc = UserNotificationServiceClient(node=self.container.node)
        self.rrc = ResourceRegistryServiceClient(node=self.container.node)
        self.imc = IdentityManagementServiceClient(node=self.container.node)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_email(self):

        proc1 = self.container.proc_manager.procs_by_name['user_notification']

        # Create a user and get the user_id
        user = UserInfo(name='new_user')
        user_id, _ = self.rrc.create(user)

        # set up....
        notification_id = self.unsc.create_email(
            event_type='ResourceLifecycleEvent',
            event_subtype=None,
            origin='Some_Resource_Agent_ID1',
            origin_type=None,
            user_id=user_id,
            email='*****@*****.**',
            mode=DeliveryMode.DIGEST,
            message_header='message_header',
            parser='parser',
            period=1)

        #------------------------------------------------------------------------------------------------------
        # Setup so as to be able to get the message and headers going into the
        # subscription callback method of the EmailEventProcessor
        #------------------------------------------------------------------------------------------------------

        # publish an event for each notification to generate the emails
        rle_publisher = EventPublisher("ResourceLifecycleEvent")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1',
                                    description="RLE test event")

        msg_tuple = proc1.event_processors[
            notification_id].smtp_client.sentmail.get(timeout=4)

        self.assertTrue(proc1.event_processors[notification_id].smtp_client.
                        sentmail.empty())

        message = msg_tuple[2]
        list_lines = message.split("\n")

        #-------------------------------------------------------
        # parse the message body
        #-------------------------------------------------------

        message_dict = {}
        for line in list_lines:
            key_item = line.split(": ")
            if key_item[0] == 'Subject':
                message_dict['Subject'] = key_item[1] + key_item[2]
            else:
                try:
                    message_dict[key_item[0]] = key_item[1]
                except IndexError as exc:
                    # these IndexError exceptions happen only because the message sometimes
                    # has successive /r/n (i.e. new lines) and therefore,
                    # the indexing goes out of range. These new lines
                    # can just be ignored. So we ignore the exceptions here.
                    pass

        #-------------------------------------------------------
        # make assertions
        #-------------------------------------------------------

        self.assertEquals(msg_tuple[1], '*****@*****.**')
        #self.assertEquals(msg_tuple[0], ION_NOTIFICATION_EMAIL_ADDRESS)

        #self.assertEquals(message_dict['From'], ION_NOTIFICATION_EMAIL_ADDRESS)
        self.assertEquals(message_dict['To'], '*****@*****.**')
        self.assertEquals(message_dict['Event'].rstrip('\r'),
                          'ResourceLifecycleEvent')
        self.assertEquals(message_dict['Originator'].rstrip('\r'),
                          'Some_Resource_Agent_ID1')
        self.assertEquals(message_dict['Description'].rstrip('\r'),
                          'RLE test event')

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_sms(self):

        proc1 = self.container.proc_manager.procs_by_name['user_notification']

        # Create a user and get the user_id
        user = UserInfo(name='new_user')
        user_id, _ = self.rrc.create(user)

        # set up....
        notification_id = self.unsc.create_sms(
            event_type='ResourceLifecycleEvent',
            event_subtype=None,
            origin='Some_Resource_Agent_ID1',
            origin_type=None,
            user_id=user_id,
            phone='401-XXX-XXXX',
            provider='T-Mobile',
            message_header='message_header',
            parser='parser',
        )

        #------------------------------------------------------------------------------------------------------
        # Setup so as to be able to get the message and headers going into the
        # subscription callback method of the EmailEventProcessor
        #------------------------------------------------------------------------------------------------------

        # publish an event for each notification to generate the emails
        rle_publisher = EventPublisher("ResourceLifecycleEvent")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1',
                                    description="RLE test event")

        msg_tuple = proc1.event_processors[
            notification_id].smtp_client.sentmail.get(timeout=4)

        self.assertTrue(proc1.event_processors[notification_id].smtp_client.
                        sentmail.empty())

        message = msg_tuple[2]
        list_lines = message.split("\n")

        #-------------------------------------------------------
        # parse the message body
        #-------------------------------------------------------

        message_dict = {}
        for line in list_lines:
            key_item = line.split(": ")
            if key_item[0] == 'Subject':
                message_dict['Subject'] = key_item[1] + key_item[2]
            else:
                try:
                    message_dict[key_item[0]] = key_item[1]
                except IndexError as exc:
                    # these IndexError exceptions happen only because the message sometimes
                    # has successive /r/n (i.e. new lines) and therefore,
                    # the indexing goes out of range. These new lines
                    # can just be ignored. So we ignore the exceptions here.
                    pass

        #-------------------------------------------------------
        # make assertions
        #-------------------------------------------------------

        self.assertEquals(msg_tuple[1], '*****@*****.**')
        #self.assertEquals(msg_tuple[0], ION_NOTIFICATION_EMAIL_ADDRESS)
        self.assertEquals(message_dict['Description'].rstrip('\r'),
                          'RLE test event')

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Skip test while in CEI LAUNCH mode')
    def test_event_detection(self):

        proc1 = self.container.proc_manager.procs_by_name['user_notification']

        # Create a user and get the user_id
        user = UserInfo(name='new_user')
        user_id, _ = self.rrc.create(user)

        # Create detection notification
        dfilt = DetectionFilterConfig()

        dfilt.processing['condition'] = 5
        dfilt.processing['comparator'] = '>'
        dfilt.processing['filter_field'] = 'voltage'

        dfilt.delivery['message'] = 'I got my detection event!'

        notification_id = self.unsc.create_detection_filter(
            event_type='ExampleDetectableEvent',
            event_subtype=None,
            origin='Some_Resource_Agent_ID1',
            origin_type=None,
            user_id=user_id,
            filter_config=dfilt)

        #---------------------------------------------------------------------------------
        # Create event subscription for resulting detection event
        #---------------------------------------------------------------------------------

        # Create an email notification so that when the DetectionEventProcessor
        # detects an event and fires its own output event, this will caught by an
        # EmailEventProcessor and an email will be sent to the user

        notification_id_2 = self.unsc.create_email(
            event_type='DetectionEvent',
            event_subtype=None,
            origin='DetectionEventProcessor',
            origin_type=None,
            user_id=user_id,
            email='*****@*****.**',
            mode=DeliveryMode.UNFILTERED,
            message_header='Detection event',
            parser='parser',
            period=1)

        # Send event that is not detected
        # publish an event for each notification to generate the emails
        rle_publisher = EventPublisher("ExampleDetectableEvent")

        # since the voltage field in this event is less than 5, it will not be detected
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1',
                                    description="RLE test event",
                                    voltage=3)

        # Check at the end of the test to make sure this event never triggered a Detectable!

        # Send Event that is detected
        # publish an event for each notification to generate the emails

        # since the voltage field in this event is greater than 5, it WILL be detected
        rle_publisher = EventPublisher("ExampleDetectableEvent")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1',
                                    description="RLE test event",
                                    voltage=10)

        #-------------------------------------------------------
        # make assertions
        #-------------------------------------------------------

        msg_tuple = proc1.event_processors[
            notification_id_2].smtp_client.sentmail.get(timeout=4)

        # The first event never triggered an email because the voltage was less than 5, the queue is now empty
        self.assertTrue(proc1.event_processors[notification_id_2].smtp_client.
                        sentmail.empty())

        self.assertEquals(msg_tuple[1], '*****@*****.**')
        #self.assertEquals(msg_tuple[0], ION_NOTIFICATION_EMAIL_ADDRESS)

        # parse the message body
        message = msg_tuple[2]
        list_lines = message.split("\n")

        message_dict = {}
        for line in list_lines:
            key_item = line.split(": ")
            if key_item[0] == 'Subject':
                message_dict['Subject'] = key_item[1] + key_item[2]
            else:
                try:
                    message_dict[key_item[0]] = key_item[1]
                except IndexError as exc:
                    # these IndexError exceptions happen only because the message sometimes
                    # has successive /r/n (i.e. new lines) and therefore,
                    # the indexing goes out of range. These new lines
                    # can just be ignored. So we ignore the exceptions here.
                    pass

        #self.assertEquals(message_dict['From'], ION_NOTIFICATION_EMAIL_ADDRESS)
        self.assertEquals(message_dict['To'], '*****@*****.**')
        self.assertEquals(message_dict['Event'].rstrip('\r'), 'DetectionEvent')
        self.assertEquals(message_dict['Originator'].rstrip('\r'),
                          'DetectionEventProcessor')
        self.assertEquals(message_dict['Description'].rstrip('\r'),
                          'Event was detected by DetectionEventProcessor')

    @unittest.skip('interface has changed!')
    def test_find_event_types_for_resource(self):
        # create a dataset object in the RR to pass into the UNS method
        dataset_object = IonObject(RT.DataSet, name="dataset1")
        dataset_id, version = self.rrc.create(dataset_object)

        # get the list of event types for the dataset
        events = self.unsc.find_event_types_for_resource(dataset_id)
        log.debug("dataset events = " + str(events))
        if not events == ['dataset_supplement_added', 'dataset_change']:
            self.fail("failed to return correct list of event types")

        # try to pass in an id of a resource that doesn't exist (should fail)
        try:
            events = self.unsc.find_event_types_for_resource("bogus_id")
            self.fail("failed to detect non-existant resource")
        except:
            pass

    @unittest.skip('interface has changed!')
    def test_create_two_user_notifications(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(
            RT.UserInfo, {
                "name": "user1_info",
                "contact": {
                    "email": '*****@*****.**'
                }
            })
        self.imc.create_user_info(user_id, user_info_object)

        # create first notification
        notification_object1 = IonObject(
            RT.NotificationRequest, {
                "name": "notification1",
                "origin_list": ['Some_Resource_Agent_ID1'],
                "events_list": ['ResourceLifecycleEvent']
            })
        notification_id1 = self.unsc.create_notification(
            notification_object1, user_id)
        # create second notification
        notification_object2 = IonObject(
            RT.NotificationRequest, {
                "name": "notification2",
                "origin_list": ['Some_Resource_Agent_ID2'],
                "events_list": ['DataEvent']
            })
        notification_id2 = self.unsc.create_notification(
            notification_object2, user_id)

        # read the notifications back and check that they are correct
        n1 = self.unsc.read_notification(notification_id1)
        if n1.name != notification_object1.name or \
           n1.origin_list != notification_object1.origin_list or \
           n1.events_list != notification_object1.events_list:
            self.fail("notification was not correct")
        n2 = self.unsc.read_notification(notification_id2)
        if n2.name != notification_object2.name or \
           n2.origin_list != notification_object2.origin_list or \
           n2.events_list != notification_object2.events_list:
            self.fail("notification was not correct")

    @unittest.skip('interface has changed!')
    def test_delete_user_notifications(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(
            RT.UserInfo, {
                "name": "user1_info",
                "contact": {
                    "email": '*****@*****.**'
                }
            })
        self.imc.create_user_info(user_id, user_info_object)

        # create first notification
        notification_object1 = IonObject(
            RT.NotificationRequest, {
                "name": "notification1",
                "origin_list": ['Some_Resource_Agent_ID1'],
                "events_list": ['ResourceLifecycleEvent']
            })
        notification1_id = self.unsc.create_notification(
            notification_object1, user_id)
        # create second notification
        notification_object2 = IonObject(
            RT.NotificationRequest, {
                "name": "notification2",
                "origin_list": ['Some_Resource_Agent_ID2'],
                "events_list": ['DataEvent']
            })
        notification2_id = self.unsc.create_notification(
            notification_object2, user_id)

        # delete both notifications
        self.unsc.delete_notification(notification1_id)
        self.unsc.delete_notification(notification2_id)

        # check that the notifications are not there
        try:
            n1 = self.unsc.read_notification(notification1_id)
        except:
            try:
                n2 = self.unsc.read_notification(notification2_id)
            except:
                return
        self.fail("failed to delete notifications")

    @unittest.skip('interface has changed!')
    def test_find_user_notifications(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(
            RT.UserInfo, {
                "name": "user1_info",
                "contact": {
                    "email": '*****@*****.**'
                }
            })
        self.imc.create_user_info(user_id, user_info_object)

        # create first notification
        notification_object = IonObject(
            RT.NotificationRequest, {
                "name": "notification1",
                "origin_list": ['Some_Resource_Agent_ID1'],
                "events_list": ['ResourceLifecycleEvent']
            })

        self.unsc.create_notification(notification_object, user_id)
        # create second notification
        notification_object = IonObject(
            RT.NotificationRequest, {
                "name": "notification2",
                "origin_list": ['Some_Resource_Agent_ID2'],
                "events_list": ['DataEvent']
            })
        self.unsc.create_notification(notification_object, user_id)

        # try to find all notifications for user
        notifications = self.unsc.find_notifications_by_user(user_id)
        if len(notifications) != 2:
            self.fail("failed to find all notifications")

    @unittest.skip('interface has changed!')
    def test_update_user_notification(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(
            RT.UserInfo, {
                "name": "user1_info",
                "contact": {
                    "email": '*****@*****.**'
                }
            })
        self.imc.create_user_info(user_id, user_info_object)

        # create a notification
        notification_object = IonObject(
            RT.NotificationRequest, {
                "name": "notification1",
                "origin_list": ['Some_Resource_Agent_ID1'],
                "events_list": ['ResourceLifecycleEvent']
            })
        notification_id = self.unsc.create_notification(
            notification_object, user_id)

        # read back the notification and change it
        notification = self.unsc.read_notification(notification_id)
        notification.origin_list = ['Some_Resource_Agent_ID5']
        self.unsc.update_notification(notification)

        # read back the notification and check that it got changed
        notification = self.unsc.read_notification(notification_id)
        if notification.origin_list != ['Some_Resource_Agent_ID5']:
            self.fail("failed to change notification")

    @unittest.skip('interface has changed!')
    def test_send_notification_emails(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {
            "name": "user1_info",
            "contact": {
                "email": '*****@*****.**'
            }
        })
        self.imc.create_user_info(user_id, user_info_object)

        # create first notification
        notification_object = IonObject(
            RT.NotificationRequest, {
                "name": "notification1",
                "origin_list": ['Some_Resource_Agent_ID1'],
                "events_list": ['ResourceLifecycleEvent']
            })
        self.unsc.create_notification(notification_object, user_id)
        # create second notification
        notification_object = IonObject(
            RT.NotificationRequest, {
                "name": "notification2",
                "origin_list": ['Some_Resource_Agent_ID2'],
                "events_list": ['DataEvent']
            })
        self.unsc.create_notification(notification_object, user_id)

        # publish an event for each notification to generate the emails
        # this can't be easily check in SW so need to check for these at the [email protected] account
        rle_publisher = EventPublisher("ResourceLifecycleEvent")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1',
                                    description="RLE test event")
        de_publisher = EventPublisher("DataEvent")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2',
                                   description="DE test event")
        gevent.sleep(1)

    @unittest.skip('interface has changed!')
    def test_find_events(self):
        # publish some events for the event repository
        rle_publisher = EventPublisher("ResourceLifecycleEvent")
        de_publisher = EventPublisher("DataEvent")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1',
                                    description="RLE test event1")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1',
                                    description="RLE test event2")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1',
                                    description="RLE test event3")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2',
                                   description="DE test event1")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2',
                                   description="DE test event2")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2',
                                   description="DE test event3")

        # find all events for the originator 'Some_Resource_Agent_ID1'
        events = self.unsc.find_events(origin='Some_Resource_Agent_ID1')
        if len(events) != 3:
            self.fail("failed to find all events")
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'Some_Resource_Agent_ID1':
                self.fail("failed to find correct events")

        # find all events for the originator 'DataEvent'
        events = self.unsc.find_events(type='DataEvent')
        if len(events) != 3:
            self.fail("failed to find all events")
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'DataEvent':
                self.fail("failed to find correct events")

        # find 2 events for the originator 'Some_Resource_Agent_ID1'
        events = self.unsc.find_events(origin='Some_Resource_Agent_ID2',
                                       limit=2)
        if len(events) != 2:
            self.fail("failed to find all events")
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'Some_Resource_Agent_ID2':
                self.fail("failed to find correct events")

        # find all events for the originator 'Some_Resource_Agent_ID1' in reverse time order
        events = self.unsc.find_events(origin='Some_Resource_Agent_ID1',
                                       descending=True)
        if len(events) != 3:
            self.fail("failed to find all events")
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'Some_Resource_Agent_ID1':
                self.fail("failed to find correct events")
class UserNotificationIntTest(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.unsc = UserNotificationServiceClient(node=self.container.node)
        self.rrc = ResourceRegistryServiceClient(node=self.container.node)
        self.imc = IdentityManagementServiceClient(node=self.container.node)
        
    def xtest_find_event_types_for_resource(self):
        dataset_object = IonObject(RT.DataSet, name="dataset1")
        dataset_id, version = self.rrc.create(dataset_object)
        events = self.unsc.find_event_types_for_resource(dataset_id)
        log.debug("dataset events = " + str(events))
        try:
            events = self.unsc.find_event_types_for_resource("bogus_id")
            self.fail("failed to detect non-existant resource")
        except:
            pass
        
    def test_create_two_user_notifications(self):
        user_identty_object = IonObject(RT.UserIdentity, name="user1")
        user_id = self.imc.create_user_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['resource_lifecycle']})
        self.unsc.create_notification(notification_object, user_id)
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification2",
                                                                 "origin_list":['Some_Resource_Agent_ID2'],
                                                                 "events_list":['data']})
        self.unsc.create_notification(notification_object, user_id)

    def test_delete_user_notifications(self):
        user_identty_object = IonObject(RT.UserIdentity, name="user1")
        user_id = self.imc.create_user_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)
        notification_object1 = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['resource_lifecycle']})
        notification1_id = self.unsc.create_notification(notification_object1, user_id)
        notification_object2 = IonObject(RT.NotificationRequest, {"name":"notification2",
                                                                 "origin_list":['Some_Resource_Agent_ID2'],
                                                                 "events_list":['data']})
        notification2_id = self.unsc.create_notification(notification_object2, user_id)
        self.unsc.delete_notification(notification1_id)
        self.unsc.delete_notification(notification2_id)

    def test_find_user_notifications(self):
        user_identty_object = IonObject(RT.UserIdentity, name="user1")
        user_id = self.imc.create_user_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['resource_lifecycle']})
        self.unsc.create_notification(notification_object, user_id)
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification2",
                                                                 "origin_list":['Some_Resource_Agent_ID2'],
                                                                 "events_list":['data']})
        self.unsc.create_notification(notification_object, user_id)
        notifications = self.unsc.find_notifications_by_user(user_id)
        for n in notifications:
            log.debug("n = " +str(n))

    def test_update_user_notification(self):
        user_identty_object = IonObject(RT.UserIdentity, name="user1")
        user_id = self.imc.create_user_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['resource_lifecycle']})
        notification_id = self.unsc.create_notification(notification_object, user_id)
        notification = self.rrc.read(notification_id)
        notification.origin_list = ['Some_Resource_Agent_ID5']
        self.unsc.update_notification(notification)

    def test_send_notification_emails(self):
        user_identty_object = IonObject(RT.UserIdentity, name="user1")
        user_id = self.imc.create_user_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['resource_lifecycle']})
        self.unsc.create_notification(notification_object, user_id)
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification2",
                                                                 "origin_list":['Some_Resource_Agent_ID2'],
                                                                 "events_list":['data']})
        self.unsc.create_notification(notification_object, user_id)
        rle_publisher = ResourceLifecycleEventPublisher()
        rle_publisher.create_and_publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event")
        de_publisher = DataEventPublisher()
        de_publisher.create_and_publish_event(origin='Some_Resource_Agent_ID2', description="DE test event")
        gevent.sleep(1)

    def test_find_events(self):
        rle_publisher = ResourceLifecycleEventPublisher(event_repo=self.container.event_repository)
        rle_publisher.create_and_publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event1")
        rle_publisher.create_and_publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event2")
        de_publisher = DataEventPublisher(event_repo=self.container.event_repository)
        de_publisher.create_and_publish_event(origin='Some_Resource_Agent_ID2', description="DE test event1")
        de_publisher.create_and_publish_event(origin='Some_Resource_Agent_ID2', description="DE test event2")
        events = self.unsc.find_events(origin='Some_Resource_Agent_ID1')
        for event in events:
            log.debug("event=" + str(event))
        events = self.unsc.find_events(type='DataEvent')
        for event in events:
            log.debug("event=" + str(event))
class UserNotificationIntTest(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.unsc = UserNotificationServiceClient(node=self.container.node)
        self.rrc = ResourceRegistryServiceClient(node=self.container.node)
        self.imc = IdentityManagementServiceClient(node=self.container.node)
        
    def test_find_event_types_for_resource(self):
        # create a dataset object in the RR to pass into the UNS method
        dataset_object = IonObject(RT.DataSet, name="dataset1")
        dataset_id, version = self.rrc.create(dataset_object)
        
        # get the list of event types for the dataset
        events = self.unsc.find_event_types_for_resource(dataset_id)
        log.debug("dataset events = " + str(events))
        if not events == ['dataset_supplement_added', 'dataset_change']:
            self.fail("failed to return correct list of event types")
            
        # try to pass in an id of a resource that doesn't exist (should fail)
        try:
            events = self.unsc.find_event_types_for_resource("bogus_id")
            self.fail("failed to detect non-existant resource")
        except:
            pass
        
    def test_create_two_user_notifications(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)
        
        # create first notification
        notification_object1 = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['ResourceLifecycleEvent']})
        notification_id1 = self.unsc.create_notification(notification_object1, user_id)
        # create second notification
        notification_object2 = IonObject(RT.NotificationRequest, {"name":"notification2",
                                                                 "origin_list":['Some_Resource_Agent_ID2'],
                                                                 "events_list":['DataEvent']})
        notification_id2 = self.unsc.create_notification(notification_object2, user_id)
        
        # read the notifications back and check that they are correct
        n1 = self.unsc.read_notification(notification_id1)
        if n1.name != notification_object1.name or \
           n1.origin_list != notification_object1.origin_list or \
           n1.events_list != notification_object1.events_list:
            self.fail("notification was not correct")
        n2 = self.unsc.read_notification(notification_id2)
        if n2.name != notification_object2.name or \
           n2.origin_list != notification_object2.origin_list or \
           n2.events_list != notification_object2.events_list:
            self.fail("notification was not correct")

    def test_delete_user_notifications(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)
        
        # create first notification
        notification_object1 = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['ResourceLifecycleEvent']})
        notification1_id = self.unsc.create_notification(notification_object1, user_id)
        # create second notification
        notification_object2 = IonObject(RT.NotificationRequest, {"name":"notification2",
                                                                 "origin_list":['Some_Resource_Agent_ID2'],
                                                                 "events_list":['DataEvent']})
        notification2_id = self.unsc.create_notification(notification_object2, user_id)
        
        # delete both notifications
        self.unsc.delete_notification(notification1_id)
        self.unsc.delete_notification(notification2_id)
        
        # check that the notifications are not there
        try:
            n1 = self.unsc.read_notification(notification1_id)
        except:
            try:
                n2 = self.unsc.read_notification(notification2_id)
            except:
                return
        self.fail("failed to delete notifications")      
        
    def test_find_user_notifications(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)

        # create first notification
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['ResourceLifecycleEvent']})

        self.unsc.create_notification(notification_object, user_id)
        # create second notification
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification2",
                                                                 "origin_list":['Some_Resource_Agent_ID2'],
                                                                 "events_list":['DataEvent']})
        self.unsc.create_notification(notification_object, user_id)
        
        # try to find all notifications for user
        notifications = self.unsc.find_notifications_by_user(user_id)
        if len(notifications) != 2:
            self.fail("failed to find all notifications")  

    def test_update_user_notification(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)

        # create a notification
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['ResourceLifecycleEvent']})
        notification_id = self.unsc.create_notification(notification_object, user_id)
        
        # read back the notification and change it
        notification = self.unsc.read_notification(notification_id)
        notification.origin_list = ['Some_Resource_Agent_ID5']
        self.unsc.update_notification(notification)
        
        # read back the notification and check that it got changed
        notification = self.unsc.read_notification(notification_id)
        if notification.origin_list != ['Some_Resource_Agent_ID5']:
            self.fail("failed to change notification")          

    def test_send_notification_emails(self):
        # create user with email address in RR
        user_identty_object = IonObject(RT.ActorIdentity, name="user1")
        user_id = self.imc.create_actor_identity(user_identty_object)
        user_info_object = IonObject(RT.UserInfo, {"name":"user1_info", "contact":{"email":'*****@*****.**'}})
        self.imc.create_user_info(user_id, user_info_object)

        # create first notification
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification1",
                                                                 "origin_list":['Some_Resource_Agent_ID1'],
                                                                 "events_list":['ResourceLifecycleEvent']})
        self.unsc.create_notification(notification_object, user_id)
        # create second notification
        notification_object = IonObject(RT.NotificationRequest, {"name":"notification2",
                                                                 "origin_list":['Some_Resource_Agent_ID2'],
                                                                 "events_list":['DataEvent']})
        self.unsc.create_notification(notification_object, user_id)
        
        # publish an event for each notification to generate the emails
        # this can't be easily check in SW so need to check for these at the [email protected] account
        rle_publisher = EventPublisher("ResourceLifecycleEvent")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event")
        de_publisher = EventPublisher("DataEvent")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2', description="DE test event")
        gevent.sleep(1)

    def test_find_events(self):
        # publish some events for the event repository
        rle_publisher = EventPublisher("ResourceLifecycleEvent")
        de_publisher = EventPublisher("DataEvent")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event1")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event2")
        rle_publisher.publish_event(origin='Some_Resource_Agent_ID1', description="RLE test event3")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2', description="DE test event1")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2', description="DE test event2")
        de_publisher.publish_event(origin='Some_Resource_Agent_ID2', description="DE test event3")
        
        # find all events for the originator 'Some_Resource_Agent_ID1'
        events = self.unsc.find_events(origin='Some_Resource_Agent_ID1')
        if len(events) != 3:
            self.fail("failed to find all events")  
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'Some_Resource_Agent_ID1':
                self.fail("failed to find correct events")
                  
        # find all events for the originator 'DataEvent'
        events = self.unsc.find_events(type='DataEvent')
        if len(events) != 3:
            self.fail("failed to find all events")  
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'DataEvent':
                self.fail("failed to find correct events") 
                 
        # find 2 events for the originator 'Some_Resource_Agent_ID1'
        events = self.unsc.find_events(origin='Some_Resource_Agent_ID2', limit=2)
        if len(events) != 2:
            self.fail("failed to find all events")  
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'Some_Resource_Agent_ID2':
                self.fail("failed to find correct events")
            
        # find all events for the originator 'Some_Resource_Agent_ID1' in reverse time order
        events = self.unsc.find_events(origin='Some_Resource_Agent_ID1', descending=True)
        if len(events) != 3:
            self.fail("failed to find all events")  
        for event in events:
            log.debug("event=" + str(event))
            if event[1][0] != 'Some_Resource_Agent_ID1':
                self.fail("failed to find correct events")
Beispiel #6
0
class TestDataProductManagementServiceIntegration(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.dpsc_cli = DataProductManagementServiceClient()
        self.rrclient = ResourceRegistryServiceClient()
        self.damsclient = DataAcquisitionManagementServiceClient()
        self.pubsubcli = PubsubManagementServiceClient()
        self.ingestclient = IngestionManagementServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.unsc = UserNotificationServiceClient()
        self.data_retriever = DataRetrieverServiceClient()
        self.identcli = IdentityManagementServiceClient()

        #------------------------------------------
        # Create the environment
        #------------------------------------------

        self.stream_def_id = self.pubsubcli.create_stream_definition(
            name='SBE37_CDM')

        self.process_definitions = {}
        ingestion_worker_definition = ProcessDefinition(
            name='ingestion worker')
        ingestion_worker_definition.executable = {
            'module':
            'ion.processes.data.ingestion.science_granule_ingestion_worker',
            'class': 'ScienceGranuleIngestionWorker'
        }
        process_definition_id = self.process_dispatcher.create_process_definition(
            process_definition=ingestion_worker_definition)
        self.process_definitions['ingestion_worker'] = process_definition_id

        self.pids = []
        self.exchange_points = []
        self.exchange_names = []

        #------------------------------------------------------------------------------------------------
        # First launch the ingestors
        #------------------------------------------------------------------------------------------------
        self.exchange_space = 'science_granule_ingestion'
        self.exchange_point = 'science_data'
        config = DotDict()
        config.process.datastore_name = 'datasets'
        config.process.queue_name = self.exchange_space

        self.exchange_names.append(self.exchange_space)
        self.exchange_points.append(self.exchange_point)

        pid = self.process_dispatcher.schedule_process(
            self.process_definitions['ingestion_worker'], configuration=config)
        log.debug("the ingestion worker process id: %s", pid)
        self.pids.append(pid)

        self.addCleanup(self.cleaning_up)

    def cleaning_up(self):
        for pid in self.pids:
            log.debug("number of pids to be terminated: %s", len(self.pids))
            try:
                self.process_dispatcher.cancel_process(pid)
                log.debug("Terminated the process: %s", pid)
            except:
                log.debug("could not terminate the process id: %s" % pid)
        IngestionManagementIntTest.clean_subscriptions()

        for xn in self.exchange_names:
            xni = self.container.ex_manager.create_xn_queue(xn)
            xni.delete()
        for xp in self.exchange_points:
            xpi = self.container.ex_manager.create_xp(xp)
            xpi.delete()

    def get_datastore(self, dataset_id):
        dataset = self.dataset_management.read_dataset(dataset_id)
        datastore_name = dataset.datastore_name
        datastore = self.container.datastore_manager.get_datastore(
            datastore_name, DataStore.DS_PROFILE.SCIDATA)
        return datastore

    @attr('EXT')
    @attr('PREP')
    def test_create_data_product(self):

        #------------------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #------------------------------------------------------------------------------------------------
        parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict')
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(
            name='Simulated CTD data',
            parameter_dictionary_id=parameter_dictionary._id)
        log.debug("Created stream def id %s" % ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # test creating a new data product w/o a stream definition
        #------------------------------------------------------------------------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp')

        dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0
        dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0
        dp_obj.ooi_product_name = "PRODNAME"

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------

        dp_id = self.dpsc_cli.create_data_product(
            data_product=dp_obj, stream_definition_id=ctd_stream_def_id)
        # Assert that the data product has an associated stream at this stage
        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                   RT.Stream, True)
        self.assertNotEquals(len(stream_ids), 0)

        # Assert that the data product has an associated stream def at this stage
        stream_ids, _ = self.rrclient.find_objects(dp_id,
                                                   PRED.hasStreamDefinition,
                                                   RT.StreamDefinition, True)
        self.assertNotEquals(len(stream_ids), 0)

        self.dpsc_cli.activate_data_product_persistence(dp_id)

        dp_obj = self.dpsc_cli.read_data_product(dp_id)
        self.assertIsNotNone(dp_obj)
        self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0)
        log.debug('Created data product %s', dp_obj)
        #------------------------------------------------------------------------------------------------
        # test creating a new data product with  a stream definition
        #------------------------------------------------------------------------------------------------
        log.debug('Creating new data product with a stream definition')
        dp_obj = IonObject(RT.DataProduct,
                           name='DP2',
                           description='some new dp')

        dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id)
        self.dpsc_cli.activate_data_product_persistence(dp_id2)
        log.debug('new dp_id = %s' % dp_id2)

        #------------------------------------------------------------------------------------------------
        #make sure data product is associated with stream def
        #------------------------------------------------------------------------------------------------
        streamdefs = []
        streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream,
                                                RT.Stream, True)
        for s in streams:
            log.debug("Checking stream %s" % s)
            sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition,
                                                  RT.StreamDefinition, True)
            for sd in sdefs:
                log.debug("Checking streamdef %s" % sd)
                streamdefs.append(sd)
        self.assertIn(ctd_stream_def_id, streamdefs)

        group_names = self.dpsc_cli.get_data_product_group_list()
        self.assertIn("PRODNAME", group_names)

        #----------------------------------------------------------------------------------------
        # Create users then notifications to this data product for each user
        #----------------------------------------------------------------------------------------

        # user_1
        user_1 = UserInfo()
        user_1.name = 'user_1'
        user_1.contact.email = '*****@*****.**'

        # user_2
        user_2 = UserInfo()
        user_2.name = 'user_2'
        user_2.contact.email = '*****@*****.**'
        #user1 is a complete user
        self.subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        actor_id = self.identcli.create_actor_identity(actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials",
                                         {"name": self.subject})
        self.identcli.register_user_credentials(actor_id, user_credentials_obj)
        user_id_1 = self.identcli.create_user_info(actor_id, user_1)
        user_id_2, _ = self.rrclient.create(user_2)

        delivery_config1a = IonObject(
            OT.DeliveryConfiguration,
            email='*****@*****.**',
            mode=DeliveryModeEnum.EMAIL,
            frequency=NotificationFrequencyEnum.BATCH)
        delivery_config1b = IonObject(
            OT.DeliveryConfiguration,
            email='*****@*****.**',
            mode=DeliveryModeEnum.EMAIL,
            frequency=NotificationFrequencyEnum.BATCH)
        notification_request_1 = NotificationRequest(
            name="notification_1",
            origin=dp_id,
            origin_type="type_1",
            event_type=OT.ResourceLifecycleEvent,
            disabled_by_system=False,
            delivery_configurations=[delivery_config1a, delivery_config1b])

        delivery_config2a = IonObject(
            OT.DeliveryConfiguration,
            email='*****@*****.**',
            mode=DeliveryModeEnum.EMAIL,
            frequency=NotificationFrequencyEnum.BATCH)
        delivery_config2b = IonObject(
            OT.DeliveryConfiguration,
            email='*****@*****.**',
            mode=DeliveryModeEnum.EMAIL,
            frequency=NotificationFrequencyEnum.BATCH)
        notification_request_2 = NotificationRequest(
            name="notification_2",
            origin=dp_id,
            origin_type="type_2",
            disabled_by_system=False,
            event_type=OT.DetectionEvent,
            delivery_configurations=[delivery_config2a, delivery_config2b])

        notification_request_1_id = self.unsc.create_notification(
            notification=notification_request_1, user_id=user_id_1)
        notification_request_2_id = self.unsc.create_notification(
            notification=notification_request_2, user_id=user_id_2)
        self.unsc.delete_notification(notification_request_1_id)

        # test reading a non-existent data product
        log.debug('reading non-existent data product')

        with self.assertRaises(NotFound):
            dp_obj = self.dpsc_cli.read_data_product('some_fake_id')

        # update a data product (tests read also)
        log.debug('Updating data product')
        # first get the existing dp object
        dp_obj = self.dpsc_cli.read_data_product(dp_id)

        # now tweak the object
        dp_obj.description = 'the very first dp'
        dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 20.0
        dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -20.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 20.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -20.0
        # now write the dp back to the registry
        update_result = self.dpsc_cli.update_data_product(dp_obj)

        # now get the dp back to see if it was updated
        dp_obj = self.dpsc_cli.read_data_product(dp_id)
        self.assertEquals(dp_obj.description, 'the very first dp')
        self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0)
        log.debug('Updated data product %s', dp_obj)

        #test extension
        extended_product = self.dpsc_cli.get_data_product_extension(dp_id)
        #validate that there is one active and one retired user notification for this data product
        self.assertEqual(
            1, len(extended_product.computed.active_user_subscriptions.value))
        self.assertEqual(
            1, len(extended_product.computed.past_user_subscriptions.value))

        self.assertEqual(dp_id, extended_product._id)
        self.assertEqual(
            ComputedValueAvailability.PROVIDED,
            extended_product.computed.product_download_size_estimated.status)
        self.assertEqual(
            0, extended_product.computed.product_download_size_estimated.value)

        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_product.computed.parameters.status)

        #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value)

        def ion_object_encoder(obj):
            return obj.__dict__

        #test prepare for create
        data_product_data = self.dpsc_cli.prepare_data_product_support()

        #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2)

        self.assertEqual(data_product_data._id, "")
        self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport)
        self.assertEqual(
            len(data_product_data.associations['StreamDefinition'].resources),
            2)
        self.assertEqual(
            len(data_product_data.associations['Dataset'].resources), 0)
        self.assertEqual(
            len(data_product_data.associations['StreamDefinition'].
                associated_resources), 0)
        self.assertEqual(
            len(data_product_data.associations['Dataset'].associated_resources
                ), 0)

        #test prepare for update
        data_product_data = self.dpsc_cli.prepare_data_product_support(dp_id)

        #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2)

        self.assertEqual(data_product_data._id, dp_id)
        self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport)
        self.assertEqual(
            len(data_product_data.associations['StreamDefinition'].resources),
            2)

        self.assertEqual(
            len(data_product_data.associations['Dataset'].resources), 1)

        self.assertEqual(
            len(data_product_data.associations['StreamDefinition'].
                associated_resources), 1)
        self.assertEqual(
            data_product_data.associations['StreamDefinition'].
            associated_resources[0].s, dp_id)

        self.assertEqual(
            len(data_product_data.associations['Dataset'].associated_resources
                ), 1)
        self.assertEqual(
            data_product_data.associations['Dataset'].associated_resources[0].
            s, dp_id)

        # now 'delete' the data product
        log.debug("deleting data product: %s" % dp_id)
        self.dpsc_cli.delete_data_product(dp_id)

        # Assert that there are no associated streams leftover after deleting the data product
        stream_ids, assoc_ids = self.rrclient.find_objects(
            dp_id, PRED.hasStream, RT.Stream, True)
        self.assertEquals(len(stream_ids), 0)
        self.assertEquals(len(assoc_ids), 0)

        self.dpsc_cli.force_delete_data_product(dp_id)

        # now try to get the deleted dp object
        with self.assertRaises(NotFound):
            dp_obj = self.dpsc_cli.read_data_product(dp_id)

        # Get the events corresponding to the data product
        ret = self.unsc.get_recent_events(resource_id=dp_id)
        events = ret.value

        for event in events:
            log.debug("event time: %s" % event.ts_created)

        self.assertTrue(len(events) > 0)

    def test_data_product_stream_def(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(
            name='Simulated CTD data', parameter_dictionary_id=pdict_id)

        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp')
        dp_id = self.dpsc_cli.create_data_product(
            data_product=dp_obj, stream_definition_id=ctd_stream_def_id)

        stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id)
        self.assertEquals(ctd_stream_def_id, stream_def_id)

    def test_derived_data_product(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(
            name='ctd parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsubcli.delete_stream_definition,
                        ctd_stream_def_id)

        dp = DataProduct(name='Instrument DP')
        dp_id = self.dpsc_cli.create_data_product(
            dp, stream_definition_id=ctd_stream_def_id)
        self.addCleanup(self.dpsc_cli.force_delete_data_product, dp_id)

        self.dpsc_cli.activate_data_product_persistence(dp_id)
        self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, dp_id)

        dataset_ids, _ = self.rrclient.find_objects(subject=dp_id,
                                                    predicate=PRED.hasDataset,
                                                    id_only=True)
        if not dataset_ids:
            raise NotFound("Data Product %s dataset  does not exist" %
                           str(dp_id))
        dataset_id = dataset_ids[0]

        # Make the derived data product
        simple_stream_def_id = self.pubsubcli.create_stream_definition(
            name='TEMPWAT stream def',
            parameter_dictionary_id=pdict_id,
            available_fields=['time', 'temp'])
        tempwat_dp = DataProduct(name='TEMPWAT',
                                 category=DataProductTypeEnum.DERIVED)
        tempwat_dp_id = self.dpsc_cli.create_data_product(
            tempwat_dp,
            stream_definition_id=simple_stream_def_id,
            parent_data_product_id=dp_id)
        self.addCleanup(self.dpsc_cli.delete_data_product, tempwat_dp_id)
        # Check that the streams associated with the data product are persisted with
        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                   RT.Stream, True)
        for stream_id in stream_ids:
            self.assertTrue(self.ingestclient.is_persisted(stream_id))

        stream_id = stream_ids[0]
        route = self.pubsubcli.read_stream_route(stream_id=stream_id)

        rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id)
        rdt['time'] = np.arange(20)
        rdt['temp'] = np.arange(20)
        rdt['pressure'] = np.arange(20)

        publisher = StandaloneStreamPublisher(stream_id, route)

        dataset_modified = Event()

        def cb(*args, **kwargs):
            dataset_modified.set()

        es = EventSubscriber(event_type=OT.DatasetModified,
                             callback=cb,
                             origin=dataset_id,
                             auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        publisher.publish(rdt.to_granule())

        self.assertTrue(dataset_modified.wait(30))

        tempwat_dataset_ids, _ = self.rrclient.find_objects(tempwat_dp_id,
                                                            PRED.hasDataset,
                                                            id_only=True)
        tempwat_dataset_id = tempwat_dataset_ids[0]
        granule = self.data_retriever.retrieve(
            tempwat_dataset_id, delivery_format=simple_stream_def_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt['time'], np.arange(20))
        self.assertEquals(set(rdt.fields), set(['time', 'temp']))

    def test_activate_suspend_data_product(self):

        #------------------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #------------------------------------------------------------------------------------------------
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(
            name='Simulated CTD data', parameter_dictionary_id=pdict_id)
        log.debug("Created stream def id %s" % ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # test creating a new data product w/o a stream definition
        #------------------------------------------------------------------------------------------------
        # Construct temporal and spatial Coordinate Reference System objects

        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp')

        log.debug("Created an IonObject for a data product: %s" % dp_obj)

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------

        dp_id = self.dpsc_cli.create_data_product(
            data_product=dp_obj, stream_definition_id=ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # Subscribe to persist events
        #------------------------------------------------------------------------------------------------
        queue = gevent.queue.Queue()

        def info_event_received(message, headers):
            queue.put(message)

        es = EventSubscriber(event_type=OT.InformationContentStatusEvent,
                             callback=info_event_received,
                             origin=dp_id,
                             auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        #------------------------------------------------------------------------------------------------
        # test activate and suspend data product persistence
        #------------------------------------------------------------------------------------------------
        self.dpsc_cli.activate_data_product_persistence(dp_id)

        dp_obj = self.dpsc_cli.read_data_product(dp_id)
        self.assertIsNotNone(dp_obj)

        dataset_ids, _ = self.rrclient.find_objects(subject=dp_id,
                                                    predicate=PRED.hasDataset,
                                                    id_only=True)
        if not dataset_ids:
            raise NotFound("Data Product %s dataset  does not exist" %
                           str(dp_id))
        dataset_id = dataset_ids[0]

        # Check that the streams associated with the data product are persisted with
        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                   RT.Stream, True)
        for stream_id in stream_ids:
            self.assertTrue(self.ingestclient.is_persisted(stream_id))

        stream_id = stream_ids[0]
        route = self.pubsubcli.read_stream_route(stream_id=stream_id)

        rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id)
        rdt['time'] = np.arange(20)
        rdt['temp'] = np.arange(20)

        publisher = StandaloneStreamPublisher(stream_id, route)

        dataset_modified = Event()

        def cb(*args, **kwargs):
            dataset_modified.set()

        es = EventSubscriber(event_type=OT.DatasetModified,
                             callback=cb,
                             origin=dataset_id,
                             auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        publisher.publish(rdt.to_granule())

        self.assertTrue(dataset_modified.wait(30))

        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------

        replay_data = self.data_retriever.retrieve(dataset_ids[0])
        self.assertIsInstance(replay_data, Granule)

        log.debug(
            "The data retriever was able to replay the dataset that was attached to the data product "
            "we wanted to be persisted. Therefore the data product was indeed persisted with "
            "otherwise we could not have retrieved its dataset using the data retriever. Therefore "
            "this demonstration shows that L4-CI-SA-RQ-267 is satisfied: 'Data product management shall persist data products'"
        )

        data_product_object = self.rrclient.read(dp_id)
        self.assertEquals(data_product_object.name, 'DP1')
        self.assertEquals(data_product_object.description, 'some new dp')

        log.debug(
            "Towards L4-CI-SA-RQ-308: 'Data product management shall persist data product metadata'. "
            " Attributes in create for the data product obj, name= '%s', description='%s', match those of object from the "
            "resource registry, name='%s', desc='%s'" %
            (dp_obj.name, dp_obj.description, data_product_object.name,
             data_product_object.description))

        #------------------------------------------------------------------------------------------------
        # test suspend data product persistence
        #------------------------------------------------------------------------------------------------
        self.dpsc_cli.suspend_data_product_persistence(dp_id)

        dataset_modified.clear()

        rdt['time'] = np.arange(20, 40)

        publisher.publish(rdt.to_granule())
        self.assertFalse(dataset_modified.wait(2))

        self.dpsc_cli.activate_data_product_persistence(dp_id)
        dataset_modified.clear()

        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_modified.wait(30))

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_almost_equal(rdt['time'], np.arange(40))

        dataset_ids, _ = self.rrclient.find_objects(dp_id,
                                                    PRED.hasDataset,
                                                    id_only=True)
        self.assertEquals(len(dataset_ids), 1)

        self.dpsc_cli.suspend_data_product_persistence(dp_id)
        self.dpsc_cli.force_delete_data_product(dp_id)
        # now try to get the deleted dp object

        with self.assertRaises(NotFound):
            dp_obj = self.rrclient.read(dp_id)

        info_event_counter = 0
        runtime = 0
        starttime = time.time()
        caught_events = []

        #check that the four InfoStatusEvents were received
        while info_event_counter < 4 and runtime < 60:
            a = queue.get(timeout=60)
            caught_events.append(a)
            info_event_counter += 1
            runtime = time.time() - starttime

        self.assertEquals(info_event_counter, 4)