class TestDataProductManagementServiceIntegration(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.dpsc_cli           = DataProductManagementServiceClient()
        self.rrclient           = ResourceRegistryServiceClient()
        self.damsclient         = DataAcquisitionManagementServiceClient()
        self.pubsubcli          = PubsubManagementServiceClient()
        self.ingestclient       = IngestionManagementServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.unsc               = UserNotificationServiceClient()
        self.data_retriever     = DataRetrieverServiceClient()
        self.identcli           = IdentityManagementServiceClient()

        #------------------------------------------
        # Create the environment
        #------------------------------------------

        self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM')

        self.process_definitions  = {}
        ingestion_worker_definition = ProcessDefinition(name='ingestion worker')
        ingestion_worker_definition.executable = {
            'module':'ion.processes.data.ingestion.science_granule_ingestion_worker',
            'class' :'ScienceGranuleIngestionWorker'
        }
        process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition)
        self.process_definitions['ingestion_worker'] = process_definition_id

        self.pids = []
        self.exchange_points = []
        self.exchange_names = []

        #------------------------------------------------------------------------------------------------
        # First launch the ingestors
        #------------------------------------------------------------------------------------------------
        self.exchange_space       = 'science_granule_ingestion'
        self.exchange_point       = 'science_data'
        config = DotDict()
        config.process.datastore_name = 'datasets'
        config.process.queue_name = self.exchange_space

        self.exchange_names.append(self.exchange_space)
        self.exchange_points.append(self.exchange_point)

        pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config)
        log.debug("the ingestion worker process id: %s", pid)
        self.pids.append(pid)

        self.addCleanup(self.cleaning_up)

    def cleaning_up(self):
        for pid in self.pids:
            log.debug("number of pids to be terminated: %s", len(self.pids))
            try:
                self.process_dispatcher.cancel_process(pid)
                log.debug("Terminated the process: %s", pid)
            except:
                log.debug("could not terminate the process id: %s" % pid)
        IngestionManagementIntTest.clean_subscriptions()

        for xn in self.exchange_names:
            xni = self.container.ex_manager.create_xn_queue(xn)
            xni.delete()
        for xp in self.exchange_points:
            xpi = self.container.ex_manager.create_xp(xp)
            xpi.delete()

    def get_datastore(self, dataset_id):
        dataset = self.dataset_management.read_dataset(dataset_id)
        datastore_name = dataset.datastore_name
        datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA)
        return datastore


    @attr('EXT')
    @attr('PREP')
    def test_create_data_product(self):

        #------------------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #------------------------------------------------------------------------------------------------
        parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id)
        log.debug("Created stream def id %s" % ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # test creating a new data product w/o a stream definition
        #------------------------------------------------------------------------------------------------




        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp')

        dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0
        dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0
        dp_obj.ooi_product_name = "PRODNAME"

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------

        dp_id = self.dpsc_cli.create_data_product( data_product= dp_obj,
                                            stream_definition_id=ctd_stream_def_id)
        # Assert that the data product has an associated stream at this stage
        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True)
        self.assertNotEquals(len(stream_ids), 0)

        # Assert that the data product has an associated stream def at this stage
        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True)
        self.assertNotEquals(len(stream_ids), 0)

        self.dpsc_cli.activate_data_product_persistence(dp_id)

        dp_obj = self.dpsc_cli.read_data_product(dp_id)
        self.assertIsNotNone(dp_obj)
        self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0)
        log.debug('Created data product %s', dp_obj)
        #------------------------------------------------------------------------------------------------
        # test creating a new data product with  a stream definition
        #------------------------------------------------------------------------------------------------
        log.debug('Creating new data product with a stream definition')
        dp_obj = IonObject(RT.DataProduct,
            name='DP2',
            description='some new dp')

        dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id)
        self.dpsc_cli.activate_data_product_persistence(dp_id2)
        log.debug('new dp_id = %s' % dp_id2)

        #------------------------------------------------------------------------------------------------
        #make sure data product is associated with stream def
        #------------------------------------------------------------------------------------------------
        streamdefs = []
        streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True)
        for s in streams:
            log.debug("Checking stream %s" % s)
            sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True)
            for sd in sdefs:
                log.debug("Checking streamdef %s" % sd)
                streamdefs.append(sd)
        self.assertIn(ctd_stream_def_id, streamdefs)

        group_names = self.dpsc_cli.get_data_product_group_list()
        self.assertIn("PRODNAME", group_names)


        #----------------------------------------------------------------------------------------
        # Create users then notifications to this data product for each user
        #----------------------------------------------------------------------------------------

        # user_1
        user_1 = UserInfo()
        user_1.name = 'user_1'
        user_1.contact.email = '*****@*****.**'

        # user_2
        user_2 = UserInfo()
        user_2.name = 'user_2'
        user_2.contact.email = '*****@*****.**'
        #user1 is a complete user
        self.subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        actor_id = self.identcli.create_actor_identity(actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials", {"name": self.subject})
        self.identcli.register_user_credentials(actor_id, user_credentials_obj)
        user_id_1 = self.identcli.create_user_info(actor_id, user_1)
        user_id_2, _ = self.rrclient.create(user_2)

        delivery_config1a = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH)
        delivery_config1b = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH)
        notification_request_1 = NotificationRequest(   name = "notification_1",
            origin=dp_id,
            origin_type="type_1",
            event_type=OT.ResourceLifecycleEvent,
            disabled_by_system = False,
            delivery_configurations=[delivery_config1a, delivery_config1b])

        delivery_config2a = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH)
        delivery_config2b = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH)
        notification_request_2 = NotificationRequest(   name = "notification_2",
            origin=dp_id,
            origin_type="type_2",
            disabled_by_system = False,
            event_type=OT.DetectionEvent,
            delivery_configurations=[delivery_config2a, delivery_config2b])

        notification_request_1_id = self.unsc.create_notification(notification=notification_request_1, user_id=user_id_1)
        notification_request_2_id = self.unsc.create_notification(notification=notification_request_2, user_id=user_id_2)
        self.unsc.delete_notification(notification_request_1_id)



        # test reading a non-existent data product
        log.debug('reading non-existent data product')

        with self.assertRaises(NotFound):
            dp_obj = self.dpsc_cli.read_data_product('some_fake_id')

        # update a data product (tests read also)
        log.debug('Updating data product')
        # first get the existing dp object
        dp_obj = self.dpsc_cli.read_data_product(dp_id)

        # now tweak the object
        dp_obj.description = 'the very first dp'
        dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 20.0
        dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -20.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 20.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -20.0
        # now write the dp back to the registry
        update_result = self.dpsc_cli.update_data_product(dp_obj)


        # now get the dp back to see if it was updated
        dp_obj = self.dpsc_cli.read_data_product(dp_id)
        self.assertEquals(dp_obj.description,'the very first dp')
        self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0)
        log.debug('Updated data product %s', dp_obj)

        #test extension
        extended_product = self.dpsc_cli.get_data_product_extension(dp_id)
        #validate that there is one active and one retired user notification for this data product
        self.assertEqual(1, len(extended_product.computed.active_user_subscriptions.value))
        self.assertEqual(1, len(extended_product.computed.past_user_subscriptions.value))

        self.assertEqual(dp_id, extended_product._id)
        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_product.computed.product_download_size_estimated.status)
        self.assertEqual(0, extended_product.computed.product_download_size_estimated.value)

        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_product.computed.parameters.status)
        #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value)


        def ion_object_encoder(obj):
            return obj.__dict__


        #test prepare for create
        data_product_data = self.dpsc_cli.prepare_data_product_support()

        #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2)

        self.assertEqual(data_product_data._id, "")
        self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport)
        self.assertEqual(len(data_product_data.associations['StreamDefinition'].resources), 2)
        self.assertEqual(len(data_product_data.associations['Dataset'].resources), 0)
        self.assertEqual(len(data_product_data.associations['StreamDefinition'].associated_resources), 0)
        self.assertEqual(len(data_product_data.associations['Dataset'].associated_resources), 0)

        #test prepare for update
        data_product_data = self.dpsc_cli.prepare_data_product_support(dp_id)

        #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2)

        self.assertEqual(data_product_data._id, dp_id)
        self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport)
        self.assertEqual(len(data_product_data.associations['StreamDefinition'].resources), 2)

        self.assertEqual(len(data_product_data.associations['Dataset'].resources), 1)

        self.assertEqual(len(data_product_data.associations['StreamDefinition'].associated_resources), 1)
        self.assertEqual(data_product_data.associations['StreamDefinition'].associated_resources[0].s, dp_id)

        self.assertEqual(len(data_product_data.associations['Dataset'].associated_resources), 1)
        self.assertEqual(data_product_data.associations['Dataset'].associated_resources[0].s, dp_id)

        # now 'delete' the data product
        log.debug("deleting data product: %s" % dp_id)
        self.dpsc_cli.delete_data_product(dp_id)

        # Assert that there are no associated streams leftover after deleting the data product
        stream_ids, assoc_ids = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True)
        self.assertEquals(len(stream_ids), 0)
        self.assertEquals(len(assoc_ids), 0)

        self.dpsc_cli.force_delete_data_product(dp_id)

        # now try to get the deleted dp object
        with self.assertRaises(NotFound):
            dp_obj = self.dpsc_cli.read_data_product(dp_id)

        # Get the events corresponding to the data product
        ret = self.unsc.get_recent_events(resource_id=dp_id)
        events = ret.value

        for event in events:
            log.debug("event time: %s" % event.ts_created)

        self.assertTrue(len(events) > 0)

    def test_data_product_stream_def(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)


        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp')
        dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj,
            stream_definition_id=ctd_stream_def_id)

        stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id)
        self.assertEquals(ctd_stream_def_id, stream_def_id)


    def test_derived_data_product(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='ctd parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsubcli.delete_stream_definition, ctd_stream_def_id)


        dp = DataProduct(name='Instrument DP')
        dp_id = self.dpsc_cli.create_data_product(dp, stream_definition_id=ctd_stream_def_id)
        self.addCleanup(self.dpsc_cli.force_delete_data_product, dp_id)

        self.dpsc_cli.activate_data_product_persistence(dp_id)
        self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, dp_id)


        dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True)
        if not dataset_ids:
            raise NotFound("Data Product %s dataset  does not exist" % str(dp_id))
        dataset_id = dataset_ids[0]
        
        # Make the derived data product
        simple_stream_def_id = self.pubsubcli.create_stream_definition(name='TEMPWAT stream def', parameter_dictionary_id=pdict_id, available_fields=['time','temp'])
        tempwat_dp = DataProduct(name='TEMPWAT', category=DataProductTypeEnum.DERIVED)
        tempwat_dp_id = self.dpsc_cli.create_data_product(tempwat_dp, stream_definition_id=simple_stream_def_id, parent_data_product_id=dp_id)
        self.addCleanup(self.dpsc_cli.delete_data_product, tempwat_dp_id)
        # Check that the streams associated with the data product are persisted with
        stream_ids, _ =  self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True)
        for stream_id in stream_ids:
            self.assertTrue(self.ingestclient.is_persisted(stream_id))

        stream_id = stream_ids[0]
        route = self.pubsubcli.read_stream_route(stream_id=stream_id)

        rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id)
        rdt['time'] = np.arange(20)
        rdt['temp'] = np.arange(20)
        rdt['pressure'] = np.arange(20)

        publisher = StandaloneStreamPublisher(stream_id,route)
        
        dataset_modified = Event()
        def cb(*args, **kwargs):
            dataset_modified.set()
        es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        publisher.publish(rdt.to_granule())

        self.assertTrue(dataset_modified.wait(30))

        tempwat_dataset_ids, _ = self.rrclient.find_objects(tempwat_dp_id, PRED.hasDataset, id_only=True)
        tempwat_dataset_id = tempwat_dataset_ids[0]
        granule = self.data_retriever.retrieve(tempwat_dataset_id, delivery_format=simple_stream_def_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt['time'], np.arange(20))
        self.assertEquals(set(rdt.fields), set(['time','temp']))


    def test_activate_suspend_data_product(self):

        #------------------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #------------------------------------------------------------------------------------------------
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)
        log.debug("Created stream def id %s" % ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # test creating a new data product w/o a stream definition
        #------------------------------------------------------------------------------------------------
        # Construct temporal and spatial Coordinate Reference System objects

        dp_obj = IonObject(RT.DataProduct,
            name='DP1',
            description='some new dp')

        log.debug("Created an IonObject for a data product: %s" % dp_obj)

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------

        dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj,
            stream_definition_id=ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # Subscribe to persist events
        #------------------------------------------------------------------------------------------------
        queue = gevent.queue.Queue()

        def info_event_received(message, headers):
            queue.put(message)

        es = EventSubscriber(event_type=OT.InformationContentStatusEvent, callback=info_event_received, origin=dp_id, auto_delete=True)
        es.start()
        self.addCleanup(es.stop)


        #------------------------------------------------------------------------------------------------
        # test activate and suspend data product persistence
        #------------------------------------------------------------------------------------------------
        self.dpsc_cli.activate_data_product_persistence(dp_id)
        
        dp_obj = self.dpsc_cli.read_data_product(dp_id)
        self.assertIsNotNone(dp_obj)

        dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True)
        if not dataset_ids:
            raise NotFound("Data Product %s dataset  does not exist" % str(dp_id))
        dataset_id = dataset_ids[0]


        # Check that the streams associated with the data product are persisted with
        stream_ids, _ =  self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True)
        for stream_id in stream_ids:
            self.assertTrue(self.ingestclient.is_persisted(stream_id))

        stream_id = stream_ids[0]
        route = self.pubsubcli.read_stream_route(stream_id=stream_id)

        rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id)
        rdt['time'] = np.arange(20)
        rdt['temp'] = np.arange(20)

        publisher = StandaloneStreamPublisher(stream_id,route)
        
        dataset_modified = Event()
        def cb(*args, **kwargs):
            dataset_modified.set()
        es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        publisher.publish(rdt.to_granule())

        self.assertTrue(dataset_modified.wait(30))

        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------

        replay_data = self.data_retriever.retrieve(dataset_ids[0])
        self.assertIsInstance(replay_data, Granule)

        log.debug("The data retriever was able to replay the dataset that was attached to the data product "
                  "we wanted to be persisted. Therefore the data product was indeed persisted with "
                  "otherwise we could not have retrieved its dataset using the data retriever. Therefore "
                  "this demonstration shows that L4-CI-SA-RQ-267 is satisfied: 'Data product management shall persist data products'")

        data_product_object = self.rrclient.read(dp_id)
        self.assertEquals(data_product_object.name,'DP1')
        self.assertEquals(data_product_object.description,'some new dp')

        log.debug("Towards L4-CI-SA-RQ-308: 'Data product management shall persist data product metadata'. "
                  " Attributes in create for the data product obj, name= '%s', description='%s', match those of object from the "
                  "resource registry, name='%s', desc='%s'" % (dp_obj.name, dp_obj.description,data_product_object.name,
                                                           data_product_object.description))

        #------------------------------------------------------------------------------------------------
        # test suspend data product persistence
        #------------------------------------------------------------------------------------------------
        self.dpsc_cli.suspend_data_product_persistence(dp_id)


        dataset_modified.clear()

        rdt['time'] = np.arange(20,40)

        publisher.publish(rdt.to_granule())
        self.assertFalse(dataset_modified.wait(2))

        self.dpsc_cli.activate_data_product_persistence(dp_id)
        dataset_modified.clear()

        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_modified.wait(30))

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_almost_equal(rdt['time'], np.arange(40))


        dataset_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasDataset, id_only=True)
        self.assertEquals(len(dataset_ids), 1)

        self.dpsc_cli.suspend_data_product_persistence(dp_id)
        self.dpsc_cli.force_delete_data_product(dp_id)
        # now try to get the deleted dp object

        with self.assertRaises(NotFound):
            dp_obj = self.rrclient.read(dp_id)


        info_event_counter = 0
        runtime = 0
        starttime = time.time()
        caught_events = []

        #check that the four InfoStatusEvents were received
        while info_event_counter < 4 and runtime < 60 :
            a = queue.get(timeout=60)
            caught_events.append(a)
            info_event_counter += 1
            runtime = time.time() - starttime

        self.assertEquals(info_event_counter, 4)
コード例 #2
0
class TestIdentityManagementServiceInt(IonIntegrationTestCase):
    
    def setUp(self):
        self.subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"

        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.resource_registry = ResourceRegistryServiceClient(node=self.container.node)
        self.identity_management_service = IdentityManagementServiceClient(node=self.container.node)
        self.org_client = OrgManagementServiceClient(node=self.container.node)

    def test_actor_identity(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})        
        user_id = self.identity_management_service.create_actor_identity(actor_identity_obj)

        actor_identity = self.identity_management_service.read_actor_identity(user_id)

        actor_identity.name = 'Updated subject'
        self.identity_management_service.update_actor_identity(actor_identity)

        ai = self.identity_management_service.find_actor_identity_by_name(actor_identity.name)
        self._baseAssertEqual(ai.name, actor_identity.name)
        with self.assertRaises(NotFound):
            ai = self.identity_management_service.find_actor_identity_by_name("Yeah, well, you know, that's just, like, your opinion, man.")

        self._baseAssertEqual(ai.name, actor_identity.name)

        self.identity_management_service.delete_actor_identity(user_id)
 
        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.read_actor_identity(user_id)
        self.assertTrue("does not exist" in cm.exception.message)
 
        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.delete_actor_identity(user_id)
        self.assertTrue("does not exist" in cm.exception.message)

    def test_user_credentials(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})        
        user_id = self.identity_management_service.create_actor_identity(actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials", {"name": self.subject})        
        self.identity_management_service.register_user_credentials(user_id, user_credentials_obj)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.unregister_user_credentials("bad", self.subject)
        self.assertTrue("does not exist" in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.unregister_user_credentials(user_id, "bad")
        self.assertTrue("does not exist" in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.unregister_user_credentials('bad', 'bad')
        self.assertTrue("does not exist" in cm.exception.message)

        self.identity_management_service.unregister_user_credentials(user_id, self.subject)

        self.identity_management_service.delete_actor_identity(user_id)




    def test_user_info(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        user_id = self.identity_management_service.create_actor_identity(actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials", {"name": self.subject})
        self.identity_management_service.register_user_credentials(user_id, user_credentials_obj)

        user_info_obj = IonObject("UserInfo", {"name": "Foo"})
        user_info = self.identity_management_service.create_user_info(user_id, user_info_obj)

        with self.assertRaises(Conflict) as cm:
            self.identity_management_service.create_user_info(user_id, user_info_obj)
        self.assertTrue("UserInfo already exists for user id" in cm.exception.message)

        user_info_obj = self.identity_management_service.find_user_info_by_id(user_id)

        user_info_obj = self.identity_management_service.find_user_info_by_name("Foo")

        user_info_obj = self.identity_management_service.find_user_info_by_subject(self.subject)

        user_info_obj = self.identity_management_service.read_user_info(user_info)

        user_info_obj.name = 'Jane Doe'

        self.identity_management_service.update_user_info(user_info_obj)

        self.identity_management_service.delete_user_info(user_info)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.read_user_info(user_info)
        self.assertTrue('does not exist' in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.delete_user_info(user_info)
        self.assertTrue('does not exist' in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.find_user_info_by_name("John Doe")
        self.assertEqual(cm.exception.message, 'UserInfo with name John Doe does not exist')

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.find_user_info_by_subject("Bogus subject")
        self.assertEqual(cm.exception.message, "UserCredentials with subject Bogus subject does not exist")

        self.identity_management_service.unregister_user_credentials(user_id, self.subject)

        self.identity_management_service.delete_actor_identity(user_id)


    def test_signon(self):
        certificate =  """-----BEGIN CERTIFICATE-----
MIIEMzCCAxugAwIBAgICBQAwDQYJKoZIhvcNAQEFBQAwajETMBEGCgmSJomT8ixkARkWA29yZzEX
MBUGCgmSJomT8ixkARkWB2NpbG9nb24xCzAJBgNVBAYTAlVTMRAwDgYDVQQKEwdDSUxvZ29uMRsw
GQYDVQQDExJDSUxvZ29uIEJhc2ljIENBIDEwHhcNMTAxMTE4MjIyNTA2WhcNMTAxMTE5MTAzMDA2
WjBvMRMwEQYKCZImiZPyLGQBGRMDb3JnMRcwFQYKCZImiZPyLGQBGRMHY2lsb2dvbjELMAkGA1UE
BhMCVVMxFzAVBgNVBAoTDlByb3RlY3ROZXR3b3JrMRkwFwYDVQQDExBSb2dlciBVbndpbiBBMjU0
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6QhsWxhUXbIxg+1ZyEc7d+hIGvchVmtb
g0kKLmivgoVsA4U7swNDRH6svW242THta0oTf6crkRx7kOKg6jma2lcAC1sjOSddqX7/92ChoUPq
7LWt2T6GVVA10ex5WAeB/o7br/Z4U8/75uCBis+ru7xEDl09PToK20mrkcz9M4HqIv1eSoPkrs3b
2lUtQc6cjuHRDU4NknXaVMXTBHKPM40UxEDHJueFyCiZJFg3lvQuSsAl4JL5Z8pC02T8/bODBuf4
dszsqn2SC8YDw1xrujvW2Bd7Q7BwMQ/gO+dZKM1mLJFpfEsR9WrjMeg6vkD2TMWLMr0/WIkGC8u+
6M6SMQIDAQABo4HdMIHaMAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQDAgSwMBMGA1UdJQQMMAoG
CCsGAQUFBwMCMBgGA1UdIAQRMA8wDQYLKwYBBAGCkTYBAgEwagYDVR0fBGMwYTAuoCygKoYoaHR0
cDovL2NybC5jaWxvZ29uLm9yZy9jaWxvZ29uLWJhc2ljLmNybDAvoC2gK4YpaHR0cDovL2NybC5k
b2Vncmlkcy5vcmcvY2lsb2dvbi1iYXNpYy5jcmwwHwYDVR0RBBgwFoEUaXRzYWdyZWVuMUB5YWhv
by5jb20wDQYJKoZIhvcNAQEFBQADggEBAEYHQPMY9Grs19MHxUzMwXp1GzCKhGpgyVKJKW86PJlr
HGruoWvx+DLNX75Oj5FC4t8bOUQVQusZGeGSEGegzzfIeOI/jWP1UtIjzvTFDq3tQMNvsgROSCx5
CkpK4nS0kbwLux+zI7BWON97UpMIzEeE05pd7SmNAETuWRsHMP+x6i7hoUp/uad4DwbzNUGIotdK
f8b270icOVgkOKRdLP/Q4r/x8skKSCRz1ZsRdR+7+B/EgksAJj7Ut3yiWoUekEMxCaTdAHPTMD/g
Mh9xL90hfMJyoGemjJswG5g3fAdTP/Lv0I6/nWeH/cLjwwpQgIEjEAVXl7KHuzX5vPD/wqQ=
-----END CERTIFICATE-----"""
        id, valid_until, registered = self.identity_management_service.signon(certificate, True)

        self.assertFalse(registered)

        id2, valid_until2, registered2 = self.identity_management_service.signon(certificate, True)

        self.assertFalse(registered2)
        self.assertTrue(id == id2)
        self.assertTrue(valid_until == valid_until2)

        user_info_obj = IonObject("UserInfo", {"name": "Foo"})        
        self.identity_management_service.create_user_info(id, user_info_obj)

        id3, valid_until3, registered3 = self.identity_management_service.signon(certificate, True)

        self.assertTrue(registered3)
        self.assertTrue(id == id3)
        self.assertTrue(valid_until == valid_until3)

    @attr('EXT')
    def test_get_extended_user_identity(self):

        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        actor_id = self.identity_management_service.create_actor_identity(actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials", {"name": self.subject})
        self.identity_management_service.register_user_credentials(actor_id, user_credentials_obj)

        user_info_obj = IonObject("UserInfo", {"name": "Foo"})
        user_info_id = self.identity_management_service.create_user_info(actor_id, user_info_obj)

        ion_org = self.org_client.find_org()

        #Build the Service Agreement Proposal to to request a role but never close it
        sap = IonObject(OT.RequestRoleProposal,consumer=actor_id, provider=ion_org._id, role_name=ORG_MANAGER_ROLE )
        sap_response = self.org_client.negotiate(sap)

        #Just grant the role anyway
        #self.org_client.grant_role(ion_org._id, actor_id, ORG_MANAGER_ROLE)

        with self.assertRaises(NotFound):
            self.identity_management_service.get_user_info_extension('That rug really tied the room together.')
        with self.assertRaises(BadRequest):
            self.identity_management_service.get_user_info_extension()

        #Check the user without the negotiation role request
        extended_user = self.identity_management_service.get_user_info_extension(user_info_id, org_id=ion_org._id)
        self.assertEqual(user_info_obj.type_,extended_user.resource.type_)
        self.assertEqual(len(extended_user.roles),1)
        self.assertEqual(len(extended_user.open_requests),1)
        self.assertEqual(extended_user.open_requests[0].org_id, ion_org._id)
        self.assertEqual(extended_user.open_requests[0].user_id, user_info_id)
        self.assertEqual(extended_user.open_requests[0].request_type, OT.RequestRoleProposal)
        self.assertEqual(len(extended_user.closed_requests),0)
        self.assertEqual(extended_user.open_requests[0]._id, extended_user.open_requests[0].negotiation_id)

        neg = self.resource_registry.read(object_id=extended_user.open_requests[0].negotiation_id)
        sap_response = Negotiation.create_counter_proposal(neg, ProposalStatusEnum.ACCEPTED, ProposalOriginatorEnum.PROVIDER)
        sap_response2 = self.org_client.negotiate(sap_response)

        #Now check the user after the negotiation has been accepted and the role granted
        extended_user = self.identity_management_service.get_user_info_extension(user_info_id, org_id=ion_org._id)
        self.assertEqual(user_info_obj.type_,extended_user.resource.type_)
        self.assertEqual(len(extended_user.roles),2)
        self.assertEqual(len(extended_user.open_requests),0)
        self.assertEqual(len(extended_user.closed_requests),1)
        self.assertEqual(extended_user.closed_requests[0].org_id, ion_org._id)
        self.assertEqual(extended_user.closed_requests[0].user_id, user_info_id)
        self.assertEqual(extended_user.closed_requests[0].request_type, OT.RequestRoleProposal)

        self.identity_management_service.delete_user_info(user_info_id)

        self.org_client.revoke_role(org_id=ion_org._id, actor_id=actor_id, role_name=ORG_MANAGER_ROLE)

        self.identity_management_service.unregister_user_credentials(actor_id, self.subject)

        self.identity_management_service.delete_actor_identity(actor_id)



    def test_account_merge(self):
        certificate =  """-----BEGIN CERTIFICATE-----
MIIEMzCCAxugAwIBAgICBQAwDQYJKoZIhvcNAQEFBQAwajETMBEGCgmSJomT8ixkARkWA29yZzEX
MBUGCgmSJomT8ixkARkWB2NpbG9nb24xCzAJBgNVBAYTAlVTMRAwDgYDVQQKEwdDSUxvZ29uMRsw
GQYDVQQDExJDSUxvZ29uIEJhc2ljIENBIDEwHhcNMTAxMTE4MjIyNTA2WhcNMTAxMTE5MTAzMDA2
WjBvMRMwEQYKCZImiZPyLGQBGRMDb3JnMRcwFQYKCZImiZPyLGQBGRMHY2lsb2dvbjELMAkGA1UE
BhMCVVMxFzAVBgNVBAoTDlByb3RlY3ROZXR3b3JrMRkwFwYDVQQDExBSb2dlciBVbndpbiBBMjU0
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6QhsWxhUXbIxg+1ZyEc7d+hIGvchVmtb
g0kKLmivgoVsA4U7swNDRH6svW242THta0oTf6crkRx7kOKg6jma2lcAC1sjOSddqX7/92ChoUPq
7LWt2T6GVVA10ex5WAeB/o7br/Z4U8/75uCBis+ru7xEDl09PToK20mrkcz9M4HqIv1eSoPkrs3b
2lUtQc6cjuHRDU4NknXaVMXTBHKPM40UxEDHJueFyCiZJFg3lvQuSsAl4JL5Z8pC02T8/bODBuf4
dszsqn2SC8YDw1xrujvW2Bd7Q7BwMQ/gO+dZKM1mLJFpfEsR9WrjMeg6vkD2TMWLMr0/WIkGC8u+
6M6SMQIDAQABo4HdMIHaMAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQDAgSwMBMGA1UdJQQMMAoG
CCsGAQUFBwMCMBgGA1UdIAQRMA8wDQYLKwYBBAGCkTYBAgEwagYDVR0fBGMwYTAuoCygKoYoaHR0
cDovL2NybC5jaWxvZ29uLm9yZy9jaWxvZ29uLWJhc2ljLmNybDAvoC2gK4YpaHR0cDovL2NybC5k
b2Vncmlkcy5vcmcvY2lsb2dvbi1iYXNpYy5jcmwwHwYDVR0RBBgwFoEUaXRzYWdyZWVuMUB5YWhv
by5jb20wDQYJKoZIhvcNAQEFBQADggEBAEYHQPMY9Grs19MHxUzMwXp1GzCKhGpgyVKJKW86PJlr
HGruoWvx+DLNX75Oj5FC4t8bOUQVQusZGeGSEGegzzfIeOI/jWP1UtIjzvTFDq3tQMNvsgROSCx5
CkpK4nS0kbwLux+zI7BWON97UpMIzEeE05pd7SmNAETuWRsHMP+x6i7hoUp/uad4DwbzNUGIotdK
f8b270icOVgkOKRdLP/Q4r/x8skKSCRz1ZsRdR+7+B/EgksAJj7Ut3yiWoUekEMxCaTdAHPTMD/g
Mh9xL90hfMJyoGemjJswG5g3fAdTP/Lv0I6/nWeH/cLjwwpQgIEjEAVXl7KHuzX5vPD/wqQ=
-----END CERTIFICATE-----"""
        subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"
        certificate_2 = """-----BEGIN CERTIFICATE-----
MIIEMzCCAxugAwIBAgIDAJ/lMA0GCSqGSIb3DQEBCwUAMGsxEzARBgoJkiaJk/IsZAEZFgNvcmcx
FzAVBgoJkiaJk/IsZAEZFgdjaWxvZ29uMQswCQYDVQQGEwJVUzEQMA4GA1UEChMHQ0lMb2dvbjEc
MBoGA1UEAxMTQ0lMb2dvbiBPcGVuSUQgQ0EgMTAeFw0xMjEwMTcwMDE2NDlaFw0xMjEwMTcxMjIx
NDlaMGkxEzARBgoJkiaJk/IsZAEZEwNvcmcxFzAVBgoJkiaJk/IsZAEZEwdjaWxvZ29uMQswCQYD
VQQGEwJVUzEPMA0GA1UEChMGR29vZ2xlMRswGQYDVQQDExJPd2VuIE93bmVycmVwIEE4OTMwggEi
MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDYLdpgg88sntivH+af4oamlp7blsUQcCQ5Yc/b
VDP/dwEKfxTcW36tMV3asLO7GcL7z4FESG761LAe86siT9rcwg2ttLkRjI9KeA3sFjC28N8XjKZ1
estCqG3odqw2pjo3VEFaU57219vIYMJhjmHKEgSnlMQeChMYun/sYIO5uNFba9BfiB6/PRS+bgee
cXRsIAm1vkB89AHdEjqdvH0uSN+jGjF6aAPXsESh70DUAHzs14lbFAomig7AZafT+weh0G5pnayC
lutVnhb9SyS3s1+A6kx8z9mkDUwY/NKXisuDeXa+WbRVq51D+Lc7ffOI+Ph+ynyfFGMcCBzbMADX
AgMBAAGjgeEwgd4wDAYDVR0TAQH/BAIwADAOBgNVHQ8BAf8EBAMCBLAwEwYDVR0lBAwwCgYIKwYB
BQUHAwIwGAYDVR0gBBEwDzANBgsrBgEEAYKRNgEDAzBsBgNVHR8EZTBjMC+gLaArhilodHRwOi8v
Y3JsLmNpbG9nb24ub3JnL2NpbG9nb24tb3BlbmlkLmNybDAwoC6gLIYqaHR0cDovL2NybC5kb2Vn
cmlkcy5vcmcvY2lsb2dvbi1vcGVuaWQuY3JsMCEGA1UdEQQaMBiBFm93ZW5vd25lcnJlcEBnbWFp
bC5jb20wDQYJKoZIhvcNAQELBQADggEBAHWd6ZOjSmJyOUyyLgZAPJpkSuk7DT5mFRhszJhfTGnu
gANHRIJZMs5e/LCMypE+ftxb8mnhAE+kURA2DmeucazHUDP5oYofU+8KMYqcNKnPpLnuiw+bCJPa
3BDxrYoi+vVislHb0U+QDjVYtUtQ2b1/Xhv8ShH89O9i65bbOq+sqez6z2AD9RWOEwRwpQLc9D65
9lkrsKGmJtuG8q3NTpZ1DSuaLOtn0QqttdmCg3pu5edRtgdpGadaSGR4s222JasV439bSTL8Z0Ug
HtjSclGqi8IBmvRkTZI61zTVbGdOKMP90LV1p8noJVLRkZpWRjLxI5xy9El8daAWMdjfrSc=
-----END CERTIFICATE-----"""
        subject_2 = "/DC=org/DC=cilogon/C=US/O=Google/CN=Owen Ownerrep A893"

        # Try to merge with nonexistent email account
        with self.assertRaises(NotFound):
            self.identity_management_service.initiate_account_merge("*****@*****.**")
        with self.assertRaises(BadRequest):
            self.identity_management_service.initiate_account_merge()

        # Create two users
        id, valid_until, registered = self.identity_management_service.signon(certificate, True)
        self.assertFalse(registered)
        id_2, valid_until_2, registered_2 = self.identity_management_service.signon(certificate_2, True)
        self.assertFalse(registered_2)

        # Validate the two accounts are different
        self.assertNotEqual(id, id_2, "The two accounts should have two different user id")

        # Create UserInfo
        contact_info_obj = IonObject("ContactInformation",{"email": "*****@*****.**"})
        user_info_obj = IonObject("UserInfo", {"name": "Dude", "contact": contact_info_obj})
        user_info_id = self.identity_management_service.create_user_info(id, user_info_obj)

        contact_info_obj_2 = IonObject("ContactInformation",{"email": "*****@*****.**"})
        user_info_obj_2 = IonObject("UserInfo", {"name": "theDude", "contact": contact_info_obj_2})
        user_info_id_2 = self.identity_management_service.create_user_info(id_2, user_info_obj_2)

        # Make sure the two users are registered
        id, valid_until, registered = self.identity_management_service.signon(certificate, True)
        self.assertTrue(registered)
        id_2, valid_until_2, registered_2 = self.identity_management_service.signon(certificate_2, True)
        self.assertTrue(registered_2)

        token = self.identity_management_service.initiate_account_merge("*****@*****.**",  headers={'ion-actor-id':id})

        # Try merging accounts with invalid token string
        with self.assertRaises(NotFound):
            self.identity_management_service.complete_account_merge(token_string="0xBeeF", headers={'ion-actor-id':id})
        with self.assertRaises(BadRequest):
            self.identity_management_service.complete_account_merge()

        # Try merging accounts with a different user
        # Since this user hasn't initiated account merge, the token doesn't exist in his/her UserInfo
        with self.assertRaises(NotFound):
            self.identity_management_service.complete_account_merge(token, headers={'ion-actor-id':id_2})

        self.identity_management_service.complete_account_merge(token, headers={'ion-actor-id':id})

        # Try merging the account again
        with self.assertRaises(BadRequest):
            self.identity_management_service.complete_account_merge(token, headers={'ion-actor-id':id})

        # Signon again and verify the two accounts have been merged
        id, valid_until, registered = self.identity_management_service.signon(certificate, True)
        self.assertTrue(registered)
        id_2, valid_until_2, registered_2 = self.identity_management_service.signon(certificate_2, True)
        self.assertTrue(registered_2)

        # Validate the two accounts are the same
        self.assertEqual(id, id_2, "The two accounts should have the same id")

        # Try to merge to your own account
        with self.assertRaises(BadRequest):
            token = self.identity_management_service.initiate_account_merge("*****@*****.**",  headers={'ion-actor-id':id})

        #  Done testing. Delete user
        self.identity_management_service.delete_user_info(user_info_id)
        self.identity_management_service.unregister_user_credentials(id, subject)
        self.identity_management_service.delete_actor_identity(id)
コード例 #3
0
class TestIdentityManagementServiceInt(IonIntegrationTestCase):
    
    def setUp(self):
        self.subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"

        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        self.identity_management_service = IdentityManagementServiceClient(node=self.container.node)

    def test_actor_identity(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})        
        user_id = self.identity_management_service.create_actor_identity(actor_identity_obj)

        actor_identity = self.identity_management_service.read_actor_identity(user_id)

        actor_identity.name = 'Updated subject'
        self.identity_management_service.update_actor_identity(actor_identity)

        self.identity_management_service.delete_actor_identity(user_id)
 
        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.read_actor_identity(user_id)
        self.assertTrue("does not exist" in cm.exception.message)
 
        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.delete_actor_identity(user_id)
        self.assertTrue("does not exist" in cm.exception.message)

    def test_user_credentials(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})        
        user_id = self.identity_management_service.create_actor_identity(actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials", {"name": self.subject})        
        self.identity_management_service.register_user_credentials(user_id, user_credentials_obj)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.unregister_user_credentials("bad", self.subject)
        self.assertTrue("does not exist" in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.unregister_user_credentials(user_id, "bad")
        self.assertTrue("does not exist" in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.unregister_user_credentials('bad', 'bad')
        self.assertTrue("does not exist" in cm.exception.message)

        self.identity_management_service.unregister_user_credentials(user_id, self.subject)

        self.identity_management_service.delete_actor_identity(user_id)

    def test_user_info(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})        
        user_id = self.identity_management_service.create_actor_identity(actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials", {"name": self.subject})        
        self.identity_management_service.register_user_credentials(user_id, user_credentials_obj)

        user_info_obj = IonObject("UserInfo", {"name": "Foo"})        
        user_info = self.identity_management_service.create_user_info(user_id, user_info_obj)

        with self.assertRaises(Conflict) as cm:
            self.identity_management_service.create_user_info(user_id, user_info_obj)
        self.assertTrue("UserInfo already exists for user id" in cm.exception.message)

        user_info_obj = self.identity_management_service.find_user_info_by_id(user_id)

        user_info_obj = self.identity_management_service.find_user_info_by_name("Foo")

        user_info_obj = self.identity_management_service.find_user_info_by_subject(self.subject)

        user_info_obj = self.identity_management_service.read_user_info(user_info)
        
        user_info_obj.name = 'Jane Doe'
        
        self.identity_management_service.update_user_info(user_info_obj)
        
        self.identity_management_service.delete_user_info(user_info)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.read_user_info(user_info)
        self.assertTrue('does not exist' in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.delete_user_info(user_info)
        self.assertTrue('does not exist' in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.find_user_info_by_name("John Doe")
        self.assertEqual(cm.exception.message, 'UserInfo with name John Doe does not exist')

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.find_user_info_by_subject("Bogus subject")
        self.assertEqual(cm.exception.message, "UserCredentials with subject Bogus subject does not exist")

        self.identity_management_service.unregister_user_credentials(user_id, self.subject)

        self.identity_management_service.delete_actor_identity(user_id)

    def test_signon(self):
        certificate =  """-----BEGIN CERTIFICATE-----
MIIEMzCCAxugAwIBAgICBQAwDQYJKoZIhvcNAQEFBQAwajETMBEGCgmSJomT8ixkARkWA29yZzEX
MBUGCgmSJomT8ixkARkWB2NpbG9nb24xCzAJBgNVBAYTAlVTMRAwDgYDVQQKEwdDSUxvZ29uMRsw
GQYDVQQDExJDSUxvZ29uIEJhc2ljIENBIDEwHhcNMTAxMTE4MjIyNTA2WhcNMTAxMTE5MTAzMDA2
WjBvMRMwEQYKCZImiZPyLGQBGRMDb3JnMRcwFQYKCZImiZPyLGQBGRMHY2lsb2dvbjELMAkGA1UE
BhMCVVMxFzAVBgNVBAoTDlByb3RlY3ROZXR3b3JrMRkwFwYDVQQDExBSb2dlciBVbndpbiBBMjU0
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6QhsWxhUXbIxg+1ZyEc7d+hIGvchVmtb
g0kKLmivgoVsA4U7swNDRH6svW242THta0oTf6crkRx7kOKg6jma2lcAC1sjOSddqX7/92ChoUPq
7LWt2T6GVVA10ex5WAeB/o7br/Z4U8/75uCBis+ru7xEDl09PToK20mrkcz9M4HqIv1eSoPkrs3b
2lUtQc6cjuHRDU4NknXaVMXTBHKPM40UxEDHJueFyCiZJFg3lvQuSsAl4JL5Z8pC02T8/bODBuf4
dszsqn2SC8YDw1xrujvW2Bd7Q7BwMQ/gO+dZKM1mLJFpfEsR9WrjMeg6vkD2TMWLMr0/WIkGC8u+
6M6SMQIDAQABo4HdMIHaMAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQDAgSwMBMGA1UdJQQMMAoG
CCsGAQUFBwMCMBgGA1UdIAQRMA8wDQYLKwYBBAGCkTYBAgEwagYDVR0fBGMwYTAuoCygKoYoaHR0
cDovL2NybC5jaWxvZ29uLm9yZy9jaWxvZ29uLWJhc2ljLmNybDAvoC2gK4YpaHR0cDovL2NybC5k
b2Vncmlkcy5vcmcvY2lsb2dvbi1iYXNpYy5jcmwwHwYDVR0RBBgwFoEUaXRzYWdyZWVuMUB5YWhv
by5jb20wDQYJKoZIhvcNAQEFBQADggEBAEYHQPMY9Grs19MHxUzMwXp1GzCKhGpgyVKJKW86PJlr
HGruoWvx+DLNX75Oj5FC4t8bOUQVQusZGeGSEGegzzfIeOI/jWP1UtIjzvTFDq3tQMNvsgROSCx5
CkpK4nS0kbwLux+zI7BWON97UpMIzEeE05pd7SmNAETuWRsHMP+x6i7hoUp/uad4DwbzNUGIotdK
f8b270icOVgkOKRdLP/Q4r/x8skKSCRz1ZsRdR+7+B/EgksAJj7Ut3yiWoUekEMxCaTdAHPTMD/g
Mh9xL90hfMJyoGemjJswG5g3fAdTP/Lv0I6/nWeH/cLjwwpQgIEjEAVXl7KHuzX5vPD/wqQ=
-----END CERTIFICATE-----"""
        id, valid_until, registered = self.identity_management_service.signon(certificate, True)

        self.assertFalse(registered)

        id2, valid_until2, registered2 = self.identity_management_service.signon(certificate, True)

        self.assertFalse(registered2)
        self.assertTrue(id == id2)
        self.assertTrue(valid_until == valid_until2)

        user_info_obj = IonObject("UserInfo", {"name": "Foo"})        
        self.identity_management_service.create_user_info(id, user_info_obj)

        id3, valid_until3, registered3 = self.identity_management_service.signon(certificate, True)

        self.assertTrue(registered3)
        self.assertTrue(id == id3)
        self.assertTrue(valid_until == valid_until3)
コード例 #4
0
class TestIdentityManagementServiceInt(IonIntegrationTestCase):
    def setUp(self):
        self.subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"

        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.resource_registry = ResourceRegistryServiceClient(
            node=self.container.node)
        self.identity_management_service = IdentityManagementServiceClient(
            node=self.container.node)
        self.org_client = OrgManagementServiceClient(node=self.container.node)

    def test_actor_identity(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        user_id = self.identity_management_service.create_actor_identity(
            actor_identity_obj)

        actor_identity = self.identity_management_service.read_actor_identity(
            user_id)

        actor_identity.name = 'Updated subject'
        self.identity_management_service.update_actor_identity(actor_identity)

        ai = self.identity_management_service.find_actor_identity_by_name(
            actor_identity.name)
        self._baseAssertEqual(ai.name, actor_identity.name)
        with self.assertRaises(NotFound):
            ai = self.identity_management_service.find_actor_identity_by_name(
                "Yeah, well, you know, that's just, like, your opinion, man.")

        self._baseAssertEqual(ai.name, actor_identity.name)

        self.identity_management_service.delete_actor_identity(user_id)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.read_actor_identity(user_id)
        self.assertTrue("does not exist" in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.delete_actor_identity(user_id)
        self.assertTrue("does not exist" in cm.exception.message)

    def test_user_credentials(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        user_id = self.identity_management_service.create_actor_identity(
            actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials",
                                         {"name": self.subject})
        self.identity_management_service.register_user_credentials(
            user_id, user_credentials_obj)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.unregister_user_credentials(
                "bad", self.subject)
        self.assertTrue("does not exist" in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.unregister_user_credentials(
                user_id, "bad")
        self.assertTrue("does not exist" in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.unregister_user_credentials(
                'bad', 'bad')
        self.assertTrue("does not exist" in cm.exception.message)

        self.identity_management_service.unregister_user_credentials(
            user_id, self.subject)

        self.identity_management_service.delete_actor_identity(user_id)

    def test_user_info(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        user_id = self.identity_management_service.create_actor_identity(
            actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials",
                                         {"name": self.subject})
        self.identity_management_service.register_user_credentials(
            user_id, user_credentials_obj)

        user_info_obj = IonObject("UserInfo", {"name": "Foo"})
        user_info = self.identity_management_service.create_user_info(
            user_id, user_info_obj)

        with self.assertRaises(Conflict) as cm:
            self.identity_management_service.create_user_info(
                user_id, user_info_obj)
        self.assertTrue(
            "UserInfo already exists for user id" in cm.exception.message)

        user_info_obj = self.identity_management_service.find_user_info_by_id(
            user_id)

        user_info_obj = self.identity_management_service.find_user_info_by_name(
            "Foo")

        user_info_obj = self.identity_management_service.find_user_info_by_subject(
            self.subject)

        user_info_obj = self.identity_management_service.read_user_info(
            user_info)

        user_info_obj.name = 'Jane Doe'

        self.identity_management_service.update_user_info(user_info_obj)

        self.identity_management_service.delete_user_info(user_info)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.read_user_info(user_info)
        self.assertTrue('does not exist' in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.delete_user_info(user_info)
        self.assertTrue('does not exist' in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.find_user_info_by_name("John Doe")
        self.assertEqual(cm.exception.message,
                         'UserInfo with name John Doe does not exist')

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.find_user_info_by_subject(
                "Bogus subject")
        self.assertEqual(
            cm.exception.message,
            "UserCredentials with subject Bogus subject does not exist")

        self.identity_management_service.unregister_user_credentials(
            user_id, self.subject)

        self.identity_management_service.delete_actor_identity(user_id)

    def test_signon(self):
        certificate = """-----BEGIN CERTIFICATE-----
MIIEMzCCAxugAwIBAgICBQAwDQYJKoZIhvcNAQEFBQAwajETMBEGCgmSJomT8ixkARkWA29yZzEX
MBUGCgmSJomT8ixkARkWB2NpbG9nb24xCzAJBgNVBAYTAlVTMRAwDgYDVQQKEwdDSUxvZ29uMRsw
GQYDVQQDExJDSUxvZ29uIEJhc2ljIENBIDEwHhcNMTAxMTE4MjIyNTA2WhcNMTAxMTE5MTAzMDA2
WjBvMRMwEQYKCZImiZPyLGQBGRMDb3JnMRcwFQYKCZImiZPyLGQBGRMHY2lsb2dvbjELMAkGA1UE
BhMCVVMxFzAVBgNVBAoTDlByb3RlY3ROZXR3b3JrMRkwFwYDVQQDExBSb2dlciBVbndpbiBBMjU0
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6QhsWxhUXbIxg+1ZyEc7d+hIGvchVmtb
g0kKLmivgoVsA4U7swNDRH6svW242THta0oTf6crkRx7kOKg6jma2lcAC1sjOSddqX7/92ChoUPq
7LWt2T6GVVA10ex5WAeB/o7br/Z4U8/75uCBis+ru7xEDl09PToK20mrkcz9M4HqIv1eSoPkrs3b
2lUtQc6cjuHRDU4NknXaVMXTBHKPM40UxEDHJueFyCiZJFg3lvQuSsAl4JL5Z8pC02T8/bODBuf4
dszsqn2SC8YDw1xrujvW2Bd7Q7BwMQ/gO+dZKM1mLJFpfEsR9WrjMeg6vkD2TMWLMr0/WIkGC8u+
6M6SMQIDAQABo4HdMIHaMAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQDAgSwMBMGA1UdJQQMMAoG
CCsGAQUFBwMCMBgGA1UdIAQRMA8wDQYLKwYBBAGCkTYBAgEwagYDVR0fBGMwYTAuoCygKoYoaHR0
cDovL2NybC5jaWxvZ29uLm9yZy9jaWxvZ29uLWJhc2ljLmNybDAvoC2gK4YpaHR0cDovL2NybC5k
b2Vncmlkcy5vcmcvY2lsb2dvbi1iYXNpYy5jcmwwHwYDVR0RBBgwFoEUaXRzYWdyZWVuMUB5YWhv
by5jb20wDQYJKoZIhvcNAQEFBQADggEBAEYHQPMY9Grs19MHxUzMwXp1GzCKhGpgyVKJKW86PJlr
HGruoWvx+DLNX75Oj5FC4t8bOUQVQusZGeGSEGegzzfIeOI/jWP1UtIjzvTFDq3tQMNvsgROSCx5
CkpK4nS0kbwLux+zI7BWON97UpMIzEeE05pd7SmNAETuWRsHMP+x6i7hoUp/uad4DwbzNUGIotdK
f8b270icOVgkOKRdLP/Q4r/x8skKSCRz1ZsRdR+7+B/EgksAJj7Ut3yiWoUekEMxCaTdAHPTMD/g
Mh9xL90hfMJyoGemjJswG5g3fAdTP/Lv0I6/nWeH/cLjwwpQgIEjEAVXl7KHuzX5vPD/wqQ=
-----END CERTIFICATE-----"""
        id, valid_until, registered = self.identity_management_service.signon(
            certificate, True)

        self.assertFalse(registered)

        id2, valid_until2, registered2 = self.identity_management_service.signon(
            certificate, True)

        self.assertFalse(registered2)
        self.assertTrue(id == id2)
        self.assertTrue(valid_until == valid_until2)

        user_info_obj = IonObject("UserInfo", {"name": "Foo"})
        self.identity_management_service.create_user_info(id, user_info_obj)

        id3, valid_until3, registered3 = self.identity_management_service.signon(
            certificate, True)

        self.assertTrue(registered3)
        self.assertTrue(id == id3)
        self.assertTrue(valid_until == valid_until3)

    @attr('EXT')
    def test_get_extended_user_identity(self):

        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        actor_id = self.identity_management_service.create_actor_identity(
            actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials",
                                         {"name": self.subject})
        self.identity_management_service.register_user_credentials(
            actor_id, user_credentials_obj)

        user_info_obj = IonObject("UserInfo", {"name": "Foo"})
        user_info_id = self.identity_management_service.create_user_info(
            actor_id, user_info_obj)

        ion_org = self.org_client.find_org()

        #Build the Service Agreement Proposal to to request a role but never close it
        sap = IonObject(OT.RequestRoleProposal,
                        consumer=actor_id,
                        provider=ion_org._id,
                        role_name=ORG_MANAGER_ROLE)
        sap_response = self.org_client.negotiate(sap)

        #Just grant the role anyway
        #self.org_client.grant_role(ion_org._id, actor_id, ORG_MANAGER_ROLE)

        with self.assertRaises(NotFound):
            self.identity_management_service.get_user_info_extension(
                'That rug really tied the room together.')
        with self.assertRaises(BadRequest):
            self.identity_management_service.get_user_info_extension()

        #Check the user without the negotiation role request
        extended_user = self.identity_management_service.get_user_info_extension(
            user_info_id, org_id=ion_org._id)
        self.assertEqual(user_info_obj.type_, extended_user.resource.type_)
        self.assertEqual(len(extended_user.roles), 1)
        self.assertEqual(len(extended_user.open_requests), 1)
        self.assertEqual(extended_user.open_requests[0].org_id, ion_org._id)
        self.assertEqual(extended_user.open_requests[0].user_id, user_info_id)
        self.assertEqual(extended_user.open_requests[0].request_type,
                         OT.RequestRoleProposal)
        self.assertEqual(len(extended_user.closed_requests), 0)
        self.assertEqual(extended_user.open_requests[0]._id,
                         extended_user.open_requests[0].negotiation_id)

        neg = self.resource_registry.read(
            object_id=extended_user.open_requests[0].negotiation_id)
        sap_response = Negotiation.create_counter_proposal(
            neg, ProposalStatusEnum.ACCEPTED, ProposalOriginatorEnum.PROVIDER)
        sap_response2 = self.org_client.negotiate(sap_response)

        #Now check the user after the negotiation has been accepted and the role granted
        extended_user = self.identity_management_service.get_user_info_extension(
            user_info_id, org_id=ion_org._id)
        self.assertEqual(user_info_obj.type_, extended_user.resource.type_)
        self.assertEqual(len(extended_user.roles), 2)
        self.assertEqual(len(extended_user.open_requests), 0)
        self.assertEqual(len(extended_user.closed_requests), 1)
        self.assertEqual(extended_user.closed_requests[0].org_id, ion_org._id)
        self.assertEqual(extended_user.closed_requests[0].user_id,
                         user_info_id)
        self.assertEqual(extended_user.closed_requests[0].request_type,
                         OT.RequestRoleProposal)

        self.identity_management_service.delete_user_info(user_info_id)

        self.org_client.revoke_role(org_id=ion_org._id,
                                    actor_id=actor_id,
                                    role_name=ORG_MANAGER_ROLE)

        self.identity_management_service.unregister_user_credentials(
            actor_id, self.subject)

        self.identity_management_service.delete_actor_identity(actor_id)

    def test_account_merge(self):
        certificate = """-----BEGIN CERTIFICATE-----
MIIEMzCCAxugAwIBAgICBQAwDQYJKoZIhvcNAQEFBQAwajETMBEGCgmSJomT8ixkARkWA29yZzEX
MBUGCgmSJomT8ixkARkWB2NpbG9nb24xCzAJBgNVBAYTAlVTMRAwDgYDVQQKEwdDSUxvZ29uMRsw
GQYDVQQDExJDSUxvZ29uIEJhc2ljIENBIDEwHhcNMTAxMTE4MjIyNTA2WhcNMTAxMTE5MTAzMDA2
WjBvMRMwEQYKCZImiZPyLGQBGRMDb3JnMRcwFQYKCZImiZPyLGQBGRMHY2lsb2dvbjELMAkGA1UE
BhMCVVMxFzAVBgNVBAoTDlByb3RlY3ROZXR3b3JrMRkwFwYDVQQDExBSb2dlciBVbndpbiBBMjU0
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6QhsWxhUXbIxg+1ZyEc7d+hIGvchVmtb
g0kKLmivgoVsA4U7swNDRH6svW242THta0oTf6crkRx7kOKg6jma2lcAC1sjOSddqX7/92ChoUPq
7LWt2T6GVVA10ex5WAeB/o7br/Z4U8/75uCBis+ru7xEDl09PToK20mrkcz9M4HqIv1eSoPkrs3b
2lUtQc6cjuHRDU4NknXaVMXTBHKPM40UxEDHJueFyCiZJFg3lvQuSsAl4JL5Z8pC02T8/bODBuf4
dszsqn2SC8YDw1xrujvW2Bd7Q7BwMQ/gO+dZKM1mLJFpfEsR9WrjMeg6vkD2TMWLMr0/WIkGC8u+
6M6SMQIDAQABo4HdMIHaMAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQDAgSwMBMGA1UdJQQMMAoG
CCsGAQUFBwMCMBgGA1UdIAQRMA8wDQYLKwYBBAGCkTYBAgEwagYDVR0fBGMwYTAuoCygKoYoaHR0
cDovL2NybC5jaWxvZ29uLm9yZy9jaWxvZ29uLWJhc2ljLmNybDAvoC2gK4YpaHR0cDovL2NybC5k
b2Vncmlkcy5vcmcvY2lsb2dvbi1iYXNpYy5jcmwwHwYDVR0RBBgwFoEUaXRzYWdyZWVuMUB5YWhv
by5jb20wDQYJKoZIhvcNAQEFBQADggEBAEYHQPMY9Grs19MHxUzMwXp1GzCKhGpgyVKJKW86PJlr
HGruoWvx+DLNX75Oj5FC4t8bOUQVQusZGeGSEGegzzfIeOI/jWP1UtIjzvTFDq3tQMNvsgROSCx5
CkpK4nS0kbwLux+zI7BWON97UpMIzEeE05pd7SmNAETuWRsHMP+x6i7hoUp/uad4DwbzNUGIotdK
f8b270icOVgkOKRdLP/Q4r/x8skKSCRz1ZsRdR+7+B/EgksAJj7Ut3yiWoUekEMxCaTdAHPTMD/g
Mh9xL90hfMJyoGemjJswG5g3fAdTP/Lv0I6/nWeH/cLjwwpQgIEjEAVXl7KHuzX5vPD/wqQ=
-----END CERTIFICATE-----"""
        subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"
        certificate_2 = """-----BEGIN CERTIFICATE-----
MIIEMzCCAxugAwIBAgIDAJ/lMA0GCSqGSIb3DQEBCwUAMGsxEzARBgoJkiaJk/IsZAEZFgNvcmcx
FzAVBgoJkiaJk/IsZAEZFgdjaWxvZ29uMQswCQYDVQQGEwJVUzEQMA4GA1UEChMHQ0lMb2dvbjEc
MBoGA1UEAxMTQ0lMb2dvbiBPcGVuSUQgQ0EgMTAeFw0xMjEwMTcwMDE2NDlaFw0xMjEwMTcxMjIx
NDlaMGkxEzARBgoJkiaJk/IsZAEZEwNvcmcxFzAVBgoJkiaJk/IsZAEZEwdjaWxvZ29uMQswCQYD
VQQGEwJVUzEPMA0GA1UEChMGR29vZ2xlMRswGQYDVQQDExJPd2VuIE93bmVycmVwIEE4OTMwggEi
MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDYLdpgg88sntivH+af4oamlp7blsUQcCQ5Yc/b
VDP/dwEKfxTcW36tMV3asLO7GcL7z4FESG761LAe86siT9rcwg2ttLkRjI9KeA3sFjC28N8XjKZ1
estCqG3odqw2pjo3VEFaU57219vIYMJhjmHKEgSnlMQeChMYun/sYIO5uNFba9BfiB6/PRS+bgee
cXRsIAm1vkB89AHdEjqdvH0uSN+jGjF6aAPXsESh70DUAHzs14lbFAomig7AZafT+weh0G5pnayC
lutVnhb9SyS3s1+A6kx8z9mkDUwY/NKXisuDeXa+WbRVq51D+Lc7ffOI+Ph+ynyfFGMcCBzbMADX
AgMBAAGjgeEwgd4wDAYDVR0TAQH/BAIwADAOBgNVHQ8BAf8EBAMCBLAwEwYDVR0lBAwwCgYIKwYB
BQUHAwIwGAYDVR0gBBEwDzANBgsrBgEEAYKRNgEDAzBsBgNVHR8EZTBjMC+gLaArhilodHRwOi8v
Y3JsLmNpbG9nb24ub3JnL2NpbG9nb24tb3BlbmlkLmNybDAwoC6gLIYqaHR0cDovL2NybC5kb2Vn
cmlkcy5vcmcvY2lsb2dvbi1vcGVuaWQuY3JsMCEGA1UdEQQaMBiBFm93ZW5vd25lcnJlcEBnbWFp
bC5jb20wDQYJKoZIhvcNAQELBQADggEBAHWd6ZOjSmJyOUyyLgZAPJpkSuk7DT5mFRhszJhfTGnu
gANHRIJZMs5e/LCMypE+ftxb8mnhAE+kURA2DmeucazHUDP5oYofU+8KMYqcNKnPpLnuiw+bCJPa
3BDxrYoi+vVislHb0U+QDjVYtUtQ2b1/Xhv8ShH89O9i65bbOq+sqez6z2AD9RWOEwRwpQLc9D65
9lkrsKGmJtuG8q3NTpZ1DSuaLOtn0QqttdmCg3pu5edRtgdpGadaSGR4s222JasV439bSTL8Z0Ug
HtjSclGqi8IBmvRkTZI61zTVbGdOKMP90LV1p8noJVLRkZpWRjLxI5xy9El8daAWMdjfrSc=
-----END CERTIFICATE-----"""
        subject_2 = "/DC=org/DC=cilogon/C=US/O=Google/CN=Owen Ownerrep A893"

        # Try to merge with nonexistent email account
        with self.assertRaises(NotFound):
            self.identity_management_service.initiate_account_merge(
                "*****@*****.**")
        with self.assertRaises(BadRequest):
            self.identity_management_service.initiate_account_merge()

        # Create two users
        id, valid_until, registered = self.identity_management_service.signon(
            certificate, True)
        self.assertFalse(registered)
        id_2, valid_until_2, registered_2 = self.identity_management_service.signon(
            certificate_2, True)
        self.assertFalse(registered_2)

        # Validate the two accounts are different
        self.assertNotEqual(
            id, id_2, "The two accounts should have two different user id")

        # Create UserInfo
        contact_info_obj = IonObject("ContactInformation",
                                     {"email": "*****@*****.**"})
        user_info_obj = IonObject("UserInfo", {
            "name": "Dude",
            "contact": contact_info_obj
        })
        user_info_id = self.identity_management_service.create_user_info(
            id, user_info_obj)

        contact_info_obj_2 = IonObject("ContactInformation",
                                       {"email": "*****@*****.**"})
        user_info_obj_2 = IonObject("UserInfo", {
            "name": "theDude",
            "contact": contact_info_obj_2
        })
        user_info_id_2 = self.identity_management_service.create_user_info(
            id_2, user_info_obj_2)

        # Make sure the two users are registered
        id, valid_until, registered = self.identity_management_service.signon(
            certificate, True)
        self.assertTrue(registered)
        id_2, valid_until_2, registered_2 = self.identity_management_service.signon(
            certificate_2, True)
        self.assertTrue(registered_2)

        token = self.identity_management_service.initiate_account_merge(
            "*****@*****.**", headers={'ion-actor-id': id})

        # Try merging accounts with invalid token string
        with self.assertRaises(NotFound):
            self.identity_management_service.complete_account_merge(
                token_string="0xBeeF", headers={'ion-actor-id': id})
        with self.assertRaises(BadRequest):
            self.identity_management_service.complete_account_merge()

        # Try merging accounts with a different user
        # Since this user hasn't initiated account merge, the token doesn't exist in his/her UserInfo
        with self.assertRaises(NotFound):
            self.identity_management_service.complete_account_merge(
                token, headers={'ion-actor-id': id_2})

        self.identity_management_service.complete_account_merge(
            token, headers={'ion-actor-id': id})

        # Try merging the account again
        with self.assertRaises(BadRequest):
            self.identity_management_service.complete_account_merge(
                token, headers={'ion-actor-id': id})

        # Signon again and verify the two accounts have been merged
        id, valid_until, registered = self.identity_management_service.signon(
            certificate, True)
        self.assertTrue(registered)
        id_2, valid_until_2, registered_2 = self.identity_management_service.signon(
            certificate_2, True)
        self.assertTrue(registered_2)

        # Validate the two accounts are the same
        self.assertEqual(id, id_2, "The two accounts should have the same id")

        # Try to merge to your own account
        with self.assertRaises(BadRequest):
            token = self.identity_management_service.initiate_account_merge(
                "*****@*****.**", headers={'ion-actor-id': id})

        #  Done testing. Delete user
        self.identity_management_service.delete_user_info(user_info_id)
        self.identity_management_service.unregister_user_credentials(
            id, subject)
        self.identity_management_service.delete_actor_identity(id)
コード例 #5
0
class TestIdentityManagementServiceInt(IonIntegrationTestCase):
    def setUp(self):
        self.subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"

        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        self.identity_management_service = IdentityManagementServiceClient(
            node=self.container.node)

    def test_actor_identity(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        user_id = self.identity_management_service.create_actor_identity(
            actor_identity_obj)

        actor_identity = self.identity_management_service.read_actor_identity(
            user_id)

        actor_identity.name = 'Updated subject'
        self.identity_management_service.update_actor_identity(actor_identity)

        self.identity_management_service.delete_actor_identity(user_id)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.read_actor_identity(user_id)
        self.assertTrue("does not exist" in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.delete_actor_identity(user_id)
        self.assertTrue("does not exist" in cm.exception.message)

    def test_user_credentials(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        user_id = self.identity_management_service.create_actor_identity(
            actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials",
                                         {"name": self.subject})
        self.identity_management_service.register_user_credentials(
            user_id, user_credentials_obj)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.unregister_user_credentials(
                "bad", self.subject)
        self.assertTrue("does not exist" in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.unregister_user_credentials(
                user_id, "bad")
        self.assertTrue("does not exist" in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.unregister_user_credentials(
                'bad', 'bad')
        self.assertTrue("does not exist" in cm.exception.message)

        self.identity_management_service.unregister_user_credentials(
            user_id, self.subject)

        self.identity_management_service.delete_actor_identity(user_id)

    def test_user_info(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        user_id = self.identity_management_service.create_actor_identity(
            actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials",
                                         {"name": self.subject})
        self.identity_management_service.register_user_credentials(
            user_id, user_credentials_obj)

        user_info_obj = IonObject("UserInfo", {"name": "Foo"})
        user_info = self.identity_management_service.create_user_info(
            user_id, user_info_obj)

        with self.assertRaises(Conflict) as cm:
            self.identity_management_service.create_user_info(
                user_id, user_info_obj)
        self.assertTrue(
            "UserInfo already exists for user id" in cm.exception.message)

        user_info_obj = self.identity_management_service.find_user_info_by_id(
            user_id)

        user_info_obj = self.identity_management_service.find_user_info_by_name(
            "Foo")

        user_info_obj = self.identity_management_service.find_user_info_by_subject(
            self.subject)

        user_info_obj = self.identity_management_service.read_user_info(
            user_info)

        user_info_obj.name = 'Jane Doe'

        self.identity_management_service.update_user_info(user_info_obj)

        self.identity_management_service.delete_user_info(user_info)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.read_user_info(user_info)
        self.assertTrue('does not exist' in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.delete_user_info(user_info)
        self.assertTrue('does not exist' in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.find_user_info_by_name("John Doe")
        self.assertEqual(cm.exception.message,
                         'UserInfo with name John Doe does not exist')

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.find_user_info_by_subject(
                "Bogus subject")
        self.assertEqual(
            cm.exception.message,
            "UserCredentials with subject Bogus subject does not exist")

        self.identity_management_service.unregister_user_credentials(
            user_id, self.subject)

        self.identity_management_service.delete_actor_identity(user_id)

    def test_signon(self):
        certificate = """-----BEGIN CERTIFICATE-----
MIIEMzCCAxugAwIBAgICBQAwDQYJKoZIhvcNAQEFBQAwajETMBEGCgmSJomT8ixkARkWA29yZzEX
MBUGCgmSJomT8ixkARkWB2NpbG9nb24xCzAJBgNVBAYTAlVTMRAwDgYDVQQKEwdDSUxvZ29uMRsw
GQYDVQQDExJDSUxvZ29uIEJhc2ljIENBIDEwHhcNMTAxMTE4MjIyNTA2WhcNMTAxMTE5MTAzMDA2
WjBvMRMwEQYKCZImiZPyLGQBGRMDb3JnMRcwFQYKCZImiZPyLGQBGRMHY2lsb2dvbjELMAkGA1UE
BhMCVVMxFzAVBgNVBAoTDlByb3RlY3ROZXR3b3JrMRkwFwYDVQQDExBSb2dlciBVbndpbiBBMjU0
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6QhsWxhUXbIxg+1ZyEc7d+hIGvchVmtb
g0kKLmivgoVsA4U7swNDRH6svW242THta0oTf6crkRx7kOKg6jma2lcAC1sjOSddqX7/92ChoUPq
7LWt2T6GVVA10ex5WAeB/o7br/Z4U8/75uCBis+ru7xEDl09PToK20mrkcz9M4HqIv1eSoPkrs3b
2lUtQc6cjuHRDU4NknXaVMXTBHKPM40UxEDHJueFyCiZJFg3lvQuSsAl4JL5Z8pC02T8/bODBuf4
dszsqn2SC8YDw1xrujvW2Bd7Q7BwMQ/gO+dZKM1mLJFpfEsR9WrjMeg6vkD2TMWLMr0/WIkGC8u+
6M6SMQIDAQABo4HdMIHaMAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQDAgSwMBMGA1UdJQQMMAoG
CCsGAQUFBwMCMBgGA1UdIAQRMA8wDQYLKwYBBAGCkTYBAgEwagYDVR0fBGMwYTAuoCygKoYoaHR0
cDovL2NybC5jaWxvZ29uLm9yZy9jaWxvZ29uLWJhc2ljLmNybDAvoC2gK4YpaHR0cDovL2NybC5k
b2Vncmlkcy5vcmcvY2lsb2dvbi1iYXNpYy5jcmwwHwYDVR0RBBgwFoEUaXRzYWdyZWVuMUB5YWhv
by5jb20wDQYJKoZIhvcNAQEFBQADggEBAEYHQPMY9Grs19MHxUzMwXp1GzCKhGpgyVKJKW86PJlr
HGruoWvx+DLNX75Oj5FC4t8bOUQVQusZGeGSEGegzzfIeOI/jWP1UtIjzvTFDq3tQMNvsgROSCx5
CkpK4nS0kbwLux+zI7BWON97UpMIzEeE05pd7SmNAETuWRsHMP+x6i7hoUp/uad4DwbzNUGIotdK
f8b270icOVgkOKRdLP/Q4r/x8skKSCRz1ZsRdR+7+B/EgksAJj7Ut3yiWoUekEMxCaTdAHPTMD/g
Mh9xL90hfMJyoGemjJswG5g3fAdTP/Lv0I6/nWeH/cLjwwpQgIEjEAVXl7KHuzX5vPD/wqQ=
-----END CERTIFICATE-----"""
        id, valid_until, registered = self.identity_management_service.signon(
            certificate, True)

        self.assertFalse(registered)

        id2, valid_until2, registered2 = self.identity_management_service.signon(
            certificate, True)

        self.assertFalse(registered2)
        self.assertTrue(id == id2)
        self.assertTrue(valid_until == valid_until2)

        user_info_obj = IonObject("UserInfo", {"name": "Foo"})
        self.identity_management_service.create_user_info(id, user_info_obj)

        id3, valid_until3, registered3 = self.identity_management_service.signon(
            certificate, True)

        self.assertTrue(registered3)
        self.assertTrue(id == id3)
        self.assertTrue(valid_until == valid_until3)
コード例 #6
0
class TestDataProductManagementServiceIntegration(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.dpsc_cli = DataProductManagementServiceClient()
        self.rrclient = ResourceRegistryServiceClient()
        self.damsclient = DataAcquisitionManagementServiceClient()
        self.pubsubcli = PubsubManagementServiceClient()
        self.ingestclient = IngestionManagementServiceClient()
        self.process_dispatcher = ProcessDispatcherServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.unsc = UserNotificationServiceClient()
        self.data_retriever = DataRetrieverServiceClient()
        self.identcli = IdentityManagementServiceClient()

        #------------------------------------------
        # Create the environment
        #------------------------------------------

        self.stream_def_id = self.pubsubcli.create_stream_definition(
            name='SBE37_CDM')

        self.process_definitions = {}
        ingestion_worker_definition = ProcessDefinition(
            name='ingestion worker')
        ingestion_worker_definition.executable = {
            'module':
            'ion.processes.data.ingestion.science_granule_ingestion_worker',
            'class': 'ScienceGranuleIngestionWorker'
        }
        process_definition_id = self.process_dispatcher.create_process_definition(
            process_definition=ingestion_worker_definition)
        self.process_definitions['ingestion_worker'] = process_definition_id

        self.pids = []
        self.exchange_points = []
        self.exchange_names = []

        #------------------------------------------------------------------------------------------------
        # First launch the ingestors
        #------------------------------------------------------------------------------------------------
        self.exchange_space = 'science_granule_ingestion'
        self.exchange_point = 'science_data'
        config = DotDict()
        config.process.datastore_name = 'datasets'
        config.process.queue_name = self.exchange_space

        self.exchange_names.append(self.exchange_space)
        self.exchange_points.append(self.exchange_point)

        pid = self.process_dispatcher.schedule_process(
            self.process_definitions['ingestion_worker'], configuration=config)
        log.debug("the ingestion worker process id: %s", pid)
        self.pids.append(pid)

        self.addCleanup(self.cleaning_up)

    def cleaning_up(self):
        for pid in self.pids:
            log.debug("number of pids to be terminated: %s", len(self.pids))
            try:
                self.process_dispatcher.cancel_process(pid)
                log.debug("Terminated the process: %s", pid)
            except:
                log.debug("could not terminate the process id: %s" % pid)
        IngestionManagementIntTest.clean_subscriptions()

        for xn in self.exchange_names:
            xni = self.container.ex_manager.create_xn_queue(xn)
            xni.delete()
        for xp in self.exchange_points:
            xpi = self.container.ex_manager.create_xp(xp)
            xpi.delete()

    def get_datastore(self, dataset_id):
        dataset = self.dataset_management.read_dataset(dataset_id)
        datastore_name = dataset.datastore_name
        datastore = self.container.datastore_manager.get_datastore(
            datastore_name, DataStore.DS_PROFILE.SCIDATA)
        return datastore

    @attr('EXT')
    @attr('PREP')
    def test_create_data_product(self):

        #------------------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #------------------------------------------------------------------------------------------------
        parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict')
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(
            name='Simulated CTD data',
            parameter_dictionary_id=parameter_dictionary._id)
        log.debug("Created stream def id %s" % ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # test creating a new data product w/o a stream definition
        #------------------------------------------------------------------------------------------------

        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp')

        dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0
        dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0
        dp_obj.ooi_product_name = "PRODNAME"

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------

        dp_id = self.dpsc_cli.create_data_product(
            data_product=dp_obj, stream_definition_id=ctd_stream_def_id)
        # Assert that the data product has an associated stream at this stage
        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                   RT.Stream, True)
        self.assertNotEquals(len(stream_ids), 0)

        # Assert that the data product has an associated stream def at this stage
        stream_ids, _ = self.rrclient.find_objects(dp_id,
                                                   PRED.hasStreamDefinition,
                                                   RT.StreamDefinition, True)
        self.assertNotEquals(len(stream_ids), 0)

        self.dpsc_cli.activate_data_product_persistence(dp_id)

        dp_obj = self.dpsc_cli.read_data_product(dp_id)
        self.assertIsNotNone(dp_obj)
        self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0)
        log.debug('Created data product %s', dp_obj)
        #------------------------------------------------------------------------------------------------
        # test creating a new data product with  a stream definition
        #------------------------------------------------------------------------------------------------
        log.debug('Creating new data product with a stream definition')
        dp_obj = IonObject(RT.DataProduct,
                           name='DP2',
                           description='some new dp')

        dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id)
        self.dpsc_cli.activate_data_product_persistence(dp_id2)
        log.debug('new dp_id = %s' % dp_id2)

        #------------------------------------------------------------------------------------------------
        #make sure data product is associated with stream def
        #------------------------------------------------------------------------------------------------
        streamdefs = []
        streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream,
                                                RT.Stream, True)
        for s in streams:
            log.debug("Checking stream %s" % s)
            sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition,
                                                  RT.StreamDefinition, True)
            for sd in sdefs:
                log.debug("Checking streamdef %s" % sd)
                streamdefs.append(sd)
        self.assertIn(ctd_stream_def_id, streamdefs)

        group_names = self.dpsc_cli.get_data_product_group_list()
        self.assertIn("PRODNAME", group_names)

        #----------------------------------------------------------------------------------------
        # Create users then notifications to this data product for each user
        #----------------------------------------------------------------------------------------

        # user_1
        user_1 = UserInfo()
        user_1.name = 'user_1'
        user_1.contact.email = '*****@*****.**'

        # user_2
        user_2 = UserInfo()
        user_2.name = 'user_2'
        user_2.contact.email = '*****@*****.**'
        #user1 is a complete user
        self.subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        actor_id = self.identcli.create_actor_identity(actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials",
                                         {"name": self.subject})
        self.identcli.register_user_credentials(actor_id, user_credentials_obj)
        user_id_1 = self.identcli.create_user_info(actor_id, user_1)
        user_id_2, _ = self.rrclient.create(user_2)

        delivery_config1a = IonObject(
            OT.DeliveryConfiguration,
            email='*****@*****.**',
            mode=DeliveryModeEnum.EMAIL,
            frequency=NotificationFrequencyEnum.BATCH)
        delivery_config1b = IonObject(
            OT.DeliveryConfiguration,
            email='*****@*****.**',
            mode=DeliveryModeEnum.EMAIL,
            frequency=NotificationFrequencyEnum.BATCH)
        notification_request_1 = NotificationRequest(
            name="notification_1",
            origin=dp_id,
            origin_type="type_1",
            event_type=OT.ResourceLifecycleEvent,
            disabled_by_system=False,
            delivery_configurations=[delivery_config1a, delivery_config1b])

        delivery_config2a = IonObject(
            OT.DeliveryConfiguration,
            email='*****@*****.**',
            mode=DeliveryModeEnum.EMAIL,
            frequency=NotificationFrequencyEnum.BATCH)
        delivery_config2b = IonObject(
            OT.DeliveryConfiguration,
            email='*****@*****.**',
            mode=DeliveryModeEnum.EMAIL,
            frequency=NotificationFrequencyEnum.BATCH)
        notification_request_2 = NotificationRequest(
            name="notification_2",
            origin=dp_id,
            origin_type="type_2",
            disabled_by_system=False,
            event_type=OT.DetectionEvent,
            delivery_configurations=[delivery_config2a, delivery_config2b])

        notification_request_1_id = self.unsc.create_notification(
            notification=notification_request_1, user_id=user_id_1)
        notification_request_2_id = self.unsc.create_notification(
            notification=notification_request_2, user_id=user_id_2)
        self.unsc.delete_notification(notification_request_1_id)

        # test reading a non-existent data product
        log.debug('reading non-existent data product')

        with self.assertRaises(NotFound):
            dp_obj = self.dpsc_cli.read_data_product('some_fake_id')

        # update a data product (tests read also)
        log.debug('Updating data product')
        # first get the existing dp object
        dp_obj = self.dpsc_cli.read_data_product(dp_id)

        # now tweak the object
        dp_obj.description = 'the very first dp'
        dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 20.0
        dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -20.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 20.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -20.0
        # now write the dp back to the registry
        update_result = self.dpsc_cli.update_data_product(dp_obj)

        # now get the dp back to see if it was updated
        dp_obj = self.dpsc_cli.read_data_product(dp_id)
        self.assertEquals(dp_obj.description, 'the very first dp')
        self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0)
        log.debug('Updated data product %s', dp_obj)

        #test extension
        extended_product = self.dpsc_cli.get_data_product_extension(dp_id)
        #validate that there is one active and one retired user notification for this data product
        self.assertEqual(
            1, len(extended_product.computed.active_user_subscriptions.value))
        self.assertEqual(
            1, len(extended_product.computed.past_user_subscriptions.value))

        self.assertEqual(dp_id, extended_product._id)
        self.assertEqual(
            ComputedValueAvailability.PROVIDED,
            extended_product.computed.product_download_size_estimated.status)
        self.assertEqual(
            0, extended_product.computed.product_download_size_estimated.value)

        self.assertEqual(ComputedValueAvailability.PROVIDED,
                         extended_product.computed.parameters.status)

        #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value)

        def ion_object_encoder(obj):
            return obj.__dict__

        #test prepare for create
        data_product_data = self.dpsc_cli.prepare_data_product_support()

        #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2)

        self.assertEqual(data_product_data._id, "")
        self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport)
        self.assertEqual(
            len(data_product_data.associations['StreamDefinition'].resources),
            2)
        self.assertEqual(
            len(data_product_data.associations['Dataset'].resources), 0)
        self.assertEqual(
            len(data_product_data.associations['StreamDefinition'].
                associated_resources), 0)
        self.assertEqual(
            len(data_product_data.associations['Dataset'].associated_resources
                ), 0)

        #test prepare for update
        data_product_data = self.dpsc_cli.prepare_data_product_support(dp_id)

        #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2)

        self.assertEqual(data_product_data._id, dp_id)
        self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport)
        self.assertEqual(
            len(data_product_data.associations['StreamDefinition'].resources),
            2)

        self.assertEqual(
            len(data_product_data.associations['Dataset'].resources), 1)

        self.assertEqual(
            len(data_product_data.associations['StreamDefinition'].
                associated_resources), 1)
        self.assertEqual(
            data_product_data.associations['StreamDefinition'].
            associated_resources[0].s, dp_id)

        self.assertEqual(
            len(data_product_data.associations['Dataset'].associated_resources
                ), 1)
        self.assertEqual(
            data_product_data.associations['Dataset'].associated_resources[0].
            s, dp_id)

        # now 'delete' the data product
        log.debug("deleting data product: %s" % dp_id)
        self.dpsc_cli.delete_data_product(dp_id)

        # Assert that there are no associated streams leftover after deleting the data product
        stream_ids, assoc_ids = self.rrclient.find_objects(
            dp_id, PRED.hasStream, RT.Stream, True)
        self.assertEquals(len(stream_ids), 0)
        self.assertEquals(len(assoc_ids), 0)

        self.dpsc_cli.force_delete_data_product(dp_id)

        # now try to get the deleted dp object
        with self.assertRaises(NotFound):
            dp_obj = self.dpsc_cli.read_data_product(dp_id)

        # Get the events corresponding to the data product
        ret = self.unsc.get_recent_events(resource_id=dp_id)
        events = ret.value

        for event in events:
            log.debug("event time: %s" % event.ts_created)

        self.assertTrue(len(events) > 0)

    def test_data_product_stream_def(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(
            name='Simulated CTD data', parameter_dictionary_id=pdict_id)

        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp')
        dp_id = self.dpsc_cli.create_data_product(
            data_product=dp_obj, stream_definition_id=ctd_stream_def_id)

        stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id)
        self.assertEquals(ctd_stream_def_id, stream_def_id)

    def test_derived_data_product(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(
            name='ctd parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsubcli.delete_stream_definition,
                        ctd_stream_def_id)

        dp = DataProduct(name='Instrument DP')
        dp_id = self.dpsc_cli.create_data_product(
            dp, stream_definition_id=ctd_stream_def_id)
        self.addCleanup(self.dpsc_cli.force_delete_data_product, dp_id)

        self.dpsc_cli.activate_data_product_persistence(dp_id)
        self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, dp_id)

        dataset_ids, _ = self.rrclient.find_objects(subject=dp_id,
                                                    predicate=PRED.hasDataset,
                                                    id_only=True)
        if not dataset_ids:
            raise NotFound("Data Product %s dataset  does not exist" %
                           str(dp_id))
        dataset_id = dataset_ids[0]

        # Make the derived data product
        simple_stream_def_id = self.pubsubcli.create_stream_definition(
            name='TEMPWAT stream def',
            parameter_dictionary_id=pdict_id,
            available_fields=['time', 'temp'])
        tempwat_dp = DataProduct(name='TEMPWAT',
                                 category=DataProductTypeEnum.DERIVED)
        tempwat_dp_id = self.dpsc_cli.create_data_product(
            tempwat_dp,
            stream_definition_id=simple_stream_def_id,
            parent_data_product_id=dp_id)
        self.addCleanup(self.dpsc_cli.delete_data_product, tempwat_dp_id)
        # Check that the streams associated with the data product are persisted with
        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                   RT.Stream, True)
        for stream_id in stream_ids:
            self.assertTrue(self.ingestclient.is_persisted(stream_id))

        stream_id = stream_ids[0]
        route = self.pubsubcli.read_stream_route(stream_id=stream_id)

        rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id)
        rdt['time'] = np.arange(20)
        rdt['temp'] = np.arange(20)
        rdt['pressure'] = np.arange(20)

        publisher = StandaloneStreamPublisher(stream_id, route)

        dataset_modified = Event()

        def cb(*args, **kwargs):
            dataset_modified.set()

        es = EventSubscriber(event_type=OT.DatasetModified,
                             callback=cb,
                             origin=dataset_id,
                             auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        publisher.publish(rdt.to_granule())

        self.assertTrue(dataset_modified.wait(30))

        tempwat_dataset_ids, _ = self.rrclient.find_objects(tempwat_dp_id,
                                                            PRED.hasDataset,
                                                            id_only=True)
        tempwat_dataset_id = tempwat_dataset_ids[0]
        granule = self.data_retriever.retrieve(
            tempwat_dataset_id, delivery_format=simple_stream_def_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_equal(rdt['time'], np.arange(20))
        self.assertEquals(set(rdt.fields), set(['time', 'temp']))

    def test_activate_suspend_data_product(self):

        #------------------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #------------------------------------------------------------------------------------------------
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubcli.create_stream_definition(
            name='Simulated CTD data', parameter_dictionary_id=pdict_id)
        log.debug("Created stream def id %s" % ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # test creating a new data product w/o a stream definition
        #------------------------------------------------------------------------------------------------
        # Construct temporal and spatial Coordinate Reference System objects

        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp')

        log.debug("Created an IonObject for a data product: %s" % dp_obj)

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------

        dp_id = self.dpsc_cli.create_data_product(
            data_product=dp_obj, stream_definition_id=ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # Subscribe to persist events
        #------------------------------------------------------------------------------------------------
        queue = gevent.queue.Queue()

        def info_event_received(message, headers):
            queue.put(message)

        es = EventSubscriber(event_type=OT.InformationContentStatusEvent,
                             callback=info_event_received,
                             origin=dp_id,
                             auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        #------------------------------------------------------------------------------------------------
        # test activate and suspend data product persistence
        #------------------------------------------------------------------------------------------------
        self.dpsc_cli.activate_data_product_persistence(dp_id)

        dp_obj = self.dpsc_cli.read_data_product(dp_id)
        self.assertIsNotNone(dp_obj)

        dataset_ids, _ = self.rrclient.find_objects(subject=dp_id,
                                                    predicate=PRED.hasDataset,
                                                    id_only=True)
        if not dataset_ids:
            raise NotFound("Data Product %s dataset  does not exist" %
                           str(dp_id))
        dataset_id = dataset_ids[0]

        # Check that the streams associated with the data product are persisted with
        stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                   RT.Stream, True)
        for stream_id in stream_ids:
            self.assertTrue(self.ingestclient.is_persisted(stream_id))

        stream_id = stream_ids[0]
        route = self.pubsubcli.read_stream_route(stream_id=stream_id)

        rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id)
        rdt['time'] = np.arange(20)
        rdt['temp'] = np.arange(20)

        publisher = StandaloneStreamPublisher(stream_id, route)

        dataset_modified = Event()

        def cb(*args, **kwargs):
            dataset_modified.set()

        es = EventSubscriber(event_type=OT.DatasetModified,
                             callback=cb,
                             origin=dataset_id,
                             auto_delete=True)
        es.start()
        self.addCleanup(es.stop)

        publisher.publish(rdt.to_granule())

        self.assertTrue(dataset_modified.wait(30))

        #--------------------------------------------------------------------------------
        # Now get the data in one chunk using an RPC Call to start_retreive
        #--------------------------------------------------------------------------------

        replay_data = self.data_retriever.retrieve(dataset_ids[0])
        self.assertIsInstance(replay_data, Granule)

        log.debug(
            "The data retriever was able to replay the dataset that was attached to the data product "
            "we wanted to be persisted. Therefore the data product was indeed persisted with "
            "otherwise we could not have retrieved its dataset using the data retriever. Therefore "
            "this demonstration shows that L4-CI-SA-RQ-267 is satisfied: 'Data product management shall persist data products'"
        )

        data_product_object = self.rrclient.read(dp_id)
        self.assertEquals(data_product_object.name, 'DP1')
        self.assertEquals(data_product_object.description, 'some new dp')

        log.debug(
            "Towards L4-CI-SA-RQ-308: 'Data product management shall persist data product metadata'. "
            " Attributes in create for the data product obj, name= '%s', description='%s', match those of object from the "
            "resource registry, name='%s', desc='%s'" %
            (dp_obj.name, dp_obj.description, data_product_object.name,
             data_product_object.description))

        #------------------------------------------------------------------------------------------------
        # test suspend data product persistence
        #------------------------------------------------------------------------------------------------
        self.dpsc_cli.suspend_data_product_persistence(dp_id)

        dataset_modified.clear()

        rdt['time'] = np.arange(20, 40)

        publisher.publish(rdt.to_granule())
        self.assertFalse(dataset_modified.wait(2))

        self.dpsc_cli.activate_data_product_persistence(dp_id)
        dataset_modified.clear()

        publisher.publish(rdt.to_granule())
        self.assertTrue(dataset_modified.wait(30))

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        np.testing.assert_array_almost_equal(rdt['time'], np.arange(40))

        dataset_ids, _ = self.rrclient.find_objects(dp_id,
                                                    PRED.hasDataset,
                                                    id_only=True)
        self.assertEquals(len(dataset_ids), 1)

        self.dpsc_cli.suspend_data_product_persistence(dp_id)
        self.dpsc_cli.force_delete_data_product(dp_id)
        # now try to get the deleted dp object

        with self.assertRaises(NotFound):
            dp_obj = self.rrclient.read(dp_id)

        info_event_counter = 0
        runtime = 0
        starttime = time.time()
        caught_events = []

        #check that the four InfoStatusEvents were received
        while info_event_counter < 4 and runtime < 60:
            a = queue.get(timeout=60)
            caught_events.append(a)
            info_event_counter += 1
            runtime = time.time() - starttime

        self.assertEquals(info_event_counter, 4)