def clean_subscriptions():
        ingestion_management = IngestionManagementServiceClient()
        pubsub = PubsubManagementServiceClient()
        rr = ResourceRegistryServiceClient()
        ingestion_config_ids = ingestion_management.list_ingestion_configurations(id_only=True)
        for ic in ingestion_config_ids:

            assocs = rr.find_associations(subject=ic, predicate=PRED.hasSubscription, id_only=False)
            for assoc in assocs:
                rr.delete_association(assoc)
                try:
                    pubsub.deactivate_subscription(assoc.o)
                except:
                    pass
                pubsub.delete_subscription(assoc.o)
class TestObservatoryManagementServiceIntegration(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR = ResourceRegistryServiceClient(node=self.container.node)
        self.RR2 = EnhancedResourceRegistryClient(self.RR)
        self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
        self.org_management_service = OrgManagementServiceClient(node=self.container.node)
        self.IMS =  InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        #print 'TestObservatoryManagementServiceIntegration: started services'

        self.event_publisher = EventPublisher()

#    @unittest.skip('this exists only for debugging the launch process')
#    def test_just_the_setup(self):
#        return

    def destroy(self, resource_ids):
        self.OMS.force_delete_observatory(resource_ids.observatory_id)
        self.OMS.force_delete_subsite(resource_ids.subsite_id)
        self.OMS.force_delete_subsite(resource_ids.subsite2_id)
        self.OMS.force_delete_subsite(resource_ids.subsiteb_id)
        self.OMS.force_delete_subsite(resource_ids.subsitez_id)
        self.OMS.force_delete_platform_site(resource_ids.platform_site_id)
        self.OMS.force_delete_platform_site(resource_ids.platform_siteb_id)
        self.OMS.force_delete_platform_site(resource_ids.platform_siteb2_id)
        self.OMS.force_delete_platform_site(resource_ids.platform_site3_id)
        self.OMS.force_delete_instrument_site(resource_ids.instrument_site_id)
        self.OMS.force_delete_instrument_site(resource_ids.instrument_site2_id)
        self.OMS.force_delete_instrument_site(resource_ids.instrument_siteb3_id)
        self.OMS.force_delete_instrument_site(resource_ids.instrument_site4_id)

    #@unittest.skip('targeting')
    def test_observatory_management(self):
        resources = self._make_associations()

        self._do_test_find_related_sites(resources)

        self._do_test_get_sites_devices_status(resources)

        self._do_test_find_site_data_products(resources)

        self._do_test_find_related_frames_of_reference(resources)

        self._do_test_create_geospatial_point_center(resources)

        self._do_test_find_observatory_org(resources)

        self.destroy(resources)

    def _do_test_find_related_sites(self, resources):

        site_resources, site_children = self.OMS.find_related_sites(resources.org_id)

        #import sys, pprint
        #print >> sys.stderr, pprint.pformat(site_resources)
        #print >> sys.stderr, pprint.pformat(site_children)

        #self.assertIn(resources.org_id, site_resources)
        self.assertIn(resources.observatory_id, site_resources)
        self.assertIn(resources.subsite_id, site_resources)
        self.assertIn(resources.subsite_id, site_resources)
        self.assertIn(resources.subsite2_id, site_resources)
        self.assertIn(resources.platform_site_id, site_resources)
        self.assertIn(resources.instrument_site_id, site_resources)
        self.assertEquals(len(site_resources), 13)

        self.assertEquals(site_resources[resources.observatory_id].type_, RT.Observatory)

        self.assertIn(resources.org_id, site_children)
        self.assertIn(resources.observatory_id, site_children)
        self.assertIn(resources.subsite_id, site_children)
        self.assertIn(resources.subsite_id, site_children)
        self.assertIn(resources.subsite2_id, site_children)
        self.assertIn(resources.platform_site_id, site_children)
        self.assertNotIn(resources.instrument_site_id, site_children)
        self.assertEquals(len(site_children), 9)

        self.assertIsInstance(site_children[resources.subsite_id], list)
        self.assertEquals(len(site_children[resources.subsite_id]), 2)

    def _do_test_get_sites_devices_status(self, resources):

        result_dict = self.OMS.get_sites_devices_status(resources.org_id)

        site_resources = result_dict.get("site_resources", None)
        site_children = result_dict.get("site_children", None)

        self.assertEquals(len(site_resources), 14)
        self.assertEquals(len(site_children), 9)

        result_dict = self.OMS.get_sites_devices_status(resources.org_id, include_devices=True, include_status=True)

        log.debug("RESULT DICT: %s", result_dict.keys())
        site_resources = result_dict.get("site_resources", None)
        site_children = result_dict.get("site_children", None)
        site_status = result_dict.get("site_status", None)

        self.assertEquals(len(site_resources), 14)
        self.assertEquals(len(site_children), 9)


        result_dict = self.OMS.get_sites_devices_status(resources.observatory_id, include_devices=True, include_status=True)

        site_resources = result_dict.get("site_resources")
        site_children = result_dict.get("site_children")
        site_status = result_dict.get("site_status")

        self.assertEquals(len(site_resources), 13)
        self.assertEquals(len(site_children), 8)


    def _do_test_find_site_data_products(self, resources):
        res_dict = self.OMS.find_site_data_products(resources.org_id)

        #import sys, pprint
        #print >> sys.stderr, pprint.pformat(res_dict)

        self.assertIsNone(res_dict['data_product_resources'])
        self.assertIn(resources.platform_device_id, res_dict['device_data_products'])
        self.assertIn(resources.instrument_device_id, res_dict['device_data_products'])

    #@unittest.skip('targeting')
    def _do_test_find_related_frames_of_reference(self, stuff):
        # finding subordinates gives a dict of obj lists, convert objs to ids
        def idify(adict):
            ids = {}
            for k, v in adict.iteritems():
                ids[k] = []
                for obj in v:
                    ids[k].append(obj._id)

            return ids

        # a short version of the function we're testing, with id-ify
        def short(resource_id, output_types):
            ret = self.OMS.find_related_frames_of_reference(resource_id,
                                                            output_types)
            return idify(ret)
            
            
        #set up associations first
        stuff = self._make_associations()
        #basic traversal of tree from instrument to platform
        ids = short(stuff.instrument_site_id, [RT.PlatformSite])
        self.assertIn(RT.PlatformSite, ids)
        self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite])
        self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite])
        self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite])

        #since this is the first search, just make sure the input inst_id got stripped
        if RT.InstrumentSite in ids:
            self.assertNotIn(stuff.instrument_site_id, ids[RT.InstrumentSite])

        #basic traversal of tree from platform to instrument
        ids = short(stuff.platform_siteb_id, [RT.InstrumentSite])
        self.assertIn(RT.InstrumentSite, ids)
        self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite])
        self.assertNotIn(stuff.instrument_site2_id, ids[RT.InstrumentSite])


        #full traversal of tree from observatory down to instrument
        ids = short(stuff.observatory_id, [RT.InstrumentSite])
        self.assertIn(RT.InstrumentSite, ids)
        self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite])


        #full traversal of tree from instrument to observatory
        ids = short(stuff.instrument_site_id, [RT.Observatory])
        self.assertIn(RT.Observatory, ids)
        self.assertIn(stuff.observatory_id, ids[RT.Observatory])


        #partial traversal, only down to platform
        ids = short(stuff.observatory_id, [RT.Subsite, RT.PlatformSite])
        self.assertIn(RT.PlatformSite, ids)
        self.assertIn(RT.Subsite, ids)
        self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite])
        self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite])
        self.assertIn(stuff.platform_siteb2_id, ids[RT.PlatformSite])
        self.assertIn(stuff.platform_site3_id, ids[RT.PlatformSite])
        self.assertIn(stuff.subsite_id, ids[RT.Subsite])
        self.assertIn(stuff.subsite2_id, ids[RT.Subsite])
        self.assertIn(stuff.subsitez_id, ids[RT.Subsite])
        self.assertIn(stuff.subsiteb_id, ids[RT.Subsite])
        self.assertNotIn(RT.InstrumentSite, ids)


        #partial traversal, only down to platform
        ids = short(stuff.instrument_site_id, [RT.Subsite, RT.PlatformSite])
        self.assertIn(RT.PlatformSite, ids)
        self.assertIn(RT.Subsite, ids)
        self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite])
        self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite])
        self.assertIn(stuff.subsite_id, ids[RT.Subsite])
        self.assertIn(stuff.subsiteb_id, ids[RT.Subsite])
        self.assertNotIn(stuff.subsite2_id, ids[RT.Subsite])
        self.assertNotIn(stuff.subsitez_id, ids[RT.Subsite])
        self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite])
        self.assertNotIn(RT.Observatory, ids)

        self.destroy(stuff)

    def _make_associations(self):
        """
        create one of each resource and association used by OMS
        to guard against problems in ion-definitions
        """

        #raise unittest.SkipTest("https://jira.oceanobservatories.org/tasks/browse/CISWCORE-41")
        

        """
        the tree we're creating (observatory, sites, platforms, instruments)

        rows are lettered, colums numbered.  
         - first row is implied a
         - first column is implied 1
         - site Z, just because 

        O--Sz
        |
        S--S2--P3--I4
        |
        Sb-Pb2-Ib3
        |
        P--I2 <- PlatformDevice, InstrumentDevice2
        |
        Pb <- PlatformDevice b
        |
        I <- InstrumentDevice

        """

        org_id = self.OMS.create_marine_facility(any_old(RT.Org))

        def create_under_org(resource_type, extra_fields=None):
            obj = any_old(resource_type, extra_fields)

            if RT.InstrumentDevice == resource_type:
                resource_id = self.IMS.create_instrument_device(obj)
            else:
                resource_id, _ = self.RR.create(obj)

            self.OMS.assign_resource_to_observatory_org(resource_id=resource_id, org_id=org_id)
            return resource_id

        #stuff we control
        observatory_id          = create_under_org(RT.Observatory)
        subsite_id              = create_under_org(RT.Subsite)
        subsite2_id             = create_under_org(RT.Subsite)
        subsiteb_id             = create_under_org(RT.Subsite)
        subsitez_id             = create_under_org(RT.Subsite)
        platform_site_id        = create_under_org(RT.PlatformSite)
        platform_siteb_id       = create_under_org(RT.PlatformSite)
        platform_siteb2_id      = create_under_org(RT.PlatformSite)
        platform_site3_id       = create_under_org(RT.PlatformSite)
        instrument_site_id      = create_under_org(RT.InstrumentSite)
        instrument_site2_id     = create_under_org(RT.InstrumentSite)
        instrument_siteb3_id    = create_under_org(RT.InstrumentSite)
        instrument_site4_id     = create_under_org(RT.InstrumentSite)

        #stuff we associate to
        instrument_device_id    = create_under_org(RT.InstrumentDevice)
        instrument_device2_id   = create_under_org(RT.InstrumentDevice)
        platform_device_id      = create_under_org(RT.PlatformDevice)
        platform_deviceb_id     = create_under_org(RT.PlatformDevice)
        instrument_model_id, _  = self.RR.create(any_old(RT.InstrumentModel))
        platform_model_id, _    = self.RR.create(any_old(RT.PlatformModel))
        deployment_id, _        = self.RR.create(any_old(RT.Deployment))

        #observatory
        self.RR.create_association(observatory_id, PRED.hasSite, subsite_id)
        self.RR.create_association(observatory_id, PRED.hasSite, subsitez_id)

        #site
        self.RR.create_association(subsite_id, PRED.hasSite, subsite2_id)
        self.RR.create_association(subsite_id, PRED.hasSite, subsiteb_id)
        self.RR.create_association(subsite2_id, PRED.hasSite, platform_site3_id)
        self.RR.create_association(subsiteb_id, PRED.hasSite, platform_siteb2_id)
        self.RR.create_association(subsiteb_id, PRED.hasSite, platform_site_id)
        
        #platform_site(s)
        self.RR.create_association(platform_site3_id, PRED.hasSite, instrument_site4_id)
        self.RR.create_association(platform_siteb2_id, PRED.hasSite, instrument_siteb3_id)
        self.RR.create_association(platform_site_id, PRED.hasSite, instrument_site2_id)
        self.RR.create_association(platform_site_id, PRED.hasSite, platform_siteb_id)
        self.RR.create_association(platform_siteb_id, PRED.hasSite, instrument_site_id)

        self.RR.create_association(platform_siteb_id, PRED.hasDevice, platform_deviceb_id)
        #test network parent link
        self.OMS.assign_device_to_network_parent(platform_device_id, platform_deviceb_id)

        self.RR.create_association(platform_site_id, PRED.hasModel, platform_model_id)
        self.RR.create_association(platform_site_id, PRED.hasDevice, platform_device_id)
        self.RR.create_association(platform_site_id, PRED.hasDeployment, deployment_id)

        #instrument_site(s)
        self.RR.create_association(instrument_site_id, PRED.hasModel, instrument_model_id)
        self.RR.create_association(instrument_site_id, PRED.hasDevice, instrument_device_id)
        self.RR.create_association(instrument_site_id, PRED.hasDeployment, deployment_id)

        self.RR.create_association(instrument_site2_id, PRED.hasDevice, instrument_device2_id)

        #platform_device
        self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id)

        #instrument_device
        self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id)
        self.RR.create_association(instrument_device2_id, PRED.hasModel, instrument_model_id)

        ret = DotDict()
        ret.org_id                = org_id
        ret.observatory_id        = observatory_id
        ret.subsite_id            = subsite_id
        ret.subsite2_id           = subsite2_id
        ret.subsiteb_id           = subsiteb_id
        ret.subsitez_id           = subsitez_id
        ret.platform_site_id      = platform_site_id
        ret.platform_siteb_id     = platform_siteb_id
        ret.platform_siteb2_id    = platform_siteb2_id
        ret.platform_site3_id     = platform_site3_id
        ret.instrument_site_id    = instrument_site_id
        ret.instrument_site2_id   = instrument_site2_id
        ret.instrument_siteb3_id  = instrument_siteb3_id
        ret.instrument_site4_id   = instrument_site4_id

        ret.instrument_device_id  = instrument_device_id
        ret.instrument_device2_id = instrument_device2_id
        ret.platform_device_id    = platform_device_id
        ret.platform_deviceb_id    = platform_deviceb_id
        ret.instrument_model_id   = instrument_model_id
        ret.platform_model_id     = platform_model_id
        ret.deployment_id         = deployment_id

        return ret

    #@unittest.skip("targeting")
    def test_create_observatory(self):
        observatory_obj = IonObject(RT.Observatory,
                                        name='TestFacility',
                                        description='some new mf')
        observatory_id = self.OMS.create_observatory(observatory_obj)
        self.OMS.force_delete_observatory(observatory_id)

    #@unittest.skip("targeting")
    def _do_test_create_geospatial_point_center(self, resources):
        platformsite_obj = IonObject(RT.PlatformSite,
                                        name='TestPlatformSite',
                                        description='some new TestPlatformSite')
        geo_index_obj = IonObject(OT.GeospatialBounds)
        geo_index_obj.geospatial_latitude_limit_north = 20.0
        geo_index_obj.geospatial_latitude_limit_south = 10.0
        geo_index_obj.geospatial_longitude_limit_east = 15.0
        geo_index_obj.geospatial_longitude_limit_west = 20.0
        platformsite_obj.constraint_list = [geo_index_obj]

        platformsite_id = self.OMS.create_platform_site(platformsite_obj)

        # now get the dp back to see if it was updated
        platformsite_obj = self.OMS.read_platform_site(platformsite_id)
        self.assertEquals('some new TestPlatformSite', platformsite_obj.description)
        self.assertAlmostEqual(15.0, platformsite_obj.geospatial_point_center.lat, places=1)


        #now adjust a few params
        platformsite_obj.description = 'some old TestPlatformSite'
        geo_index_obj = IonObject(OT.GeospatialBounds)
        geo_index_obj.geospatial_latitude_limit_north = 30.0
        geo_index_obj.geospatial_latitude_limit_south = 20.0
        platformsite_obj.constraint_list = [geo_index_obj]
        update_result = self.OMS.update_platform_site(platformsite_obj)

        # now get the dp back to see if it was updated
        platformsite_obj = self.OMS.read_platform_site(platformsite_id)
        self.assertEquals('some old TestPlatformSite', platformsite_obj.description)
        self.assertAlmostEqual(25.0, platformsite_obj.geospatial_point_center.lat, places=1)

        self.OMS.force_delete_platform_site(platformsite_id)


    #@unittest.skip("targeting")
    def _do_test_find_observatory_org(self, resources):
        log.debug("Make TestOrg")
        org_obj = IonObject(RT.Org,
                            name='TestOrg',
                            description='some new mf org')

        org_id =  self.OMS.create_marine_facility(org_obj)

        log.debug("Make Observatory")
        observatory_obj = IonObject(RT.Observatory,
                                        name='TestObservatory',
                                        description='some new obs')
        observatory_id = self.OMS.create_observatory(observatory_obj)

        log.debug("assign observatory to org")
        self.OMS.assign_resource_to_observatory_org(observatory_id, org_id)


        log.debug("verify assigment")
        org_objs = self.OMS.find_org_by_observatory(observatory_id)
        self.assertEqual(1, len(org_objs))
        self.assertEqual(org_id, org_objs[0]._id)
        log.debug("org_id=<" + org_id + ">")

        log.debug("create a subsite with parent Observatory")
        subsite_obj =  IonObject(RT.Subsite,
                                name= 'TestSubsite',
                                description = 'sample subsite')
        subsite_id = self.OMS.create_subsite(subsite_obj, observatory_id)
        self.assertIsNotNone(subsite_id, "Subsite not created.")

        log.debug("verify that Subsite is linked to Observatory")
        mf_subsite_assoc = self.RR.get_association(observatory_id, PRED.hasSite, subsite_id)
        self.assertIsNotNone(mf_subsite_assoc, "Subsite not connected to Observatory.")


        log.debug("add the Subsite as a resource of this Observatory")
        self.OMS.assign_resource_to_observatory_org(resource_id=subsite_id, org_id=org_id)
        log.debug("verify that Subsite is linked to Org")
        org_subsite_assoc = self.RR.get_association(org_id, PRED.hasResource, subsite_id)
        self.assertIsNotNone(org_subsite_assoc, "Subsite not connected as resource to Org.")


        log.debug("create a logical platform with parent Subsite")
        platform_site_obj =  IonObject(RT.PlatformSite,
                                name= 'TestPlatformSite',
                                description = 'sample logical platform')
        platform_site_id = self.OMS.create_platform_site(platform_site_obj, subsite_id)
        self.assertIsNotNone(platform_site_id, "PlatformSite not created.")

        log.debug("verify that PlatformSite is linked to Site")
        site_lp_assoc = self.RR.get_association(subsite_id, PRED.hasSite, platform_site_id)
        self.assertIsNotNone(site_lp_assoc, "PlatformSite not connected to Site.")


        log.debug("add the PlatformSite as a resource of this Observatory")
        self.OMS.assign_resource_to_observatory_org(resource_id=platform_site_id, org_id=org_id)
        log.debug("verify that PlatformSite is linked to Org")
        org_lp_assoc = self.RR.get_association(org_id, PRED.hasResource, platform_site_id)
        self.assertIsNotNone(org_lp_assoc, "PlatformSite not connected as resource to Org.")



        log.debug("create a logical instrument with parent logical platform")
        instrument_site_obj =  IonObject(RT.InstrumentSite,
                                name= 'TestInstrumentSite',
                                description = 'sample logical instrument')
        instrument_site_id = self.OMS.create_instrument_site(instrument_site_obj, platform_site_id)
        self.assertIsNotNone(instrument_site_id, "InstrumentSite not created.")


        log.debug("verify that InstrumentSite is linked to PlatformSite")
        li_lp_assoc = self.RR.get_association(platform_site_id, PRED.hasSite, instrument_site_id)
        self.assertIsNotNone(li_lp_assoc, "InstrumentSite not connected to PlatformSite.")


        log.debug("add the InstrumentSite as a resource of this Observatory")
        self.OMS.assign_resource_to_observatory_org(resource_id=instrument_site_id, org_id=org_id)
        log.debug("verify that InstrumentSite is linked to Org")
        org_li_assoc = self.RR.get_association(org_id, PRED.hasResource, instrument_site_id)
        self.assertIsNotNone(org_li_assoc, "InstrumentSite not connected as resource to Org.")


        log.debug("remove the InstrumentSite as a resource of this Observatory")
        self.OMS.unassign_resource_from_observatory_org(instrument_site_id, org_id)
        log.debug("verify that InstrumentSite is linked to Org")
        assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.InstrumentSite, id_only=True )
        self.assertEqual(0, len(assocs))

        log.debug("remove the InstrumentSite, association should drop automatically")
        self.OMS.delete_instrument_site(instrument_site_id)
        assocs, _ = self.RR.find_objects(platform_site_id, PRED.hasSite, RT.InstrumentSite, id_only=True )
        self.assertEqual(0, len(assocs))


        log.debug("remove the PlatformSite as a resource of this Observatory")
        self.OMS.unassign_resource_from_observatory_org(platform_site_id, org_id)
        log.debug("verify that PlatformSite is linked to Org")
        assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.PlatformSite, id_only=True )
        self.assertEqual(0, len(assocs))


        log.debug("remove the Site as a resource of this Observatory")
        self.OMS.unassign_resource_from_observatory_org(subsite_id, org_id)
        log.debug("verify that Site is linked to Org")
        assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.Subsite, id_only=True )
        self.assertEqual(0, len(assocs))

        self.RR.delete(org_id)
        self.OMS.force_delete_observatory(observatory_id)
        self.OMS.force_delete_subsite(subsite_id)
        self.OMS.force_delete_platform_site(platform_site_id)
        self.OMS.force_delete_instrument_site(instrument_site_id)


    @attr('EXT')
    def test_observatory_extensions(self):



        obs_id = self.RR2.create(any_old(RT.Observatory))
        pss_id = self.RR2.create(any_old(RT.PlatformSite, dict(alt_resource_type="StationSite")))
        pas_id = self.RR2.create(any_old(RT.PlatformSite, dict(alt_resource_type="PlatformAssemblySite")))
        pcs_id = self.RR2.create(any_old(RT.PlatformSite, dict(alt_resource_type="PlatformComponentSite")))
        ins_id = self.RR2.create(any_old(RT.InstrumentSite))

        obs_obj = self.RR2.read(obs_id)
        pss_obj = self.RR2.read(pss_id)
        pas_obj = self.RR2.read(pas_id)
        pcs_obj = self.RR2.read(pcs_id)
        ins_obj = self.RR2.read(ins_id)

        self.RR2.create_association(obs_id, PRED.hasSite, pss_id)
        self.RR2.create_association(pss_id, PRED.hasSite, pas_id)
        self.RR2.create_association(pas_id, PRED.hasSite, pcs_id)
        self.RR2.create_association(pcs_id, PRED.hasSite, ins_id)

        extended_obs = self.OMS.get_observatory_site_extension(obs_id, user_id=12345)
        self.assertEqual([pss_obj], extended_obs.computed.platform_station_sites.value)
        self.assertEqual(ComputedValueAvailability.PROVIDED, extended_obs.computed.platform_station_sites.status)
        self.assertEqual([pas_obj], extended_obs.computed.platform_assembly_sites.value)
        self.assertEqual(ComputedValueAvailability.PROVIDED, extended_obs.computed.platform_assembly_sites.status)
        self.assertEqual([pcs_obj], extended_obs.computed.platform_component_sites.value)
        self.assertEqual(ComputedValueAvailability.PROVIDED, extended_obs.computed.platform_component_sites.status)
        self.assertEqual([ins_obj], extended_obs.computed.instrument_sites.value)

        extended_pss = self.OMS.get_observatory_site_extension(obs_id, user_id=12345)
        self.assertEqual([pas_obj], extended_pss.computed.platform_assembly_sites.value)
        self.assertEqual([pcs_obj], extended_pss.computed.platform_component_sites.value)
        self.assertEqual([ins_obj], extended_pss.computed.instrument_sites.value)

        extended_pas = self.OMS.get_observatory_site_extension(pas_id, user_id=12345)
        self.assertEqual([pcs_obj], extended_pas.computed.platform_component_sites.value)
        self.assertEqual([ins_obj], extended_pas.computed.instrument_sites.value)

        extended_pcs = self.OMS.get_platform_component_site_extension(pcs_id, user_id=12345)
        self.assertEqual([ins_obj], extended_pcs.computed.instrument_sites.value)


    #@unittest.skip("in development...")
    @attr('EXT')
    @attr('EXT1')
    def test_observatory_org_extended(self):

        stuff = self._make_associations()

        parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict',
                                                                                    id_only=True)

        parsed_stream_def_id = self.pubsubcli.create_stream_definition(name='parsed',
                                                                       parameter_dictionary_id=parsed_pdict_id)
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()
        dp_obj = IonObject(RT.DataProduct,
            name='the parsed data',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)


        data_product_id1 = self.dpclient.create_data_product(data_product=dp_obj,
                                                             stream_definition_id=parsed_stream_def_id)
        self.damsclient.assign_data_product(input_resource_id=stuff.instrument_device_id,
                                            data_product_id=data_product_id1)


        #Create a  user to be used as regular member
        member_actor_obj = IonObject(RT.ActorIdentity, name='org member actor')
        member_actor_id,_ = self.RR.create(member_actor_obj)
        assert(member_actor_id)
        member_actor_header = get_actor_header(member_actor_id)


        member_user_obj = IonObject(RT.UserInfo, name='org member user')
        member_user_id,_ = self.RR.create(member_user_obj)
        assert(member_user_id)

        self.RR.create_association(subject=member_actor_id, predicate=PRED.hasInfo, object=member_user_id)


        #Build the Service Agreement Proposal to enroll a user actor
        sap = IonObject(OT.EnrollmentProposal,consumer=member_actor_id, provider=stuff.org_id )

        sap_response = self.org_management_service.negotiate(sap, headers=member_actor_header )

        #enroll the member without using negotiation
        self.org_management_service.enroll_member(org_id=stuff.org_id, actor_id=member_actor_id)

        #--------------------------------------------------------------------------------
        # Get the extended Site (platformSite)
        #--------------------------------------------------------------------------------


        try:
            extended_site = self.OMS.get_site_extension(stuff.platform_site_id)
        except:
            log.error('failed to get extended site', exc_info=True)
            raise
        log.debug("extended_site:  %r ", extended_site)
        self.assertEqual(1, len(extended_site.platform_devices))
        self.assertEqual(1, len(extended_site.platform_models))
        self.assertEqual(stuff.platform_device_id, extended_site.platform_devices[0]._id)
        self.assertEqual(stuff.platform_model_id, extended_site.platform_models[0]._id)

        log.debug("verify that PlatformDeviceb is linked to PlatformDevice with hasNetworkParent link")
        associations = self.RR.find_associations(subject=stuff.platform_deviceb_id, predicate=PRED.hasNetworkParent, object=stuff.platform_device_id, id_only=True)
        self.assertIsNotNone(associations, "PlatformDevice child not connected to PlatformDevice parent.")


        #--------------------------------------------------------------------------------
        # Get the extended Org
        #--------------------------------------------------------------------------------
        #test the extended resource
        extended_org = self.OMS.get_marine_facility_extension(stuff.org_id)
        log.debug("test_observatory_org_extended: extended_org:  %s ", str(extended_org))
        #self.assertEqual(2, len(extended_org.instruments_deployed) )
        #self.assertEqual(1, len(extended_org.platforms_not_deployed) )
        self.assertEqual(2, extended_org.number_of_platforms)
        self.assertEqual(2, len(extended_org.platform_models) )

        self.assertEqual(2, extended_org.number_of_instruments)
        self.assertEqual(2, len(extended_org.instrument_models) )

        self.assertEqual(1, len(extended_org.members))
        self.assertNotEqual(extended_org.members[0]._id, member_actor_id)
        self.assertEqual(extended_org.members[0]._id, member_user_id)

        self.assertEqual(1, len(extended_org.open_requests))

        self.assertTrue(len(extended_site.deployments)>0)
        self.assertEqual(len(extended_site.deployments), len(extended_site.deployment_info))

        #test the extended resource of the ION org
        ion_org_id = self.org_management_service.find_org()
        extended_org = self.OMS.get_marine_facility_extension(ion_org_id._id, user_id=12345)
        log.debug("test_observatory_org_extended: extended_ION_org:  %s ", str(extended_org))
        self.assertEqual(1, len(extended_org.members))
        self.assertEqual(0, extended_org.number_of_platforms)
        #self.assertEqual(1, len(extended_org.sites))


        #--------------------------------------------------------------------------------
        # Get the extended Site
        #--------------------------------------------------------------------------------

        #create device state events to use for op /non-op filtering in extended
        t = get_ion_ts()
        self.event_publisher.publish_event(  ts_created= t,  event_type = 'ResourceAgentStateEvent',
            origin = stuff.instrument_device_id, state=ResourceAgentState.STREAMING  )

        self.event_publisher.publish_event( ts_created= t,   event_type = 'ResourceAgentStateEvent',
            origin = stuff.instrument_device2_id, state=ResourceAgentState.INACTIVE )
        extended_site =  self.OMS.get_site_extension(stuff.instrument_site2_id)


        log.debug("test_observatory_org_extended: extended_site:  %s ", str(extended_site))

        self.dpclient.delete_data_product(data_product_id1)
Exemplo n.º 3
0
class PubsubManagementIntTest(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.pubsub_management       = PubsubManagementServiceClient()
        self.resource_registry       = ResourceRegistryServiceClient()
        self.dataset_management      = DatasetManagementServiceClient()
        self.data_product_management = DataProductManagementServiceClient()

        self.pdicts = {}
        self.queue_cleanup = list()
        self.exchange_cleanup = list()
        self.context_ids = set()

    def tearDown(self):
        for queue in self.queue_cleanup:
            xn = self.container.ex_manager.create_xn_queue(queue)
            xn.delete()
        for exchange in self.exchange_cleanup:
            xp = self.container.ex_manager.create_xp(exchange)
            xp.delete()

        self.cleanup_contexts()
    
    def test_stream_def_crud(self):

        # Test Creation
        pdict = DatasetManagementService.get_parameter_dictionary_by_name('ctd_parsed_param_dict')
        stream_definition_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict.identifier)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_definition_id)

        # Make sure there is an assoc
        self.assertTrue(self.resource_registry.find_associations(subject=stream_definition_id, predicate=PRED.hasParameterDictionary, object=pdict.identifier, id_only=True))

        # Test Reading
        stream_definition = self.pubsub_management.read_stream_definition(stream_definition_id)
        self.assertTrue(PubsubManagementService._compare_pdicts(pdict.dump(), stream_definition.parameter_dictionary))


        # Test comparisons
        in_stream_definition_id = self.pubsub_management.create_stream_definition('L0 products', parameter_dictionary_id=pdict.identifier, available_fields=['time','temp','conductivity','pressure'])
        self.addCleanup(self.pubsub_management.delete_stream_definition, in_stream_definition_id)

        out_stream_definition_id = in_stream_definition_id
        self.assertTrue(self.pubsub_management.compare_stream_definition(in_stream_definition_id, out_stream_definition_id))
        self.assertTrue(self.pubsub_management.compatible_stream_definitions(in_stream_definition_id, out_stream_definition_id))

        out_stream_definition_id = self.pubsub_management.create_stream_definition('L2 Products', parameter_dictionary_id=pdict.identifier, available_fields=['time','salinity','density'])
        self.addCleanup(self.pubsub_management.delete_stream_definition, out_stream_definition_id)
        self.assertFalse(self.pubsub_management.compare_stream_definition(in_stream_definition_id, out_stream_definition_id))

        self.assertTrue(self.pubsub_management.compatible_stream_definitions(in_stream_definition_id, out_stream_definition_id))

    @unittest.skip('Needs to be refactored for cleanup')
    def test_validate_stream_defs(self):
        self.addCleanup(self.cleanup_contexts)
        #test no input 
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = []
        available_fields_out = []
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_0', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_0', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)
    
        #test input with no output
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
        available_fields_out = []
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_1', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_1', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)
        
        #test available field missing parameter context definition -- missing PRESWAT_L0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
        available_fields_out = ['DENSITY']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_2', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_2', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        #test l1 from l0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
        available_fields_out = ['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_3', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_3', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        #test l2 from l0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1', 'DENSITY', 'PRACSAL'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_4', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_4', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)
        
        #test Ln from L0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['DENSITY','PRACSAL','TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
        available_fields_out = ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_5', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_5', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)
        
        #test L2 from L1
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        outgoing_pdict_id = self._get_pdict(['DENSITY','PRACSAL','TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_6', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_6', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)
        
        #test L1 from L0 missing L0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON'])
        outgoing_pdict_id = self._get_pdict(['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_7', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_7', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)
        
        #test L2 from L0 missing L0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON'])
        outgoing_pdict_id = self._get_pdict(['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_8', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_8', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)
        
        #test L2 from L0 missing L1
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['DENSITY', 'PRACSAL'])
        available_fields_in = ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition('in_sd_9', parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in)
        self.addCleanup(self.pubsub_management.delete_stream_definition, incoming_stream_def_id)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition('out_sd_9', parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out)
        self.addCleanup(self.pubsub_management.delete_stream_definition, outgoing_stream_def_id)
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)
    
    def publish_on_stream(self, stream_id, msg):
        stream = self.pubsub_management.read_stream(stream_id)
        stream_route = stream.stream_route
        publisher = StandaloneStreamPublisher(stream_id=stream_id, stream_route=stream_route)
        publisher.publish(msg)

    def test_stream_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition('test_definition', stream_type='stream')
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)
        topic_id = self.pubsub_management.create_topic(name='test_topic', exchange_point='test_exchange')
        self.addCleanup(self.pubsub_management.delete_topic, topic_id)
        self.exchange_cleanup.append('test_exchange')
        topic2_id = self.pubsub_management.create_topic(name='another_topic', exchange_point='outside')
        self.addCleanup(self.pubsub_management.delete_topic, topic2_id)
        stream_id, route = self.pubsub_management.create_stream(name='test_stream', topic_ids=[topic_id, topic2_id], exchange_point='test_exchange', stream_definition_id=stream_def_id)

        topics, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertEquals(topics,[topic_id])

        defs, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasStreamDefinition, id_only=True)
        self.assertTrue(len(defs))

        stream = self.pubsub_management.read_stream(stream_id)
        self.assertEquals(stream.name,'test_stream')
        self.pubsub_management.delete_stream(stream_id)
        
        with self.assertRaises(NotFound):
            self.pubsub_management.read_stream(stream_id)

        defs, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasStreamDefinition, id_only=True)
        self.assertFalse(len(defs))

        topics, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertFalse(len(topics))



    def test_data_product_subscription(self):
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        stream_def_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id)

        tdom, sdom = time_series_domain()
        dp = DataProduct(name='ctd parsed')
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()

        data_product_id = self.data_product_management.create_data_product(data_product=dp, stream_definition_id=stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product, data_product_id)

        subscription_id = self.pubsub_management.create_subscription('validator', data_product_ids=[data_product_id])
        self.addCleanup(self.pubsub_management.delete_subscription, subscription_id)

        validated = Event()
        def validation(msg, route, stream_id):
            validated.set()

        stream_ids, _ = self.resource_registry.find_objects(subject=data_product_id, predicate=PRED.hasStream, id_only=True)
        dp_stream_id = stream_ids.pop()

        validator = StandaloneStreamSubscriber('validator', callback=validation)
        validator.start()
        self.addCleanup(validator.stop)

        self.pubsub_management.activate_subscription(subscription_id)
        self.addCleanup(self.pubsub_management.deactivate_subscription, subscription_id)

        route = self.pubsub_management.read_stream_route(dp_stream_id)

        publisher = StandaloneStreamPublisher(dp_stream_id, route)
        publisher.publish('hi')
        self.assertTrue(validated.wait(10))
            

    def test_subscription_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition('test_definition', stream_type='stream')
        stream_id, route = self.pubsub_management.create_stream(name='test_stream', exchange_point='test_exchange', stream_definition_id=stream_def_id)
        subscription_id = self.pubsub_management.create_subscription(name='test subscription', stream_ids=[stream_id], exchange_name='test_queue')
        self.exchange_cleanup.append('test_exchange')

        subs, assocs = self.resource_registry.find_objects(subject=subscription_id,predicate=PRED.hasStream,id_only=True)
        self.assertEquals(subs,[stream_id])

        res, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='test_queue', id_only=True)
        self.assertEquals(len(res),1)

        subs, assocs = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)
        self.assertEquals(subs[0], res[0])

        subscription = self.pubsub_management.read_subscription(subscription_id)
        self.assertEquals(subscription.exchange_name, 'test_queue')

        self.pubsub_management.delete_subscription(subscription_id)
        
        subs, assocs = self.resource_registry.find_objects(subject=subscription_id,predicate=PRED.hasStream,id_only=True)
        self.assertFalse(len(subs))

        subs, assocs = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)
        self.assertFalse(len(subs))


        self.pubsub_management.delete_stream(stream_id)
        self.pubsub_management.delete_stream_definition(stream_def_id)

    def test_move_before_activate(self):
        stream_id, route = self.pubsub_management.create_stream(name='test_stream', exchange_point='test_xp')

        #--------------------------------------------------------------------------------
        # Test moving before activate
        #--------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription('first_queue', stream_ids=[stream_id])

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='first_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)
        self.assertEquals(xn_ids[0], subjects[0])

        self.pubsub_management.move_subscription(subscription_id, exchange_name='second_queue')

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='second_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)

        self.assertEquals(len(subjects),1)
        self.assertEquals(subjects[0], xn_ids[0])

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_move_activated_subscription(self):

        stream_id, route = self.pubsub_management.create_stream(name='test_stream', exchange_point='test_xp')
        #--------------------------------------------------------------------------------
        # Test moving after activate
        #--------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription('first_queue', stream_ids=[stream_id])
        self.pubsub_management.activate_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='first_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)
        self.assertEquals(xn_ids[0], subjects[0])

        self.verified = Event()

        def verify(m,r,s):
            self.assertEquals(m,'verified')
            self.verified.set()

        subscriber = StandaloneStreamSubscriber('second_queue', verify)
        subscriber.start()

        self.pubsub_management.move_subscription(subscription_id, exchange_name='second_queue')

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='second_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)

        self.assertEquals(len(subjects),1)
        self.assertEquals(subjects[0], xn_ids[0])

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish('verified')

        self.assertTrue(self.verified.wait(2))

        self.pubsub_management.deactivate_subscription(subscription_id)

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_queue_cleanup(self):
        stream_id, route = self.pubsub_management.create_stream('test_stream','xp1')
        xn_objs, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='queue1')
        for xn_obj in xn_objs:
            xn = self.container.ex_manager.create_xn_queue(xn_obj.name)
            xn.delete()
        subscription_id = self.pubsub_management.create_subscription('queue1',stream_ids=[stream_id])
        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='queue1')
        self.assertEquals(len(xn_ids),1)

        self.pubsub_management.delete_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='queue1')
        self.assertEquals(len(xn_ids),0)

    def test_activation_and_deactivation(self):
        stream_id, route = self.pubsub_management.create_stream('stream1','xp1')
        subscription_id = self.pubsub_management.create_subscription('sub1', stream_ids=[stream_id])

        self.check1 = Event()

        def verifier(m,r,s):
            self.check1.set()


        subscriber = StandaloneStreamSubscriber('sub1',verifier)
        subscriber.start()

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish('should not receive')

        self.assertFalse(self.check1.wait(0.25))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish('should receive')
        self.assertTrue(self.check1.wait(2))

        self.check1.clear()
        self.assertFalse(self.check1.is_set())

        self.pubsub_management.deactivate_subscription(subscription_id)

        publisher.publish('should not receive')
        self.assertFalse(self.check1.wait(0.5))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish('should receive')
        self.assertTrue(self.check1.wait(2))

        subscriber.stop()

        self.pubsub_management.deactivate_subscription(subscription_id)
        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

        

    def test_topic_crud(self):

        topic_id = self.pubsub_management.create_topic(name='test_topic', exchange_point='test_xp')
        self.exchange_cleanup.append('test_xp')

        topic = self.pubsub_management.read_topic(topic_id)

        self.assertEquals(topic.name,'test_topic')
        self.assertEquals(topic.exchange_point, 'test_xp')

        self.pubsub_management.delete_topic(topic_id)
        with self.assertRaises(NotFound):
            self.pubsub_management.read_topic(topic_id)

    def test_full_pubsub(self):

        self.sub1_sat = Event()
        self.sub2_sat = Event()

        def subscriber1(m,r,s):
            self.sub1_sat.set()

        def subscriber2(m,r,s):
            self.sub2_sat.set()

        sub1 = StandaloneStreamSubscriber('sub1', subscriber1)
        sub1.start()
        self.addCleanup(sub1.stop)

        sub2 = StandaloneStreamSubscriber('sub2', subscriber2)
        sub2.start()
        self.addCleanup(sub2.stop)

        log_topic = self.pubsub_management.create_topic('instrument_logs', exchange_point='instruments')
        self.addCleanup(self.pubsub_management.delete_topic, log_topic)
        science_topic = self.pubsub_management.create_topic('science_data', exchange_point='instruments')
        self.addCleanup(self.pubsub_management.delete_topic, science_topic)
        events_topic = self.pubsub_management.create_topic('notifications', exchange_point='events')
        self.addCleanup(self.pubsub_management.delete_topic, events_topic)


        log_stream, route = self.pubsub_management.create_stream('instrument1-logs', topic_ids=[log_topic], exchange_point='instruments')
        self.addCleanup(self.pubsub_management.delete_stream, log_stream)
        ctd_stream, route = self.pubsub_management.create_stream('instrument1-ctd', topic_ids=[science_topic], exchange_point='instruments')
        self.addCleanup(self.pubsub_management.delete_stream, ctd_stream)
        event_stream, route = self.pubsub_management.create_stream('notifications', topic_ids=[events_topic], exchange_point='events')
        self.addCleanup(self.pubsub_management.delete_stream, event_stream)
        raw_stream, route = self.pubsub_management.create_stream('temp', exchange_point='global.data')
        self.addCleanup(self.pubsub_management.delete_stream, raw_stream)


        subscription1 = self.pubsub_management.create_subscription('subscription1', stream_ids=[log_stream,event_stream], exchange_name='sub1')
        self.addCleanup(self.pubsub_management.delete_subscription, subscription1)
        subscription2 = self.pubsub_management.create_subscription('subscription2', exchange_points=['global.data'], stream_ids=[ctd_stream], exchange_name='sub2')
        self.addCleanup(self.pubsub_management.delete_subscription, subscription2)

        self.pubsub_management.activate_subscription(subscription1)
        self.addCleanup(self.pubsub_management.deactivate_subscription, subscription1)
        self.pubsub_management.activate_subscription(subscription2)
        self.addCleanup(self.pubsub_management.deactivate_subscription, subscription2)

        self.publish_on_stream(log_stream, 1)
        self.assertTrue(self.sub1_sat.wait(4))
        self.assertFalse(self.sub2_sat.is_set())

        self.publish_on_stream(raw_stream,1)
        self.assertTrue(self.sub1_sat.wait(4))
    
    def test_topic_craziness(self):

        self.msg_queue = Queue()

        def subscriber1(m,r,s):
            self.msg_queue.put(m)

        sub1 = StandaloneStreamSubscriber('sub1', subscriber1)
        sub1.start()
        self.addCleanup(sub1.stop)

        topic1 = self.pubsub_management.create_topic('topic1', exchange_point='xp1')
        self.addCleanup(self.pubsub_management.delete_topic, topic1)
        topic2 = self.pubsub_management.create_topic('topic2', exchange_point='xp1', parent_topic_id=topic1)
        self.addCleanup(self.pubsub_management.delete_topic, topic2)
        topic3 = self.pubsub_management.create_topic('topic3', exchange_point='xp1', parent_topic_id=topic1)
        self.addCleanup(self.pubsub_management.delete_topic, topic3)
        topic4 = self.pubsub_management.create_topic('topic4', exchange_point='xp1', parent_topic_id=topic2)
        self.addCleanup(self.pubsub_management.delete_topic, topic4)
        topic5 = self.pubsub_management.create_topic('topic5', exchange_point='xp1', parent_topic_id=topic2)
        self.addCleanup(self.pubsub_management.delete_topic, topic5)
        topic6 = self.pubsub_management.create_topic('topic6', exchange_point='xp1', parent_topic_id=topic3)
        self.addCleanup(self.pubsub_management.delete_topic, topic6)
        topic7 = self.pubsub_management.create_topic('topic7', exchange_point='xp1', parent_topic_id=topic3)
        self.addCleanup(self.pubsub_management.delete_topic, topic7)

        # Tree 2
        topic8 = self.pubsub_management.create_topic('topic8', exchange_point='xp2')
        self.addCleanup(self.pubsub_management.delete_topic, topic8)
        topic9 = self.pubsub_management.create_topic('topic9', exchange_point='xp2', parent_topic_id=topic8)
        self.addCleanup(self.pubsub_management.delete_topic, topic9)
        topic10 = self.pubsub_management.create_topic('topic10', exchange_point='xp2', parent_topic_id=topic9)
        self.addCleanup(self.pubsub_management.delete_topic, topic10)
        topic11 = self.pubsub_management.create_topic('topic11', exchange_point='xp2', parent_topic_id=topic9)
        self.addCleanup(self.pubsub_management.delete_topic, topic11)
        topic12 = self.pubsub_management.create_topic('topic12', exchange_point='xp2', parent_topic_id=topic11)
        self.addCleanup(self.pubsub_management.delete_topic, topic12)
        topic13 = self.pubsub_management.create_topic('topic13', exchange_point='xp2', parent_topic_id=topic11)
        self.addCleanup(self.pubsub_management.delete_topic, topic13)
        self.exchange_cleanup.extend(['xp1','xp2'])
        
        stream1_id, route = self.pubsub_management.create_stream('stream1', topic_ids=[topic7, topic4, topic5], exchange_point='xp1')
        self.addCleanup(self.pubsub_management.delete_stream, stream1_id)
        stream2_id, route = self.pubsub_management.create_stream('stream2', topic_ids=[topic8], exchange_point='xp2')
        self.addCleanup(self.pubsub_management.delete_stream, stream2_id)
        stream3_id, route = self.pubsub_management.create_stream('stream3', topic_ids=[topic10,topic13], exchange_point='xp2')
        self.addCleanup(self.pubsub_management.delete_stream, stream3_id)
        stream4_id, route = self.pubsub_management.create_stream('stream4', topic_ids=[topic9], exchange_point='xp2')
        self.addCleanup(self.pubsub_management.delete_stream, stream4_id)
        stream5_id, route = self.pubsub_management.create_stream('stream5', topic_ids=[topic11], exchange_point='xp2')
        self.addCleanup(self.pubsub_management.delete_stream, stream5_id)

        subscription1 = self.pubsub_management.create_subscription('sub1', topic_ids=[topic1])
        self.addCleanup(self.pubsub_management.delete_subscription, subscription1)
        subscription2 = self.pubsub_management.create_subscription('sub2', topic_ids=[topic8], exchange_name='sub1')
        self.addCleanup(self.pubsub_management.delete_subscription, subscription2)
        subscription3 = self.pubsub_management.create_subscription('sub3', topic_ids=[topic9], exchange_name='sub1')
        self.addCleanup(self.pubsub_management.delete_subscription, subscription3)
        subscription4 = self.pubsub_management.create_subscription('sub4', topic_ids=[topic10,topic13, topic11], exchange_name='sub1')
        self.addCleanup(self.pubsub_management.delete_subscription, subscription4)
        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription1)

        self.publish_on_stream(stream1_id,1)

        self.assertEquals(self.msg_queue.get(timeout=10), 1)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)


        self.pubsub_management.deactivate_subscription(subscription1)
        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription2)
        
        self.publish_on_stream(stream2_id,2)
        self.assertEquals(self.msg_queue.get(timeout=10), 2)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)

        self.pubsub_management.deactivate_subscription(subscription2)

        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription3)

        self.publish_on_stream(stream2_id, 3)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream3_id, 4)
        self.assertEquals(self.msg_queue.get(timeout=10),4)


        self.pubsub_management.deactivate_subscription(subscription3)

        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription4)

        self.publish_on_stream(stream4_id, 5)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream5_id, 6)
        self.assertEquals(self.msg_queue.get(timeout=10),6)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.pubsub_management.deactivate_subscription(subscription4)
        
        #--------------------------------------------------------------------------------
    
    def cleanup_contexts(self):
        for context_id in self.context_ids:
            self.dataset_management.delete_parameter_context(context_id)

    def add_context_to_cleanup(self, context_id):
        self.context_ids.add(context_id)

    def _get_pdict(self, filter_values):
        t_ctxt = ParameterContext('TIME', param_type=QuantityType(value_encoding=np.dtype('int64')))
        t_ctxt.uom = 'seconds since 01-01-1900'
        t_ctxt_id = self.dataset_management.create_parameter_context(name='TIME', parameter_context=t_ctxt.dump(), parameter_type='quantity<int64>', units=t_ctxt.uom)
        self.add_context_to_cleanup(t_ctxt_id)

        lat_ctxt = ParameterContext('LAT', param_type=ConstantType(QuantityType(value_encoding=np.dtype('float32'))), fill_value=-9999)
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt_id = self.dataset_management.create_parameter_context(name='LAT', parameter_context=lat_ctxt.dump(), parameter_type='quantity<float32>', units=lat_ctxt.uom)
        self.add_context_to_cleanup(lat_ctxt_id)


        lon_ctxt = ParameterContext('LON', param_type=ConstantType(QuantityType(value_encoding=np.dtype('float32'))), fill_value=-9999)
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt_id = self.dataset_management.create_parameter_context(name='LON', parameter_context=lon_ctxt.dump(), parameter_type='quantity<float32>', units=lon_ctxt.uom)
        self.add_context_to_cleanup(lon_ctxt_id)


        # Independent Parameters
         # Temperature - values expected to be the decimal results of conversion from hex
        temp_ctxt = ParameterContext('TEMPWAT_L0', param_type=QuantityType(value_encoding=np.dtype('float32')), fill_value=-9999)
        temp_ctxt.uom = 'deg_C'
        temp_ctxt_id = self.dataset_management.create_parameter_context(name='TEMPWAT_L0', parameter_context=temp_ctxt.dump(), parameter_type='quantity<float32>', units=temp_ctxt.uom)
        self.add_context_to_cleanup(temp_ctxt_id)


        # Conductivity - values expected to be the decimal results of conversion from hex
        cond_ctxt = ParameterContext('CONDWAT_L0', param_type=QuantityType(value_encoding=np.dtype('float32')), fill_value=-9999)
        cond_ctxt.uom = 'S m-1'
        cond_ctxt_id = self.dataset_management.create_parameter_context(name='CONDWAT_L0', parameter_context=cond_ctxt.dump(), parameter_type='quantity<float32>', units=cond_ctxt.uom)
        self.add_context_to_cleanup(cond_ctxt_id)


        # Pressure - values expected to be the decimal results of conversion from hex
        press_ctxt = ParameterContext('PRESWAT_L0', param_type=QuantityType(value_encoding=np.dtype('float32')), fill_value=-9999)
        press_ctxt.uom = 'dbar'
        press_ctxt_id = self.dataset_management.create_parameter_context(name='PRESWAT_L0', parameter_context=press_ctxt.dump(), parameter_type='quantity<float32>', units=press_ctxt.uom)
        self.add_context_to_cleanup(press_ctxt_id)


        # Dependent Parameters

        # TEMPWAT_L1 = (TEMPWAT_L0 / 10000) - 10
        tl1_func = '(T / 10000) - 10'
        tl1_pmap = {'T': 'TEMPWAT_L0'}
        expr = NumexprFunction('TEMPWAT_L1', tl1_func, ['T'], param_map=tl1_pmap)
        tempL1_ctxt = ParameterContext('TEMPWAT_L1', param_type=ParameterFunctionType(function=expr), variability=VariabilityEnum.TEMPORAL)
        tempL1_ctxt.uom = 'deg_C'
        tempL1_ctxt_id = self.dataset_management.create_parameter_context(name=tempL1_ctxt.name, parameter_context=tempL1_ctxt.dump(), parameter_type='pfunc', units=tempL1_ctxt.uom)
        self.add_context_to_cleanup(tempL1_ctxt_id)


        # CONDWAT_L1 = (CONDWAT_L0 / 100000) - 0.5
        cl1_func = '(C / 100000) - 0.5'
        cl1_pmap = {'C': 'CONDWAT_L0'}
        expr = NumexprFunction('CONDWAT_L1', cl1_func, ['C'], param_map=cl1_pmap)
        condL1_ctxt = ParameterContext('CONDWAT_L1', param_type=ParameterFunctionType(function=expr), variability=VariabilityEnum.TEMPORAL)
        condL1_ctxt.uom = 'S m-1'
        condL1_ctxt_id = self.dataset_management.create_parameter_context(name=condL1_ctxt.name, parameter_context=condL1_ctxt.dump(), parameter_type='pfunc', units=condL1_ctxt.uom)
        self.add_context_to_cleanup(condL1_ctxt_id)


        # Equation uses p_range, which is a calibration coefficient - Fixing to 679.34040721
        #   PRESWAT_L1 = (PRESWAT_L0 * p_range / (0.85 * 65536)) - (0.05 * p_range)
        pl1_func = '(P * p_range / (0.85 * 65536)) - (0.05 * p_range)'
        pl1_pmap = {'P': 'PRESWAT_L0', 'p_range': 679.34040721}
        expr = NumexprFunction('PRESWAT_L1', pl1_func, ['P', 'p_range'], param_map=pl1_pmap)
        presL1_ctxt = ParameterContext('PRESWAT_L1', param_type=ParameterFunctionType(function=expr), variability=VariabilityEnum.TEMPORAL)
        presL1_ctxt.uom = 'S m-1'
        presL1_ctxt_id = self.dataset_management.create_parameter_context(name=presL1_ctxt.name, parameter_context=presL1_ctxt.dump(), parameter_type='pfunc', units=presL1_ctxt.uom)
        self.add_context_to_cleanup(presL1_ctxt_id)


        # Density & practical salinity calucluated using the Gibbs Seawater library - available via python-gsw project:
        #       https://code.google.com/p/python-gsw/ & http://pypi.python.org/pypi/gsw/3.0.1

        # PRACSAL = gsw.SP_from_C((CONDWAT_L1 * 10), TEMPWAT_L1, PRESWAT_L1)
        owner = 'gsw'
        sal_func = 'SP_from_C'
        sal_arglist = ['C', 't', 'p']
        sal_pmap = {'C': NumexprFunction('CONDWAT_L1*10', 'C*10', ['C'], param_map={'C': 'CONDWAT_L1'}), 't': 'TEMPWAT_L1', 'p': 'PRESWAT_L1'}
        sal_kwargmap = None
        expr = PythonFunction('PRACSAL', owner, sal_func, sal_arglist, sal_kwargmap, sal_pmap)
        sal_ctxt = ParameterContext('PRACSAL', param_type=ParameterFunctionType(expr), variability=VariabilityEnum.TEMPORAL)
        sal_ctxt.uom = 'g kg-1'
        sal_ctxt_id = self.dataset_management.create_parameter_context(name=sal_ctxt.name, parameter_context=sal_ctxt.dump(), parameter_type='pfunc', units=sal_ctxt.uom)
        self.add_context_to_cleanup(sal_ctxt_id)


        # absolute_salinity = gsw.SA_from_SP(PRACSAL, PRESWAT_L1, longitude, latitude)
        # conservative_temperature = gsw.CT_from_t(absolute_salinity, TEMPWAT_L1, PRESWAT_L1)
        # DENSITY = gsw.rho(absolute_salinity, conservative_temperature, PRESWAT_L1)
        owner = 'gsw'
        abs_sal_expr = PythonFunction('abs_sal', owner, 'SA_from_SP', ['PRACSAL', 'PRESWAT_L1', 'LON','LAT'])
        cons_temp_expr = PythonFunction('cons_temp', owner, 'CT_from_t', [abs_sal_expr, 'TEMPWAT_L1', 'PRESWAT_L1'])
        dens_expr = PythonFunction('DENSITY', owner, 'rho', [abs_sal_expr, cons_temp_expr, 'PRESWAT_L1'])
        dens_ctxt = ParameterContext('DENSITY', param_type=ParameterFunctionType(dens_expr), variability=VariabilityEnum.TEMPORAL)
        dens_ctxt.uom = 'kg m-3'
        dens_ctxt_id = self.dataset_management.create_parameter_context(name=dens_ctxt.name, parameter_context=dens_ctxt.dump(), parameter_type='pfunc', units=dens_ctxt.uom)
        self.add_context_to_cleanup(dens_ctxt_id)

        
        ids = [t_ctxt_id, lat_ctxt_id, lon_ctxt_id, temp_ctxt_id, cond_ctxt_id, press_ctxt_id, tempL1_ctxt_id, condL1_ctxt_id, presL1_ctxt_id, sal_ctxt_id, dens_ctxt_id]
        contexts = [t_ctxt, lat_ctxt, lon_ctxt, temp_ctxt, cond_ctxt, press_ctxt, tempL1_ctxt, condL1_ctxt, presL1_ctxt, sal_ctxt, dens_ctxt]
        context_ids = [ids[i] for i,ctxt in enumerate(contexts) if ctxt.name in filter_values]
        pdict_name = '_'.join([ctxt.name for ctxt in contexts if ctxt.name in filter_values])

        try:
            self.pdicts[pdict_name]
            return self.pdicts[pdict_name]
        except KeyError:
            pdict_id = self.dataset_management.create_parameter_dictionary(pdict_name, parameter_context_ids=context_ids, temporal_context='time')
            self.addCleanup(self.dataset_management.delete_parameter_dictionary, pdict_id)
            self.pdicts[pdict_name] = pdict_id
            return pdict_id
class TestResourceRegistryAttachments(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR  = ResourceRegistryServiceClient(node=self.container.node)

        print 'started services'

    def test_resource_registry_blob_sanity(self):
        resource_id, _ = self.RR.create(IonObject(RT.Resource, name="foo"))

        MY_CONTENT = "the quick brown fox etc etc etc"

        #save
        att_id = self.RR.create_attachment(resource_id,  IonObject(RT.Attachment,
                                                          name="test.txt",
                                                          content=MY_CONTENT,
                                                          content_type="text/plain",
                                                          keywords=["test1", "test2"],
                                                          attachment_type=AttachmentType.BLOB))
        
        #load
        attachment = self.RR.read_attachment(att_id, include_content=True)
        self.assertEqual("test.txt", attachment.name)
        self.assertEqual("text/plain", attachment.content_type)
        self.assertIn("test1", attachment.keywords)
        self.assertIn("test2", attachment.keywords)

        #content has changed; it's base64-encoded from what we put in
        self.assertEqual(MY_CONTENT, attachment.content)

        obj = self.RR.read(resource_id)
        self.assertEqual(obj.name, "foo")
        obj.name = "TheDudeAbides"
        obj = self.RR.update(obj)
        obj = self.RR.read(resource_id)
        self.assertEqual(obj.name, "TheDudeAbides")

        att = self.RR.find_attachments(resource_id)
        self.assertNotEqual(att, None)


        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.RR.create(actor_identity_obj)
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.RR.create(user_info_obj)
        assoc_id, assoc_rev = self.RR.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertNotEqual(assoc_id, None)

        find_assoc = self.RR.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(find_assoc[0]._id == assoc_id)
        subj = self.RR.find_subjects(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)

        res_obj1 = self.RR.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)

        self.RR.delete_association(assoc_id)
        self.RR.delete_attachment(att_id)
        self.RR.delete(resource_id)
class TestResourceRegistry(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient()

    @unittest.skip('Represents a bug in storage/retrieval')
    def test_tuple_in_dict(self):
        # create a resource with a tuple saved in a dict
        transform_obj = IonObject(RT.Transform)
        transform_obj.configuration = {}
        transform_obj.configuration["tuple"] = ('STRING', )
        transform_id, _ = self.resource_registry_service.create(transform_obj)

        # read the resource back
        returned_transform_obj = self.resource_registry_service.read(
            transform_id)

        self.assertEqual(transform_obj.configuration["tuple"],
                         returned_transform_obj.configuration["tuple"])

    def test_basics(self):
        # Sequence all the tests so that we can save numerous system start and stops
        self._do_test_crud()
        self._do_test_read_mult()
        self._do_test_lifecycle()
        self._do_test_attach()
        self._do_test_association()
        self._do_test_find_resources()
        self._do_test_find_objects_mult()

    def _do_test_crud(self):
        # Some quick registry tests
        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", name="name", foo="bar")
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name", "foo": "bar"})
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name"}, foo="bar")
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")

        # Can set attributes that aren't in the object's schema
        with self.assertRaises(AttributeError) as cm:
            setattr(obj, "foo", "bar")
        self.assertTrue(
            cm.exception.message == "'UserInfo' object has no attribute 'foo'")

        # Cam't call update with object that hasn't been persisted
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.update(obj)
        self.assertTrue(
            cm.exception.message.startswith(
                "Object does not have required '_id' or '_rev' attribute"))

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        # Cannot create object with _id and _rev fields pre-set
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create(read_obj)

        # Update object
        read_obj.name = "John Doe"
        self.resource_registry_service.update(read_obj)

        # Update should fail with revision mismatch
        with self.assertRaises(Conflict) as cm:
            self.resource_registry_service.update(read_obj)

        # Re-read and update object
        read_obj = self.resource_registry_service.read(obj_id)
        self.resource_registry_service.update(read_obj)

        # Delete object
        self.resource_registry_service.delete(obj_id)

        # Make sure read, update and delete report error
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.read(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        # Owner creation tests
        user = IonObject("ActorIdentity", name='user')
        uid, _ = self.resource_registry_service.create(user)

        inst = IonObject("InstrumentDevice", name='instrument')
        iid, _ = self.resource_registry_service.create(
            inst, headers={'ion-actor-id': str(uid)})

        ids, _ = self.resource_registry_service.find_objects(iid,
                                                             PRED.hasOwner,
                                                             RT.ActorIdentity,
                                                             id_only=True)
        self.assertEquals(len(ids), 1)

        assoc = self.resource_registry_service.read(ids[0])
        self.resource_registry_service.delete(iid)

        with self.assertRaises(NotFound) as ex:
            assoc = self.resource_registry_service.read(iid)

    def _do_test_read_mult(self):
        test_resource1_id, _ = self.resource_registry_service.create(
            Resource(name='test1'))
        test_resource2_id, _ = self.resource_registry_service.create(
            Resource(name='test2'))

        res_list = [test_resource1_id, test_resource2_id]

        objects = self.resource_registry_service.read_mult(res_list)

        for o in objects:
            self.assertIsInstance(o, Resource)
            self.assertTrue(o._id in res_list)

    def _do_test_lifecycle(self):
        # Lifecycle tests
        att = IonObject("InstrumentDevice", name='mine', description='desc')

        rid, rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT)
        self.assertEquals(att1.availability, AS.PRIVATE)

        new_state = self.resource_registry_service.execute_lifecycle_transition(
            rid, LCE.PLAN)
        self.assertEquals(new_state, lcstate(LCS.PLANNED, AS.PRIVATE))

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED)
        self.assertEquals(att2.availability, AS.PRIVATE)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.execute_lifecycle_transition(
                rid, LCE.UNANNOUNCE)
        self.assertTrue(
            "type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce"
            in cm.exception.message)

        new_state = self.resource_registry_service.execute_lifecycle_transition(
            rid, LCE.DEVELOP)
        self.assertEquals(new_state, lcstate(LCS.DEVELOPED, AS.PRIVATE))

        with self.assertRaises(BadRequest):
            self.resource_registry_service.execute_lifecycle_transition(
                resource_id=rid, transition_event='NONE##')

        self.resource_registry_service.set_lifecycle_state(
            rid, lcstate(LCS.INTEGRATED, AS.PRIVATE))
        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.lcstate, LCS.INTEGRATED)
        self.assertEquals(att1.availability, AS.PRIVATE)

    def _do_test_attach(self):
        binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82"

        # Owner creation tests
        instrument = IonObject("InstrumentDevice", name='instrument')
        iid, _ = self.resource_registry_service.create(instrument)

        att = Attachment(content=binary, attachment_type=AttachmentType.BLOB)
        aid1 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(
            aid1, include_content=True)
        self.assertEquals(binary, att1.content)

        import base64
        att = Attachment(content=base64.encodestring(binary),
                         attachment_type=AttachmentType.ASCII)
        aid2 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(
            aid2, include_content=True)
        self.assertEquals(binary, base64.decodestring(att1.content))

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [aid1, aid2])

        att_ids = self.resource_registry_service.find_attachments(
            iid, id_only=True, descending=True)
        self.assertEquals(att_ids, [aid2, aid1])

        att_ids = self.resource_registry_service.find_attachments(
            iid, id_only=True, descending=True, limit=1)
        self.assertEquals(att_ids, [aid2])

        atts = self.resource_registry_service.find_attachments(
            iid, id_only=False, include_content=True, limit=1)
        self.assertEquals(atts[0].content, binary)

        self.resource_registry_service.delete_attachment(aid1)

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [aid2])

        self.resource_registry_service.delete_attachment(aid2)

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [])

    def _do_test_association(self):
        # Instantiate ActorIdentity object
        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(
            actor_identity_obj)
        read_actor_identity_obj = self.resource_registry_service.read(
            actor_identity_obj_id)

        # Instantiate UserInfo object
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(
            user_info_obj)
        read_user_info_obj = self.resource_registry_service.read(
            user_info_obj_id)

        # Test create failures
        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.bogus, user_info_obj_id)
        self.assertTrue(cm.exception.message == "bogus")

        # Predicate not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, None, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Subject id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                None, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Object id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, None)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Bad subject id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(
                "bogus", PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # Bad object id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, "bogus")
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # _id missing from subject
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message.startswith("Subject id"))

        # _id missing from object
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, user_info_obj)
        self.assertTrue(cm.exception.message.startswith("Object id"))

        # Wrong subject type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                user_info_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message ==
                        "Illegal subject type UserInfo for predicate hasInfo")

        # Wrong object type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id)
        self.assertTrue(
            cm.exception.message ==
            "Illegal object type ActorIdentity for predicate hasInfo")

        # Create two different association types between the same subject and predicate
        assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)

        # Read object, subject
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True)
        self.assertEquals(res_obj1, user_info_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id)
        self.assertEquals(res_obj2._id, actor_identity_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True)
        self.assertEquals(res_obj2, actor_identity_obj_id)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(
            None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, id_only=False)
        ret3 = self.resource_registry_service.find_associations(
            predicate=PRED.hasInfo, id_only=False)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo, read_user_info_obj)
        ret2 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(
            None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None,
            True)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(
            predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, None)
        self.assertIn("Illegal parameters", cm.exception.message)

        # Find subjects (good cases)
        subj_ret1 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)
        subj_ret2 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_subjects(
            None, PRED.hasInfo, user_info_obj_id, True)
        subj_ret4 = self.resource_registry_service.find_subjects(
            None, None, read_user_info_obj, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_subjects(
            None, PRED.hasInfo, user_info_obj_id, False)
        subj_ret6 = self.resource_registry_service.find_subjects(
            None, None, read_user_info_obj, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find subjects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide object")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_subjects(
                RT.UserCredentials, PRED.bogus, user_info_obj_id, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_subjects(
            RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_subjects(
            RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(
                RT.UserCredentials, PRED.hasInfo, user_info_obj, True)
        self.assertTrue(
            cm.exception.message == "Object id not available in object")

        # Find objects (good cases)
        subj_ret1 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True)
        subj_ret2 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, None, True)
        subj_ret4 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, None, None, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, None, False)
        subj_ret6 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, None, None, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find objects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide subject")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_objects(
                actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(
                actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(
            cm.exception.message == "Object id not available in subject")

        # Get association (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, None)
        self.assertIn("Illegal parameters", cm.exception.message)

        assoc = self.resource_registry_service.get_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(assoc._id == assoc_id1)

        # Delete (bad cases)
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete_association("bogus")
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # Delete other association
        self.resource_registry_service.delete_association(assoc_id1)

        # Delete resources
        self.resource_registry_service.delete(actor_identity_obj_id)
        self.resource_registry_service.delete(user_info_obj_id)

    def _do_test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(
                RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(
            cm.exception.message == "find by name does not support lcstate")

        ret = self.resource_registry_service.find_resources(
            RT.UserInfo, None, "name", False)
        self.assertEquals(len(ret[0]), 0)

        # Instantiate an object
        obj = IonObject("InstrumentAgentInstance", name="name")

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentAgentInstance, None, "name", False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentAgentInstance, LCS.DEPLOYED, None, False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)

    def _do_test_find_objects_mult(self):
        dp = DataProcess()
        transform = Transform()
        pd = ProcessDefinition()

        dp_id, _ = self.resource_registry_service.create(dp)
        transform_id, _ = self.resource_registry_service.create(transform)
        pd_id, _ = self.resource_registry_service.create(pd)

        self.resource_registry_service.create_association(
            subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.resource_registry_service.create_association(
            subject=transform_id,
            object=pd_id,
            predicate=PRED.hasProcessDefinition)

        results, _ = self.resource_registry_service.find_objects_mult(
            subjects=[dp_id], id_only=True)
        self.assertTrue(results == [transform_id])

        results, _ = self.resource_registry_service.find_objects_mult(
            subjects=[dp_id, transform_id], id_only=True)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)

    @attr('EXT')
    def test_get_resource_extension(self):

        #Testing multiple instrument owners
        subject1 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"

        actor_identity_obj1 = IonObject(RT.ActorIdentity, {"name": subject1})
        actor_id1, _ = self.resource_registry_service.create(
            actor_identity_obj1)

        user_info_obj1 = IonObject(RT.UserInfo, {"name": "Foo"})
        user_info_id1, _ = self.resource_registry_service.create(
            user_info_obj1)
        self.resource_registry_service.create_association(
            actor_id1, PRED.hasInfo, user_info_id1)

        subject2 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256"

        actor_identity_obj2 = IonObject(RT.ActorIdentity, {"name": subject2})
        actor_id2, _ = self.resource_registry_service.create(
            actor_identity_obj2)

        user_info_obj2 = IonObject(RT.UserInfo, {"name": "Foo2"})
        user_info_id2, _ = self.resource_registry_service.create(
            user_info_obj2)
        self.resource_registry_service.create_association(
            actor_id2, PRED.hasInfo, user_info_id2)

        test_obj = IonObject(RT.InformationResource, {"name": "TestResource"})
        test_obj_id, _ = self.resource_registry_service.create(test_obj)
        self.resource_registry_service.create_association(
            test_obj_id, PRED.hasOwner, actor_id1)
        self.resource_registry_service.create_association(
            test_obj_id, PRED.hasOwner, actor_id2)

        extended_resource = self.resource_registry_service.get_resource_extension(
            test_obj_id, OT.ExtendedInformationResource)

        self.assertEqual(test_obj_id, extended_resource._id)
        self.assertEqual(len(extended_resource.owners), 2)

        extended_resource_list = self.resource_registry_service.get_resource_extension(
            str([user_info_id1, user_info_id2]),
            OT.ExtendedInformationResource)
        self.assertEqual(len(extended_resource_list), 2)

        optional_args = {'user_id': user_info_id1}
        extended_resource = self.resource_registry_service.get_resource_extension(
            test_obj_id,
            OT.TestExtendedInformationResource,
            optional_args=optional_args)

        self.assertEqual(test_obj_id, extended_resource._id)
        self.assertEqual(len(extended_resource.owners), 2)
        self.assertEqual(extended_resource.user_id, user_info_id1)

    @attr('PREP')
    def test_prepare_resource_support(self):

        prepare_data = self.resource_registry_service.prepare_resource_support(
            resource_type=RT.StreamDefinition)

        self.assertEqual(prepare_data.create_request.service_name,
                         "resource_registry")
        self.assertEqual(prepare_data.create_request.service_operation,
                         "create")
        self.assertEqual(prepare_data.create_request.request_parameters,
                         {"object": "$(object)"})

        self.assertEqual(prepare_data.update_request.service_name,
                         "resource_registry")
        self.assertEqual(prepare_data.update_request.service_operation,
                         "update")
        self.assertEqual(prepare_data.update_request.request_parameters,
                         {"object": "$(object)"})

        res_id, _ = self.resource_registry_service.create(
            prepare_data.resource)

        prepare_data = self.resource_registry_service.prepare_resource_support(
            resource_type=RT.StreamDefinition, resource_id=res_id)

        prepare_data.resource.name = "test_stream_def"
        prepare_data.resource.stream_type = "test_type"

        stream_def_id, _ = self.resource_registry_service.update(
            prepare_data.resource)

        #def ion_object_encoder(obj):
        #    return obj.__dict__

        #print simplejson.dumps(prepare_data, default=ion_object_encoder, indent=2)

        stream_def = self.resource_registry_service.read(stream_def_id)

        self.assertEqual(stream_def.name, prepare_data.resource.name)
        self.assertEqual(stream_def.stream_type,
                         prepare_data.resource.stream_type)
class TestResourceRegistry(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient()

    @unittest.skip('Represents a bug in storage/retrieval')
    def test_tuple_in_dict(self):
        # create a resource with a tuple saved in a dict
        transform_obj = IonObject(RT.Transform)
        transform_obj.configuration = {}
        transform_obj.configuration["tuple"] = ('STRING',)
        transform_id, _ = self.resource_registry_service.create(transform_obj)

        # read the resource back
        returned_transform_obj = self.resource_registry_service.read(transform_id)

        self.assertEqual(transform_obj.configuration["tuple"], returned_transform_obj.configuration["tuple"])

    def test_basics(self):
        # Sequence all the tests so that we can save numerous system start and stops
        self._do_test_crud()
        self._do_test_read_mult()
        self._do_test_lifecycle()
        self._do_test_attach()
        self._do_test_association()
        self._do_test_find_resources()
        self._do_test_find_objects_mult()

    def _do_test_crud(self):
        # Some quick registry tests
        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", name="name", foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name", "foo": "bar"})
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name"}, foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")
        
        # Can set attributes that aren't in the object's schema
        with self.assertRaises(AttributeError) as cm:
            setattr(obj, "foo", "bar")
        self.assertTrue(cm.exception.message == "'UserInfo' object has no attribute 'foo'")

        # Cam't call update with object that hasn't been persisted
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.update(obj)
        self.assertTrue(cm.exception.message.startswith("Object does not have required '_id' or '_rev' attribute"))

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        # Cannot create object with _id and _rev fields pre-set
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create(read_obj)

        # Update object
        read_obj.name = "John Doe"
        self.resource_registry_service.update(read_obj)

        # Update should fail with revision mismatch
        with self.assertRaises(Conflict) as cm:
            self.resource_registry_service.update(read_obj)

        # Re-read and update object
        read_obj = self.resource_registry_service.read(obj_id)
        self.resource_registry_service.update(read_obj)
        
        # Delete object
        self.resource_registry_service.delete(obj_id)

        # Make sure read, update and delete report error
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.read(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        # Owner creation tests
        user = IonObject("ActorIdentity", name='user')
        uid,_ = self.resource_registry_service.create(user)

        inst = IonObject("InstrumentDevice", name='instrument')
        iid,_ = self.resource_registry_service.create(inst, headers={'ion-actor-id':str(uid)})

        ids,_ = self.resource_registry_service.find_objects(iid, PRED.hasOwner, RT.ActorIdentity, id_only=True)
        self.assertEquals(len(ids), 1)

        assoc = self.resource_registry_service.read(ids[0])
        self.resource_registry_service.delete(iid)

        with self.assertRaises(NotFound) as ex:
            assoc = self.resource_registry_service.read(iid)

    def _do_test_read_mult(self):
        test_resource1_id,_  = self.resource_registry_service.create(Resource(name='test1'))
        test_resource2_id,_  = self.resource_registry_service.create(Resource(name='test2'))

        res_list = [test_resource1_id, test_resource2_id]

        objects = self.resource_registry_service.read_mult(res_list)

        for o in objects:
            self.assertIsInstance(o,Resource)
            self.assertTrue(o._id in res_list)

    def _do_test_lifecycle(self):
        # Lifecycle tests
        att = IonObject("InstrumentDevice", name='mine', description='desc')

        rid,rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT)
        self.assertEquals(att1.availability, AS.PRIVATE)

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.PLAN)
        self.assertEquals(new_state, lcstate(LCS.PLANNED, AS.PRIVATE))

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED)
        self.assertEquals(att2.availability, AS.PRIVATE)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.execute_lifecycle_transition(rid, LCE.UNANNOUNCE)
        self.assertTrue("type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce" in cm.exception.message)

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.DEVELOP)
        self.assertEquals(new_state, lcstate(LCS.DEVELOPED, AS.PRIVATE))

        with self.assertRaises(BadRequest):
            self.resource_registry_service.execute_lifecycle_transition(
                    resource_id=rid, transition_event='NONE##')

        self.resource_registry_service.set_lifecycle_state(rid, lcstate(LCS.INTEGRATED, AS.PRIVATE))
        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.lcstate, LCS.INTEGRATED)
        self.assertEquals(att1.availability, AS.PRIVATE)

    def _do_test_attach(self):
        binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82"

        # Owner creation tests
        instrument = IonObject("InstrumentDevice", name='instrument')
        iid,_ = self.resource_registry_service.create(instrument)

        att = Attachment(content=binary, attachment_type=AttachmentType.BLOB)
        aid1 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid1, include_content=True)
        self.assertEquals(binary, att1.content)

        import base64
        att = Attachment(content=base64.encodestring(binary), attachment_type=AttachmentType.ASCII)
        aid2 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid2, include_content=True)
        self.assertEquals(binary, base64.decodestring(att1.content))

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [aid1, aid2])

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True)
        self.assertEquals(att_ids, [aid2, aid1])

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True, limit=1)
        self.assertEquals(att_ids, [aid2])

        atts = self.resource_registry_service.find_attachments(iid, id_only=False, include_content=True, limit=1)
        self.assertEquals(atts[0].content, binary)

        self.resource_registry_service.delete_attachment(aid1)

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [aid2])

        self.resource_registry_service.delete_attachment(aid2)

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [])

    def _do_test_association(self):
        # Instantiate ActorIdentity object
        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(actor_identity_obj)
        read_actor_identity_obj = self.resource_registry_service.read(actor_identity_obj_id)

        # Instantiate UserInfo object
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(user_info_obj)
        read_user_info_obj = self.resource_registry_service.read(user_info_obj_id)

        # Test create failures
        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.bogus, user_info_obj_id)
        self.assertTrue(cm.exception.message == "bogus")

        # Predicate not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Subject id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(None, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Object id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, None)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Bad subject id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association("bogus", PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Bad object id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, "bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # _id missing from subject
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message.startswith("Subject id"))

        # _id missing from object
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj)
        self.assertTrue(cm.exception.message.startswith("Object id"))

        # Wrong subject type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(user_info_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal subject type UserInfo for predicate hasInfo")

        # Wrong object type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id)
        self.assertTrue(cm.exception.message == "Illegal object type ActorIdentity for predicate hasInfo")

        # Create two different association types between the same subject and predicate
        assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)

        # Read object, subject
        res_obj1 = self.resource_registry_service.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)
        res_obj1 = self.resource_registry_service.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True)
        self.assertEquals(res_obj1, user_info_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id)
        self.assertEquals(res_obj2._id, actor_identity_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True)
        self.assertEquals(res_obj2, actor_identity_obj_id)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False)
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=False)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=False)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(read_actor_identity_obj, PRED.hasInfo, read_user_info_obj)
        ret2 = self.resource_registry_service.find_associations(read_actor_identity_obj, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None, True)
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, None)
        self.assertIn("Illegal parameters", cm.exception.message)

        # Find subjects (good cases)
        subj_ret1 = self.resource_registry_service.find_subjects(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)
        subj_ret2 = self.resource_registry_service.find_subjects(RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, True)
        subj_ret4 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, False)
        subj_ret6 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find subjects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide object")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.bogus, user_info_obj_id, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_subjects(RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj, True)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Find objects (good cases)
        subj_ret1 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True)
        subj_ret2 = self.resource_registry_service.find_objects(read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, True)
        subj_ret4 = self.resource_registry_service.find_objects(actor_identity_obj_id, None, None, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, False)
        subj_ret6 = self.resource_registry_service.find_objects(read_actor_identity_obj, None, None, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find objects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide subject")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        # Get association (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, None)
        self.assertIn("Illegal parameters", cm.exception.message)


        assoc = self.resource_registry_service.get_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(assoc._id == assoc_id1)

        # Delete (bad cases)
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete_association("bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Delete other association
        self.resource_registry_service.delete_association(assoc_id1)

        # Delete resources
        self.resource_registry_service.delete(actor_identity_obj_id)
        self.resource_registry_service.delete(user_info_obj_id)

    def _do_test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(cm.exception.message == "find by name does not support lcstate")
        
        ret = self.resource_registry_service.find_resources(RT.UserInfo, None, "name", False)
        self.assertEquals(len(ret[0]), 0)

        # Instantiate an object
        obj = IonObject("InstrumentAgentInstance", name="name")
        
        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(RT.InstrumentAgentInstance, None, "name", False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)

        ret = self.resource_registry_service.find_resources(RT.InstrumentAgentInstance, LCS.DEPLOYED, None, False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)

    def _do_test_find_objects_mult(self):
        dp = DataProcess()
        transform = Transform()
        pd = ProcessDefinition()

        dp_id, _ = self.resource_registry_service.create(dp)
        transform_id, _ = self.resource_registry_service.create(transform)
        pd_id, _ = self.resource_registry_service.create(pd)

        self.resource_registry_service.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.resource_registry_service.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition)

        results, _  = self.resource_registry_service.find_objects_mult(subjects=[dp_id],id_only=True)
        self.assertTrue(results == [transform_id])

        results, _  = self.resource_registry_service.find_objects_mult(subjects=[dp_id, transform_id], id_only=True)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)

    @attr('EXT')
    def test_get_resource_extension(self):


        #Testing multiple instrument owners
        subject1 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"

        actor_identity_obj1 = IonObject(RT.ActorIdentity, {"name": subject1})
        actor_id1,_ = self.resource_registry_service.create(actor_identity_obj1)

        user_info_obj1 = IonObject(RT.UserInfo, {"name": "Foo"})
        user_info_id1,_ = self.resource_registry_service.create(user_info_obj1)
        self.resource_registry_service.create_association(actor_id1, PRED.hasInfo, user_info_id1)

        subject2 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256"

        actor_identity_obj2 = IonObject(RT.ActorIdentity, {"name": subject2})
        actor_id2,_ = self.resource_registry_service.create(actor_identity_obj2)

        user_info_obj2 = IonObject(RT.UserInfo, {"name": "Foo2"})
        user_info_id2,_ = self.resource_registry_service.create(user_info_obj2)
        self.resource_registry_service.create_association(actor_id2, PRED.hasInfo, user_info_id2)

        test_obj = IonObject(RT.InformationResource,  {"name": "TestResource"})
        test_obj_id,_ = self.resource_registry_service.create(test_obj)
        self.resource_registry_service.create_association(test_obj_id, PRED.hasOwner, actor_id1)
        self.resource_registry_service.create_association(test_obj_id, PRED.hasOwner, actor_id2)

        extended_resource = self.resource_registry_service.get_resource_extension(test_obj_id, OT.ExtendedInformationResource )

        self.assertEqual(test_obj_id,extended_resource._id)
        self.assertEqual(len(extended_resource.owners),2)

        extended_resource_list = self.resource_registry_service.get_resource_extension(str([user_info_id1,user_info_id2]), OT.ExtendedInformationResource)
        self.assertEqual(len(extended_resource_list), 2)

        optional_args = {'user_id': user_info_id1}
        extended_resource = self.resource_registry_service.get_resource_extension(test_obj_id, OT.TestExtendedInformationResource, optional_args=optional_args )

        self.assertEqual(test_obj_id,extended_resource._id)
        self.assertEqual(len(extended_resource.owners),2)
        self.assertEqual(extended_resource.user_id, user_info_id1)
class TestResourceRegistry(IonIntegrationTestCase):
    
#    service_dependencies = [('resource_registry', {'resource_registry': {'persistent': True, 'force_clean': True}})]

    def setUp(self):
        # Start container
        self._start_container()

        # Establish endpoint with container
        container_client = ContainerAgentClient(node=self.container.node, name=self.container.name)
        container_client.start_rel_from_url('res/deploy/r2coi.yml')

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient(node=self.container.node)

    def test_crud(self):
        # Some quick registry tests
        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", name="name", foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name", "foo": "bar"})
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name"}, foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")
        
        # Can set attributes that aren't in the object's schema
        with self.assertRaises(AttributeError) as cm:
            setattr(obj, "foo", "bar")
        self.assertTrue(cm.exception.message == "'UserInfo' object has no attribute 'foo'")

        # Cam't call update with object that hasn't been persisted
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.update(obj)
        self.assertTrue(cm.exception.message.startswith("Object does not have required '_id' or '_rev' attribute"))

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        # Cannot create object with _id and _rev fields pre-set        
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create(read_obj)
        self.assertTrue(cm.exception.message.startswith("Doc must not have '_id'"))

        # Update object
        read_obj.name = "John Doe"
        self.resource_registry_service.update(read_obj)

        # Update should fail with revision mismatch
        with self.assertRaises(Conflict) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object not based on most current version"))

        # Re-read and update object
        read_obj = self.resource_registry_service.read(obj_id)
        self.resource_registry_service.update(read_obj)
        
        # Delete object
        self.resource_registry_service.delete(obj_id)

        # Make sure read, update and delete report error
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.read(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))
           
    def test_lifecycle(self):
        att = IonObject("Attachment", name='mine', description='desc')

        rid,rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT)

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.register)
        self.assertEquals(new_state, LCS.PLANNED)

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED)

        with self.assertRaises(Inconsistent) as cm:
            self.resource_registry_service.execute_lifecycle_transition(rid, LCE.register)
        self.assertTrue("type=Attachment, lcstate=PLANNED has no transition for event register" in cm.exception.message)

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.develop, LCS.PLANNED)
        self.assertEquals(new_state, LCS.DEVELOPED)

        self.assertRaises(iex.Inconsistent, self.resource_registry_service.execute_lifecycle_transition,
                                            resource_id=rid, transition_event=LCE.develop, current_lcstate=LCS.PLANNED)

    def test_association(self):
        # Instantiate UserIdentity object
        user_identity_obj = IonObject("UserIdentity", name="name")
        user_identity_obj_id, user_identity_obj_rev = self.resource_registry_service.create(user_identity_obj)
        read_user_identity_obj = self.resource_registry_service.read(user_identity_obj_id)

        # Instantiate UserInfo object
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(user_info_obj)
        read_user_info_obj = self.resource_registry_service.read(user_info_obj_id)

        # Test create failures
        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.create_association(user_identity_obj_id, PRED.bogus, user_info_obj_id)
        self.assertTrue(cm.exception.message == "bogus")

        # Predicate not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(user_identity_obj_id, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Bad association type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, user_info_obj_id, 'bogustype')
        self.assertTrue(cm.exception.message == "Unsupported assoc_type: bogustype")

        # Subject id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(None, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Object id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, None)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Bad subject id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association("bogus", PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Bad object id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, "bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # _id missing from subject
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(user_identity_obj, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Subject id or rev not available")

        # _id missing from object
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, user_info_obj)
        self.assertTrue(cm.exception.message == "Object id or rev not available")

        # Wrong subject type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(user_info_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal subject type UserInfo for predicate hasInfo")

        # Wrong object type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, user_identity_obj_id)
        self.assertTrue(cm.exception.message == "Illegal object type UserIdentity for predicate hasInfo")

        # Create duplicate associations
        assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        assoc_id2, assoc_rev2 = self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, user_info_obj_id, "H2R")

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(user_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        ret2 = self.resource_registry_service.find_associations(user_identity_obj_id, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(user_identity_obj_id, PRED.hasInfo, user_info_obj_id, True)
        ret2 = self.resource_registry_service.find_associations(user_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(read_user_identity_obj, PRED.hasInfo, read_user_info_obj)
        ret2 = self.resource_registry_service.find_associations(read_user_identity_obj, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(user_identity_obj_id, PRED.hasInfo, read_user_info_obj, True)
        ret2 = self.resource_registry_service.find_associations(user_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(user_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(user_identity_obj, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(user_identity_obj_id, None, user_info_obj)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Find subjects (good cases)
        subj_ret1 = self.resource_registry_service.find_subjects(RT.UserIdentity, PRED.hasInfo, user_info_obj_id, True)
        subj_ret2 = self.resource_registry_service.find_subjects(RT.UserIdentity, PRED.hasInfo, read_user_info_obj, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, True)
        subj_ret4 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, False)
        subj_ret6 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find subjects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide object")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.bogus, user_info_obj_id, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_subjects(RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj, True)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Find objects (good cases)
        subj_ret1 = self.resource_registry_service.find_objects(user_identity_obj_id, PRED.hasInfo, RT.UserInfo, True)
        subj_ret2 = self.resource_registry_service.find_objects(read_user_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_objects(user_identity_obj_id, PRED.hasInfo, None, True)
        subj_ret4 = self.resource_registry_service.find_objects(user_identity_obj_id, None, None, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_objects(user_identity_obj_id, PRED.hasInfo, None, False)
        subj_ret6 = self.resource_registry_service.find_objects(read_user_identity_obj, None, None, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find objects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide subject")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_objects(user_identity_obj_id, PRED.bogus, RT.UserCredentials, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_objects(user_identity_obj_id, PRED.hasCredentials, RT.UserIdentity, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_objects(user_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(user_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        # Get association (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(user_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(user_identity_obj, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(user_identity_obj_id, None, user_info_obj)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        with self.assertRaises(Inconsistent) as cm:
            self.resource_registry_service.get_association(user_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message.startswith("Duplicate associations found for subject/predicate/object"))

        # Delete one of the associations
        self.resource_registry_service.delete_association(assoc_id1)

        assoc = self.resource_registry_service.get_association(user_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(assoc._id == assoc_id2)

        # Delete (bad cases)
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete_association("bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Delete other association
        self.resource_registry_service.delete_association(assoc_id2)

        # Delete resources
        self.resource_registry_service.delete(user_identity_obj_id)
        self.resource_registry_service.delete(user_info_obj_id)

    def test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(cm.exception.message == "find by name does not support lcstate")
        
        ret = self.resource_registry_service.find_resources(RT.UserInfo, None, "name", False)
        self.assertTrue(len(ret[0]) == 0)

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")
        
        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(RT.UserInfo, None, "name", False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)

        ret = self.resource_registry_service.find_resources(RT.UserInfo, LCS.DRAFT, None, False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)
Exemplo n.º 8
0
class TestWorkflowManagementIntegration(VisualizationIntegrationTestHelper):

    def setUp(self):
        # Start container

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.workflowclient = WorkflowManagementServiceClient(node=self.container.node)
        self.process_dispatcher = ProcessDispatcherServiceClient(node=self.container.node)
        self.data_retriever = DataRetrieverServiceClient(node=self.container.node)

        self.ctd_stream_def = SBE37_CDM_stream_definition()



    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_SA_transform_components(self):

        assertions = self.assertTrue

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)


        ###
        ###  Setup the first transformation
        ###

        # Salinity: Data Process Definition
        ctd_L2_salinity_dprocdef_id = self.create_salinity_data_process_definition()

        l2_salinity_all_data_process_id, ctd_l2_salinity_output_dp_id = self.create_transform_process(ctd_L2_salinity_dprocdef_id,ctd_parsed_data_product_id )

        ## get the stream id for the transform outputs
        stream_ids, _ = self.rrclient.find_objects(ctd_l2_salinity_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0 )
        sal_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_stream_id)


        ###
        ###  Setup the second transformation
        ###

        # Salinity Doubler: Data Process Definition
        salinity_doubler_dprocdef_id = self.create_salinity_doubler_data_process_definition()

        salinity_double_data_process_id, salinity_doubler_output_dp_id = self.create_transform_process(salinity_doubler_dprocdef_id, ctd_l2_salinity_output_dp_id )

        stream_ids, _ = self.rrclient.find_objects(salinity_doubler_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0 )
        sal_dbl_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_dbl_stream_id)


        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id, data_product_stream_ids)

        #Stop the transform processes
        self.dataprocessclient.deactivate_data_process(salinity_double_data_process_id)
        self.dataprocessclient.deactivate_data_process(l2_salinity_all_data_process_id)

        #Validate the data from each of the messages along the way
        self.validate_messages(results)

    @attr('LOCOINT')
    @attr('SMOKE')
    @unittest.skip("not working")
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Salinity_Test_Workflow',description='tests a workflow of multiple transform data processes')

        workflow_data_product_name = 'TEST-Workflow_Output_Product' #Set a specific output product name

        #Add a transformation process definition
        ctd_L2_salinity_dprocdef_id = self.create_salinity_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=ctd_L2_salinity_dprocdef_id, persist_process_output_data=False)  #Don't persist the intermediate data product
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Add a transformation process definition
        salinity_doubler_dprocdef_id = self.create_salinity_doubler_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=salinity_doubler_dprocdef_id, output_data_product_name=workflow_data_product_name)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        aids = self.rrclient.find_associations(workflow_def_id, PRED.hasDataProcessDefinition)
        assertions(len(aids) == 2 )

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id, timeout=30)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        #persist the output product
        #self.dataproductclient.activate_data_product_persistence(workflow_product_id)
        dataset_ids,_ = self.rrclient.find_objects(workflow_product_id, PRED.hasDataset, RT.DataSet, True)
        assertions(len(dataset_ids) == 1 )
        dataset_id = dataset_ids[0]

        #Verify the output data product name matches what was specified in the workflow definition
        workflow_product = self.rrclient.read(workflow_product_id)
        assertions(workflow_product.name == workflow_data_product_name)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 2 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id, data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id, False, timeout=15)  # Should test true at some point

        #Make sure the Workflow object was removed
        objs, _ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(objs) == 0)

        #Validate the data from each of the messages along the way
        self.validate_messages(results)

        #validate that the data was persisted and can be retrieved
        self.validate_data_ingest_retrieve(dataset_id)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_google_dt_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='GoogleDT_Test_Workflow',description='Tests the workflow of converting stream data to Google DT')

        #Add a transformation process definition
        google_dt_procdef_id = self.create_google_dt_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=google_dt_procdef_id, persist_process_output_data=True)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id, timeout=20)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id, data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id, False)  # Should test true at some point

        #Validate the data from each of the messages along the way
        self.validate_google_dt_transform_results(results)

        # Check to see if ingestion worked. Extract the granules from data_retrieval.
        # First find the dataset associated with the output dp product
        ds_ids,_ = self.rrclient.find_objects(workflow_dp_ids[len(workflow_dp_ids) - 1], PRED.hasDataset, RT.DataSet, True)
        retrieve_granule = self.data_retriever.retrieve(ds_ids[0])

        #Validate the data from each of the messages along the way
        self.validate_google_dt_transform_results(retrieve_granule)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )



    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_mpl_graphs_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Mpl_Graphs_Test_Workflow',description='Tests the workflow of converting stream data to Matplotlib graphs')

        #Add a transformation process definition
        mpl_graphs_procdef_id = self.create_mpl_graphs_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=mpl_graphs_procdef_id, persist_process_output_data=True)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id, timeout=20)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id, data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id, False)  # Should test true at some point

        #Validate the data from each of the messages along the way
        self.validate_mpl_graphs_transform_results(results)

        # Check to see if ingestion worked. Extract the granules from data_retrieval.
        # First find the dataset associated with the output dp product
        ds_ids,_ = self.rrclient.find_objects(workflow_dp_ids[len(workflow_dp_ids) - 1], PRED.hasDataset, RT.DataSet, True)

        retrieve_granule = self.data_retriever.retrieve(ds_ids[0])

        #Validate the data from each of the messages along the way
        self.validate_mpl_graphs_transform_results(retrieve_granule)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_multiple_workflow_instances(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Multiple_Test_Workflow',description='Tests the workflow of converting stream data')

        #Add a transformation process definition
        google_dt_procdef_id = self.create_google_dt_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=google_dt_procdef_id)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the first input data product
        ctd_stream_id1, ctd_parsed_data_product_id1 = self.create_ctd_input_stream_and_data_product('ctd_parsed1')
        data_product_stream_ids.append(ctd_stream_id1)

        #Create and start the first workflow
        workflow_id1, workflow_product_id1 = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id1, timeout=20)

        #Create the second input data product
        ctd_stream_id2, ctd_parsed_data_product_id2 = self.create_ctd_input_stream_and_data_product('ctd_parsed2')
        data_product_stream_ids.append(ctd_stream_id2)

        #Create and start the first workflow
        workflow_id2, workflow_product_id2 = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id2, timeout=20)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_ids,_ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 2 )


        #Start the first input stream process
        ctd_sim_pid1 = self.start_sinusoidal_input_stream_process(ctd_stream_id1)

        #Start the second input stream process
        ctd_sim_pid2 = self.start_simple_input_stream_process(ctd_stream_id2)

        #Start the output stream listener to monitor a set number of messages being sent through the workflows
        results = self.start_output_stream_and_listen(None, data_product_stream_ids, message_count_per_stream=5)

        # stop the flow of messages...
        self.process_dispatcher.cancel_process(ctd_sim_pid1) # kill the ctd simulator process - that is enough data
        self.process_dispatcher.cancel_process(ctd_sim_pid2)

        #Stop the first workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id1, False)  # Should test true at some point

        #Stop the second workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id2, False)  # Should test true at some point

        workflow_ids,_ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 0 )

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )

        aid_list = self.rrclient.find_associations(workflow_def_id, PRED.hasDataProcessDefinition)
        assertions(len(aid_list) == 0 )
class TestIntDataProcessManagementServiceMultiOut(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.dataset_management = self.datasetclient
        self.process_dispatcher = ProcessDispatcherServiceClient(node=self.container.node)

    def test_createDataProcess(self):

        #---------------------------------------------------------------------------
        # Data Process Definition
        #---------------------------------------------------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)

        # Make assertion on the newly registered data process definition
        data_process_definition = self.rrclient.read(dprocdef_id)
        self.assertEquals(data_process_definition.name, 'ctd_L0_all')
        self.assertEquals(data_process_definition.description, 'transform ctd package into three separate L0 streams')
        self.assertEquals(data_process_definition.module, 'ion.processes.data.transforms.ctd.ctd_L0_all')
        self.assertEquals(data_process_definition.class_name, 'ctd_L0_all')

        # Read the data process definition using data process management and make assertions
        dprocdef_obj = self.dataprocessclient.read_data_process_definition(dprocdef_id)
        self.assertEquals(dprocdef_obj.class_name,'ctd_L0_all')
        self.assertEquals(dprocdef_obj.module,'ion.processes.data.transforms.ctd.ctd_L0_all')

        #---------------------------------------------------------------------------
        # Create an input instrument
        #---------------------------------------------------------------------------

        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        # Register the instrument so that the data producer and stream object are created
        data_producer_id = self.damsclient.register_instrument(instrument_id)

        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)

        self.dataprocessclient.assign_input_stream_definition_to_data_process_definition(ctd_stream_def_id, dprocdef_id )

        # Assert that the link between the stream definition and the data process definition was done
        assocs = self.rrclient.find_associations(subject=dprocdef_id, predicate=PRED.hasInputStreamDefinition, object=ctd_stream_def_id, id_only=True)

        self.assertIsNotNone(assocs)

        #---------------------------------------------------------------------------
        # Input Data Product
        #---------------------------------------------------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        input_dp_obj = IonObject(   RT.DataProduct,
                                    name='InputDataProduct',
                                    description='some new dp',
                                    temporal_domain = tdom,
                                    spatial_domain = sdom)

        input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=ctd_stream_def_id, exchange_point='test')

        #Make assertions on the input data product created
        input_dp_obj = self.rrclient.read(input_dp_id)
        self.assertEquals(input_dp_obj.name, 'InputDataProduct')
        self.assertEquals(input_dp_obj.description, 'some new dp')

        self.damsclient.assign_data_product(instrument_id, input_dp_id)

        # Retrieve the stream via the DataProduct->Stream associations
        stream_ids, _ = self.rrclient.find_objects(input_dp_id, PRED.hasStream, None, True)

        self.in_stream_id = stream_ids[0]

        #---------------------------------------------------------------------------
        # Output Data Product
        #---------------------------------------------------------------------------

        outgoing_stream_conductivity_id = self.pubsubclient.create_stream_definition(name='conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_conductivity_id, dprocdef_id,binding='conductivity' )

        outgoing_stream_pressure_id = self.pubsubclient.create_stream_definition(name='pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_pressure_id, dprocdef_id, binding='pressure' )

        outgoing_stream_temperature_id = self.pubsubclient.create_stream_definition(name='temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_temperature_id, dprocdef_id, binding='temperature' )


        self.output_products={}

        output_dp_obj = IonObject(RT.DataProduct,
            name='conductivity',
            description='transform output conductivity',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_1 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_conductivity_id)
        self.output_products['conductivity'] = output_dp_id_1

        output_dp_obj = IonObject(RT.DataProduct,
            name='pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_2 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_pressure_id)
        self.output_products['pressure'] = output_dp_id_2

        output_dp_obj = IonObject(RT.DataProduct,
            name='temperature',
            description='transform output ',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_3 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_temperature_id)
        self.output_products['temperature'] = output_dp_id_3


        #---------------------------------------------------------------------------
        # Create the data process
        #---------------------------------------------------------------------------
        def _create_data_process():
            dproc_id = self.dataprocessclient.create_data_process(dprocdef_id, [input_dp_id], self.output_products)
            return dproc_id

        dproc_id = _create_data_process()

        # Make assertions on the data process created
        data_process = self.dataprocessclient.read_data_process(dproc_id)

        # Assert that the data process has a process id attached
        self.assertIsNotNone(data_process.process_id)

        # Assert that the data process got the input data product's subscription id attached as its own input_susbcription_id attribute
        self.assertIsNotNone(data_process.input_subscription_id)

        output_data_product_ids = self.rrclient.find_objects(subject=dproc_id, predicate=PRED.hasOutputProduct, object_type=RT.DataProduct, id_only=True)

        self.assertEquals(Set(output_data_product_ids[0]), Set([output_dp_id_1,output_dp_id_2,output_dp_id_3]))


    @patch.dict(CFG, {'endpoint':{'receive':{'timeout': 60}}})
    def test_createDataProcessUsingSim(self):
        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" )
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)

        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg")
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        #-------------------------------
        # Create InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        #-------------------------------
        # Create InstrumentAgentInstance to hold configuration information
        #-------------------------------


        port_agent_config = {
            'device_addr': 'sbe37-simulator.oceanobservatories.org',
            'device_port': 4001,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'command_port': 4002,
            'data_port': 4003,
            'log_level': 5,
        }


        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance",
                                          port_agent_config = port_agent_config)
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)


        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE32_CDM', parameter_dictionary_id=pdict_id)

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()



        dp_obj = IonObject(RT.DataProduct,
            name='ctd_parsed',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        raw_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE37_RAW', parameter_dictionary_id=pdict_id)

        dp_obj.name = 'ctd_raw'
        ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
            
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' )


        self.output_products={}

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)


        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id

        ctd_l0_pressure_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                                    outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id

        ctd_l0_temperature_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain = tdom,
            spatial_domain = sdom)


        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                    outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id


        #-------------------------------
        # Create listener for data process events and verify that events are received.
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-367  Data processing shall notify registered data product consumers about data processing workflow life cycle events
        #todo (contd) ... I believe the capability does not exist yet now. ANS And SA are not yet publishing any workflow life cycle events (Swarbhanu)
        
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------

        ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products)
        data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        process_id = data_process.process_id
        self.addCleanup(self.process_dispatcher.cancel_process, process_id)

        #-------------------------------
        # Wait until the process launched in the create_data_process() method is actually running, before proceeding further in this test
        #-------------------------------

        gate = ProcessStateGate(self.process_dispatcher.read_process, process_id, ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30), "The data process (%s) did not spawn in 30 seconds" % process_id)

        #-------------------------------
        # Retrieve a list of all data process defintions in RR and validate that the DPD is listed
        #-------------------------------

        # todo: Req: L4-CI-SA-RQ-366  Data processing shall manage data topic definitions
        # todo: data topics are being handled by pub sub at the level of streams
        self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        

        #todo: check that activate event is received L4-CI-SA-RQ-367
        #todo... (it looks like no event is being published when the data process is activated... so below, we just check for now
        # todo... that the subscription is indeed activated) (Swarbhanu)


        # todo: monitor process to see if it is active (sa-rq-182)
        ctd_l0_all_data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        input_subscription_id = ctd_l0_all_data_process.input_subscription_id
        subs = self.rrclient.read(input_subscription_id)
        self.assertTrue(subs.activated)

        # todo: This has not yet been completed by CEI, will prbly surface thru a DPMS call
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)


        #-------------------------------
        # Retrieve the extended resources for data process definition and for data process
        #-------------------------------
        extended_process_definition = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id)
        self.assertEqual(1, len(extended_process_definition.data_processes))
        log.debug("test_createDataProcess: extended_process_definition  %s", str(extended_process_definition))

        extended_process = self.dataprocessclient.get_data_process_extension(ctd_l0_all_data_process_id)
        self.assertEqual(1, len(extended_process.input_data_products))
        log.debug("test_createDataProcess: extended_process  %s", str(extended_process))

        ################################ Test the removal of data processes ##################################

        #-------------------------------------------------------------------
        # Test the deleting of the data process
        #-------------------------------------------------------------------

        # Before deleting, get the input streams, output streams and the subscriptions so that they can be checked after deleting
#        dp_obj_1 = self.rrclient.read(ctd_l0_all_data_process_id)
#        input_subscription_id = dp_obj_1.input_subscription_id
#        out_prods, _ = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True)
#        in_prods, _ = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasInputProduct, id_only=True)
#        in_streams = []
#        for in_prod in in_prods:
#            streams, _ = self.rrclient.find_objects(in_prod, PRED.hasStream, id_only=True)
#            in_streams.extend(streams)
#        out_streams = []
#        for out_prod in out_prods:
#            streams, _ = self.rrclient.find_objects(out_prod, PRED.hasStream, id_only=True)
#            out_streams.extend(streams)

        # Deleting the data process
        self.dataprocessclient.delete_data_process(ctd_l0_all_data_process_id)

        # Check that the data process got removed. Check the lcs state. It should be retired
        dp_obj = self.rrclient.read(ctd_l0_all_data_process_id)
        self.assertEquals(dp_obj.lcstate, LCS.RETIRED)

        # Check for process defs still attached to the data process
        dpd_assn_ids = self.rrclient.find_associations(subject=ctd_l0_all_data_process_id,  predicate=PRED.hasProcessDefinition, id_only=True)
        self.assertEquals(len(dpd_assn_ids), 0)

        # Check for output data product still attached to the data process
        out_products, assocs = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True)
        self.assertEquals(len(out_products), 0)
        self.assertEquals(len(assocs), 0)

        # Check for input data products still attached to the data process
        inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct)
        self.assertEquals(len(inprod_associations), 0)

        # Check for input data products still attached to the data process
        inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct)
        self.assertEquals(len(inprod_associations), 0)

        # Check of the data process has been deactivated
        self.assertIsNone(dp_obj.input_subscription_id)

        # Read the original subscription id of the data process and check that it has been deactivated
        with self.assertRaises(NotFound):
            self.pubsubclient.read_subscription(input_subscription_id)

        #-------------------------------------------------------------------
        # Delete the data process definition
        #-------------------------------------------------------------------

        # before deleting, get the process definition being associated to in order to be able to check later if the latter gets deleted as it should
        proc_def_ids, proc_def_asocs = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasProcessDefinition)
        self.dataprocessclient.delete_data_process_definition(ctd_L0_all_dprocdef_id)

        # check that the data process definition has been retired
        dp_proc_def = self.rrclient.read(ctd_L0_all_dprocdef_id)
        self.assertEquals(dp_proc_def.lcstate, LCS.RETIRED)

        # Check for old associations of this data process definition
        proc_defs, proc_def_asocs = self.rrclient.find_objects(ctd_L0_all_dprocdef_id, PRED.hasProcessDefinition)
        self.assertEquals(len(proc_defs), 0)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject= ctd_L0_all_dprocdef_id, id_only=True)
        self.assertEquals(len(obj_assns), 0)

        ################################ Test the removal of data processes ##################################
        # Try force delete... This should simply delete the associations and the data process object
        # from the resource registry

        #---------------------------------------------------------------------------------------------------------------
        # Force deleting a data process
        #---------------------------------------------------------------------------------------------------------------
        self.dataprocessclient.force_delete_data_process(ctd_l0_all_data_process_id)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True)

        # find all associations where this is the object
        _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True)

        self.assertEquals(len(obj_assns), 0)
        self.assertEquals(len(sbj_assns), 0)
        
        with self.assertRaises(NotFound):
            self.rrclient.read(ctd_l0_all_data_process_id)

        #---------------------------------------------------------------------------------------------------------------
        # Force deleting a data process definition
        #---------------------------------------------------------------------------------------------------------------
        self.dataprocessclient.force_delete_data_process_definition(ctd_L0_all_dprocdef_id)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True)

        # find all associations where this is the object
        _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True)

        self.assertEquals(len(obj_assns), 0)
        self.assertEquals(len(sbj_assns), 0)

        with self.assertRaises(NotFound):
            self.rrclient.read(ctd_l0_all_data_process_id)


    def test_transform_function_crd(self):
        tf = TransformFunction(name='simple', module='pyon.ion.process', cls='SimpleProcess')

        tf_id = self.dataprocessclient.create_transform_function(tf)
        self.assertTrue(tf_id)

        tf2_id = self.dataprocessclient.create_transform_function(tf)
        self.assertEquals(tf_id, tf2_id)

        tf_obj = self.dataprocessclient.read_transform_function(tf_id)
        self.assertEquals([tf.name, tf.module, tf.cls, tf.function_type], [tf_obj.name, tf_obj.module, tf_obj.cls, tf_obj.function_type])

        tf.module = 'dev.null'
        self.assertRaises(BadRequest, self.dataprocessclient.create_transform_function, tf)

        self.dataprocessclient.delete_transform_function(tf_id)
        self.assertRaises(NotFound, self.dataprocessclient.read_transform_function, tf_id)
class TestIntDataAcquisitionManagementService(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)

    def tearDown(self):
        pass


    #@unittest.skip('Not done yet.')
    def test_data_source_ops(self):
        # test creating a new data source
        print 'Creating new data source'
        datasource_obj = IonObject(RT.DataSource,
                           name='DataSource1',
                           description='instrument based new source' ,
                            type='sbe37')
        try:
            ds_id = self.client.create_data_source(datasource_obj)
        except BadRequest as ex:
            self.fail("failed to create new data source: %s" %ex)
        print 'new data source id = ', ds_id


        # test reading a non-existent data source
        print 'reading non-existent data source'
        try:
            dp_obj = self.client.read_data_source('some_fake_id')
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during read: %s" %dp_obj)

        # update a data source (tests read also)
        print 'Updating data source'
        # first get the existing data source object
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass

        # now tweak the object
        datasource_obj.description = 'the very first data source'
        # now write the dp back to the registry
        try:
            update_result = self.client.update_data_source(datasource_obj)
        except NotFound as ex:
            self.fail("existing data source was not found during update")
        except Conflict as ex:
            self.fail("revision conflict exception during data source update")
        #else:
        #    self.assertTrue(update_result == True)
        # now get the data source back to see if it was updated
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass
        self.assertTrue(datasource_obj.description == 'the very first data source')


        # now 'delete' the data source
        print "deleting data source"
        try:
            delete_result = self.client.force_delete_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during delete")
        #self.assertTrue(delete_result == True)
        # now try to get the deleted dp object
        try:
            dp_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("deleted data source was found during read")

        # now try to delete the already deleted data source object
        print "deleting non-existing data source"
        try:
            delete_result = self.client.delete_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during delete")


    #@unittest.skip('Not done yet.')
    def test_register_instrument(self):
        # Register an instrument as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)

        # test registering a new data producer
        try:
            ds_id = self.client.register_instrument(instrument_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id


        # test assigning a data product to an instrument, creating the stream for the product
        try:
            self.client.assign_data_product(instrument_id, dataproduct_id)
            self.client.assign_data_product_source(dataproduct_id, instrument_id)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        assocs = self.rrclient.find_associations(dataproduct_id, PRED.hasSource, instrument_id)
        if not assocs or len(assocs) == 0:
            self.fail("failed to assign data product to data producer")

        # test UNassigning a data product from instrument, deleting the stream for the product
        try:
            self.client.unassign_data_product(instrument_id, dataproduct_id)
            self.client.unassign_data_product_source(dataproduct_id, instrument_id)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        assocs = self.rrclient.find_associations(dataproduct_id, PRED.hasSource, instrument_id)
        if  assocs:
            self.fail("failed to unassign data product to data producer")

        # test UNregistering a new data producer
        try:
            ds_id = self.client.unregister_instrument(instrument_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)


    def test_register_external_data_set(self):
        # Register an external data set as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        ext_dataset_obj = IonObject(RT.ExternalDataset, name='DataSet1',description='an external data feed')
        ext_dataset_id, rev = self.rrclient.create(ext_dataset_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)


        # test registering a new external data set
        try:
            ds_id = self.client.register_external_data_set(ext_dataset_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id

        # test assigning a data product to an ext_dataset_id, creating the stream for the product
        try:
            self.client.assign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        # test UNassigning a data product from ext_dataset_id, deleting the stream for the product
        try:
            self.client.unassign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a external data set
        try:
            ds_id = self.client.unregister_external_data_set(ext_dataset_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)





    #@unittest.skip('not ready')
    def test_eoi_resources(self):

            #
            # test creating a new data provider
            #
            print 'Creating new external_data_provider'
            dataprovider_obj = IonObject(RT.ExternalDataProvider,
                               name='ExtDataProvider1',
                               description='external data provider ')
            try:
                dataprovider_id = self.client.create_external_data_provider(dataprovider_obj)
            except BadRequest as ex:
                self.fail("failed to create new data provider: %s" %ex)
            print 'new data provider id = ', dataprovider_id

            #
            # test creating a new data source
            #
            print 'Creating new data source'
            datasource_obj = IonObject(RT.DataSource,
                               name='DataSource1',
                               description='data source ',
                               type='DAP')
            try:
                datasource_id = self.client.create_data_source(datasource_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source: %s" %ex)
            print 'new data source id = ', datasource_id

            #
            # test creating a new data source model
            #
            print 'Creating new data source model'
            datamodel_obj = IonObject(RT.DataSourceModel,
                               name='DataSourceModel1',
                               description='data source model')
            try:
                datamodel_id = self.client.create_data_source_model(datamodel_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source model: %s" %ex)
            print 'new data source model id = ', datamodel_id


            #
            # test creating a new external data set
            #
            print 'Creating new external data set'
            dataset_obj = IonObject(RT.ExternalDataset,
                               name='ExternalDataSet1',
                               description='external data set ')
            try:
                extdataset_id = self.client.create_external_dataset(dataset_obj)
            except BadRequest as ex:
                self.fail("failed to create new external data set: %s" %ex)
            print 'new external data set id = ', extdataset_id


            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external data agent '
            datasetagent_obj = IonObject(RT.ExternalDatasetAgent,
                               name='ExternalDatasetAgent1',
                               description='external data agent ',
                                handler_module = 'module_name',
                                handler_class = 'class_name')
            try:
                datasetagent_id = self.client.create_external_dataset_agent(datasetagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent: %s" %ex)
            print 'new external data agent  id = ', datasetagent_id


            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagent_obj = IonObject(RT.DataSourceAgent,
                               name='DataSourceAgent1',
                               description=' DataSource agent ')
            try:
                datasource_agent_id = self.client.create_data_source_agent(datasourceagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_id





            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external dataset agent instance'
            datasetagentinstance_obj = IonObject(RT.ExternalDatasetAgentInstance,
                               name='ExternalDatasetAgentInstance1',
                               description='external dataset agent instance ')
            try:
                datasetagentinstance_id = self.client.create_external_dataset_agent_instance(datasetagentinstance_obj, datasetagent_id)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent instance: %s" %ex)
            print 'new external data agent instance id = ', datasetagentinstance_id

            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagentinstance_obj = IonObject(RT.DataSourceAgentInstance,
                               name='ExternalDataSourceAgentInstance1',
                               description='external DataSource agent instance ')
            try:
                datasource_agent_instance_id = self.client.create_data_source_agent_instance(datasourceagentinstance_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent instance: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_instance_id

            #
            # test assign / unassign
            #

            self.client.unassign_data_source_from_external_data_provider(datasource_id, dataprovider_id)

            self.client.unassign_data_source_from_data_model(datasource_id, datamodel_id)

            self.client.unassign_external_dataset_from_data_source(extdataset_id, datasource_id)

            #
            # test read
            #

            try:
                dp_obj = self.client.read_external_data_provider(dataprovider_id)
            except NotFound as ex:
                self.fail("existing data provicer was not found during read")
            else:
                pass

            try:
                dp_obj = self.client.read_data_source(datasource_id)
            except NotFound as ex:
                self.fail("existing data source was not found during read")
            else:
                pass

            #
            # test delete
            #
            try:
                self.client.delete_external_data_provider(dataprovider_id)
                self.client.delete_data_source(datasource_id)
                self.client.delete_external_dataset(extdataset_id)
                self.client.delete_data_source_model(datamodel_id)
                self.client.delete_external_dataset_agent(datasetagent_id)
                self.client.delete_data_source_agent_instance(datasource_agent_instance_id)

                self.client.force_delete_external_data_provider(dataprovider_id)
                self.client.force_delete_data_source(datasource_id)
                self.client.force_delete_external_dataset(extdataset_id)
                self.client.force_delete_data_source_model(datamodel_id)
                self.client.force_delete_external_dataset_agent(datasetagent_id)
                self.client.force_delete_data_source_agent_instance(datasource_agent_instance_id)
            except NotFound as ex:
                self.fail("existing data product was not found during delete")


            # test reading a non-existent data product
            print 'reading non-existent data product'
            try:
                bad_obj = self.client.read_external_data_provider('some_fake_id')
            except NotFound as ex:
                pass
            else:
                self.fail("non-existing data product was found during read: %s" %bad_obj)
Exemplo n.º 11
0
class TestResourceRegistry(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2coi.yml')

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient(
            node=self.container.node)

    def test_crud(self):
        # Some quick registry tests
        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", name="name", foo="bar")
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name", "foo": "bar"})
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name"}, foo="bar")
        self.assertTrue(cm.exception.message ==
                        "__init__() got an unexpected keyword argument 'foo'")

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")

        # Can set attributes that aren't in the object's schema
        with self.assertRaises(AttributeError) as cm:
            setattr(obj, "foo", "bar")
        self.assertTrue(
            cm.exception.message == "'UserInfo' object has no attribute 'foo'")

        # Cam't call update with object that hasn't been persisted
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.update(obj)
        self.assertTrue(
            cm.exception.message.startswith(
                "Object does not have required '_id' or '_rev' attribute"))

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        # Cannot create object with _id and _rev fields pre-set
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create(read_obj)
        self.assertTrue(
            cm.exception.message.startswith("Doc must not have '_id'"))

        # Update object
        read_obj.name = "John Doe"
        self.resource_registry_service.update(read_obj)

        # Update should fail with revision mismatch
        with self.assertRaises(Conflict) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(
            cm.exception.message.startswith(
                "Object not based on most current version"))

        # Re-read and update object
        read_obj = self.resource_registry_service.read(obj_id)
        self.resource_registry_service.update(read_obj)

        # Delete object
        self.resource_registry_service.delete(obj_id)

        # Make sure read, update and delete report error
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.read(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        # Owner creation tests
        user = IonObject("ActorIdentity", name='user')
        uid, _ = self.resource_registry_service.create(user)

        inst = IonObject("InstrumentDevice", name='instrument')
        iid, _ = self.resource_registry_service.create(
            inst, headers={'ion-actor-id': str(uid)})

        ids, _ = self.resource_registry_service.find_objects(iid,
                                                             PRED.hasOwner,
                                                             RT.ActorIdentity,
                                                             id_only=True)
        self.assertEquals(len(ids), 1)

        assoc = self.resource_registry_service.read(ids[0])
        self.resource_registry_service.delete(iid)

        with self.assertRaises(NotFound) as ex:
            assoc = self.resource_registry_service.read(iid)

    def test_lifecycle(self):
        att = IonObject("InstrumentDevice", name='mine', description='desc')

        rid, rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT_PRIVATE)

        new_state = self.resource_registry_service.execute_lifecycle_transition(
            rid, LCE.PLAN)
        self.assertEquals(new_state, LCS.PLANNED_PRIVATE)

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED_PRIVATE)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.execute_lifecycle_transition(
                rid, LCE.UNANNOUNCE)
        self.assertTrue(
            "type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce"
            in cm.exception.message)

        new_state = self.resource_registry_service.execute_lifecycle_transition(
            rid, LCE.DEVELOP)
        self.assertEquals(new_state, LCS.DEVELOPED_PRIVATE)

        self.assertRaises(
            iex.BadRequest,
            self.resource_registry_service.execute_lifecycle_transition,
            resource_id=rid,
            transition_event='NONE##')

        self.resource_registry_service.set_lifecycle_state(
            rid, LCS.INTEGRATED_PRIVATE)
        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.lcstate, LCS.INTEGRATED_PRIVATE)

    def test_association(self):
        # Instantiate ActorIdentity object
        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(
            actor_identity_obj)
        read_actor_identity_obj = self.resource_registry_service.read(
            actor_identity_obj_id)

        # Instantiate UserInfo object
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(
            user_info_obj)
        read_user_info_obj = self.resource_registry_service.read(
            user_info_obj_id)

        # Test create failures
        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.bogus, user_info_obj_id)
        self.assertTrue(cm.exception.message == "bogus")

        # Predicate not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, None, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Bad association type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, user_info_obj_id,
                'bogustype')
        self.assertTrue(
            cm.exception.message == "Unsupported assoc_type: bogustype")

        # Subject id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                None, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Object id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, None)
        self.assertTrue(
            cm.exception.message == "Association must have all elements set")

        # Bad subject id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(
                "bogus", PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # Bad object id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, "bogus")
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # _id missing from subject
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Subject id or rev not available")

        # _id missing from object
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, user_info_obj)
        self.assertTrue(
            cm.exception.message == "Object id or rev not available")

        # Wrong subject type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                user_info_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message ==
                        "Illegal subject type UserInfo for predicate hasInfo")

        # Wrong object type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id)
        self.assertTrue(
            cm.exception.message ==
            "Illegal object type ActorIdentity for predicate hasInfo")

        # Create two different association types between the same subject and predicate
        assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)

        # Read object, subject
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True)
        self.assertEquals(res_obj1, user_info_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id)
        self.assertEquals(res_obj2._id, actor_identity_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True)
        self.assertEquals(res_obj2, actor_identity_obj_id)

        # Create a similar association to a specific revision
        # TODO: This is not a supported case so far
        assoc_id2, assoc_rev2 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, "H2R")

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(
            None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, id_only=False)
        ret3 = self.resource_registry_service.find_associations(
            predicate=PRED.hasInfo, id_only=False)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo, read_user_info_obj)
        ret2 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(
            None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None,
            True)
        ret2 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(
            predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(
                actor_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(
                None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(
                actor_identity_obj, None, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(
                actor_identity_obj_id, None, user_info_obj)
        self.assertTrue(
            cm.exception.message == "Object id not available in object")

        # Find subjects (good cases)
        subj_ret1 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)
        subj_ret2 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_subjects(
            None, PRED.hasInfo, user_info_obj_id, True)
        subj_ret4 = self.resource_registry_service.find_subjects(
            None, None, read_user_info_obj, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_subjects(
            None, PRED.hasInfo, user_info_obj_id, False)
        subj_ret6 = self.resource_registry_service.find_subjects(
            None, None, read_user_info_obj, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find subjects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide object")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_subjects(
                RT.UserCredentials, PRED.bogus, user_info_obj_id, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_subjects(
            RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_subjects(
            RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(
                RT.UserCredentials, PRED.hasInfo, user_info_obj, True)
        self.assertTrue(
            cm.exception.message == "Object id not available in object")

        # Find objects (good cases)
        subj_ret1 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True)
        subj_ret2 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, None, True)
        subj_ret4 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, None, None, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, None, False)
        subj_ret6 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, None, None, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find objects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide subject")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_objects(
                actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(
                actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(
            cm.exception.message == "Object id not available in subject")

        # Get association (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(
                actor_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(
                None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(
                actor_identity_obj, None, user_info_obj_id)
        self.assertTrue(
            cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(
                actor_identity_obj_id, None, user_info_obj)
        self.assertTrue(
            cm.exception.message == "Object id not available in object")

        # Delete one of the associations
        self.resource_registry_service.delete_association(assoc_id2)

        assoc = self.resource_registry_service.get_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(assoc._id == assoc_id1)

        # Delete (bad cases)
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete_association("bogus")
        self.assertTrue(
            cm.exception.message == "Object with id bogus does not exist.")

        # Delete other association
        self.resource_registry_service.delete_association(assoc_id1)

        # Delete resources
        self.resource_registry_service.delete(actor_identity_obj_id)
        self.resource_registry_service.delete(user_info_obj_id)

    def test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(
                RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(
            cm.exception.message == "find by name does not support lcstate")

        ret = self.resource_registry_service.find_resources(
            RT.UserInfo, None, "name", False)
        self.assertTrue(len(ret[0]) == 0)

        # Instantiate an object
        obj = IonObject("InstrumentDevice", name="name")

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentDevice, None, "name", False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentDevice, LCS.DRAFT, None, False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)

    def test_attach(self):

        binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82"

        # Owner creation tests
        instrument = IonObject("InstrumentDevice", name='instrument')
        iid, _ = self.resource_registry_service.create(instrument)

        att = Attachment(content=binary, attachment_type=AttachmentType.BLOB)
        aid1 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid1)
        self.assertEquals(binary, att1.content)

        import base64
        att = Attachment(content=base64.encodestring(binary),
                         attachment_type=AttachmentType.ASCII)
        aid2 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid2)
        self.assertEquals(binary, base64.decodestring(att1.content))

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [aid1, aid2])

        att_ids = self.resource_registry_service.find_attachments(
            iid, id_only=True, descending=True)
        self.assertEquals(att_ids, [aid2, aid1])

        att_ids = self.resource_registry_service.find_attachments(
            iid, id_only=True, descending=True, limit=1)
        self.assertEquals(att_ids, [aid2])

        atts = self.resource_registry_service.find_attachments(iid,
                                                               id_only=False,
                                                               limit=1)
        self.assertEquals(atts[0].content, att1.content)

        self.resource_registry_service.delete_attachment(aid1)

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [aid2])

        self.resource_registry_service.delete_attachment(aid2)

        att_ids = self.resource_registry_service.find_attachments(iid,
                                                                  id_only=True)
        self.assertEquals(att_ids, [])

    def test_read_mult(self):
        test_resource1_id, _ = self.resource_registry_service.create(
            Resource(name='test1'))
        test_resource2_id, _ = self.resource_registry_service.create(
            Resource(name='test2'))

        res_list = [test_resource1_id, test_resource2_id]

        objects = self.resource_registry_service.read_mult(res_list)

        for o in objects:
            self.assertIsInstance(o, Resource)
            self.assertTrue(o._id in res_list)

    def test_find_associations_mult(self):
        dp = DataProcess()
        transform = Transform()
        pd = ProcessDefinition()

        dp_id, _ = self.resource_registry_service.create(dp)
        transform_id, _ = self.resource_registry_service.create(transform)
        pd_id, _ = self.resource_registry_service.create(pd)

        self.resource_registry_service.create_association(
            subject=dp_id, object=transform_id, predicate=PRED.hasTransform)
        self.resource_registry_service.create_association(
            subject=transform_id,
            object=pd_id,
            predicate=PRED.hasProcessDefinition)

        results, _ = self.resource_registry_service.find_associations_mult(
            subjects=[dp_id], id_only=True)
        self.assertTrue(results == [transform_id])

        results, _ = self.resource_registry_service.find_associations_mult(
            subjects=[dp_id, transform_id], id_only=True)
        results.sort()
        correct = [transform_id, pd_id]
        correct.sort()
        self.assertTrue(results == correct)
Exemplo n.º 12
0
class TransformManagementServiceIntTest(IonIntegrationTestCase):
    def setUp(self):
        # set up the container
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2dm.yml")

        self.pubsub_cli = PubsubManagementServiceClient(node=self.container.node)
        self.tms_cli = TransformManagementServiceClient(node=self.container.node)
        self.rr_cli = ResourceRegistryServiceClient(node=self.container.node)
        self.procd_cli = ProcessDispatcherServiceClient(node=self.container.node)

        self.input_stream_id = self.pubsub_cli.create_stream(name="input_stream", original=True)

        self.input_subscription_id = self.pubsub_cli.create_subscription(
            query=StreamQuery(stream_ids=[self.input_stream_id]),
            exchange_name="transform_input",
            name="input_subscription",
        )

        self.output_stream_id = self.pubsub_cli.create_stream(name="output_stream", original=True)

        self.process_definition = ProcessDefinition(name="basic_transform_definition")
        self.process_definition.executable = {
            "module": "ion.processes.data.transforms.transform_example",
            "class": "TransformExample",
        }
        self.process_definition_id = self.procd_cli.create_process_definition(
            process_definition=self.process_definition
        )

    @attr("LOCOINT")
    @unittest.skipIf(os.getenv("CEI_LAUNCH_TEST", False), "Not integrated for CEI")
    def test_create_transform(self):
        configuration = {"program_args": {"arg1": "value"}}

        transform_id = self.tms_cli.create_transform(
            name="test_transform",
            in_subscription_id=self.input_subscription_id,
            out_streams={"output": self.output_stream_id},
            process_definition_id=self.process_definition_id,
        )

        # test transform creation in rr
        transform = self.rr_cli.read(transform_id)
        self.assertEquals(transform.name, "test_transform")

        # test associations
        predicates = [PRED.hasSubscription, PRED.hasOutStream, PRED.hasProcessDefinition]
        assocs = []
        for p in predicates:
            assocs += self.rr_cli.find_associations(transform_id, p, id_only=True)
        self.assertEquals(len(assocs), 3)

        # test process creation
        transform = self.tms_cli.read_transform(transform_id)
        pid = transform.process_id
        proc = self.container.proc_manager.procs.get(pid)
        self.assertIsInstance(proc, TransformExample)

        # clean up
        self.tms_cli.delete_transform(transform_id)

    def test_create_transform_no_procdef(self):
        with self.assertRaises(NotFound):
            self.tms_cli.create_transform(name="test", in_subscription_id=self.input_subscription_id)

    def test_create_transform_bad_procdef(self):
        with self.assertRaises(NotFound):
            self.tms_cli.create_transform(
                name="test", in_subscription_id=self.input_subscription_id, process_definition_id="bad"
            )

    def test_create_transform_no_config(self):
        transform_id = self.tms_cli.create_transform(
            name="test_transform",
            in_subscription_id=self.input_subscription_id,
            out_streams={"output": self.output_stream_id},
            process_definition_id=self.process_definition_id,
        )
        self.tms_cli.delete_transform(transform_id)

    def test_create_transform_name_failure(self):
        transform_id = self.tms_cli.create_transform(
            name="test_transform",
            in_subscription_id=self.input_subscription_id,
            out_streams={"output": self.output_stream_id},
            process_definition_id=self.process_definition_id,
        )
        with self.assertRaises(BadRequest):
            transform_id = self.tms_cli.create_transform(
                name="test_transform",
                in_subscription_id=self.input_subscription_id,
                out_streams={"output": self.output_stream_id},
                process_definition_id=self.process_definition_id,
            )
        self.tms_cli.delete_transform(transform_id)

    @attr("LOCOINT")
    @unittest.skipIf(os.getenv("CEI_LAUNCH_TEST", False), "Not integrated for CEI")
    def test_create_no_output(self):
        transform_id = self.tms_cli.create_transform(
            name="test_transform",
            in_subscription_id=self.input_subscription_id,
            process_definition_id=self.process_definition_id,
        )

        predicates = [PRED.hasSubscription, PRED.hasOutStream, PRED.hasProcessDefinition]
        assocs = []
        for p in predicates:
            assocs += self.rr_cli.find_associations(transform_id, p, id_only=True)
        self.assertEquals(len(assocs), 2)

        # test process creation
        transform = self.tms_cli.read_transform(transform_id)
        pid = transform.process_id
        proc = self.container.proc_manager.procs.get(pid)
        self.assertIsInstance(proc, TransformExample)

        self.tms_cli.delete_transform(transform_id)

    def test_read_transform_exists(self):
        trans_obj = IonObject(RT.Transform, name="trans_obj")
        trans_id, _ = self.rr_cli.create(trans_obj)

        res = self.tms_cli.read_transform(trans_id)
        actual = self.rr_cli.read(trans_id)

        self.assertEquals(res._id, actual._id)

    def test_read_transform_nonexist(self):
        with self.assertRaises(NotFound) as e:
            self.tms_cli.read_transform("123")

    def test_activate_transform(self):

        transform_id = self.tms_cli.create_transform(
            name="test_transform",
            in_subscription_id=self.input_subscription_id,
            out_streams={"output": self.output_stream_id},
            process_definition_id=self.process_definition_id,
        )

        self.tms_cli.activate_transform(transform_id)

        # pubsub check if activated?
        self.tms_cli.delete_transform(transform_id)

    def test_activate_transform_nonexist(self):
        with self.assertRaises(NotFound):
            self.tms_cli.activate_transform("1234")

    def test_delete_transform(self):

        transform_id = self.tms_cli.create_transform(
            name="test_transform",
            in_subscription_id=self.input_subscription_id,
            process_definition_id=self.process_definition_id,
        )
        self.tms_cli.delete_transform(transform_id)

        # assertions
        with self.assertRaises(NotFound):
            self.rr_cli.read(transform_id)

    def test_delete_transform_nonexist(self):
        with self.assertRaises(NotFound):
            self.tms_cli.delete_transform("123")

    def test_execute_transform(self):
        # set up
        process_definition = ProcessDefinition(name="procdef_execute")
        process_definition.executable["module"] = "ion.processes.data.transforms.transform_example"
        process_definition.executable["class"] = "ReverseTransform"
        data = [1, 2, 3]

        process_definition_id, _ = self.rr_cli.create(process_definition)

        retval = self.tms_cli.execute_transform(process_definition_id, data)

        self.assertEquals(retval, [3, 2, 1])

    def test_integrated_transform(self):
        """
        This example script runs a chained three way transform:
            B
        A <
            C
        Where A is the even_odd transform (generates a stream of even and odd numbers from input)
        and B and C are the basic transforms that receive even and odd input
        """
        cc = self.container
        assertions = self.assertTrue

        pubsub_cli = PubsubManagementServiceClient(node=cc.node)
        rr_cli = ResourceRegistryServiceClient(node=cc.node)
        tms_cli = TransformManagementServiceClient(node=cc.node)
        # -------------------------------
        # Process Definition
        # -------------------------------
        # Create the process definition for the basic transform
        process_definition = IonObject(RT.ProcessDefinition, name="basic_transform_definition")
        process_definition.executable = {
            "module": "ion.processes.data.transforms.transform_example",
            "class": "TransformExample",
        }
        basic_transform_definition_id, _ = rr_cli.create(process_definition)

        # Create The process definition for the TransformEvenOdd
        process_definition = IonObject(RT.ProcessDefinition, name="evenodd_transform_definition")
        process_definition.executable = {
            "module": "ion.processes.data.transforms.transform_example",
            "class": "TransformEvenOdd",
        }
        evenodd_transform_definition_id, _ = rr_cli.create(process_definition)

        # -------------------------------
        # Streams
        # -------------------------------
        streams = [pubsub_cli.create_stream() for i in xrange(5)]

        # -------------------------------
        # Subscriptions
        # -------------------------------

        query = StreamQuery(stream_ids=[streams[0]])
        input_subscription_id = pubsub_cli.create_subscription(query=query, exchange_name="input_queue")

        query = StreamQuery(stream_ids=[streams[1]])  # even output
        even_subscription_id = pubsub_cli.create_subscription(query=query, exchange_name="even_queue")

        query = StreamQuery(stream_ids=[streams[2]])  # odd output
        odd_subscription_id = pubsub_cli.create_subscription(query=query, exchange_name="odd_queue")

        # -------------------------------
        # Launch the EvenOdd Transform
        # -------------------------------

        evenodd_id = tms_cli.create_transform(
            name="even_odd",
            in_subscription_id=input_subscription_id,
            out_streams={"even": streams[1], "odd": streams[2]},
            process_definition_id=evenodd_transform_definition_id,
            configuration={},
        )
        tms_cli.activate_transform(evenodd_id)

        # -------------------------------
        # Launch the Even Processing Transform
        # -------------------------------

        even_transform_id = tms_cli.create_transform(
            name="even_transform",
            in_subscription_id=even_subscription_id,
            out_streams={"even_plus1": streams[3]},
            process_definition_id=basic_transform_definition_id,
            configuration={},
        )
        tms_cli.activate_transform(even_transform_id)

        # -------------------------------
        # Launch the Odd Processing Transform
        # -------------------------------

        odd_transform_id = tms_cli.create_transform(
            name="odd_transform",
            in_subscription_id=odd_subscription_id,
            out_streams={"odd_plus1": streams[4]},
            process_definition_id=basic_transform_definition_id,
            configuration={},
        )
        tms_cli.activate_transform(odd_transform_id)

        # -------------------------------
        # Set up final subscribers
        # -------------------------------

        evenplus1_subscription_id = pubsub_cli.create_subscription(
            query=StreamQuery([streams[3]]),
            exchange_name="evenplus1_queue",
            name="EvenPlus1Subscription",
            description="EvenPlus1 SubscriptionDescription",
        )
        oddplus1_subscription_id = pubsub_cli.create_subscription(
            query=StreamQuery([streams[4]]),
            exchange_name="oddplus1_queue",
            name="OddPlus1Subscription",
            description="OddPlus1 SubscriptionDescription",
        )

        total_msg_count = 2

        msgs = gevent.queue.Queue()

        def even1_message_received(message, headers):
            input = int(message.get("num"))
            assertions((input % 2))  # Assert it is odd (transform adds 1)
            msgs.put(True)

        def odd1_message_received(message, headers):
            input = int(message.get("num"))
            assertions(not (input % 2))  # Assert it is even
            msgs.put(True)

        subscriber_registrar = StreamSubscriberRegistrar(process=cc, node=cc.node)
        even_subscriber = subscriber_registrar.create_subscriber(
            exchange_name="evenplus1_queue", callback=even1_message_received
        )
        odd_subscriber = subscriber_registrar.create_subscriber(
            exchange_name="oddplus1_queue", callback=odd1_message_received
        )

        # Start subscribers
        even_subscriber.start()
        odd_subscriber.start()

        # Activate subscriptions
        pubsub_cli.activate_subscription(evenplus1_subscription_id)
        pubsub_cli.activate_subscription(oddplus1_subscription_id)

        # -------------------------------
        # Set up fake stream producer
        # -------------------------------

        pid = cc.spawn_process(name="dummy_process_for_test", module="pyon.ion.process", cls="SimpleProcess", config={})
        dummy_process = cc.proc_manager.procs[pid]

        # Normally the user does not see or create the publisher, this is part of the containers business.
        # For the test we need to set it up explicitly
        publisher_registrar = StreamPublisherRegistrar(process=dummy_process, node=cc.node)
        stream_publisher = publisher_registrar.create_publisher(stream_id=streams[0])

        # -------------------------------
        # Start test
        # -------------------------------

        # Publish a stream
        for i in xrange(total_msg_count):
            stream_publisher.publish({"num": str(i)})

        time.sleep(0.5)

        for i in xrange(total_msg_count * 2):
            try:
                msgs.get()
            except Empty:
                assertions(False, "Failed to process all messages correctly.")

    @attr("LOCOINT")
    @unittest.skipIf(os.getenv("CEI_LAUNCH_TEST", False), "CEI incompatible")
    def test_transform_restart(self):
        tms_cli = self.tms_cli
        rr_cli = self.rr_cli
        pubsub_cli = self.pubsub_cli
        cc = self.container
        assertions = self.assertTrue

        proc_def = ProcessDefinition(
            executable={
                "module": "ion.services.dm.transformation.test.test_transform_service",
                "class": "TestTransform",
            }
        )
        proc_def_id, _ = rr_cli.create(proc_def)

        stream_id = pubsub_cli.create_stream()
        subscription_id = pubsub_cli.create_subscription(
            query=StreamQuery(stream_ids=[stream_id]), exchange_name="test_transform"
        )

        transform_id = tms_cli.create_transform(
            name="test_transform", in_subscription_id=subscription_id, process_definition_id=proc_def_id
        )
        tms_cli.activate_transform(transform_id)

        transform = rr_cli.read(transform_id)
        pid = transform.process_id

        assertions(cc.proc_manager.procs.has_key(pid), "Transform process was never started.")
        cc.proc_manager.terminate_process(pid)

        assertions(not cc.proc_manager.procs.has_key(pid), "Transform process was never killed")

        handle = cc.proc_manager.procs_by_name["transform_management"]
        handle._restart_transform(transform_id)

        transform = rr_cli.read(transform_id)
        pid = transform.process_id

        assertions(cc.proc_manager.procs.has_key(pid), "Transform process was never restarted.")
class TestInstrumentDataIngestion(IonIntegrationTestCase):
    """
    Tests for data ingestion from an instrument. It is basically the
    test_poll from test_instrument_agent (as of 7/23/12) with
    the additional preparation of the ingestion management service and the
    verification that the published granules are persisted.
    """

    def setUp(self):

        self.resource_registry = ResourceRegistryServiceClient()
        self.ingestion_management = IngestionManagementServiceClient()


        self._support = DriverIntegrationTestSupport(DRV_MOD,
                                                     DRV_CLS,
                                                     DEV_ADDR,
                                                     DEV_PORT,
                                                     DELIM,
                                                     WORK_DIR)
        # Start port agent, add stop to cleanup.
        self._pagent = None        
        self._start_pagent()
        self.addCleanup(self._support.stop_pagent)    
        
        # Start container.
        self._start_container()
        
        # Bring up services in a deploy file (no need to message)
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Start data suscribers, add stop to cleanup.
        # Define stream_config.
        self._no_samples = None
        self._async_data_result = AsyncResult()
        self._data_greenlets = []
        self._stream_config = {}
        self._samples_received = []
        self._data_subscribers = []
        self._start_data_subscribers()
        self.addCleanup(self._stop_data_subscribers)

        self.prepare_ingestion()

        # Start event subscribers, add stop to cleanup.
        self._no_events = None
        self._async_event_result = AsyncResult()
        self._events_received = []

        # Create agent config.
        agent_config = {
            'driver_config' : DVR_CONFIG,
            'stream_config' : self._stream_config,
            'agent'         : {'resource_id': IA_RESOURCE_ID},
            'test_mode' : True
        }
        
        # Start instrument agent.
        self._ia_pid = None
        log.debug("TestInstrumentDataIngestion.setup(): starting IA.")
        container_client = ContainerAgentClient(node=self.container.node,
                                                name=self.container.name)
        self._ia_pid = container_client.spawn_process(name=IA_NAME,
                                                      module=IA_MOD, 
                                                      cls=IA_CLS, 
                                                      config=agent_config)      
        log.info('Agent pid=%s.', str(self._ia_pid))
        
        # Start a resource agent client to talk with the instrument agent.
        self._ia_client = None
        self._ia_client = ResourceAgentClient(IA_RESOURCE_ID, process=FakeProcess())
        log.info('Got ia client %s.', str(self._ia_client))        
        
    def _start_pagent(self):
        """
        Construct and start the port agent.
        """
        port = self._support.start_pagent()
        
        # Configure driver to use port agent port number.
        DVR_CONFIG['comms_config'] = {
            'addr' : 'localhost',
            'port' : port
        }
        
    def _start_data_subscribers(self):
        """
        """
        # Create a pubsub client to create streams.
        pubsub_client = PubsubManagementServiceClient(node=self.container.node)

        # A callback for processing subscribed-to data.
        def consume_data(message, headers):
            log.info('Subscriber received data message: type(message)=%s.', str(type(message)))
            log.info('Subscriber received data message: %s.', str(message))
            self._samples_received.append(message)
            if self._no_samples and self._no_samples == len(self._samples_received):
                self._async_data_result.set()
                
        # Create a stream subscriber registrar to create subscribers.
        subscriber_registrar = StreamSubscriberRegistrar(process=self.container,
                                                container=self.container)

        # Create streams and subscriptions for each stream named in driver.
        self._stream_config = {}
        self._data_subscribers = []
        for stream_name in PACKET_CONFIG:
            stream_def = ctd_stream_definition(stream_id=None)
            stream_def_id = pubsub_client.create_stream_definition(
                                                    container=stream_def)        
            stream_id = pubsub_client.create_stream(
                        name=stream_name,
                        stream_definition_id=stream_def_id,
                        original=True,
                        encoding='ION R2')

            taxy = get_taxonomy(stream_name)
            stream_config = dict(
                id=stream_id,
                taxonomy=taxy.dump()
            )
            self._stream_config[stream_name] = stream_config

            # Create subscriptions for each stream.
            exchange_name = '%s_queue' % stream_name
            sub = subscriber_registrar.create_subscriber(exchange_name=exchange_name,
                                                         callback=consume_data)
            self._listen(sub)
            self._data_subscribers.append(sub)
            query = StreamQuery(stream_ids=[stream_id])
            sub_id = pubsub_client.create_subscription(
                                query=query, exchange_name=exchange_name, exchange_point='science_data')
            pubsub_client.activate_subscription(sub_id)
            
    def _listen(self, sub):
        """
        Pass in a subscriber here, this will make it listen in a background greenlet.
        """
        gl = spawn(sub.listen)
        self._data_greenlets.append(gl)
        sub._ready_event.wait(timeout=5)
        return gl
                                 
    def _stop_data_subscribers(self):
        """
        Stop the data subscribers on cleanup.
        """
        for sub in self._data_subscribers:
            sub.stop()
        for gl in self._data_greenlets:
            gl.kill()
            
    def assertRawSampleDict(self, val):
        """
        Verify the value is a raw sample dictionary for the sbe37.
        """
        #{'stream_name': 'raw', 'blob': ['20.8074,80.04590, 761.394,   33.5658, 1506.076, 01 Feb 2001, 01:01:00'], 'time': [1342392781.61211]}
        self.assertTrue(isinstance(val, dict))
        self.assertTrue(val.has_key('blob'))
        self.assertTrue(val.has_key('time'))
        blob = val['blob'][0]
        time = val['time'][0]
    
        self.assertTrue(isinstance(blob, str))
        self.assertTrue(isinstance(time, float))

    def assertParsedSampleDict(self, val):
        """
        Verify the value is a parsed sample dictionary for the sbe37.
        """
        #{'p': [-6.945], 'c': [0.08707], 't': [20.002], 'time': [1333752198.450622]}
        self.assertTrue(isinstance(val, dict))
        self.assertTrue(val.has_key('c'))
        self.assertTrue(val.has_key('t'))
        self.assertTrue(val.has_key('p'))
        self.assertTrue(val.has_key('time'))
        c = val['c'][0]
        t = val['t'][0]
        p = val['p'][0]
        time = val['time'][0]

        self.assertTrue(isinstance(c, float))
        self.assertTrue(isinstance(t, float))
        self.assertTrue(isinstance(p, float))
        self.assertTrue(isinstance(time, float))

    def assertSample(self, sample):
        """
        Verify a sample retrieved from the sbe37, which is expected to be a
        dict of dicts {'raw':raw_sample, 'parsed':parsed_sample}.
        """
        self.assertTrue(isinstance(sample, dict))
        raw_sample = sample['raw']
        parsed_sample = sample['parsed']
        self.assertParsedSampleDict(parsed_sample)
        self.assertRawSampleDict(raw_sample)

    def get_ingestion_config(self):
        #
        # From test_dm_end_2_end.py as of 7/23/12
        #

        #--------------------------------------------------------------------------------
        # Grab the ingestion configuration from the resource registry
        #--------------------------------------------------------------------------------
        # The ingestion configuration should have been created by the bootstrap service
        # which is configured through r2deploy.yml

        ingest_configs, _ = self.resource_registry.find_resources(
            restype=RT.IngestionConfiguration,id_only=True)
        return ingest_configs[0]

    def prepare_ingestion(self):
        #
        # Takes pieces from test_dm_end_2_end.py as of 7/23/12
        #

        # Get the ingestion configuration from the resource registry
        self.ingest_config_id = ingest_config_id = self.get_ingestion_config()

        # to keep the (stream_id, dataset_id) associated with each stream_name
        self.dataset_ids = {}

        for stream_name, stream_config in self._stream_config.iteritems():
            stream_id = stream_config['id']

            dataset_id = self.ingestion_management.persist_data_stream(
                stream_id=stream_id,
                ingestion_configuration_id=ingest_config_id)

            log.info("persisting stream_name=%s (stream_id=%s): dataset_id=%s" % (
                stream_name, stream_id, dataset_id))

            self.assertTrue(self.ingestion_management.is_persisted(stream_id))

            self.dataset_ids[stream_name] = (stream_id, dataset_id)

    def verify_granules_persisted(self):
        #
        # takes elements from ingestion_management_test.py as of 7/23/12
        #
        ingest_config_id = self.ingest_config_id
        for stream_name, (stream_id, dataset_id) in self.dataset_ids.iteritems():

            assoc = self.resource_registry.find_associations(
                subject=ingest_config_id, predicate=PRED.hasSubscription)

            sub = self.resource_registry.read(assoc[0].o)

            self.assertTrue(sub.is_active)

            dataset = self.resource_registry.read(dataset_id)
            self.assertIsInstance(dataset, DataSet)

            log.info("Data persisted for stream_name=%s (stream_id=%s, "
                     "dataset_id=%s) dataset=%s" % (stream_name, stream_id, dataset_id, dataset))


    def test_poll_and_verify_granules_persisted(self):
        #
        # As test_instrument_agent.py:TestInstrumentAgent.test_poll with
        # verification that data are persisted.
        #
        self._test_poll()
        self.verify_granules_persisted()

    def _test_poll(self):
        #
        # Same as test_instrument_agent.py:TestInstrumentAgent.test_poll as of 7/23/12
        #

        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)
    
        cmd = AgentCommand(command='initialize')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.INACTIVE)

        cmd = AgentCommand(command='go_active')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.IDLE)
        
        cmd = AgentCommand(command='run')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.OBSERVATORY)
        
        # Lets call acquire_sample 3 times, so we should get 6 samples =
        # 3 raw samples + 3 parsed samples
        self._no_samples = 6
        
        # Poll for a few samples.
        cmd = AgentCommand(command='acquire_sample')
        reply = self._ia_client.execute(cmd)
        log.debug("acquire_sample reply.result=%s" % reply.result)
        self.assertSample(reply.result)

        cmd = AgentCommand(command='acquire_sample')
        reply = self._ia_client.execute(cmd)
        self.assertSample(reply.result)

        cmd = AgentCommand(command='acquire_sample')
        reply = self._ia_client.execute(cmd)
        self.assertSample(reply.result)

        # Assert we got 6 samples.
        self._async_data_result.get(timeout=10)
        self.assertEquals(len(self._samples_received), 6)

        cmd = AgentCommand(command='reset')
        retval = self._ia_client.execute_agent(cmd)
        cmd = AgentCommand(command='get_current_state')
        retval = self._ia_client.execute_agent(cmd)
        state = retval.result
        self.assertEqual(state, InstrumentAgentState.UNINITIALIZED)
class TestResourceRegistry(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2coi.yml")

        # Now create client to bank service
        self.resource_registry_service = ResourceRegistryServiceClient(node=self.container.node)

    def test_crud(self):
        # Some quick registry tests
        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", name="name", foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name", "foo": "bar"})
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Can't call new with fields that aren't defined in the object's schema
        with self.assertRaises(TypeError) as cm:
            IonObject("UserInfo", {"name": "name"}, foo="bar")
        self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'")

        # Instantiate an object
        obj = IonObject("UserInfo", name="name")

        # Can set attributes that aren't in the object's schema
        with self.assertRaises(AttributeError) as cm:
            setattr(obj, "foo", "bar")
        self.assertTrue(cm.exception.message == "'UserInfo' object has no attribute 'foo'")

        # Cam't call update with object that hasn't been persisted
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.update(obj)
        self.assertTrue(cm.exception.message.startswith("Object does not have required '_id' or '_rev' attribute"))

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        # Cannot create object with _id and _rev fields pre-set
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create(read_obj)
        self.assertTrue(cm.exception.message.startswith("Doc must not have '_id'"))

        # Update object
        read_obj.name = "John Doe"
        self.resource_registry_service.update(read_obj)

        # Update should fail with revision mismatch
        with self.assertRaises(Conflict) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object not based on most current version"))

        # Re-read and update object
        read_obj = self.resource_registry_service.read(obj_id)
        self.resource_registry_service.update(read_obj)

        # Delete object
        self.resource_registry_service.delete(obj_id)

        # Make sure read, update and delete report error
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.read(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.update(read_obj)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete(obj_id)
        self.assertTrue(cm.exception.message.startswith("Object with id"))

        # Owner creation tests
        user = IonObject("ActorIdentity", name="user")
        uid, _ = self.resource_registry_service.create(user)

        inst = IonObject("InstrumentDevice", name="instrument")
        iid, _ = self.resource_registry_service.create(inst, headers={"ion-actor-id": str(uid)})

        ids, _ = self.resource_registry_service.find_objects(iid, PRED.hasOwner, RT.ActorIdentity, id_only=True)
        self.assertEquals(len(ids), 1)

        assoc = self.resource_registry_service.read(ids[0])
        self.resource_registry_service.delete(iid)

        with self.assertRaises(NotFound) as ex:
            assoc = self.resource_registry_service.read(iid)

    def test_lifecycle(self):
        att = IonObject("InstrumentDevice", name="mine", description="desc")

        rid, rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT_PRIVATE)

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.PLAN)
        self.assertEquals(new_state, LCS.PLANNED_PRIVATE)

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED_PRIVATE)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.execute_lifecycle_transition(rid, LCE.UNANNOUNCE)
        self.assertTrue(
            "type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce"
            in cm.exception.message
        )

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.DEVELOP)
        self.assertEquals(new_state, LCS.DEVELOPED_PRIVATE)

        self.assertRaises(
            iex.BadRequest,
            self.resource_registry_service.execute_lifecycle_transition,
            resource_id=rid,
            transition_event="NONE##",
        )

        self.resource_registry_service.set_lifecycle_state(rid, LCS.INTEGRATED_PRIVATE)
        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.lcstate, LCS.INTEGRATED_PRIVATE)

    def test_association(self):
        # Instantiate ActorIdentity object
        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(actor_identity_obj)
        read_actor_identity_obj = self.resource_registry_service.read(actor_identity_obj_id)

        # Instantiate UserInfo object
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(user_info_obj)
        read_user_info_obj = self.resource_registry_service.read(user_info_obj_id)

        # Test create failures
        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.bogus, user_info_obj_id)
        self.assertTrue(cm.exception.message == "bogus")

        # Predicate not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Bad association type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, "bogustype"
            )
        self.assertTrue(cm.exception.message == "Unsupported assoc_type: bogustype")

        # Subject id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(None, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Object id or object not provided
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, None)
        self.assertTrue(cm.exception.message == "Association must have all elements set")

        # Bad subject id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association("bogus", PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Bad object id
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, "bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # _id missing from subject
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Subject id or rev not available")

        # _id missing from object
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj)
        self.assertTrue(cm.exception.message == "Object id or rev not available")

        # Wrong subject type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(user_info_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal subject type UserInfo for predicate hasInfo")

        # Wrong object type
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.create_association(
                actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id
            )
        self.assertTrue(cm.exception.message == "Illegal object type ActorIdentity for predicate hasInfo")

        # Create two different association types between the same subject and predicate
        assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id
        )

        # Read object, subject
        res_obj1 = self.resource_registry_service.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)
        res_obj1 = self.resource_registry_service.read_object(
            actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True
        )
        self.assertEquals(res_obj1, user_info_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id)
        self.assertEquals(res_obj2._id, actor_identity_obj_id)
        res_obj2 = self.resource_registry_service.read_subject(
            RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True
        )
        self.assertEquals(res_obj2, actor_identity_obj_id)

        # Create a similar association to a specific revision
        # TODO: This is not a supported case so far
        assoc_id2, assoc_rev2 = self.resource_registry_service.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, "H2R"
        )

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False
        )
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=False)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=False)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (good cases)
        ret1 = self.resource_registry_service.find_associations(
            read_actor_identity_obj, PRED.hasInfo, read_user_info_obj
        )
        ret2 = self.resource_registry_service.find_associations(read_actor_identity_obj, PRED.hasInfo)
        ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo)
        self.assertTrue(len(ret1) == len(ret2) == len(ret3))
        self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id)

        ret1 = self.resource_registry_service.find_associations(
            actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None, True
        )
        ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=True)
        ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=True)
        self.assertTrue(ret1 == ret2 == ret3)

        # Search for associations (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(actor_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(actor_identity_obj, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_associations(actor_identity_obj_id, None, user_info_obj)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Find subjects (good cases)
        subj_ret1 = self.resource_registry_service.find_subjects(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True)
        subj_ret2 = self.resource_registry_service.find_subjects(
            RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True
        )
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, True)
        subj_ret4 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, False)
        subj_ret6 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find subjects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide object")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.bogus, user_info_obj_id, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_subjects(RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj, True)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Find objects (good cases)
        subj_ret1 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True)
        subj_ret2 = self.resource_registry_service.find_objects(
            read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True
        )
        self.assertTrue(len(subj_ret1) == len(subj_ret2))
        self.assertTrue(subj_ret1[0] == subj_ret2[0])
        self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id)

        subj_ret3 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, True)
        subj_ret4 = self.resource_registry_service.find_objects(actor_identity_obj_id, None, None, True)
        self.assertTrue(len(subj_ret3) == len(subj_ret4))
        self.assertTrue(subj_ret3[0] == subj_ret4[0])
        self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id)

        subj_ret5 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, False)
        subj_ret6 = self.resource_registry_service.find_objects(read_actor_identity_obj, None, None, False)
        self.assertTrue(len(subj_ret5) == len(subj_ret6))
        self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id)
        self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id)

        # Find objects (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(None, None, None)
        self.assertTrue(cm.exception.message == "Must provide subject")

        with self.assertRaises(AttributeError) as cm:
            self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True)
        self.assertTrue(cm.exception.message == "bogus")

        ret = self.resource_registry_service.find_objects(
            actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True
        )
        self.assertTrue(len(ret[0]) == 0)

        ret = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True)
        self.assertTrue(len(ret[0]) == 0)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_objects(actor_identity_obj, PRED.hasInfo, RT.UserInfo, True)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        # Get association (bad cases)
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(actor_identity_obj_id, None, None)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(None, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Illegal parameters")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(actor_identity_obj, None, user_info_obj_id)
        self.assertTrue(cm.exception.message == "Object id not available in subject")

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.get_association(actor_identity_obj_id, None, user_info_obj)
        self.assertTrue(cm.exception.message == "Object id not available in object")

        # Delete one of the associations
        self.resource_registry_service.delete_association(assoc_id2)

        assoc = self.resource_registry_service.get_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertTrue(assoc._id == assoc_id1)

        # Delete (bad cases)
        with self.assertRaises(NotFound) as cm:
            self.resource_registry_service.delete_association("bogus")
        self.assertTrue(cm.exception.message == "Object with id bogus does not exist.")

        # Delete other association
        self.resource_registry_service.delete_association(assoc_id1)

        # Delete resources
        self.resource_registry_service.delete(actor_identity_obj_id)
        self.resource_registry_service.delete(user_info_obj_id)

    def test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(cm.exception.message == "find by name does not support lcstate")

        ret = self.resource_registry_service.find_resources(RT.UserInfo, None, "name", False)
        self.assertTrue(len(ret[0]) == 0)

        # Instantiate an object
        obj = IonObject("InstrumentDevice", name="name")

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(RT.InstrumentDevice, None, "name", False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)

        ret = self.resource_registry_service.find_resources(RT.InstrumentDevice, LCS.DRAFT, None, False)
        self.assertTrue(len(ret[0]) == 1)
        self.assertTrue(ret[0][0]._id == read_obj._id)

    # @attr('INT', group='coirr1')
    # class TestResourceRegistry1(IonIntegrationTestCase):
    #
    #    def setUp(self):
    #        # Start container
    #        self._start_container()
    #        self.container.start_rel_from_url('res/deploy/r2coi.yml')
    #
    #        # Now create client to bank service
    #        self.resource_registry_service = ResourceRegistryServiceClient(node=self.container.node)

    def test_attach(self):

        binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82"

        # Owner creation tests
        instrument = IonObject("InstrumentDevice", name="instrument")
        iid, _ = self.resource_registry_service.create(instrument)

        att = Attachment(content=binary, attachment_type=AttachmentType.BLOB)
        aid1 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid1)
        self.assertEquals(binary, att1.content)

        import base64

        att = Attachment(content=base64.encodestring(binary), attachment_type=AttachmentType.ASCII)
        aid2 = self.resource_registry_service.create_attachment(iid, att)

        att1 = self.resource_registry_service.read_attachment(aid2)
        self.assertEquals(binary, base64.decodestring(att1.content))

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [aid1, aid2])

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True)
        self.assertEquals(att_ids, [aid2, aid1])

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True, limit=1)
        self.assertEquals(att_ids, [aid2])

        atts = self.resource_registry_service.find_attachments(iid, id_only=False, limit=1)
        self.assertEquals(atts[0].content, att1.content)

        self.resource_registry_service.delete_attachment(aid1)

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [aid2])

        self.resource_registry_service.delete_attachment(aid2)

        att_ids = self.resource_registry_service.find_attachments(iid, id_only=True)
        self.assertEquals(att_ids, [])
class TestFindRelatedResources(IonIntegrationTestCase, ResourceHelper):
    """
    assembly integration tests at the service level
    """
    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.IMS = InstrumentManagementServiceClient(node=self.container.node)
        self.OMS = ObservatoryManagementServiceClient(node=self.container.node)

        self.RR = ResourceRegistryServiceClient(node=self.container.node)

        self.care = {}
        self.dontcare = {}
        self.realtype = {}

#    @unittest.skip('this test just for debugging setup')
#    def test_just_the_setup(self):
#        return

    def create_dummy_structure(self):
        """
        Create two observatories.
         - each observatory has 2 subsites
         - each subsite has 2 more subsites
         - each of those subsites has 2 platform sites
         - each of those platform sites has a model and 2 sub- platform sites
         - each of those sub- platform sites has a model, matching platform device, and 2 instrument sites
         - each of those instrument sites has a model and matching instrument device

         One of each resource type (observatory all the way down to instrument device/model) is what we "care" about
          - it goes in the self.care dict
         All the rest go in the self.dontcare dict

         To manage subsite/platform multiplicity, we alias them in the dict... the proper hierarchy is:
         Observatory-Site-Subsite-PlatformSite-SubPlatformSite-InstrumentSite

         self.realtype[alias] gives the real resource type of an alias
        """
        self.create_observatory(True)
        self.create_observatory(False)

        for rt in [
                RT.Observatory, RT_SITE, RT.Subsite, RT.PlatformSite,
                RT_SUBPLATFORMSITE, RT.PlatformDevice, RT.PlatformModel,
                RT.InstrumentSite, RT.InstrumentDevice, RT.InstrumentModel
        ]:
            self.assertIn(rt, self.care)

        self.expected_associations = [
            (RT.Observatory, PRED.hasSite, RT_SITE),
            (RT.Site, PRED.hasSite, RT.Subsite),
            (RT.Subsite, PRED.hasSite, RT.PlatformSite),
            (RT.PlatformSite, PRED.hasSite, RT_SUBPLATFORMSITE),
            (RT_SUBPLATFORMSITE, PRED.hasSite, RT.InstrumentSite),
            (RT_SUBPLATFORMSITE, PRED.hasModel, RT.PlatformModel),
            (RT_SUBPLATFORMSITE, PRED.hasDevice, RT.PlatformDevice),
            (RT.PlatformDevice, PRED.hasModel, RT.PlatformModel),
            (RT.InstrumentSite, PRED.hasModel, RT.InstrumentModel),
            (RT.InstrumentSite, PRED.hasDevice, RT.InstrumentDevice),
            (RT.InstrumentDevice, PRED.hasModel, RT.InstrumentModel)
        ]

        log.info("Verifying created structure")
        for (st, p, ot) in self.expected_associations:
            rst = self.realtype[st]
            rot = self.realtype[ot]
            s = self.care[st]
            o = self.care[ot]
            log.debug("searching %s->%s->%s as %s->%s->%s" %
                      (st, p, ot, rst, p, rot))
            log.debug(" - expecting %s %s" % (rot, o))
            a = self.RR.find_associations(subject=s, predicate=p, object=o)
            if not (0 < len(a) < 3):
                a2 = self.RR.find_associations(subject=s, predicate=p)
                a2content = [("(%s %s)" % (alt.ot, alt.o)) for alt in a2]
                self.fail(
                    "Expected 1-2 associations for %s->%s->%s, got %s: %s" %
                    (st, p, ot, len(a2), a2content))
            self.assertIn(o, [aa.o for aa in a])
        log.info(
            "CREATED STRUCTURE APPEARS CORRECT ==============================="
        )

    def simplify_assn_resource_ids(self, assn_list):
        count = 0

        lookup = {}

        retval = []

        for a in assn_list:
            if not a.s in lookup:
                lookup[a.s] = count
                count += 1
            if not a.o in lookup:
                lookup[a.o] = count
                count += 1
            retval.append(
                DotDict({
                    "s": lookup[a.s],
                    "st": a.st,
                    "p": a.p,
                    "o": lookup[a.o],
                    "ot": a.ot
                }))

        return retval

    def describe_assn_graph(self, assn_list):
        return [("%s %s -> %s -> %s %s" % (a.st, a.s, a.p, a.ot, a.o))
                for a in assn_list]

    #@unittest.skip('refactoring')
    def test_related_resource_crawler(self):
        """

        """
        self.create_dummy_structure()

        r = RelatedResourcesCrawler()

        # test the basic forward-backward searches
        for (st, p, ot) in self.expected_associations:
            rst = self.realtype[st]
            rot = self.realtype[ot]
            s = self.care[st]
            o = self.care[ot]

            test_sto_fn = r.generate_get_related_resources_fn(
                self.RR, [rot], {p: (True, False)})
            sto_crawl = test_sto_fn(s, 1)  # depth of 1
            if 2 < len(sto_crawl):  # we get 2 because of care/dontcare
                self.fail("got %s" % self.describe_assn_graph(
                    self.simplify_assn_resource_ids(sto_crawl)))

            self.assertIn(o, [t.o for t in sto_crawl])

            test_ots_fn = r.generate_get_related_resources_fn(
                self.RR, [rst], {p: (False, True)})
            ots_crawl = test_ots_fn(o, 1)  # depth of 1
            if 1 != len(ots_crawl):
                self.fail("got %s" % self.describe_assn_graph(
                    self.simplify_assn_resource_ids(ots_crawl)))

        # test a nontrivial lookup, in which we extract resources related to an instrument device
        rw = []
        pd = {}

        # we want things related to an instrument device
        rw.append(RT.PlatformModel)
        rw.append(RT.InstrumentModel)
        rw.append(RT.PlatformDevice)
        rw.append(RT.InstrumentSite)
        rw.append(RT.PlatformSite)
        rw.append(RT.Subsite)
        rw.append(RT.Observatory)
        rw.append(RT.InstrumentDevice)
        pd[PRED.hasModel] = (True, True)
        pd[PRED.hasDevice] = (False, True)
        pd[PRED.hasSite] = (False, True)

        test_real_fn = r.generate_get_related_resources_fn(
            self.RR, resource_whitelist=rw, predicate_dictionary=pd)
        related = test_real_fn(self.care[RT.InstrumentDevice])

        log.debug("========= Result is:")
        for l in self.describe_assn_graph(
                self.simplify_assn_resource_ids(related)):
            log.debug("    %s", l)

        # check that we only got things we care about
        for a in related:
            # special case for platform model, because we don't care about the top-level platform's model
            #  so it will blow up if we don't ignore it.  if we got an extra platform model, we'd have an
            #  extra platform anyway... so this special case is safe.
            if a.st != RT.PlatformModel:
                self.assertIn(a.s, self.care.values(),
                              "%s %s not cared about" % (a.st, a.s))

            if a.ot != RT.PlatformModel:
                self.assertIn(a.o, self.care.values(),
                              "%s %s not cared about" % (a.ot, a.o))
Exemplo n.º 16
0
    def test_replay_integration(self):
        '''
        Test full DM Services Integration
        '''

        cc = self.container

        ### Every thing below here can be run as a script:


        pubsub_management_service = PubsubManagementServiceClient(node=cc.node)
        ingestion_management_service = IngestionManagementServiceClient(node=cc.node)
        dataset_management_service = DatasetManagementServiceClient(node=cc.node)
        data_retriever_service = DataRetrieverServiceClient(node=cc.node)
        resource_registry_service = ResourceRegistryServiceClient(node=cc.node)

        #------------------------------------------------------------------------------------------------------
        # Datastore name
        #------------------------------------------------------------------------------------------------------

        datastore_name = 'test_replay_integration'

        #------------------------------------------------------------------------------------------------------
        # Spawn process
        #------------------------------------------------------------------------------------------------------

        pid = cc.spawn_process(name='dummy_process_for_test',
            module='pyon.ion.process',
            cls='SimpleProcess',
            config={})

        dummy_process = cc.proc_manager.procs[pid]

        #------------------------------------------------------------------------------------------------------
        # Set up subscriber
        #------------------------------------------------------------------------------------------------------

        # Normally the user does not see or create the publisher, this is part of the containers business.
        # For the test we need to set it up explicitly
        publisher_registrar = StreamPublisherRegistrar(process=dummy_process, node=cc.node)
        subscriber_registrar = StreamSubscriberRegistrar(process=cc, node=cc.node)


        #------------------------------------------------------------------------------------------------------
        # Set up ingestion
        #------------------------------------------------------------------------------------------------------

        # Configure ingestion using eight workers, ingesting to test_dm_integration datastore with the SCIDATA profile
        ingestion_configuration_id = ingestion_management_service.create_ingestion_configuration(
            exchange_point_id='science_data',
            couch_storage=CouchStorage(datastore_name=datastore_name, datastore_profile='SCIDATA'),
            hdf_storage=HdfStorage(),
            number_of_workers=1,
        )

        ingestion_management_service.activate_ingestion_configuration(
            ingestion_configuration_id=ingestion_configuration_id)

        #------------------------------------------------------------------------------------------------------
        # Grab the transforms acting as ingestion workers
        #------------------------------------------------------------------------------------------------------

        transforms = [resource_registry_service.read(assoc.o)
                      for assoc in resource_registry_service.find_associations(ingestion_configuration_id, PRED.hasTransform)]

        proc_1 = cc.proc_manager.procs[transforms[0].process_id]
        log.info("PROCESS 1: %s" % str(proc_1))

        #------------------------------------------------------------------------------------------------------
        # Set up the test hooks for the gevent event AsyncResult object
        #------------------------------------------------------------------------------------------------------

        def ingestion_worker_received(message, headers):
            ar.set(message)

        proc_1.ingest_process_test_hook = ingestion_worker_received

        #------------------------------------------------------------------------------------------------------
        # Set up the producers (CTD Simulators)
        #------------------------------------------------------------------------------------------------------

        ctd_stream_def = ctd_stream_definition()

        stream_def_id = pubsub_management_service.create_stream_definition(container=ctd_stream_def, name='Junk definition')


        stream_id = pubsub_management_service.create_stream(stream_definition_id=stream_def_id)

        #------------------------------------------------------------------------------------------------------
        # Set up the dataset config
        #------------------------------------------------------------------------------------------------------


        dataset_id = dataset_management_service.create_dataset(
            stream_id=stream_id,
            datastore_name=datastore_name,
            view_name='datasets/stream_join_granule'
        )

        dataset_config_id = ingestion_management_service.create_dataset_configuration(
            dataset_id = dataset_id,
            archive_data = True,
            archive_metadata = True,
            ingestion_configuration_id = ingestion_configuration_id
        )

        #------------------------------------------------------------------------------------------------------
        # Launch a ctd_publisher
        #------------------------------------------------------------------------------------------------------

        publisher = publisher_registrar.create_publisher(stream_id=stream_id)

        #------------------------------------------------------------------------
        # Create a packet and publish it
        #------------------------------------------------------------------------

        ctd_packet = _create_packet(stream_id)
        published_hdfstring = ctd_packet.identifiables['ctd_data'].values

        publisher.publish(ctd_packet)

        #------------------------------------------------------------------------------------------------------
        # Catch what the ingestion worker gets! Assert it is the same packet that was published!
        #------------------------------------------------------------------------------------------------------

        packet = ar.get(timeout=2)

        #------------------------------------------------------------------------------------------------------
        # Create subscriber to listen to the replays
        #------------------------------------------------------------------------------------------------------

        replay_id, replay_stream_id = data_retriever_service.define_replay(dataset_id)

        query = StreamQuery(stream_ids=[replay_stream_id])

        subscription_id = pubsub_management_service.create_subscription(query = query, exchange_name='replay_capture_point' ,name = 'replay_capture_point')

        # It is not required or even generally a good idea to use the subscription resource name as the queue name, but it makes things simple here
        # Normally the container creates and starts subscribers for you when a transform process is spawned
        subscriber = subscriber_registrar.create_subscriber(exchange_name='replay_capture_point', callback=_subscriber_call_back)
        subscriber.start()

        pubsub_management_service.activate_subscription(subscription_id)

        #------------------------------------------------------------------------------------------------------
        # Start the replay
        #------------------------------------------------------------------------------------------------------

        data_retriever_service.start_replay(replay_id)

        #------------------------------------------------------------------------------------------------------
        # Get the hdf string from the captured stream in the replay
        #------------------------------------------------------------------------------------------------------

        retrieved_hdf_string  = ar2.get(timeout=2)


        ### Non scriptable portion of the test

        #------------------------------------------------------------------------------------------------------
        # Assert that it matches the message we sent
        #------------------------------------------------------------------------------------------------------

        self.assertEquals(packet.identifiables['stream_encoding'].sha1, ctd_packet.identifiables['stream_encoding'].sha1)


        self.assertEquals(retrieved_hdf_string, published_hdfstring)
Exemplo n.º 17
0
class PubsubManagementIntTest(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.pubsub_management  = PubsubManagementServiceClient()
        self.resource_registry  = ResourceRegistryServiceClient()
        self.dataset_management = DatasetManagementServiceClient()


        self.queue_cleanup = list()
        self.exchange_cleanup = list()

    def tearDown(self):
        for queue in self.queue_cleanup:
            xn = self.container.ex_manager.create_xn_queue(queue)
            xn.delete()
        for exchange in self.exchange_cleanup:
            xp = self.container.ex_manager.create_xp(exchange)
            xp.delete()

    def test_stream_def_crud(self):

        # Test Creation
        pdict = DatasetManagementService.get_parameter_dictionary_by_name('ctd_parsed_param_dict')
        stream_definition_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict.identifier)

        # Make sure there is an assoc
        self.assertTrue(self.resource_registry.find_associations(subject=stream_definition_id, predicate=PRED.hasParameterDictionary, object=pdict.identifier, id_only=True))

        # Test Reading
        stream_definition = self.pubsub_management.read_stream_definition(stream_definition_id)
        self.assertTrue(PubsubManagementService._compare_pdicts(pdict.dump(), stream_definition.parameter_dictionary))

        # Test Deleting
        self.pubsub_management.delete_stream_definition(stream_definition_id)
        self.assertFalse(self.resource_registry.find_associations(subject=stream_definition_id, predicate=PRED.hasParameterDictionary, object=pdict.identifier, id_only=True))


        # Test comparisons
        in_stream_definition_id = self.pubsub_management.create_stream_definition('L0 products', parameter_dictionary=pdict.identifier, available_fields=['time','temp','conductivity','pressure'])
        self.addCleanup(self.pubsub_management.delete_stream_definition, in_stream_definition_id)

        out_stream_definition_id = in_stream_definition_id
        self.assertTrue(self.pubsub_management.compare_stream_definition(in_stream_definition_id, out_stream_definition_id))
        self.assertTrue(self.pubsub_management.compatible_stream_definitions(in_stream_definition_id, out_stream_definition_id))

        out_stream_definition_id = self.pubsub_management.create_stream_definition('L2 Products', parameter_dictionary=pdict.identifier, available_fields=['time','salinity','density'])
        self.addCleanup(self.pubsub_management.delete_stream_definition, out_stream_definition_id)
        self.assertFalse(self.pubsub_management.compare_stream_definition(in_stream_definition_id, out_stream_definition_id))

        self.assertTrue(self.pubsub_management.compatible_stream_definitions(in_stream_definition_id, out_stream_definition_id))




    def publish_on_stream(self, stream_id, msg):
        stream = self.pubsub_management.read_stream(stream_id)
        stream_route = stream.stream_route
        publisher = StandaloneStreamPublisher(stream_id=stream_id, stream_route=stream_route)
        publisher.publish(msg)

    def test_stream_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition('test_definition', stream_type='stream')
        topic_id = self.pubsub_management.create_topic(name='test_topic', exchange_point='test_exchange')
        self.exchange_cleanup.append('test_exchange')
        topic2_id = self.pubsub_management.create_topic(name='another_topic', exchange_point='outside')
        stream_id, route = self.pubsub_management.create_stream(name='test_stream', topic_ids=[topic_id, topic2_id], exchange_point='test_exchange', stream_definition_id=stream_def_id)

        topics, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertEquals(topics,[topic_id])

        defs, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasStreamDefinition, id_only=True)
        self.assertTrue(len(defs))

        stream = self.pubsub_management.read_stream(stream_id)
        self.assertEquals(stream.name,'test_stream')
        self.pubsub_management.delete_stream(stream_id)
        
        with self.assertRaises(NotFound):
            self.pubsub_management.read_stream(stream_id)

        defs, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasStreamDefinition, id_only=True)
        self.assertFalse(len(defs))

        topics, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertFalse(len(topics))

        self.pubsub_management.delete_topic(topic_id)
        self.pubsub_management.delete_topic(topic2_id)
        self.pubsub_management.delete_stream_definition(stream_def_id)


    def test_subscription_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition('test_definition', stream_type='stream')
        stream_id, route = self.pubsub_management.create_stream(name='test_stream', exchange_point='test_exchange', stream_definition_id=stream_def_id)
        subscription_id = self.pubsub_management.create_subscription(name='test subscription', stream_ids=[stream_id], exchange_name='test_queue')
        self.exchange_cleanup.append('test_exchange')

        subs, assocs = self.resource_registry.find_objects(subject=subscription_id,predicate=PRED.hasStream,id_only=True)
        self.assertEquals(subs,[stream_id])

        res, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='test_queue', id_only=True)
        self.assertEquals(len(res),1)

        subs, assocs = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)
        self.assertEquals(subs[0], res[0])

        subscription = self.pubsub_management.read_subscription(subscription_id)
        self.assertEquals(subscription.exchange_name, 'test_queue')

        self.pubsub_management.delete_subscription(subscription_id)
        
        subs, assocs = self.resource_registry.find_objects(subject=subscription_id,predicate=PRED.hasStream,id_only=True)
        self.assertFalse(len(subs))

        subs, assocs = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)
        self.assertFalse(len(subs))


        self.pubsub_management.delete_stream(stream_id)
        self.pubsub_management.delete_stream_definition(stream_def_id)

    def test_move_before_activate(self):
        stream_id, route = self.pubsub_management.create_stream(name='test_stream', exchange_point='test_xp')

        #--------------------------------------------------------------------------------
        # Test moving before activate
        #--------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription('first_queue', stream_ids=[stream_id])

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='first_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)
        self.assertEquals(xn_ids[0], subjects[0])

        self.pubsub_management.move_subscription(subscription_id, exchange_name='second_queue')

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='second_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)

        self.assertEquals(len(subjects),1)
        self.assertEquals(subjects[0], xn_ids[0])

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_move_activated_subscription(self):

        stream_id, route = self.pubsub_management.create_stream(name='test_stream', exchange_point='test_xp')
        #--------------------------------------------------------------------------------
        # Test moving after activate
        #--------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription('first_queue', stream_ids=[stream_id])
        self.pubsub_management.activate_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='first_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)
        self.assertEquals(xn_ids[0], subjects[0])

        self.verified = Event()

        def verify(m,r,s):
            self.assertEquals(m,'verified')
            self.verified.set()

        subscriber = StandaloneStreamSubscriber('second_queue', verify)
        subscriber.start()

        self.pubsub_management.move_subscription(subscription_id, exchange_name='second_queue')

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='second_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(object=subscription_id, predicate=PRED.hasSubscription, id_only=True)

        self.assertEquals(len(subjects),1)
        self.assertEquals(subjects[0], xn_ids[0])

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish('verified')

        self.assertTrue(self.verified.wait(2))

        self.pubsub_management.deactivate_subscription(subscription_id)

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_queue_cleanup(self):
        stream_id, route = self.pubsub_management.create_stream('test_stream','xp1')
        xn_objs, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='queue1')
        for xn_obj in xn_objs:
            xn = self.container.ex_manager.create_xn_queue(xn_obj.name)
            xn.delete()
        subscription_id = self.pubsub_management.create_subscription('queue1',stream_ids=[stream_id])
        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='queue1')
        self.assertEquals(len(xn_ids),1)

        self.pubsub_management.delete_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name='queue1')
        self.assertEquals(len(xn_ids),0)

    def test_activation_and_deactivation(self):
        stream_id, route = self.pubsub_management.create_stream('stream1','xp1')
        subscription_id = self.pubsub_management.create_subscription('sub1', stream_ids=[stream_id])

        self.check1 = Event()

        def verifier(m,r,s):
            self.check1.set()


        subscriber = StandaloneStreamSubscriber('sub1',verifier)
        subscriber.start()

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish('should not receive')

        self.assertFalse(self.check1.wait(0.25))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish('should receive')
        self.assertTrue(self.check1.wait(2))

        self.check1.clear()
        self.assertFalse(self.check1.is_set())

        self.pubsub_management.deactivate_subscription(subscription_id)

        publisher.publish('should not receive')
        self.assertFalse(self.check1.wait(0.5))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish('should receive')
        self.assertTrue(self.check1.wait(2))

        subscriber.stop()

        self.pubsub_management.deactivate_subscription(subscription_id)
        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

        

    def test_topic_crud(self):

        topic_id = self.pubsub_management.create_topic(name='test_topic', exchange_point='test_xp')
        self.exchange_cleanup.append('test_xp')

        topic = self.pubsub_management.read_topic(topic_id)

        self.assertEquals(topic.name,'test_topic')
        self.assertEquals(topic.exchange_point, 'test_xp')

        self.pubsub_management.delete_topic(topic_id)
        with self.assertRaises(NotFound):
            self.pubsub_management.read_topic(topic_id)

    def test_full_pubsub(self):

        self.sub1_sat = Event()
        self.sub2_sat = Event()

        def subscriber1(m,r,s):
            self.sub1_sat.set()

        def subscriber2(m,r,s):
            self.sub2_sat.set()

        sub1 = StandaloneStreamSubscriber('sub1', subscriber1)
        self.queue_cleanup.append(sub1.xn.queue)
        sub1.start()

        sub2 = StandaloneStreamSubscriber('sub2', subscriber2)
        self.queue_cleanup.append(sub2.xn.queue)
        sub2.start()

        log_topic = self.pubsub_management.create_topic('instrument_logs', exchange_point='instruments')
        science_topic = self.pubsub_management.create_topic('science_data', exchange_point='instruments')
        events_topic = self.pubsub_management.create_topic('notifications', exchange_point='events')


        log_stream, route = self.pubsub_management.create_stream('instrument1-logs', topic_ids=[log_topic], exchange_point='instruments')
        ctd_stream, route = self.pubsub_management.create_stream('instrument1-ctd', topic_ids=[science_topic], exchange_point='instruments')
        event_stream, route = self.pubsub_management.create_stream('notifications', topic_ids=[events_topic], exchange_point='events')
        raw_stream, route = self.pubsub_management.create_stream('temp', exchange_point='global.data')
        self.exchange_cleanup.extend(['instruments','events','global.data'])


        subscription1 = self.pubsub_management.create_subscription('subscription1', stream_ids=[log_stream,event_stream], exchange_name='sub1')
        subscription2 = self.pubsub_management.create_subscription('subscription2', exchange_points=['global.data'], stream_ids=[ctd_stream], exchange_name='sub2')

        self.pubsub_management.activate_subscription(subscription1)
        self.pubsub_management.activate_subscription(subscription2)

        self.publish_on_stream(log_stream, 1)
        self.assertTrue(self.sub1_sat.wait(4))
        self.assertFalse(self.sub2_sat.is_set())

        self.publish_on_stream(raw_stream,1)
        self.assertTrue(self.sub1_sat.wait(4))

        sub1.stop()
        sub2.stop()


    def test_topic_craziness(self):

        self.msg_queue = Queue()

        def subscriber1(m,r,s):
            self.msg_queue.put(m)

        sub1 = StandaloneStreamSubscriber('sub1', subscriber1)
        self.queue_cleanup.append(sub1.xn.queue)
        sub1.start()

        topic1 = self.pubsub_management.create_topic('topic1', exchange_point='xp1')
        topic2 = self.pubsub_management.create_topic('topic2', exchange_point='xp1', parent_topic_id=topic1)
        topic3 = self.pubsub_management.create_topic('topic3', exchange_point='xp1', parent_topic_id=topic1)
        topic4 = self.pubsub_management.create_topic('topic4', exchange_point='xp1', parent_topic_id=topic2)
        topic5 = self.pubsub_management.create_topic('topic5', exchange_point='xp1', parent_topic_id=topic2)
        topic6 = self.pubsub_management.create_topic('topic6', exchange_point='xp1', parent_topic_id=topic3)
        topic7 = self.pubsub_management.create_topic('topic7', exchange_point='xp1', parent_topic_id=topic3)

        # Tree 2
        topic8 = self.pubsub_management.create_topic('topic8', exchange_point='xp2')
        topic9 = self.pubsub_management.create_topic('topic9', exchange_point='xp2', parent_topic_id=topic8)
        topic10 = self.pubsub_management.create_topic('topic10', exchange_point='xp2', parent_topic_id=topic9)
        topic11 = self.pubsub_management.create_topic('topic11', exchange_point='xp2', parent_topic_id=topic9)
        topic12 = self.pubsub_management.create_topic('topic12', exchange_point='xp2', parent_topic_id=topic11)
        topic13 = self.pubsub_management.create_topic('topic13', exchange_point='xp2', parent_topic_id=topic11)
        self.exchange_cleanup.extend(['xp1','xp2'])
        
        stream1_id, route = self.pubsub_management.create_stream('stream1', topic_ids=[topic7, topic4, topic5], exchange_point='xp1')
        stream2_id, route = self.pubsub_management.create_stream('stream2', topic_ids=[topic8], exchange_point='xp2')
        stream3_id, route = self.pubsub_management.create_stream('stream3', topic_ids=[topic10,topic13], exchange_point='xp2')
        stream4_id, route = self.pubsub_management.create_stream('stream4', topic_ids=[topic9], exchange_point='xp2')
        stream5_id, route = self.pubsub_management.create_stream('stream5', topic_ids=[topic11], exchange_point='xp2')

        subscription1 = self.pubsub_management.create_subscription('sub1', topic_ids=[topic1])
        subscription2 = self.pubsub_management.create_subscription('sub2', topic_ids=[topic8], exchange_name='sub1')
        subscription3 = self.pubsub_management.create_subscription('sub3', topic_ids=[topic9], exchange_name='sub1')
        subscription4 = self.pubsub_management.create_subscription('sub4', topic_ids=[topic10,topic13, topic11], exchange_name='sub1')
        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription1)

        self.publish_on_stream(stream1_id,1)

        self.assertEquals(self.msg_queue.get(timeout=10), 1)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)


        self.pubsub_management.deactivate_subscription(subscription1)
        self.pubsub_management.delete_subscription(subscription1)
        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription2)
        
        self.publish_on_stream(stream2_id,2)
        self.assertEquals(self.msg_queue.get(timeout=10), 2)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)

        self.pubsub_management.deactivate_subscription(subscription2)
        self.pubsub_management.delete_subscription(subscription2)

        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription3)

        self.publish_on_stream(stream2_id, 3)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream3_id, 4)
        self.assertEquals(self.msg_queue.get(timeout=10),4)


        self.pubsub_management.deactivate_subscription(subscription3)
        self.pubsub_management.delete_subscription(subscription3)

        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription4)

        self.publish_on_stream(stream4_id, 5)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream5_id, 6)
        self.assertEquals(self.msg_queue.get(timeout=10),6)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.pubsub_management.deactivate_subscription(subscription4)
        self.pubsub_management.delete_subscription(subscription4)
        
        #--------------------------------------------------------------------------------
        sub1.stop()

        self.pubsub_management.delete_topic(topic13)
        self.pubsub_management.delete_topic(topic12)
        self.pubsub_management.delete_topic(topic11)
        self.pubsub_management.delete_topic(topic10)
        self.pubsub_management.delete_topic(topic9)
        self.pubsub_management.delete_topic(topic8)
        self.pubsub_management.delete_topic(topic7)
        self.pubsub_management.delete_topic(topic6)
        self.pubsub_management.delete_topic(topic5)
        self.pubsub_management.delete_topic(topic4)
        self.pubsub_management.delete_topic(topic3)
        self.pubsub_management.delete_topic(topic2)
        self.pubsub_management.delete_topic(topic1)

        self.pubsub_management.delete_stream(stream1_id)
        self.pubsub_management.delete_stream(stream2_id)
        self.pubsub_management.delete_stream(stream3_id)
        self.pubsub_management.delete_stream(stream4_id)
        self.pubsub_management.delete_stream(stream5_id)
Exemplo n.º 18
0
class PubsubManagementIntTest(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url("res/deploy/r2deploy.yml")
        self.pubsub_management = PubsubManagementServiceClient()
        self.resource_registry = ResourceRegistryServiceClient()
        self.dataset_management = DatasetManagementServiceClient()

        self.queue_cleanup = list()
        self.exchange_cleanup = list()

    def tearDown(self):
        for queue in self.queue_cleanup:
            xn = self.container.ex_manager.create_xn_queue(queue)
            xn.delete()
        for exchange in self.exchange_cleanup:
            xp = self.container.ex_manager.create_xp(exchange)
            xp.delete()

    def test_stream_def_crud(self):

        # Test Creation
        pdict = DatasetManagementService.get_parameter_dictionary_by_name("ctd_parsed_param_dict")
        stream_definition_id = self.pubsub_management.create_stream_definition(
            "ctd parsed", parameter_dictionary_id=pdict.identifier
        )

        # Make sure there is an assoc
        self.assertTrue(
            self.resource_registry.find_associations(
                subject=stream_definition_id,
                predicate=PRED.hasParameterDictionary,
                object=pdict.identifier,
                id_only=True,
            )
        )

        # Test Reading
        stream_definition = self.pubsub_management.read_stream_definition(stream_definition_id)
        self.assertTrue(PubsubManagementService._compare_pdicts(pdict.dump(), stream_definition.parameter_dictionary))

        # Test Deleting
        self.pubsub_management.delete_stream_definition(stream_definition_id)
        self.assertFalse(
            self.resource_registry.find_associations(
                subject=stream_definition_id,
                predicate=PRED.hasParameterDictionary,
                object=pdict.identifier,
                id_only=True,
            )
        )

        # Test comparisons
        in_stream_definition_id = self.pubsub_management.create_stream_definition(
            "L0 products",
            parameter_dictionary=pdict.identifier,
            available_fields=["time", "temp", "conductivity", "pressure"],
        )
        self.addCleanup(self.pubsub_management.delete_stream_definition, in_stream_definition_id)

        out_stream_definition_id = in_stream_definition_id
        self.assertTrue(
            self.pubsub_management.compare_stream_definition(in_stream_definition_id, out_stream_definition_id)
        )
        self.assertTrue(
            self.pubsub_management.compatible_stream_definitions(in_stream_definition_id, out_stream_definition_id)
        )

        out_stream_definition_id = self.pubsub_management.create_stream_definition(
            "L2 Products", parameter_dictionary=pdict.identifier, available_fields=["time", "salinity", "density"]
        )
        self.addCleanup(self.pubsub_management.delete_stream_definition, out_stream_definition_id)
        self.assertFalse(
            self.pubsub_management.compare_stream_definition(in_stream_definition_id, out_stream_definition_id)
        )

        self.assertTrue(
            self.pubsub_management.compatible_stream_definitions(in_stream_definition_id, out_stream_definition_id)
        )

    def test_validate_stream_defs(self):

        # test no input
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = []
        available_fields_out = []
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_0", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_0", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        # test input with no output
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"]
        available_fields_out = []
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_1", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_1", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        # test available field missing parameter context definition -- missing PRESWAT_L0
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"]
        available_fields_out = ["DENSITY"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_2", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_2", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        # test l1 from l0
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"]
        available_fields_out = ["TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_3", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_3", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        # test l2 from l0
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1", "DENSITY", "PRACSAL"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"]
        available_fields_out = ["DENSITY", "PRACSAL"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_4", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_4", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        # test Ln from L0
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"]
        available_fields_out = ["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_5", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_5", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        # test L2 from L1
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"]
        available_fields_out = ["DENSITY", "PRACSAL"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_6", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_6", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        # test L1 from L0 missing L0
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon"])
        outgoing_pdict_id = self._get_pdict(["TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon"]
        available_fields_out = ["DENSITY", "PRACSAL"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_7", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_7", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        # test L2 from L0 missing L0
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL", "TEMPWAT_L1", "CONDWAT_L1", "PRESWAT_L1"])
        available_fields_in = ["time", "lat", "lon"]
        available_fields_out = ["DENSITY", "PRACSAL"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_8", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_8", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        # test L2 from L0 missing L1
        incoming_pdict_id = self._get_pdict(["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"])
        outgoing_pdict_id = self._get_pdict(["DENSITY", "PRACSAL"])
        available_fields_in = ["time", "lat", "lon", "TEMPWAT_L0", "CONDWAT_L0", "PRESWAT_L0"]
        available_fields_out = ["DENSITY", "PRACSAL"]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            "in_sd_9", parameter_dictionary_id=incoming_pdict_id, available_fields=available_fields_in
        )
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            "out_sd_9", parameter_dictionary_id=outgoing_pdict_id, available_fields=available_fields_out
        )
        result = self.pubsub_management.validate_stream_defs(incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

    def publish_on_stream(self, stream_id, msg):
        stream = self.pubsub_management.read_stream(stream_id)
        stream_route = stream.stream_route
        publisher = StandaloneStreamPublisher(stream_id=stream_id, stream_route=stream_route)
        publisher.publish(msg)

    def test_stream_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition("test_definition", stream_type="stream")
        topic_id = self.pubsub_management.create_topic(name="test_topic", exchange_point="test_exchange")
        self.exchange_cleanup.append("test_exchange")
        topic2_id = self.pubsub_management.create_topic(name="another_topic", exchange_point="outside")
        stream_id, route = self.pubsub_management.create_stream(
            name="test_stream",
            topic_ids=[topic_id, topic2_id],
            exchange_point="test_exchange",
            stream_definition_id=stream_def_id,
        )

        topics, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertEquals(topics, [topic_id])

        defs, assocs = self.resource_registry.find_objects(
            subject=stream_id, predicate=PRED.hasStreamDefinition, id_only=True
        )
        self.assertTrue(len(defs))

        stream = self.pubsub_management.read_stream(stream_id)
        self.assertEquals(stream.name, "test_stream")
        self.pubsub_management.delete_stream(stream_id)

        with self.assertRaises(NotFound):
            self.pubsub_management.read_stream(stream_id)

        defs, assocs = self.resource_registry.find_objects(
            subject=stream_id, predicate=PRED.hasStreamDefinition, id_only=True
        )
        self.assertFalse(len(defs))

        topics, assocs = self.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertFalse(len(topics))

        self.pubsub_management.delete_topic(topic_id)
        self.pubsub_management.delete_topic(topic2_id)
        self.pubsub_management.delete_stream_definition(stream_def_id)

    def test_subscription_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition("test_definition", stream_type="stream")
        stream_id, route = self.pubsub_management.create_stream(
            name="test_stream", exchange_point="test_exchange", stream_definition_id=stream_def_id
        )
        subscription_id = self.pubsub_management.create_subscription(
            name="test subscription", stream_ids=[stream_id], exchange_name="test_queue"
        )
        self.exchange_cleanup.append("test_exchange")

        subs, assocs = self.resource_registry.find_objects(
            subject=subscription_id, predicate=PRED.hasStream, id_only=True
        )
        self.assertEquals(subs, [stream_id])

        res, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="test_queue", id_only=True)
        self.assertEquals(len(res), 1)

        subs, assocs = self.resource_registry.find_subjects(
            object=subscription_id, predicate=PRED.hasSubscription, id_only=True
        )
        self.assertEquals(subs[0], res[0])

        subscription = self.pubsub_management.read_subscription(subscription_id)
        self.assertEquals(subscription.exchange_name, "test_queue")

        self.pubsub_management.delete_subscription(subscription_id)

        subs, assocs = self.resource_registry.find_objects(
            subject=subscription_id, predicate=PRED.hasStream, id_only=True
        )
        self.assertFalse(len(subs))

        subs, assocs = self.resource_registry.find_subjects(
            object=subscription_id, predicate=PRED.hasSubscription, id_only=True
        )
        self.assertFalse(len(subs))

        self.pubsub_management.delete_stream(stream_id)
        self.pubsub_management.delete_stream_definition(stream_def_id)

    def test_move_before_activate(self):
        stream_id, route = self.pubsub_management.create_stream(name="test_stream", exchange_point="test_xp")

        # --------------------------------------------------------------------------------
        # Test moving before activate
        # --------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription("first_queue", stream_ids=[stream_id])

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="first_queue", id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id, predicate=PRED.hasSubscription, id_only=True
        )
        self.assertEquals(xn_ids[0], subjects[0])

        self.pubsub_management.move_subscription(subscription_id, exchange_name="second_queue")

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="second_queue", id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id, predicate=PRED.hasSubscription, id_only=True
        )

        self.assertEquals(len(subjects), 1)
        self.assertEquals(subjects[0], xn_ids[0])

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_move_activated_subscription(self):

        stream_id, route = self.pubsub_management.create_stream(name="test_stream", exchange_point="test_xp")
        # --------------------------------------------------------------------------------
        # Test moving after activate
        # --------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription("first_queue", stream_ids=[stream_id])
        self.pubsub_management.activate_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="first_queue", id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id, predicate=PRED.hasSubscription, id_only=True
        )
        self.assertEquals(xn_ids[0], subjects[0])

        self.verified = Event()

        def verify(m, r, s):
            self.assertEquals(m, "verified")
            self.verified.set()

        subscriber = StandaloneStreamSubscriber("second_queue", verify)
        subscriber.start()

        self.pubsub_management.move_subscription(subscription_id, exchange_name="second_queue")

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="second_queue", id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id, predicate=PRED.hasSubscription, id_only=True
        )

        self.assertEquals(len(subjects), 1)
        self.assertEquals(subjects[0], xn_ids[0])

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish("verified")

        self.assertTrue(self.verified.wait(2))

        self.pubsub_management.deactivate_subscription(subscription_id)

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_queue_cleanup(self):
        stream_id, route = self.pubsub_management.create_stream("test_stream", "xp1")
        xn_objs, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="queue1")
        for xn_obj in xn_objs:
            xn = self.container.ex_manager.create_xn_queue(xn_obj.name)
            xn.delete()
        subscription_id = self.pubsub_management.create_subscription("queue1", stream_ids=[stream_id])
        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="queue1")
        self.assertEquals(len(xn_ids), 1)

        self.pubsub_management.delete_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(restype=RT.ExchangeName, name="queue1")
        self.assertEquals(len(xn_ids), 0)

    def test_activation_and_deactivation(self):
        stream_id, route = self.pubsub_management.create_stream("stream1", "xp1")
        subscription_id = self.pubsub_management.create_subscription("sub1", stream_ids=[stream_id])

        self.check1 = Event()

        def verifier(m, r, s):
            self.check1.set()

        subscriber = StandaloneStreamSubscriber("sub1", verifier)
        subscriber.start()

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish("should not receive")

        self.assertFalse(self.check1.wait(0.25))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish("should receive")
        self.assertTrue(self.check1.wait(2))

        self.check1.clear()
        self.assertFalse(self.check1.is_set())

        self.pubsub_management.deactivate_subscription(subscription_id)

        publisher.publish("should not receive")
        self.assertFalse(self.check1.wait(0.5))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish("should receive")
        self.assertTrue(self.check1.wait(2))

        subscriber.stop()

        self.pubsub_management.deactivate_subscription(subscription_id)
        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_topic_crud(self):

        topic_id = self.pubsub_management.create_topic(name="test_topic", exchange_point="test_xp")
        self.exchange_cleanup.append("test_xp")

        topic = self.pubsub_management.read_topic(topic_id)

        self.assertEquals(topic.name, "test_topic")
        self.assertEquals(topic.exchange_point, "test_xp")

        self.pubsub_management.delete_topic(topic_id)
        with self.assertRaises(NotFound):
            self.pubsub_management.read_topic(topic_id)

    def test_full_pubsub(self):

        self.sub1_sat = Event()
        self.sub2_sat = Event()

        def subscriber1(m, r, s):
            self.sub1_sat.set()

        def subscriber2(m, r, s):
            self.sub2_sat.set()

        sub1 = StandaloneStreamSubscriber("sub1", subscriber1)
        self.queue_cleanup.append(sub1.xn.queue)
        sub1.start()

        sub2 = StandaloneStreamSubscriber("sub2", subscriber2)
        self.queue_cleanup.append(sub2.xn.queue)
        sub2.start()

        log_topic = self.pubsub_management.create_topic("instrument_logs", exchange_point="instruments")
        science_topic = self.pubsub_management.create_topic("science_data", exchange_point="instruments")
        events_topic = self.pubsub_management.create_topic("notifications", exchange_point="events")

        log_stream, route = self.pubsub_management.create_stream(
            "instrument1-logs", topic_ids=[log_topic], exchange_point="instruments"
        )
        ctd_stream, route = self.pubsub_management.create_stream(
            "instrument1-ctd", topic_ids=[science_topic], exchange_point="instruments"
        )
        event_stream, route = self.pubsub_management.create_stream(
            "notifications", topic_ids=[events_topic], exchange_point="events"
        )
        raw_stream, route = self.pubsub_management.create_stream("temp", exchange_point="global.data")
        self.exchange_cleanup.extend(["instruments", "events", "global.data"])

        subscription1 = self.pubsub_management.create_subscription(
            "subscription1", stream_ids=[log_stream, event_stream], exchange_name="sub1"
        )
        subscription2 = self.pubsub_management.create_subscription(
            "subscription2", exchange_points=["global.data"], stream_ids=[ctd_stream], exchange_name="sub2"
        )

        self.pubsub_management.activate_subscription(subscription1)
        self.pubsub_management.activate_subscription(subscription2)

        self.publish_on_stream(log_stream, 1)
        self.assertTrue(self.sub1_sat.wait(4))
        self.assertFalse(self.sub2_sat.is_set())

        self.publish_on_stream(raw_stream, 1)
        self.assertTrue(self.sub1_sat.wait(4))

        sub1.stop()
        sub2.stop()

    def test_topic_craziness(self):

        self.msg_queue = Queue()

        def subscriber1(m, r, s):
            self.msg_queue.put(m)

        sub1 = StandaloneStreamSubscriber("sub1", subscriber1)
        self.queue_cleanup.append(sub1.xn.queue)
        sub1.start()

        topic1 = self.pubsub_management.create_topic("topic1", exchange_point="xp1")
        topic2 = self.pubsub_management.create_topic("topic2", exchange_point="xp1", parent_topic_id=topic1)
        topic3 = self.pubsub_management.create_topic("topic3", exchange_point="xp1", parent_topic_id=topic1)
        topic4 = self.pubsub_management.create_topic("topic4", exchange_point="xp1", parent_topic_id=topic2)
        topic5 = self.pubsub_management.create_topic("topic5", exchange_point="xp1", parent_topic_id=topic2)
        topic6 = self.pubsub_management.create_topic("topic6", exchange_point="xp1", parent_topic_id=topic3)
        topic7 = self.pubsub_management.create_topic("topic7", exchange_point="xp1", parent_topic_id=topic3)

        # Tree 2
        topic8 = self.pubsub_management.create_topic("topic8", exchange_point="xp2")
        topic9 = self.pubsub_management.create_topic("topic9", exchange_point="xp2", parent_topic_id=topic8)
        topic10 = self.pubsub_management.create_topic("topic10", exchange_point="xp2", parent_topic_id=topic9)
        topic11 = self.pubsub_management.create_topic("topic11", exchange_point="xp2", parent_topic_id=topic9)
        topic12 = self.pubsub_management.create_topic("topic12", exchange_point="xp2", parent_topic_id=topic11)
        topic13 = self.pubsub_management.create_topic("topic13", exchange_point="xp2", parent_topic_id=topic11)
        self.exchange_cleanup.extend(["xp1", "xp2"])

        stream1_id, route = self.pubsub_management.create_stream(
            "stream1", topic_ids=[topic7, topic4, topic5], exchange_point="xp1"
        )
        stream2_id, route = self.pubsub_management.create_stream("stream2", topic_ids=[topic8], exchange_point="xp2")
        stream3_id, route = self.pubsub_management.create_stream(
            "stream3", topic_ids=[topic10, topic13], exchange_point="xp2"
        )
        stream4_id, route = self.pubsub_management.create_stream("stream4", topic_ids=[topic9], exchange_point="xp2")
        stream5_id, route = self.pubsub_management.create_stream("stream5", topic_ids=[topic11], exchange_point="xp2")

        subscription1 = self.pubsub_management.create_subscription("sub1", topic_ids=[topic1])
        subscription2 = self.pubsub_management.create_subscription("sub2", topic_ids=[topic8], exchange_name="sub1")
        subscription3 = self.pubsub_management.create_subscription("sub3", topic_ids=[topic9], exchange_name="sub1")
        subscription4 = self.pubsub_management.create_subscription(
            "sub4", topic_ids=[topic10, topic13, topic11], exchange_name="sub1"
        )
        # --------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription1)

        self.publish_on_stream(stream1_id, 1)

        self.assertEquals(self.msg_queue.get(timeout=10), 1)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)

        self.pubsub_management.deactivate_subscription(subscription1)
        self.pubsub_management.delete_subscription(subscription1)
        # --------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription2)

        self.publish_on_stream(stream2_id, 2)
        self.assertEquals(self.msg_queue.get(timeout=10), 2)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)

        self.pubsub_management.deactivate_subscription(subscription2)
        self.pubsub_management.delete_subscription(subscription2)

        # --------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription3)

        self.publish_on_stream(stream2_id, 3)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream3_id, 4)
        self.assertEquals(self.msg_queue.get(timeout=10), 4)

        self.pubsub_management.deactivate_subscription(subscription3)
        self.pubsub_management.delete_subscription(subscription3)

        # --------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription4)

        self.publish_on_stream(stream4_id, 5)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream5_id, 6)
        self.assertEquals(self.msg_queue.get(timeout=10), 6)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.pubsub_management.deactivate_subscription(subscription4)
        self.pubsub_management.delete_subscription(subscription4)

        # --------------------------------------------------------------------------------
        sub1.stop()

        self.pubsub_management.delete_topic(topic13)
        self.pubsub_management.delete_topic(topic12)
        self.pubsub_management.delete_topic(topic11)
        self.pubsub_management.delete_topic(topic10)
        self.pubsub_management.delete_topic(topic9)
        self.pubsub_management.delete_topic(topic8)
        self.pubsub_management.delete_topic(topic7)
        self.pubsub_management.delete_topic(topic6)
        self.pubsub_management.delete_topic(topic5)
        self.pubsub_management.delete_topic(topic4)
        self.pubsub_management.delete_topic(topic3)
        self.pubsub_management.delete_topic(topic2)
        self.pubsub_management.delete_topic(topic1)

        self.pubsub_management.delete_stream(stream1_id)
        self.pubsub_management.delete_stream(stream2_id)
        self.pubsub_management.delete_stream(stream3_id)
        self.pubsub_management.delete_stream(stream4_id)
        self.pubsub_management.delete_stream(stream5_id)

    def _get_pdict(self, filter_values):
        t_ctxt = ParameterContext("time", param_type=QuantityType(value_encoding=np.dtype("int64")))
        t_ctxt.uom = "seconds since 01-01-1900"
        t_ctxt.fill_value = -9999
        t_ctxt_id = self.dataset_management.create_parameter_context(
            name="time", parameter_context=t_ctxt.dump(), parameter_type="quantity<int64>", unit_of_measure=t_ctxt.uom
        )

        lat_ctxt = ParameterContext("lat", param_type=ConstantType(QuantityType(value_encoding=np.dtype("float32"))))
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = "degree_north"
        lat_ctxt.fill_value = -9999
        lat_ctxt_id = self.dataset_management.create_parameter_context(
            name="lat",
            parameter_context=lat_ctxt.dump(),
            parameter_type="quantity<float32>",
            unit_of_measure=lat_ctxt.uom,
        )

        lon_ctxt = ParameterContext("lon", param_type=ConstantType(QuantityType(value_encoding=np.dtype("float32"))))
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = "degree_east"
        lon_ctxt.fill_value = -9999
        lon_ctxt_id = self.dataset_management.create_parameter_context(
            name="lon",
            parameter_context=lon_ctxt.dump(),
            parameter_type="quantity<float32>",
            unit_of_measure=lon_ctxt.uom,
        )

        temp_ctxt = ParameterContext("TEMPWAT_L0", param_type=QuantityType(value_encoding=np.dtype("float32")))
        temp_ctxt.uom = "deg_C"
        temp_ctxt.fill_value = -9999
        temp_ctxt_id = self.dataset_management.create_parameter_context(
            name="TEMPWAT_L0",
            parameter_context=temp_ctxt.dump(),
            parameter_type="quantity<float32>",
            unit_of_measure=temp_ctxt.uom,
        )

        # Conductivity - values expected to be the decimal results of conversion from hex
        cond_ctxt = ParameterContext("CONDWAT_L0", param_type=QuantityType(value_encoding=np.dtype("float32")))
        cond_ctxt.uom = "S m-1"
        cond_ctxt.fill_value = -9999
        cond_ctxt_id = self.dataset_management.create_parameter_context(
            name="CONDWAT_L0",
            parameter_context=cond_ctxt.dump(),
            parameter_type="quantity<float32>",
            unit_of_measure=cond_ctxt.uom,
        )

        # Pressure - values expected to be the decimal results of conversion from hex
        press_ctxt = ParameterContext("PRESWAT_L0", param_type=QuantityType(value_encoding=np.dtype("float32")))
        press_ctxt.uom = "dbar"
        press_ctxt.fill_value = -9999
        press_ctxt_id = self.dataset_management.create_parameter_context(
            name="PRESWAT_L0",
            parameter_context=press_ctxt.dump(),
            parameter_type="quantity<float32>",
            unit_of_measure=press_ctxt.uom,
        )

        # TEMPWAT_L1 = (TEMPWAT_L0 / 10000) - 10
        tl1_func = "(TEMPWAT_L0 / 10000) - 10"
        tl1_pmap = {"TEMPWAT_L0": "TEMPWAT_L0"}
        func = NumexprFunction("TEMPWAT_L1", tl1_func, tl1_pmap)
        tempL1_ctxt = ParameterContext(
            "TEMPWAT_L1", param_type=ParameterFunctionType(function=func), variability=VariabilityEnum.TEMPORAL
        )
        tempL1_ctxt.uom = "deg_C"

        tempL1_ctxt_id = self.dataset_management.create_parameter_context(
            name=tempL1_ctxt.name,
            parameter_context=tempL1_ctxt.dump(),
            parameter_type="pfunc",
            unit_of_measure=tempL1_ctxt.uom,
        )

        # CONDWAT_L1 = (CONDWAT_L0 / 100000) - 0.5
        cl1_func = "(CONDWAT_L0 / 100000) - 0.5"
        cl1_pmap = {"CONDWAT_L0": "CONDWAT_L0"}
        func = NumexprFunction("CONDWAT_L1", cl1_func, cl1_pmap)
        condL1_ctxt = ParameterContext(
            "CONDWAT_L1", param_type=ParameterFunctionType(function=func), variability=VariabilityEnum.TEMPORAL
        )
        condL1_ctxt.uom = "S m-1"
        condL1_ctxt_id = self.dataset_management.create_parameter_context(
            name=condL1_ctxt.name,
            parameter_context=condL1_ctxt.dump(),
            parameter_type="pfunc",
            unit_of_measure=condL1_ctxt.uom,
        )

        # Equation uses p_range, which is a calibration coefficient - Fixing to 679.34040721
        #   PRESWAT_L1 = (PRESWAT_L0 * p_range / (0.85 * 65536)) - (0.05 * p_range)
        pl1_func = "(PRESWAT_L0 * 679.34040721 / (0.85 * 65536)) - (0.05 * 679.34040721)"
        pl1_pmap = {"PRESWAT_L0": "PRESWAT_L0"}
        func = NumexprFunction("PRESWAT_L1", pl1_func, pl1_pmap)
        presL1_ctxt = ParameterContext(
            "PRESWAT_L1", param_type=ParameterFunctionType(function=func), variability=VariabilityEnum.TEMPORAL
        )
        presL1_ctxt.uom = "S m-1"
        presL1_ctxt_id = self.dataset_management.create_parameter_context(
            name=presL1_ctxt.name,
            parameter_context=presL1_ctxt.dump(),
            parameter_type="pfunc",
            unit_of_measure=presL1_ctxt.uom,
        )

        # Density & practical salinity calucluated using the Gibbs Seawater library - available via python-gsw project:
        #       https://code.google.com/p/python-gsw/ & http://pypi.python.org/pypi/gsw/3.0.1

        # PRACSAL = gsw.SP_from_C((CONDWAT_L1 * 10), TEMPWAT_L1, PRESWAT_L1)
        owner = "gsw"
        sal_func = "SP_from_C"
        sal_arglist = [NumexprFunction("CONDWAT_L1*10", "C*10", {"C": "CONDWAT_L1"}), "TEMPWAT_L1", "PRESWAT_L1"]
        sal_kwargmap = None
        func = PythonFunction("PRACSAL", owner, sal_func, sal_arglist, sal_kwargmap)
        sal_ctxt = ParameterContext(
            "PRACSAL", param_type=ParameterFunctionType(func), variability=VariabilityEnum.TEMPORAL
        )
        sal_ctxt.uom = "g kg-1"

        sal_ctxt_id = self.dataset_management.create_parameter_context(
            name=sal_ctxt.name, parameter_context=sal_ctxt.dump(), parameter_type="pfunc", unit_of_measure=sal_ctxt.uom
        )

        # absolute_salinity = gsw.SA_from_SP(PRACSAL, PRESWAT_L1, longitude, latitude)
        # conservative_temperature = gsw.CT_from_t(absolute_salinity, TEMPWAT_L1, PRESWAT_L1)
        # DENSITY = gsw.rho(absolute_salinity, conservative_temperature, PRESWAT_L1)
        owner = "gsw"
        abs_sal_func = PythonFunction("abs_sal", owner, "SA_from_SP", ["PRACSAL", "PRESWAT_L1", "lon", "lat"], None)
        # abs_sal_func = PythonFunction('abs_sal', owner, 'SA_from_SP', ['lon','lat'], None)
        cons_temp_func = PythonFunction(
            "cons_temp", owner, "CT_from_t", [abs_sal_func, "TEMPWAT_L1", "PRESWAT_L1"], None
        )
        dens_func = PythonFunction("DENSITY", owner, "rho", [abs_sal_func, cons_temp_func, "PRESWAT_L1"], None)
        dens_ctxt = ParameterContext(
            "DENSITY", param_type=ParameterFunctionType(dens_func), variability=VariabilityEnum.TEMPORAL
        )
        dens_ctxt.uom = "kg m-3"

        dens_ctxt_id = self.dataset_management.create_parameter_context(
            name=dens_ctxt.name,
            parameter_context=dens_ctxt.dump(),
            parameter_type="pfunc",
            unit_of_measure=dens_ctxt.uom,
        )

        ids = [
            t_ctxt_id,
            lat_ctxt_id,
            lon_ctxt_id,
            temp_ctxt_id,
            cond_ctxt_id,
            press_ctxt_id,
            tempL1_ctxt_id,
            condL1_ctxt_id,
            presL1_ctxt_id,
            sal_ctxt_id,
            dens_ctxt_id,
        ]
        contexts = [
            t_ctxt,
            lat_ctxt,
            lon_ctxt,
            temp_ctxt,
            cond_ctxt,
            press_ctxt,
            tempL1_ctxt,
            condL1_ctxt,
            presL1_ctxt,
            sal_ctxt,
            dens_ctxt,
        ]
        context_ids = [ids[i] for i, ctxt in enumerate(contexts) if ctxt.name in filter_values]
        pdict_name = "_".join([ctxt.name for ctxt in contexts if ctxt.name in filter_values])

        pdict_id = self.dataset_management.create_parameter_dictionary(
            pdict_name, parameter_context_ids=context_ids, temporal_context="time"
        )
        return pdict_id
class TestObservatoryManagementFullIntegration(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR = ResourceRegistryServiceClient(node=self.container.node)
        self.RR2 = EnhancedResourceRegistryClient(self.RR)
        self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
        self.org_management_service = OrgManagementServiceClient(node=self.container.node)
        self.IMS =  InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(node=self.container.node)
        self.pubsubcli =  PubsubManagementServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()

    def assert_can_load(self, scenarios, loadui=False, loadooi=False, path=TESTED_DOC, ui_path='default'):
        """ perform preload for given scenarios and raise exception if there is a problem with the data """
        config = dict(op="load",
                      scenario=scenarios,
                      attachments="res/preload/r2_ioc/attachments",
                      loadui=loadui,
                      loadooi=loadooi,
                      path=path, ui_path=ui_path,
                      assets='res/preload/r2_ioc/ooi_assets',
                      bulk=loadooi,
                      debug=True,
                      ooiexclude='DataProduct,DataProductLink',
                      )
        self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=config)

        # 'DataProduct,DataProductLink,WorkflowDefinition,ExternalDataProvider,ExternalDatasetModel,ExternalDataset,ExternalDatasetAgent,ExternalDatasetAgentInstance',

    def load_summer_deploy_assets(self):
        """ make sure UI assets are valid using DEFAULT_UI_ASSETS = 'https://userexperience.oceanobservatories.org/database-exports/' """
        ui_path = 'https://userexperience.oceanobservatories.org/database-exports/Candidates'
        self.assert_can_load(scenarios="X", loadui=True, loadooi=True, ui_path=ui_path)

    @unittest.skip('under construction.')
    def test_observatory(self):
        # Perform OOI preload for summer deployments (production mode, no debug, no bulk)
        self.load_summer_deploy_assets()

        #test an asset
        res_list, _  = self.RR.find_resources_ext(alt_id_ns="OOI", alt_id="CE04OSBP-LJ01C-06-CTDBPO108")
        log.debug('test_observatory  retrieve test:  %s', res_list)

        # Check OOI preloaded resources to see if they match needs for this test and for correctness
        observatory_list, _ = self.RR.find_resources_ext(restype=RT.Observatory)
        self.assertEquals(42, len(observatory_list))

        platform_site_list, _ = self.RR.find_resources(RT.PlatformSite, None, None, False)
        for ps in platform_site_list:
            log.debug('platform site: %s', ps.name)
        self.assertEquals(38, len(platform_site_list))

        platform_device_list, _ = self.RR.find_resources(RT.PlatformDevice, None, None, False)
        for pd in platform_device_list:
            log.debug('platform device: %s', pd.name)
        self.assertEquals(38, len(platform_site_list))

        platform_agent_list, _ = self.RR.find_resources(RT.PlatformAgent, None, None, False)
        self.assertEquals(2, len(platform_agent_list))
        for pa in platform_agent_list:
            log.debug('platform agent: %s', pa.name)

        deployment_list, _ = self.RR.find_resources(RT.Deployment, None, None, False)
        self.assertEquals(62, len(deployment_list))
        for d in deployment_list:
            log.debug('deployment: %s', d.name)

        # Check lcstates for select OOI resources: Some PLANNED, some INTEGRATED, some DEPLOYED
        for obs in observatory_list:
            self.assertEquals(obs.lcstate, 'DRAFT')
        for pdev in platform_device_list:
            self.assertEquals(pdev.lcstate, 'PLANNED')
        for pagent in platform_agent_list:
            self.assertEquals(pagent.lcstate, 'DEPLOYED')

        # See if Deployment for primary nodes is already active and in DEPLOYED lcstate, in particular CE04OSHY-PN01C
        for deploy in deployment_list:
            self.assertEquals(deploy.lcstate, 'PLANNED')

        dp_list, _  = self.RR.find_resources_ext(alt_id_ns="PRE", alt_id="CE04OSHY-PN01C_DEP")
        self.assertEquals(len(dp_list), 1)
        self.assertEquals(dp_list[0].availability, 'AVAILABLE')
        log.debug('test_observatory  retrieve CE04OSHY-PN01C_DEP deployment:  %s', res_list[0])

        # Check existing RSN node CE04OSHY-LV01C Deployment (PLANNED lcstate)
        CE04OSHY_LV01C_deployment = self.retrieve_ooi_asset(namespace='PRE', alt_id='CE04OSHY-LV01C_DEP')

        #self.dump_deployment(CE04OSHY_LV01C_deployment._id)
        self.assertEquals(CE04OSHY_LV01C_deployment.lcstate, 'PLANNED')
        log.debug('test_observatory  retrieve RSN node CE04OSHY-LV01C Deployment:  %s', CE04OSHY_LV01C_deployment)

        # Set CE04OSHY-LV01C device to DEVELOPED state
        # MATURITY = ['DRAFT', 'PLANNED', 'DEVELOPED', 'INTEGRATED', 'DEPLOYED', 'RETIRED']
        CE04OSHY_LV01C_device = self.retrieve_ooi_asset(namespace='PRE', alt_id='CE04OSHY-LV01C_PD')
        #ret = self.RR.execute_lifecycle_transition(resource_id=CE04OSHY_LV01C_device._id, transition_event=LCE.PLANNED)
        self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_device._id, new_lcs_state=LCE.DEVELOP, verify='DEVELOPED')


        # Set CE04OSHY-LV01C device to INTEGRATED state
        self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_device._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')

        # Set CE04OSHY-LV01C device to DEPLOYED state
        self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_device._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Set CE04OSHY-LV01C Deployment to DEPLOYED state
        self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_deployment._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Activate Deployment for CE04OSHY-LV01C
        self.OMS.activate_deployment(CE04OSHY_LV01C_deployment._id)
        log.debug('---------    activate_deployment CE04OSHY_LV01C_deployment -------------- ')
        self.dump_deployment(CE04OSHY_LV01C_deployment._id)
        self.validate_deployment_activated(CE04OSHY_LV01C_deployment._id)

        # (optional) Start CE04OSHY-LV01C platform agent with simulator

        # Set DataProduct for CE04OSHY-LV01C platform to DEPLOYED state
        output_data_product_ids, assns =self.RR.find_objects(subject=CE04OSHY_LV01C_device._id, predicate=PRED.hasOutputProduct, id_only=True)
        if output_data_product_ids:
            #self.assertEquals(len(child_devs), 3)
            for output_data_product_id in output_data_product_ids:
                log.debug('DataProduct for CE04OSHY-LV01C platform:  %s', output_data_product_id)
                self.transition_lcs_then_verify(resource_id=output_data_product_id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')


        # Check events for CE04OSHY-LV01C platform


        # Check existing CE04OSBP-LJ01C Deployment (PLANNED lcstate)
#        dp_list, _  = self.RR.find_resources_ext(alt_id_ns="PRE", alt_id="CE04OSBP-LJ01C_DEP")
#        self.assertEquals(len(dp_list), 1)
#        CE04OSHY_LV01C_deployment = dp_list[0]
#        self.assertEquals(CE04OSHY_LV01C_deployment.lcstate, 'PLANNED')
#        log.debug('test_observatory  retrieve RSN node CE04OSBP-LJ01C Deployment:  %s', CE04OSHY_LV01C_deployment)


        # Set CE04OSBP-LJ01C Deployment to DEPLOYED state

        # Update description and other attributes for CE04OSBP-LJ01C device resource

        # Create attachment (JPG image) for CE04OSBP-LJ01C device resource

        # Activate Deployment for CE04OSBP-LJ01C

        # (optional) Add/register CE04OSBP-LJ01C platform agent to parent agent

        # (optional) Start CE04OSBP-LJ01C platform agent


        log.debug('--------- ------------------------------------------------------------------------------------------------------------ -------------- ')
        # Check existing RSN instrument CE04OSBP-LJ01C-06-CTDBPO108 Deployment (PLANNED lcstate)
        CE04OSBP_LJ01C_06_CTDBPO108_deploy = self.retrieve_ooi_asset(namespace='PRE', alt_id='CE04OSBP-LJ01C-06-CTDBPO108_DEP')
        self.dump_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        self.assertEquals(CE04OSBP_LJ01C_06_CTDBPO108_deploy.lcstate, 'PLANNED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to DEVELOPED state
        CE04OSBP_LJ01C_06_CTDBPO108_device = self.retrieve_ooi_asset(namespace='PRE', alt_id='CE04OSBP-LJ01C-06-CTDBPO108_ID')
        self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.DEVELOP, verify='DEVELOPED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to INTEGRATED state
        self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to DEPLOYED state
        self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 Deployment to DEPLOYED state
        self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_deploy._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Activate Deployment for CE04OSBP-LJ01C-06-CTDBPO108 instrument
        log.debug('---------    activate_deployment CE04OSBP-LJ01C-06-CTDBPO108 deployment -------------- ')
        self.OMS.activate_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        self.validate_deployment_activated(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)

        # (optional) Add/register CE04OSBP-LJ01C-06-CTDBPO108 instrument agent to parent agent

        # (optional) Start CE04OSBP-LJ01C-06-CTDBPO108 instrument agent with simulator

        # Set all DataProducts for CE04OSBP-LJ01C-06-CTDBPO108 to DEPLOYED state


        # (optional) Create a substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a comparable device
        CE04OSBP_LJ01C_06_CTDBPO108_isite = self.retrieve_ooi_asset(namespace='PRE', alt_id='CE04OSBP-LJ01C-06-CTDBPO108')

        ## create device here: retrieve CTD Mooring on Mooring Riser 001 - similiar?
        GP03FLMB_RI001_10_CTDMOG999_ID_idevice = self.retrieve_ooi_asset(namespace='PRE', alt_id='GP03FLMB-RI001-10-CTDMOG999_ID')

        deploy_id_2 = self.create_basic_deployment(name='CE04OSBP-LJ01C-06-CTDBPO108_DEP2', description='substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a comparable device')
        self.IMS.deploy_instrument_device(instrument_device_id=GP03FLMB_RI001_10_CTDMOG999_ID_idevice._id, deployment_id=deploy_id_2)
        self.OMS.deploy_instrument_site(instrument_site_id=CE04OSBP_LJ01C_06_CTDBPO108_isite._id, deployment_id=deploy_id_2)
        self.dump_deployment(deploy_id_2)

        # (optional) Activate this second deployment - check first deployment is deactivated
        self.OMS.deactivate_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        self.validate_deployment_deactivated(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)


        log.debug('Activate deployment deploy_id_2')
        self.get_deployment_ids(deploy_id_2)
        self.dump_deployment(deploy_id_2, "deploy_id_2")
        self.OMS.activate_deployment(deploy_id_2)
        self.validate_deployment_deactivated(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)

        # (optional) Set first CE04OSBP-LJ01C-06-CTDBPO108 Deployment to INTEGRATED state
        self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_deploy._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')

        # Set first CE04OSBP-LJ01C-06-CTDBPO108 device to INTEGRATED state
        self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')


        # (optional) Create a third Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a same device from first deployment
        deploy_id_3 = self.create_basic_deployment(name='CE04OSBP-LJ01C-06-CTDBPO108_DEP3', description='substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with same device as first')
        self.IMS.deploy_instrument_device(instrument_device_id=GP03FLMB_RI001_10_CTDMOG999_ID_idevice._id, deployment_id=deploy_id_3)
        self.OMS.deploy_instrument_site(instrument_site_id=CE04OSBP_LJ01C_06_CTDBPO108_isite._id, deployment_id=deploy_id_3)
        self.dump_deployment(deploy_id_3)


        # Set first CE04OSBP-LJ01C-06-CTDBPO108 device to DEPLOYED state
        self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # (optional) Activate this third deployment - check second deployment is deactivated
        log.debug('Activate deployment deploy_id_3')
        self.dump_deployment(deploy_id_3)
        self.OMS.activate_deployment(deploy_id_3)
        #todo: check second deployment is deactivated

        # Check that glider GP05MOAS-GL001 assembly is defined by OOI preload (3 instruments)
        GP05MOAS_GL001_device = self.retrieve_ooi_asset(namespace='PRE', alt_id='GP05MOAS-GL001_PD')
        child_devs, assns =self.RR.find_objects(subject=GP05MOAS_GL001_device._id, predicate=PRED.hasDevice, id_only=True)
        self.assertEquals(len(child_devs), 3)

        # Set GP05MOAS-GL001 Deployment to DEPLOYED
        GP05MOAS_GL001_deploy = self.retrieve_ooi_asset(namespace='PRE', alt_id='GP05MOAS-GL001_DEP')
        self.transition_lcs_then_verify(resource_id=GP05MOAS_GL001_deploy._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Activate Deployment for GP05MOAS-GL001
        #self.OMS.activate_deployment(GP05MOAS_GL001_deploy._id)

        # Deactivate Deployment for GP05MOAS-GL001
        #self.OMS.deactivate_deployment(GP05MOAS_GL001_deploy._id)


        # Create a new Deployment resource X without any assignment
        x_deploy_id = self.create_basic_deployment(name='X_Deployment', description='new Deployment resource X without any assignment')

        # Assign Deployment X to site GP05MOAS-GL001
        GP05MOAS_GL001_psite = self.retrieve_ooi_asset(namespace='PRE', alt_id='GP05MOAS-GL001')
        self.OMS.deploy_platform_site(GP05MOAS_GL001_psite._id, x_deploy_id)

        # Assign Deployment X to first device for GP05MOAS-GL001
        GP05MOAS_GL001_device = self.retrieve_ooi_asset(namespace='PRE', alt_id='GP05MOAS-GL001_PD')
        self.IMS.deploy_platform_device(GP05MOAS_GL001_device._id, x_deploy_id)

        # Set GP05MOAS-GL001 Deployment to PLANNED state
        #self.transition_lcs_then_verify(resource_id=x_deploy_id, new_lcs_state=LCE.PLAN, verify='PLANNED')
        # ??? already in planned

        # Set second GP05MOAS-GL001 Deployment to DEPLOYED
        self.transition_lcs_then_verify(resource_id=x_deploy_id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')
        self.dump_deployment(x_deploy_id)

        # Activate second Deployment for GP05MOAS-GL001
        #self.OMS.activate_deployment(x_deploy_id)

        # Deactivate second Deployment for GP05MOAS-GL001
        #self.OMS.deactivate_deployment(x_deploy_id)


        # Set several CE01ISSM-RI002-* instrument devices to DEVELOPED state

        # Assemble several CE01ISSM-RI002-* instruments to a CG CE01ISSM-RI002 component platform

        # Set several CE01ISSM-RI002-* instrument devices to INTEGRATED state

        # Assemble CE01ISSM-RI002 platform to CG CE01ISSM-LM001 station platform

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Set CE01ISSM-LM001 station device to INTEGRATED state

        # Set CE01ISSM-LM001 station device to DEPLOYED state (children maybe too?)

        # Set CE01ISSM-LM001 Deployment to DEPLOYED

        # Activate CE01ISSM-LM001 platform assembly deployment


        # Dectivate CE01ISSM-LM001 platform assembly deployment

        # Set CE01ISSM-LM001 Deployment to INTEGRATED state

        # Set CE01ISSM-LM001 station device to INTEGRATED state

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Disassemble CE01ISSM-RI002 platform from CG CE01ISSM-LM001 station platform

        # Disassemble all CE01ISSM-RI002-* instruments from a CG CE01ISSM-RI002 component platform


        # Retire instrument one for CE01ISSM-RI002-*

        # Retire device one for CE01ISSM-RI002

        # Retire device one for CE01ISSM-LM001


        # Add a new instrument agent

        # Add a new instrument agent instance

        # Check DataProducts

        # Check provenance

        pass

    def retrieve_ooi_asset(self, namespace='', alt_id=''):
        dp_list, _  = self.RR.find_resources_ext(alt_id_ns=namespace, alt_id=alt_id)
        self.assertEquals(len(dp_list), 1)
        return dp_list[0]

    def transition_lcs_then_verify(self, resource_id, new_lcs_state, verify):
        ret = self.RR2.advance_lcs(resource_id, new_lcs_state)
        resource_obj = self.RR.read(resource_id)
        self.assertEquals(resource_obj.lcstate, verify)

    def create_basic_deployment(self, name='', description=''):
        start = IonTime(datetime.datetime(2013,1,1))
        end = IonTime(datetime.datetime(2014,1,1))
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string())
        deployment_obj = IonObject(RT.Deployment,
            name=name,
            description=description,
            context=IonObject(OT.CabledNodeDeploymentContext),
            constraint_list=[temporal_bounds])
        return self.OMS.create_deployment(deployment_obj)

    def validate_deployment_activated(self, deployment_id=''):
        site_id, device_id = self.get_deployment_ids(deployment_id)
        assocs = self.RR.find_associations(subject=site_id, predicate=PRED.hasDevice, object=device_id)
        self.assertEquals(len(assocs), 1)

    def validate_deployment_deactivated(self, deployment_id=''):
        site_id, device_id = self.get_deployment_ids(deployment_id)
        assocs = self.RR.find_associations(subject=site_id, predicate=PRED.hasDevice, object=device_id)
        self.assertEquals(len(assocs), 0)

    def dump_deployment(self, deployment_id='', name=""):
        #site_id, device_id = self.get_deployment_ids(deployment_id)
        resource_list,_ = self.RR.find_subjects(predicate=PRED.hasDeployment, object=deployment_id, id_only=True)
        resource_list.append(deployment_id)
        resources = self.RR.read_mult(resource_list )
        log.debug('---------   dump_deployment %s summary---------------', name)
        for resource in resources:
            log.debug('%s: %s (%s)', resource._get_type(), resource.name, resource._id)

        log.debug('---------   dump_deployment %s full dump ---------------', name)

        for resource in resources:
            log.debug('resource: %s ', resource)
        log.debug('---------   dump_deployment %s end  ---------------', name)


        #assocs = self.container.resource_registry.find_assoctiations(anyside=deployment_id)
#        assocs = Container.instance.resource_registry.find_assoctiations(anyside=deployment_id)
#        log.debug('---------   dump_deployment  ---------------')
#        for assoc in assocs:
#            log.debug('SUBJECT: %s      PREDICATE: %s OBJET: %s', assoc.s, assoc.p, assoc.o)
#        log.debug('---------   dump_deployment  end  ---------------')


    def get_deployment_ids(self, deployment_id=''):
        devices = []
        sites = []
        idevice_list,_ = self.RR.find_subjects(RT.InstrumentDevice, PRED.hasDeployment, deployment_id, id_only=True)
        pdevice_list,_ = self.RR.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, id_only=True)
        devices = idevice_list + pdevice_list
        self.assertEquals(1, len(devices))
        isite_list,_ = self.RR.find_subjects(RT.InstrumentSite, PRED.hasDeployment, deployment_id, id_only=True)
        psite_list,_ = self.RR.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, id_only=True)
        sites = isite_list + psite_list
        self.assertEquals(1, len(sites))
        return sites[0], devices[0]
Exemplo n.º 20
0
class PubsubManagementIntTest(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.pubsub_management = PubsubManagementServiceClient()
        self.resource_registry = ResourceRegistryServiceClient()
        self.dataset_management = DatasetManagementServiceClient()

        self.pdicts = {}
        self.queue_cleanup = list()
        self.exchange_cleanup = list()

    def tearDown(self):
        for queue in self.queue_cleanup:
            xn = self.container.ex_manager.create_xn_queue(queue)
            xn.delete()
        for exchange in self.exchange_cleanup:
            xp = self.container.ex_manager.create_xp(exchange)
            xp.delete()

    def test_stream_def_crud(self):

        # Test Creation
        pdict = DatasetManagementService.get_parameter_dictionary_by_name(
            'ctd_parsed_param_dict')
        stream_definition_id = self.pubsub_management.create_stream_definition(
            'ctd parsed', parameter_dictionary_id=pdict.identifier)

        # Make sure there is an assoc
        self.assertTrue(
            self.resource_registry.find_associations(
                subject=stream_definition_id,
                predicate=PRED.hasParameterDictionary,
                object=pdict.identifier,
                id_only=True))

        # Test Reading
        stream_definition = self.pubsub_management.read_stream_definition(
            stream_definition_id)
        self.assertTrue(
            PubsubManagementService._compare_pdicts(
                pdict.dump(), stream_definition.parameter_dictionary))

        # Test Deleting
        self.pubsub_management.delete_stream_definition(stream_definition_id)
        self.assertFalse(
            self.resource_registry.find_associations(
                subject=stream_definition_id,
                predicate=PRED.hasParameterDictionary,
                object=pdict.identifier,
                id_only=True))

        # Test comparisons
        in_stream_definition_id = self.pubsub_management.create_stream_definition(
            'L0 products',
            parameter_dictionary_id=pdict.identifier,
            available_fields=['time', 'temp', 'conductivity', 'pressure'])
        self.addCleanup(self.pubsub_management.delete_stream_definition,
                        in_stream_definition_id)

        out_stream_definition_id = in_stream_definition_id
        self.assertTrue(
            self.pubsub_management.compare_stream_definition(
                in_stream_definition_id, out_stream_definition_id))
        self.assertTrue(
            self.pubsub_management.compatible_stream_definitions(
                in_stream_definition_id, out_stream_definition_id))

        out_stream_definition_id = self.pubsub_management.create_stream_definition(
            'L2 Products',
            parameter_dictionary_id=pdict.identifier,
            available_fields=['time', 'salinity', 'density'])
        self.addCleanup(self.pubsub_management.delete_stream_definition,
                        out_stream_definition_id)
        self.assertFalse(
            self.pubsub_management.compare_stream_definition(
                in_stream_definition_id, out_stream_definition_id))

        self.assertTrue(
            self.pubsub_management.compatible_stream_definitions(
                in_stream_definition_id, out_stream_definition_id))

    def test_validate_stream_defs(self):
        #test no input
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(
            ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = []
        available_fields_out = []
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_0',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_0',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        #test input with no output
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(
            ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'
        ]
        available_fields_out = []
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_1',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_1',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        #test available field missing parameter context definition -- missing PRESWAT_L0
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0'])
        outgoing_pdict_id = self._get_pdict(
            ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'
        ]
        available_fields_out = ['DENSITY']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_2',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_2',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        #test l1 from l0
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(
            ['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'
        ]
        available_fields_out = ['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_3',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_3',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        #test l2 from l0
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(
            ['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1', 'DENSITY', 'PRACSAL'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'
        ]
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_4',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_4',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        #test Ln from L0
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(
            ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'
        ]
        available_fields_out = [
            'DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'
        ]
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_5',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_5',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        #test L2 from L1
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        outgoing_pdict_id = self._get_pdict(
            ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'
        ]
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_6',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_6',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertTrue(result)

        #test L1 from L0 missing L0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON'])
        outgoing_pdict_id = self._get_pdict(
            ['TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_7',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_7',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        #test L2 from L0 missing L0
        incoming_pdict_id = self._get_pdict(['TIME', 'LAT', 'LON'])
        outgoing_pdict_id = self._get_pdict(
            ['DENSITY', 'PRACSAL', 'TEMPWAT_L1', 'CONDWAT_L1', 'PRESWAT_L1'])
        available_fields_in = ['TIME', 'LAT', 'LON']
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_8',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_8',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

        #test L2 from L0 missing L1
        incoming_pdict_id = self._get_pdict(
            ['TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'])
        outgoing_pdict_id = self._get_pdict(['DENSITY', 'PRACSAL'])
        available_fields_in = [
            'TIME', 'LAT', 'LON', 'TEMPWAT_L0', 'CONDWAT_L0', 'PRESWAT_L0'
        ]
        available_fields_out = ['DENSITY', 'PRACSAL']
        incoming_stream_def_id = self.pubsub_management.create_stream_definition(
            'in_sd_9',
            parameter_dictionary_id=incoming_pdict_id,
            available_fields=available_fields_in)
        outgoing_stream_def_id = self.pubsub_management.create_stream_definition(
            'out_sd_9',
            parameter_dictionary_id=outgoing_pdict_id,
            available_fields=available_fields_out)
        result = self.pubsub_management.validate_stream_defs(
            incoming_stream_def_id, outgoing_stream_def_id)
        self.assertFalse(result)

    def publish_on_stream(self, stream_id, msg):
        stream = self.pubsub_management.read_stream(stream_id)
        stream_route = stream.stream_route
        publisher = StandaloneStreamPublisher(stream_id=stream_id,
                                              stream_route=stream_route)
        publisher.publish(msg)

    def test_stream_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition(
            'test_definition', stream_type='stream')
        topic_id = self.pubsub_management.create_topic(
            name='test_topic', exchange_point='test_exchange')
        self.exchange_cleanup.append('test_exchange')
        topic2_id = self.pubsub_management.create_topic(
            name='another_topic', exchange_point='outside')
        stream_id, route = self.pubsub_management.create_stream(
            name='test_stream',
            topic_ids=[topic_id, topic2_id],
            exchange_point='test_exchange',
            stream_definition_id=stream_def_id)

        topics, assocs = self.resource_registry.find_objects(
            subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertEquals(topics, [topic_id])

        defs, assocs = self.resource_registry.find_objects(
            subject=stream_id,
            predicate=PRED.hasStreamDefinition,
            id_only=True)
        self.assertTrue(len(defs))

        stream = self.pubsub_management.read_stream(stream_id)
        self.assertEquals(stream.name, 'test_stream')
        self.pubsub_management.delete_stream(stream_id)

        with self.assertRaises(NotFound):
            self.pubsub_management.read_stream(stream_id)

        defs, assocs = self.resource_registry.find_objects(
            subject=stream_id,
            predicate=PRED.hasStreamDefinition,
            id_only=True)
        self.assertFalse(len(defs))

        topics, assocs = self.resource_registry.find_objects(
            subject=stream_id, predicate=PRED.hasTopic, id_only=True)
        self.assertFalse(len(topics))

        self.pubsub_management.delete_topic(topic_id)
        self.pubsub_management.delete_topic(topic2_id)
        self.pubsub_management.delete_stream_definition(stream_def_id)

    def test_subscription_crud(self):
        stream_def_id = self.pubsub_management.create_stream_definition(
            'test_definition', stream_type='stream')
        stream_id, route = self.pubsub_management.create_stream(
            name='test_stream',
            exchange_point='test_exchange',
            stream_definition_id=stream_def_id)
        subscription_id = self.pubsub_management.create_subscription(
            name='test subscription',
            stream_ids=[stream_id],
            exchange_name='test_queue')
        self.exchange_cleanup.append('test_exchange')

        subs, assocs = self.resource_registry.find_objects(
            subject=subscription_id, predicate=PRED.hasStream, id_only=True)
        self.assertEquals(subs, [stream_id])

        res, _ = self.resource_registry.find_resources(restype=RT.ExchangeName,
                                                       name='test_queue',
                                                       id_only=True)
        self.assertEquals(len(res), 1)

        subs, assocs = self.resource_registry.find_subjects(
            object=subscription_id,
            predicate=PRED.hasSubscription,
            id_only=True)
        self.assertEquals(subs[0], res[0])

        subscription = self.pubsub_management.read_subscription(
            subscription_id)
        self.assertEquals(subscription.exchange_name, 'test_queue')

        self.pubsub_management.delete_subscription(subscription_id)

        subs, assocs = self.resource_registry.find_objects(
            subject=subscription_id, predicate=PRED.hasStream, id_only=True)
        self.assertFalse(len(subs))

        subs, assocs = self.resource_registry.find_subjects(
            object=subscription_id,
            predicate=PRED.hasSubscription,
            id_only=True)
        self.assertFalse(len(subs))

        self.pubsub_management.delete_stream(stream_id)
        self.pubsub_management.delete_stream_definition(stream_def_id)

    def test_move_before_activate(self):
        stream_id, route = self.pubsub_management.create_stream(
            name='test_stream', exchange_point='test_xp')

        #--------------------------------------------------------------------------------
        # Test moving before activate
        #--------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription(
            'first_queue', stream_ids=[stream_id])

        xn_ids, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='first_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id,
            predicate=PRED.hasSubscription,
            id_only=True)
        self.assertEquals(xn_ids[0], subjects[0])

        self.pubsub_management.move_subscription(subscription_id,
                                                 exchange_name='second_queue')

        xn_ids, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='second_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id,
            predicate=PRED.hasSubscription,
            id_only=True)

        self.assertEquals(len(subjects), 1)
        self.assertEquals(subjects[0], xn_ids[0])

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_move_activated_subscription(self):

        stream_id, route = self.pubsub_management.create_stream(
            name='test_stream', exchange_point='test_xp')
        #--------------------------------------------------------------------------------
        # Test moving after activate
        #--------------------------------------------------------------------------------

        subscription_id = self.pubsub_management.create_subscription(
            'first_queue', stream_ids=[stream_id])
        self.pubsub_management.activate_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='first_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id,
            predicate=PRED.hasSubscription,
            id_only=True)
        self.assertEquals(xn_ids[0], subjects[0])

        self.verified = Event()

        def verify(m, r, s):
            self.assertEquals(m, 'verified')
            self.verified.set()

        subscriber = StandaloneStreamSubscriber('second_queue', verify)
        subscriber.start()

        self.pubsub_management.move_subscription(subscription_id,
                                                 exchange_name='second_queue')

        xn_ids, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='second_queue', id_only=True)
        subjects, _ = self.resource_registry.find_subjects(
            object=subscription_id,
            predicate=PRED.hasSubscription,
            id_only=True)

        self.assertEquals(len(subjects), 1)
        self.assertEquals(subjects[0], xn_ids[0])

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish('verified')

        self.assertTrue(self.verified.wait(2))

        self.pubsub_management.deactivate_subscription(subscription_id)

        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_queue_cleanup(self):
        stream_id, route = self.pubsub_management.create_stream(
            'test_stream', 'xp1')
        xn_objs, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='queue1')
        for xn_obj in xn_objs:
            xn = self.container.ex_manager.create_xn_queue(xn_obj.name)
            xn.delete()
        subscription_id = self.pubsub_management.create_subscription(
            'queue1', stream_ids=[stream_id])
        xn_ids, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='queue1')
        self.assertEquals(len(xn_ids), 1)

        self.pubsub_management.delete_subscription(subscription_id)

        xn_ids, _ = self.resource_registry.find_resources(
            restype=RT.ExchangeName, name='queue1')
        self.assertEquals(len(xn_ids), 0)

    def test_activation_and_deactivation(self):
        stream_id, route = self.pubsub_management.create_stream(
            'stream1', 'xp1')
        subscription_id = self.pubsub_management.create_subscription(
            'sub1', stream_ids=[stream_id])

        self.check1 = Event()

        def verifier(m, r, s):
            self.check1.set()

        subscriber = StandaloneStreamSubscriber('sub1', verifier)
        subscriber.start()

        publisher = StandaloneStreamPublisher(stream_id, route)
        publisher.publish('should not receive')

        self.assertFalse(self.check1.wait(0.25))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish('should receive')
        self.assertTrue(self.check1.wait(2))

        self.check1.clear()
        self.assertFalse(self.check1.is_set())

        self.pubsub_management.deactivate_subscription(subscription_id)

        publisher.publish('should not receive')
        self.assertFalse(self.check1.wait(0.5))

        self.pubsub_management.activate_subscription(subscription_id)

        publisher.publish('should receive')
        self.assertTrue(self.check1.wait(2))

        subscriber.stop()

        self.pubsub_management.deactivate_subscription(subscription_id)
        self.pubsub_management.delete_subscription(subscription_id)
        self.pubsub_management.delete_stream(stream_id)

    def test_topic_crud(self):

        topic_id = self.pubsub_management.create_topic(
            name='test_topic', exchange_point='test_xp')
        self.exchange_cleanup.append('test_xp')

        topic = self.pubsub_management.read_topic(topic_id)

        self.assertEquals(topic.name, 'test_topic')
        self.assertEquals(topic.exchange_point, 'test_xp')

        self.pubsub_management.delete_topic(topic_id)
        with self.assertRaises(NotFound):
            self.pubsub_management.read_topic(topic_id)

    def test_full_pubsub(self):

        self.sub1_sat = Event()
        self.sub2_sat = Event()

        def subscriber1(m, r, s):
            self.sub1_sat.set()

        def subscriber2(m, r, s):
            self.sub2_sat.set()

        sub1 = StandaloneStreamSubscriber('sub1', subscriber1)
        self.queue_cleanup.append(sub1.xn.queue)
        sub1.start()

        sub2 = StandaloneStreamSubscriber('sub2', subscriber2)
        self.queue_cleanup.append(sub2.xn.queue)
        sub2.start()

        log_topic = self.pubsub_management.create_topic(
            'instrument_logs', exchange_point='instruments')
        science_topic = self.pubsub_management.create_topic(
            'science_data', exchange_point='instruments')
        events_topic = self.pubsub_management.create_topic(
            'notifications', exchange_point='events')

        log_stream, route = self.pubsub_management.create_stream(
            'instrument1-logs',
            topic_ids=[log_topic],
            exchange_point='instruments')
        ctd_stream, route = self.pubsub_management.create_stream(
            'instrument1-ctd',
            topic_ids=[science_topic],
            exchange_point='instruments')
        event_stream, route = self.pubsub_management.create_stream(
            'notifications', topic_ids=[events_topic], exchange_point='events')
        raw_stream, route = self.pubsub_management.create_stream(
            'temp', exchange_point='global.data')
        self.exchange_cleanup.extend(['instruments', 'events', 'global.data'])

        subscription1 = self.pubsub_management.create_subscription(
            'subscription1',
            stream_ids=[log_stream, event_stream],
            exchange_name='sub1')
        subscription2 = self.pubsub_management.create_subscription(
            'subscription2',
            exchange_points=['global.data'],
            stream_ids=[ctd_stream],
            exchange_name='sub2')

        self.pubsub_management.activate_subscription(subscription1)
        self.pubsub_management.activate_subscription(subscription2)

        self.publish_on_stream(log_stream, 1)
        self.assertTrue(self.sub1_sat.wait(4))
        self.assertFalse(self.sub2_sat.is_set())

        self.publish_on_stream(raw_stream, 1)
        self.assertTrue(self.sub1_sat.wait(4))

        sub1.stop()
        sub2.stop()

    def test_topic_craziness(self):

        self.msg_queue = Queue()

        def subscriber1(m, r, s):
            self.msg_queue.put(m)

        sub1 = StandaloneStreamSubscriber('sub1', subscriber1)
        self.queue_cleanup.append(sub1.xn.queue)
        sub1.start()

        topic1 = self.pubsub_management.create_topic('topic1',
                                                     exchange_point='xp1')
        topic2 = self.pubsub_management.create_topic('topic2',
                                                     exchange_point='xp1',
                                                     parent_topic_id=topic1)
        topic3 = self.pubsub_management.create_topic('topic3',
                                                     exchange_point='xp1',
                                                     parent_topic_id=topic1)
        topic4 = self.pubsub_management.create_topic('topic4',
                                                     exchange_point='xp1',
                                                     parent_topic_id=topic2)
        topic5 = self.pubsub_management.create_topic('topic5',
                                                     exchange_point='xp1',
                                                     parent_topic_id=topic2)
        topic6 = self.pubsub_management.create_topic('topic6',
                                                     exchange_point='xp1',
                                                     parent_topic_id=topic3)
        topic7 = self.pubsub_management.create_topic('topic7',
                                                     exchange_point='xp1',
                                                     parent_topic_id=topic3)

        # Tree 2
        topic8 = self.pubsub_management.create_topic('topic8',
                                                     exchange_point='xp2')
        topic9 = self.pubsub_management.create_topic('topic9',
                                                     exchange_point='xp2',
                                                     parent_topic_id=topic8)
        topic10 = self.pubsub_management.create_topic('topic10',
                                                      exchange_point='xp2',
                                                      parent_topic_id=topic9)
        topic11 = self.pubsub_management.create_topic('topic11',
                                                      exchange_point='xp2',
                                                      parent_topic_id=topic9)
        topic12 = self.pubsub_management.create_topic('topic12',
                                                      exchange_point='xp2',
                                                      parent_topic_id=topic11)
        topic13 = self.pubsub_management.create_topic('topic13',
                                                      exchange_point='xp2',
                                                      parent_topic_id=topic11)
        self.exchange_cleanup.extend(['xp1', 'xp2'])

        stream1_id, route = self.pubsub_management.create_stream(
            'stream1',
            topic_ids=[topic7, topic4, topic5],
            exchange_point='xp1')
        stream2_id, route = self.pubsub_management.create_stream(
            'stream2', topic_ids=[topic8], exchange_point='xp2')
        stream3_id, route = self.pubsub_management.create_stream(
            'stream3', topic_ids=[topic10, topic13], exchange_point='xp2')
        stream4_id, route = self.pubsub_management.create_stream(
            'stream4', topic_ids=[topic9], exchange_point='xp2')
        stream5_id, route = self.pubsub_management.create_stream(
            'stream5', topic_ids=[topic11], exchange_point='xp2')

        subscription1 = self.pubsub_management.create_subscription(
            'sub1', topic_ids=[topic1])
        subscription2 = self.pubsub_management.create_subscription(
            'sub2', topic_ids=[topic8], exchange_name='sub1')
        subscription3 = self.pubsub_management.create_subscription(
            'sub3', topic_ids=[topic9], exchange_name='sub1')
        subscription4 = self.pubsub_management.create_subscription(
            'sub4',
            topic_ids=[topic10, topic13, topic11],
            exchange_name='sub1')
        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription1)

        self.publish_on_stream(stream1_id, 1)

        self.assertEquals(self.msg_queue.get(timeout=10), 1)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)

        self.pubsub_management.deactivate_subscription(subscription1)
        self.pubsub_management.delete_subscription(subscription1)
        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription2)

        self.publish_on_stream(stream2_id, 2)
        self.assertEquals(self.msg_queue.get(timeout=10), 2)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.1)

        self.pubsub_management.deactivate_subscription(subscription2)
        self.pubsub_management.delete_subscription(subscription2)

        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription3)

        self.publish_on_stream(stream2_id, 3)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream3_id, 4)
        self.assertEquals(self.msg_queue.get(timeout=10), 4)

        self.pubsub_management.deactivate_subscription(subscription3)
        self.pubsub_management.delete_subscription(subscription3)

        #--------------------------------------------------------------------------------
        self.pubsub_management.activate_subscription(subscription4)

        self.publish_on_stream(stream4_id, 5)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.publish_on_stream(stream5_id, 6)
        self.assertEquals(self.msg_queue.get(timeout=10), 6)
        with self.assertRaises(Empty):
            self.msg_queue.get(timeout=0.3)

        self.pubsub_management.deactivate_subscription(subscription4)
        self.pubsub_management.delete_subscription(subscription4)

        #--------------------------------------------------------------------------------
        sub1.stop()

        self.pubsub_management.delete_topic(topic13)
        self.pubsub_management.delete_topic(topic12)
        self.pubsub_management.delete_topic(topic11)
        self.pubsub_management.delete_topic(topic10)
        self.pubsub_management.delete_topic(topic9)
        self.pubsub_management.delete_topic(topic8)
        self.pubsub_management.delete_topic(topic7)
        self.pubsub_management.delete_topic(topic6)
        self.pubsub_management.delete_topic(topic5)
        self.pubsub_management.delete_topic(topic4)
        self.pubsub_management.delete_topic(topic3)
        self.pubsub_management.delete_topic(topic2)
        self.pubsub_management.delete_topic(topic1)

        self.pubsub_management.delete_stream(stream1_id)
        self.pubsub_management.delete_stream(stream2_id)
        self.pubsub_management.delete_stream(stream3_id)
        self.pubsub_management.delete_stream(stream4_id)
        self.pubsub_management.delete_stream(stream5_id)

    def _get_pdict(self, filter_values):
        t_ctxt = ParameterContext(
            'TIME', param_type=QuantityType(value_encoding=np.dtype('int64')))
        t_ctxt.uom = 'seconds since 01-01-1900'
        t_ctxt_id = self.dataset_management.create_parameter_context(
            name='TIME',
            parameter_context=t_ctxt.dump(),
            parameter_type='quantity<int64>',
            unit_of_measure=t_ctxt.uom)

        lat_ctxt = ParameterContext(
            'LAT',
            param_type=ConstantType(
                QuantityType(value_encoding=np.dtype('float32'))),
            fill_value=-9999)
        lat_ctxt.axis = AxisTypeEnum.LAT
        lat_ctxt.uom = 'degree_north'
        lat_ctxt_id = self.dataset_management.create_parameter_context(
            name='LAT',
            parameter_context=lat_ctxt.dump(),
            parameter_type='quantity<float32>',
            unit_of_measure=lat_ctxt.uom)

        lon_ctxt = ParameterContext(
            'LON',
            param_type=ConstantType(
                QuantityType(value_encoding=np.dtype('float32'))),
            fill_value=-9999)
        lon_ctxt.axis = AxisTypeEnum.LON
        lon_ctxt.uom = 'degree_east'
        lon_ctxt_id = self.dataset_management.create_parameter_context(
            name='LON',
            parameter_context=lon_ctxt.dump(),
            parameter_type='quantity<float32>',
            unit_of_measure=lon_ctxt.uom)

        # Independent Parameters
        # Temperature - values expected to be the decimal results of conversion from hex
        temp_ctxt = ParameterContext(
            'TEMPWAT_L0',
            param_type=QuantityType(value_encoding=np.dtype('float32')),
            fill_value=-9999)
        temp_ctxt.uom = 'deg_C'
        temp_ctxt_id = self.dataset_management.create_parameter_context(
            name='TEMPWAT_L0',
            parameter_context=temp_ctxt.dump(),
            parameter_type='quantity<float32>',
            unit_of_measure=temp_ctxt.uom)

        # Conductivity - values expected to be the decimal results of conversion from hex
        cond_ctxt = ParameterContext(
            'CONDWAT_L0',
            param_type=QuantityType(value_encoding=np.dtype('float32')),
            fill_value=-9999)
        cond_ctxt.uom = 'S m-1'
        cond_ctxt_id = self.dataset_management.create_parameter_context(
            name='CONDWAT_L0',
            parameter_context=cond_ctxt.dump(),
            parameter_type='quantity<float32>',
            unit_of_measure=cond_ctxt.uom)

        # Pressure - values expected to be the decimal results of conversion from hex
        press_ctxt = ParameterContext(
            'PRESWAT_L0',
            param_type=QuantityType(value_encoding=np.dtype('float32')),
            fill_value=-9999)
        press_ctxt.uom = 'dbar'
        press_ctxt_id = self.dataset_management.create_parameter_context(
            name='PRESWAT_L0',
            parameter_context=press_ctxt.dump(),
            parameter_type='quantity<float32>',
            unit_of_measure=press_ctxt.uom)

        # Dependent Parameters

        # TEMPWAT_L1 = (TEMPWAT_L0 / 10000) - 10
        tl1_func = '(T / 10000) - 10'
        tl1_pmap = {'T': 'TEMPWAT_L0'}
        expr = NumexprFunction('TEMPWAT_L1',
                               tl1_func, ['T'],
                               param_map=tl1_pmap)
        tempL1_ctxt = ParameterContext(
            'TEMPWAT_L1',
            param_type=ParameterFunctionType(function=expr),
            variability=VariabilityEnum.TEMPORAL)
        tempL1_ctxt.uom = 'deg_C'
        tempL1_ctxt_id = self.dataset_management.create_parameter_context(
            name=tempL1_ctxt.name,
            parameter_context=tempL1_ctxt.dump(),
            parameter_type='pfunc',
            unit_of_measure=tempL1_ctxt.uom)

        # CONDWAT_L1 = (CONDWAT_L0 / 100000) - 0.5
        cl1_func = '(C / 100000) - 0.5'
        cl1_pmap = {'C': 'CONDWAT_L0'}
        expr = NumexprFunction('CONDWAT_L1',
                               cl1_func, ['C'],
                               param_map=cl1_pmap)
        condL1_ctxt = ParameterContext(
            'CONDWAT_L1',
            param_type=ParameterFunctionType(function=expr),
            variability=VariabilityEnum.TEMPORAL)
        condL1_ctxt.uom = 'S m-1'
        condL1_ctxt_id = self.dataset_management.create_parameter_context(
            name=condL1_ctxt.name,
            parameter_context=condL1_ctxt.dump(),
            parameter_type='pfunc',
            unit_of_measure=condL1_ctxt.uom)

        # Equation uses p_range, which is a calibration coefficient - Fixing to 679.34040721
        #   PRESWAT_L1 = (PRESWAT_L0 * p_range / (0.85 * 65536)) - (0.05 * p_range)
        pl1_func = '(P * p_range / (0.85 * 65536)) - (0.05 * p_range)'
        pl1_pmap = {'P': 'PRESWAT_L0', 'p_range': 679.34040721}
        expr = NumexprFunction('PRESWAT_L1',
                               pl1_func, ['P', 'p_range'],
                               param_map=pl1_pmap)
        presL1_ctxt = ParameterContext(
            'PRESWAT_L1',
            param_type=ParameterFunctionType(function=expr),
            variability=VariabilityEnum.TEMPORAL)
        presL1_ctxt.uom = 'S m-1'
        presL1_ctxt_id = self.dataset_management.create_parameter_context(
            name=presL1_ctxt.name,
            parameter_context=presL1_ctxt.dump(),
            parameter_type='pfunc',
            unit_of_measure=presL1_ctxt.uom)

        # Density & practical salinity calucluated using the Gibbs Seawater library - available via python-gsw project:
        #       https://code.google.com/p/python-gsw/ & http://pypi.python.org/pypi/gsw/3.0.1

        # PRACSAL = gsw.SP_from_C((CONDWAT_L1 * 10), TEMPWAT_L1, PRESWAT_L1)
        owner = 'gsw'
        sal_func = 'SP_from_C'
        sal_arglist = ['C', 't', 'p']
        sal_pmap = {
            'C':
            NumexprFunction('CONDWAT_L1*10',
                            'C*10', ['C'],
                            param_map={'C': 'CONDWAT_L1'}),
            't':
            'TEMPWAT_L1',
            'p':
            'PRESWAT_L1'
        }
        sal_kwargmap = None
        expr = PythonFunction('PRACSAL', owner, sal_func, sal_arglist,
                              sal_kwargmap, sal_pmap)
        sal_ctxt = ParameterContext('PRACSAL',
                                    param_type=ParameterFunctionType(expr),
                                    variability=VariabilityEnum.TEMPORAL)
        sal_ctxt.uom = 'g kg-1'
        sal_ctxt_id = self.dataset_management.create_parameter_context(
            name=sal_ctxt.name,
            parameter_context=sal_ctxt.dump(),
            parameter_type='pfunc',
            unit_of_measure=sal_ctxt.uom)

        # absolute_salinity = gsw.SA_from_SP(PRACSAL, PRESWAT_L1, longitude, latitude)
        # conservative_temperature = gsw.CT_from_t(absolute_salinity, TEMPWAT_L1, PRESWAT_L1)
        # DENSITY = gsw.rho(absolute_salinity, conservative_temperature, PRESWAT_L1)
        owner = 'gsw'
        abs_sal_expr = PythonFunction('abs_sal', owner, 'SA_from_SP',
                                      ['PRACSAL', 'PRESWAT_L1', 'LON', 'LAT'])
        cons_temp_expr = PythonFunction(
            'cons_temp', owner, 'CT_from_t',
            [abs_sal_expr, 'TEMPWAT_L1', 'PRESWAT_L1'])
        dens_expr = PythonFunction(
            'DENSITY', owner, 'rho',
            [abs_sal_expr, cons_temp_expr, 'PRESWAT_L1'])
        dens_ctxt = ParameterContext(
            'DENSITY',
            param_type=ParameterFunctionType(dens_expr),
            variability=VariabilityEnum.TEMPORAL)
        dens_ctxt.uom = 'kg m-3'
        dens_ctxt_id = self.dataset_management.create_parameter_context(
            name=dens_ctxt.name,
            parameter_context=dens_ctxt.dump(),
            parameter_type='pfunc',
            unit_of_measure=dens_ctxt.uom)

        ids = [
            t_ctxt_id, lat_ctxt_id, lon_ctxt_id, temp_ctxt_id, cond_ctxt_id,
            press_ctxt_id, tempL1_ctxt_id, condL1_ctxt_id, presL1_ctxt_id,
            sal_ctxt_id, dens_ctxt_id
        ]
        contexts = [
            t_ctxt, lat_ctxt, lon_ctxt, temp_ctxt, cond_ctxt, press_ctxt,
            tempL1_ctxt, condL1_ctxt, presL1_ctxt, sal_ctxt, dens_ctxt
        ]
        context_ids = [
            ids[i] for i, ctxt in enumerate(contexts)
            if ctxt.name in filter_values
        ]
        pdict_name = '_'.join(
            [ctxt.name for ctxt in contexts if ctxt.name in filter_values])

        try:
            self.pdicts[pdict_name]
            return self.pdicts[pdict_name]
        except KeyError:
            pdict_id = self.dataset_management.create_parameter_dictionary(
                pdict_name,
                parameter_context_ids=context_ids,
                temporal_context='time')
            self.pdicts[pdict_name] = pdict_id
            return pdict_id
Exemplo n.º 21
0
class TestObservatoryManagementServiceIntegration(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR = ResourceRegistryServiceClient(node=self.container.node)
        self.RR2 = EnhancedResourceRegistryClient(self.RR)
        self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
        self.org_management_service = OrgManagementServiceClient(
            node=self.container.node)
        self.IMS = InstrumentManagementServiceClient(node=self.container.node)
        self.dpclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.pubsubcli = PubsubManagementServiceClient(
            node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.dataset_management = DatasetManagementServiceClient()
        #print 'TestObservatoryManagementServiceIntegration: started services'

        self.event_publisher = EventPublisher()

#    @unittest.skip('this exists only for debugging the launch process')
#    def test_just_the_setup(self):
#        return

    def destroy(self, resource_ids):
        self.OMS.force_delete_observatory(resource_ids.observatory_id)
        self.OMS.force_delete_subsite(resource_ids.subsite_id)
        self.OMS.force_delete_subsite(resource_ids.subsite2_id)
        self.OMS.force_delete_subsite(resource_ids.subsiteb_id)
        self.OMS.force_delete_subsite(resource_ids.subsitez_id)
        self.OMS.force_delete_platform_site(resource_ids.platform_site_id)
        self.OMS.force_delete_platform_site(resource_ids.platform_siteb_id)
        self.OMS.force_delete_platform_site(resource_ids.platform_siteb2_id)
        self.OMS.force_delete_platform_site(resource_ids.platform_site3_id)
        self.OMS.force_delete_instrument_site(resource_ids.instrument_site_id)
        self.OMS.force_delete_instrument_site(resource_ids.instrument_site2_id)
        self.OMS.force_delete_instrument_site(
            resource_ids.instrument_siteb3_id)
        self.OMS.force_delete_instrument_site(resource_ids.instrument_site4_id)

    #@unittest.skip('targeting')
    def test_observatory_management(self):
        resources = self._make_associations()

        self._do_test_find_related_sites(resources)

        self._do_test_get_sites_devices_status(resources)

        self._do_test_find_site_data_products(resources)

        self._do_test_find_related_frames_of_reference(resources)

        self._do_test_create_geospatial_point_center(resources)

        self._do_test_find_observatory_org(resources)

        self.destroy(resources)

    def _do_test_find_related_sites(self, resources):

        site_resources, site_children, _, _ = self.OMS.find_related_sites(
            resources.org_id)

        #import sys, pprint
        #print >> sys.stderr, pprint.pformat(site_resources)
        #print >> sys.stderr, pprint.pformat(site_children)

        #self.assertIn(resources.org_id, site_resources)
        self.assertIn(resources.observatory_id, site_resources)
        self.assertIn(resources.subsite_id, site_resources)
        self.assertIn(resources.subsite_id, site_resources)
        self.assertIn(resources.subsite2_id, site_resources)
        self.assertIn(resources.platform_site_id, site_resources)
        self.assertIn(resources.instrument_site_id, site_resources)
        self.assertEquals(len(site_resources), 13)

        self.assertEquals(site_resources[resources.observatory_id].type_,
                          RT.Observatory)

        self.assertIn(resources.org_id, site_children)
        self.assertIn(resources.observatory_id, site_children)
        self.assertIn(resources.subsite_id, site_children)
        self.assertIn(resources.subsite_id, site_children)
        self.assertIn(resources.subsite2_id, site_children)
        self.assertIn(resources.platform_site_id, site_children)
        self.assertNotIn(resources.instrument_site_id, site_children)
        self.assertEquals(len(site_children), 9)

        self.assertIsInstance(site_children[resources.subsite_id], list)
        self.assertEquals(len(site_children[resources.subsite_id]), 2)

    def _do_test_get_sites_devices_status(self, resources):
        #bin/nosetests -s -v --nologcapture ion/services/sa/observatory/test/test_observatory_management_service_integration.py:TestObservatoryManagementServiceIntegration.test_observatory_management

        full_result_dict = self.OMS.get_sites_devices_status(
            parent_resource_ids=[resources.org_id], include_sites=True)

        result_dict = full_result_dict[resources.org_id]

        site_resources = result_dict.get("site_resources", None)
        site_children = result_dict.get("site_children", None)

        self.assertEquals(len(site_resources), 14)
        self.assertEquals(len(site_children), 9)

        full_result_dict = self.OMS.get_sites_devices_status(
            parent_resource_ids=[resources.org_id],
            include_sites=True,
            include_devices=True,
            include_status=True)

        result_dict = full_result_dict[resources.org_id]

        log.debug("RESULT DICT: %s", result_dict.keys())
        site_resources = result_dict.get("site_resources", None)
        site_children = result_dict.get("site_children", None)
        site_status = result_dict.get("site_status", None)

        self.assertEquals(len(site_resources), 14)
        self.assertEquals(len(site_children), 9)

        full_result_dict = self.OMS.get_sites_devices_status(
            parent_resource_ids=[resources.observatory_id],
            include_sites=True,
            include_devices=True,
            include_status=True)

        result_dict = full_result_dict[resources.observatory_id]

        site_resources = result_dict.get("site_resources")
        site_children = result_dict.get("site_children")
        site_status = result_dict.get("site_status")

        self.assertEquals(len(site_resources), 13)
        self.assertEquals(len(site_children), 8)

    def _do_test_find_site_data_products(self, resources):
        res_dict = self.OMS.find_site_data_products(resources.org_id)

        #import sys, pprint
        #print >> sys.stderr, pprint.pformat(res_dict)

        self.assertIsNone(res_dict['data_product_resources'])
        self.assertIn(resources.platform_device_id,
                      res_dict['device_data_products'])
        self.assertIn(resources.instrument_device_id,
                      res_dict['device_data_products'])

    #@unittest.skip('targeting')
    def _do_test_find_related_frames_of_reference(self, stuff):
        # finding subordinates gives a dict of obj lists, convert objs to ids
        def idify(adict):
            ids = {}
            for k, v in adict.iteritems():
                ids[k] = []
                for obj in v:
                    ids[k].append(obj._id)

            return ids

        # a short version of the function we're testing, with id-ify
        def short(resource_id, output_types):
            ret = self.OMS.find_related_frames_of_reference(
                resource_id, output_types)
            return idify(ret)

        #set up associations first
        stuff = self._make_associations()
        #basic traversal of tree from instrument to platform
        ids = short(stuff.instrument_site_id, [RT.PlatformSite])
        self.assertIn(RT.PlatformSite, ids)
        self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite])
        self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite])
        self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite])

        #since this is the first search, just make sure the input inst_id got stripped
        if RT.InstrumentSite in ids:
            self.assertNotIn(stuff.instrument_site_id, ids[RT.InstrumentSite])

        #basic traversal of tree from platform to instrument
        ids = short(stuff.platform_siteb_id, [RT.InstrumentSite])
        self.assertIn(RT.InstrumentSite, ids)
        self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite])
        self.assertNotIn(stuff.instrument_site2_id, ids[RT.InstrumentSite])

        #full traversal of tree from observatory down to instrument
        ids = short(stuff.observatory_id, [RT.InstrumentSite])
        self.assertIn(RT.InstrumentSite, ids)
        self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite])

        #full traversal of tree from instrument to observatory
        ids = short(stuff.instrument_site_id, [RT.Observatory])
        self.assertIn(RT.Observatory, ids)
        self.assertIn(stuff.observatory_id, ids[RT.Observatory])

        #partial traversal, only down to platform
        ids = short(stuff.observatory_id, [RT.Subsite, RT.PlatformSite])
        self.assertIn(RT.PlatformSite, ids)
        self.assertIn(RT.Subsite, ids)
        self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite])
        self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite])
        self.assertIn(stuff.platform_siteb2_id, ids[RT.PlatformSite])
        self.assertIn(stuff.platform_site3_id, ids[RT.PlatformSite])
        self.assertIn(stuff.subsite_id, ids[RT.Subsite])
        self.assertIn(stuff.subsite2_id, ids[RT.Subsite])
        self.assertIn(stuff.subsitez_id, ids[RT.Subsite])
        self.assertIn(stuff.subsiteb_id, ids[RT.Subsite])
        self.assertNotIn(RT.InstrumentSite, ids)

        #partial traversal, only down to platform
        ids = short(stuff.instrument_site_id, [RT.Subsite, RT.PlatformSite])
        self.assertIn(RT.PlatformSite, ids)
        self.assertIn(RT.Subsite, ids)
        self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite])
        self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite])
        self.assertIn(stuff.subsite_id, ids[RT.Subsite])
        self.assertIn(stuff.subsiteb_id, ids[RT.Subsite])
        self.assertNotIn(stuff.subsite2_id, ids[RT.Subsite])
        self.assertNotIn(stuff.subsitez_id, ids[RT.Subsite])
        self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite])
        self.assertNotIn(RT.Observatory, ids)

        self.destroy(stuff)

    def _make_associations(self):
        """
        create one of each resource and association used by OMS
        to guard against problems in ion-definitions
        """

        #raise unittest.SkipTest("https://jira.oceanobservatories.org/tasks/browse/CISWCORE-41")
        """
        the tree we're creating (observatory, sites, platforms, instruments)

        rows are lettered, colums numbered.  
         - first row is implied a
         - first column is implied 1
         - site Z, just because 

        O--Sz
        |
        S--S2--P3--I4
        |
        Sb-Pb2-Ib3
        |
        P--I2 <- PlatformDevice, InstrumentDevice2
        |
        Pb <- PlatformDevice b
        |
        I <- InstrumentDevice

        """

        org_id = self.OMS.create_marine_facility(any_old(RT.Org))

        def create_under_org(resource_type, extra_fields=None):
            obj = any_old(resource_type, extra_fields)

            if RT.InstrumentDevice == resource_type:
                resource_id = self.IMS.create_instrument_device(obj)
            else:
                resource_id, _ = self.RR.create(obj)

            self.OMS.assign_resource_to_observatory_org(
                resource_id=resource_id, org_id=org_id)
            return resource_id

        #stuff we control
        observatory_id = create_under_org(RT.Observatory)
        subsite_id = create_under_org(RT.Subsite)
        subsite2_id = create_under_org(RT.Subsite)
        subsiteb_id = create_under_org(RT.Subsite)
        subsitez_id = create_under_org(RT.Subsite)
        platform_site_id = create_under_org(RT.PlatformSite)
        platform_siteb_id = create_under_org(RT.PlatformSite)
        platform_siteb2_id = create_under_org(RT.PlatformSite)
        platform_site3_id = create_under_org(RT.PlatformSite)
        instrument_site_id = create_under_org(RT.InstrumentSite)
        instrument_site2_id = create_under_org(RT.InstrumentSite)
        instrument_siteb3_id = create_under_org(RT.InstrumentSite)
        instrument_site4_id = create_under_org(RT.InstrumentSite)

        #stuff we associate to
        instrument_device_id = create_under_org(RT.InstrumentDevice)
        instrument_device2_id = create_under_org(RT.InstrumentDevice)
        platform_device_id = create_under_org(RT.PlatformDevice)
        platform_deviceb_id = create_under_org(RT.PlatformDevice)
        instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel))
        platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel))
        deployment_id, _ = self.RR.create(any_old(RT.Deployment))

        # marine tracking resources
        asset_id = create_under_org(RT.Asset)
        asset_type_id = create_under_org(RT.AssetType)
        event_duration_id = create_under_org(RT.EventDuration)
        event_duration_type_id = create_under_org(RT.EventDurationType)

        #observatory
        self.RR.create_association(observatory_id, PRED.hasSite, subsite_id)
        self.RR.create_association(observatory_id, PRED.hasSite, subsitez_id)

        #site
        self.RR.create_association(subsite_id, PRED.hasSite, subsite2_id)
        self.RR.create_association(subsite_id, PRED.hasSite, subsiteb_id)
        self.RR.create_association(subsite2_id, PRED.hasSite,
                                   platform_site3_id)
        self.RR.create_association(subsiteb_id, PRED.hasSite,
                                   platform_siteb2_id)
        self.RR.create_association(subsiteb_id, PRED.hasSite, platform_site_id)

        #platform_site(s)
        self.RR.create_association(platform_site3_id, PRED.hasSite,
                                   instrument_site4_id)
        self.RR.create_association(platform_siteb2_id, PRED.hasSite,
                                   instrument_siteb3_id)
        self.RR.create_association(platform_site_id, PRED.hasSite,
                                   instrument_site2_id)
        self.RR.create_association(platform_site_id, PRED.hasSite,
                                   platform_siteb_id)
        self.RR.create_association(platform_siteb_id, PRED.hasSite,
                                   instrument_site_id)

        self.RR.create_association(platform_siteb_id, PRED.hasDevice,
                                   platform_deviceb_id)
        #test network parent link
        self.OMS.assign_device_to_network_parent(platform_device_id,
                                                 platform_deviceb_id)

        self.RR.create_association(platform_site_id, PRED.hasModel,
                                   platform_model_id)
        self.RR.create_association(platform_site_id, PRED.hasDevice,
                                   platform_device_id)
        self.RR.create_association(platform_site_id, PRED.hasDeployment,
                                   deployment_id)

        #instrument_site(s)
        self.RR.create_association(instrument_site_id, PRED.hasModel,
                                   instrument_model_id)
        self.RR.create_association(instrument_site_id, PRED.hasDevice,
                                   instrument_device_id)
        self.RR.create_association(instrument_site_id, PRED.hasDeployment,
                                   deployment_id)

        self.RR.create_association(instrument_site2_id, PRED.hasDevice,
                                   instrument_device2_id)

        #platform_device
        self.RR.create_association(platform_device_id, PRED.hasModel,
                                   platform_model_id)

        #instrument_device
        self.RR.create_association(instrument_device_id, PRED.hasModel,
                                   instrument_model_id)
        self.RR.create_association(instrument_device2_id, PRED.hasModel,
                                   instrument_model_id)

        ret = DotDict()
        ret.org_id = org_id
        ret.observatory_id = observatory_id
        ret.subsite_id = subsite_id
        ret.subsite2_id = subsite2_id
        ret.subsiteb_id = subsiteb_id
        ret.subsitez_id = subsitez_id
        ret.platform_site_id = platform_site_id
        ret.platform_siteb_id = platform_siteb_id
        ret.platform_siteb2_id = platform_siteb2_id
        ret.platform_site3_id = platform_site3_id
        ret.instrument_site_id = instrument_site_id
        ret.instrument_site2_id = instrument_site2_id
        ret.instrument_siteb3_id = instrument_siteb3_id
        ret.instrument_site4_id = instrument_site4_id

        ret.instrument_device_id = instrument_device_id
        ret.instrument_device2_id = instrument_device2_id
        ret.platform_device_id = platform_device_id
        ret.platform_deviceb_id = platform_deviceb_id
        ret.instrument_model_id = instrument_model_id
        ret.platform_model_id = platform_model_id
        ret.deployment_id = deployment_id

        ret.asset_id = asset_id
        ret.asset_type_id = asset_type_id
        ret.event_duration_id = event_duration_id
        ret.event_duration_type_id = event_duration_type_id

        return ret

    #@unittest.skip("targeting")
    def test_create_observatory(self):
        observatory_obj = IonObject(RT.Observatory,
                                    name='TestFacility',
                                    description='some new mf')
        observatory_id = self.OMS.create_observatory(observatory_obj)
        self.OMS.force_delete_observatory(observatory_id)

    #@unittest.skip("targeting")
    def _do_test_create_geospatial_point_center(self, resources):
        platformsite_obj = IonObject(RT.PlatformSite,
                                     name='TestPlatformSite',
                                     description='some new TestPlatformSite')
        geo_index_obj = IonObject(OT.GeospatialBounds)
        geo_index_obj.geospatial_latitude_limit_north = 20.0
        geo_index_obj.geospatial_latitude_limit_south = 10.0
        geo_index_obj.geospatial_longitude_limit_east = 15.0
        geo_index_obj.geospatial_longitude_limit_west = 20.0
        platformsite_obj.constraint_list = [geo_index_obj]

        platformsite_id = self.OMS.create_platform_site(platformsite_obj)

        # now get the dp back to see if it was updated
        platformsite_obj = self.OMS.read_platform_site(platformsite_id)
        self.assertEquals('some new TestPlatformSite',
                          platformsite_obj.description)
        self.assertAlmostEqual(15.0,
                               platformsite_obj.geospatial_point_center.lat,
                               places=1)

        #now adjust a few params
        platformsite_obj.description = 'some old TestPlatformSite'
        geo_index_obj = IonObject(OT.GeospatialBounds)
        geo_index_obj.geospatial_latitude_limit_north = 30.0
        geo_index_obj.geospatial_latitude_limit_south = 20.0
        platformsite_obj.constraint_list = [geo_index_obj]
        update_result = self.OMS.update_platform_site(platformsite_obj)

        # now get the dp back to see if it was updated
        platformsite_obj = self.OMS.read_platform_site(platformsite_id)
        self.assertEquals('some old TestPlatformSite',
                          platformsite_obj.description)
        self.assertAlmostEqual(25.0,
                               platformsite_obj.geospatial_point_center.lat,
                               places=1)

        self.OMS.force_delete_platform_site(platformsite_id)

    #@unittest.skip("targeting")
    def _do_test_find_observatory_org(self, resources):
        log.debug("Make TestOrg")
        org_obj = IonObject(RT.Org,
                            name='TestOrg',
                            description='some new mf org')

        org_id = self.OMS.create_marine_facility(org_obj)

        log.debug("Make Observatory")
        observatory_obj = IonObject(RT.Observatory,
                                    name='TestObservatory',
                                    description='some new obs')
        observatory_id = self.OMS.create_observatory(observatory_obj)

        log.debug("assign observatory to org")
        self.OMS.assign_resource_to_observatory_org(observatory_id, org_id)

        log.debug("verify assigment")
        org_objs = self.OMS.find_org_by_observatory(observatory_id)
        self.assertEqual(1, len(org_objs))
        self.assertEqual(org_id, org_objs[0]._id)
        log.debug("org_id=<" + org_id + ">")

        log.debug("create a subsite with parent Observatory")
        subsite_obj = IonObject(RT.Subsite,
                                name='TestSubsite',
                                description='sample subsite')
        subsite_id = self.OMS.create_subsite(subsite_obj, observatory_id)
        self.assertIsNotNone(subsite_id, "Subsite not created.")

        log.debug("verify that Subsite is linked to Observatory")
        mf_subsite_assoc = self.RR.get_association(observatory_id,
                                                   PRED.hasSite, subsite_id)
        self.assertIsNotNone(mf_subsite_assoc,
                             "Subsite not connected to Observatory.")

        log.debug("add the Subsite as a resource of this Observatory")
        self.OMS.assign_resource_to_observatory_org(resource_id=subsite_id,
                                                    org_id=org_id)
        log.debug("verify that Subsite is linked to Org")
        org_subsite_assoc = self.RR.get_association(org_id, PRED.hasResource,
                                                    subsite_id)
        self.assertIsNotNone(org_subsite_assoc,
                             "Subsite not connected as resource to Org.")

        log.debug("create a logical platform with parent Subsite")
        platform_site_obj = IonObject(RT.PlatformSite,
                                      name='TestPlatformSite',
                                      description='sample logical platform')
        platform_site_id = self.OMS.create_platform_site(
            platform_site_obj, subsite_id)
        self.assertIsNotNone(platform_site_id, "PlatformSite not created.")

        log.debug("verify that PlatformSite is linked to Site")
        site_lp_assoc = self.RR.get_association(subsite_id, PRED.hasSite,
                                                platform_site_id)
        self.assertIsNotNone(site_lp_assoc,
                             "PlatformSite not connected to Site.")

        log.debug("add the PlatformSite as a resource of this Observatory")
        self.OMS.assign_resource_to_observatory_org(
            resource_id=platform_site_id, org_id=org_id)
        log.debug("verify that PlatformSite is linked to Org")
        org_lp_assoc = self.RR.get_association(org_id, PRED.hasResource,
                                               platform_site_id)
        self.assertIsNotNone(org_lp_assoc,
                             "PlatformSite not connected as resource to Org.")

        log.debug("create a logical instrument with parent logical platform")
        instrument_site_obj = IonObject(
            RT.InstrumentSite,
            name='TestInstrumentSite',
            description='sample logical instrument')
        instrument_site_id = self.OMS.create_instrument_site(
            instrument_site_obj, platform_site_id)
        self.assertIsNotNone(instrument_site_id, "InstrumentSite not created.")

        log.debug("verify that InstrumentSite is linked to PlatformSite")
        li_lp_assoc = self.RR.get_association(platform_site_id, PRED.hasSite,
                                              instrument_site_id)
        self.assertIsNotNone(li_lp_assoc,
                             "InstrumentSite not connected to PlatformSite.")

        log.debug("add the InstrumentSite as a resource of this Observatory")
        self.OMS.assign_resource_to_observatory_org(
            resource_id=instrument_site_id, org_id=org_id)
        log.debug("verify that InstrumentSite is linked to Org")
        org_li_assoc = self.RR.get_association(org_id, PRED.hasResource,
                                               instrument_site_id)
        self.assertIsNotNone(
            org_li_assoc, "InstrumentSite not connected as resource to Org.")

        log.debug(
            "remove the InstrumentSite as a resource of this Observatory")
        self.OMS.unassign_resource_from_observatory_org(
            instrument_site_id, org_id)
        log.debug("verify that InstrumentSite is linked to Org")
        assocs, _ = self.RR.find_objects(org_id,
                                         PRED.hasResource,
                                         RT.InstrumentSite,
                                         id_only=True)
        self.assertEqual(0, len(assocs))

        log.debug(
            "remove the InstrumentSite, association should drop automatically")
        self.OMS.delete_instrument_site(instrument_site_id)
        assocs, _ = self.RR.find_objects(platform_site_id,
                                         PRED.hasSite,
                                         RT.InstrumentSite,
                                         id_only=True)
        self.assertEqual(0, len(assocs))

        log.debug("remove the PlatformSite as a resource of this Observatory")
        self.OMS.unassign_resource_from_observatory_org(
            platform_site_id, org_id)
        log.debug("verify that PlatformSite is linked to Org")
        assocs, _ = self.RR.find_objects(org_id,
                                         PRED.hasResource,
                                         RT.PlatformSite,
                                         id_only=True)
        self.assertEqual(0, len(assocs))

        log.debug("remove the Site as a resource of this Observatory")
        self.OMS.unassign_resource_from_observatory_org(subsite_id, org_id)
        log.debug("verify that Site is linked to Org")
        assocs, _ = self.RR.find_objects(org_id,
                                         PRED.hasResource,
                                         RT.Subsite,
                                         id_only=True)
        self.assertEqual(0, len(assocs))

        self.RR.delete(org_id)
        self.OMS.force_delete_observatory(observatory_id)
        self.OMS.force_delete_subsite(subsite_id)
        self.OMS.force_delete_platform_site(platform_site_id)
        self.OMS.force_delete_instrument_site(instrument_site_id)

    @attr('EXT')
    @unittest.skipIf(os.getenv(
        'CEI_LAUNCH_TEST', False
    ), 'Skip test while in CEI LAUNCH mode as it depends on modifying CFG on service side'
                     )
    def test_observatory_extensions(self):
        self.patch_cfg(CFG["container"],
                       {"extended_resources": {
                           "strip_results": False
                       }})

        obs_id = self.RR2.create(any_old(RT.Observatory))
        pss_id = self.RR2.create(
            any_old(RT.PlatformSite, dict(alt_resource_type="StationSite")))
        pas_id = self.RR2.create(
            any_old(RT.PlatformSite,
                    dict(alt_resource_type="PlatformAssemblySite")))
        pcs_id = self.RR2.create(
            any_old(RT.PlatformSite,
                    dict(alt_resource_type="PlatformComponentSite")))
        ins_id = self.RR2.create(any_old(RT.InstrumentSite))

        obs_obj = self.RR2.read(obs_id)
        pss_obj = self.RR2.read(pss_id)
        pas_obj = self.RR2.read(pas_id)
        pcs_obj = self.RR2.read(pcs_id)
        ins_obj = self.RR2.read(ins_id)

        self.RR2.create_association(obs_id, PRED.hasSite, pss_id)
        self.RR2.create_association(pss_id, PRED.hasSite, pas_id)
        self.RR2.create_association(pas_id, PRED.hasSite, pcs_id)
        self.RR2.create_association(pcs_id, PRED.hasSite, ins_id)

        extended_obs = self.OMS.get_observatory_site_extension(obs_id,
                                                               user_id=12345)
        self.assertEqual([pss_obj], extended_obs.platform_station_sites)
        self.assertEqual([pas_obj], extended_obs.platform_assembly_sites)
        self.assertEqual([pcs_obj], extended_obs.platform_component_sites)
        self.assertEqual([ins_obj], extended_obs.instrument_sites)

        extended_pss = self.OMS.get_observatory_site_extension(obs_id,
                                                               user_id=12345)
        self.assertEqual([pas_obj], extended_pss.platform_assembly_sites)
        self.assertEqual([pcs_obj], extended_pss.platform_component_sites)
        self.assertEqual([ins_obj], extended_pss.instrument_sites)

        extended_pas = self.OMS.get_observatory_site_extension(pas_id,
                                                               user_id=12345)
        self.assertEqual([pcs_obj], extended_pas.platform_component_sites)
        self.assertEqual([ins_obj], extended_pas.instrument_sites)

        extended_pcs = self.OMS.get_platform_component_site_extension(
            pcs_id, user_id=12345)
        self.assertEqual([ins_obj], extended_pcs.instrument_sites)

    #@unittest.skip("in development...")
    @attr('EXT')
    @attr('EXT1')
    @unittest.skipIf(os.getenv(
        'CEI_LAUNCH_TEST', False
    ), 'Skip test while in CEI LAUNCH mode as it depends on modifying CFG on service side'
                     )
    def test_observatory_org_extended(self):
        self.patch_cfg(CFG["container"],
                       {"extended_resources": {
                           "strip_results": False
                       }})

        stuff = self._make_associations()

        parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)

        parsed_stream_def_id = self.pubsubcli.create_stream_definition(
            name='parsed', parameter_dictionary_id=parsed_pdict_id)
        dp_obj = IonObject(RT.DataProduct,
                           name='the parsed data',
                           description='ctd stream test')

        data_product_id1 = self.dpclient.create_data_product(
            data_product=dp_obj, stream_definition_id=parsed_stream_def_id)
        self.damsclient.assign_data_product(
            input_resource_id=stuff.instrument_device_id,
            data_product_id=data_product_id1)

        #Create a  user to be used as regular member
        member_actor_obj = IonObject(RT.ActorIdentity, name='org member actor')
        member_actor_id, _ = self.RR.create(member_actor_obj)
        assert (member_actor_id)
        member_actor_header = get_actor_header(member_actor_id)

        member_user_obj = IonObject(RT.UserInfo, name='org member user')
        member_user_id, _ = self.RR.create(member_user_obj)
        assert (member_user_id)

        self.RR.create_association(subject=member_actor_id,
                                   predicate=PRED.hasInfo,
                                   object=member_user_id)

        #Build the Service Agreement Proposal to enroll a user actor
        sap = IonObject(OT.EnrollmentProposal,
                        consumer=member_actor_id,
                        provider=stuff.org_id)

        sap_response = self.org_management_service.negotiate(
            sap, headers=member_actor_header)

        #enroll the member without using negotiation
        self.org_management_service.enroll_member(org_id=stuff.org_id,
                                                  actor_id=member_actor_id)

        #--------------------------------------------------------------------------------
        # Get the extended Site (platformSite)
        #--------------------------------------------------------------------------------

        try:
            extended_site = self.OMS.get_site_extension(stuff.platform_site_id)
        except:
            log.error('failed to get extended site', exc_info=True)
            raise
        log.debug("extended_site:  %r ", extended_site)
        self.assertEquals(stuff.subsiteb_id, extended_site.parent_site._id)
        self.assertEqual(2, len(extended_site.sites))
        self.assertEqual(2, len(extended_site.platform_devices))
        self.assertEqual(2, len(extended_site.platform_models))
        self.assertIn(stuff.platform_device_id,
                      [o._id for o in extended_site.platform_devices])
        self.assertIn(
            stuff.platform_model_id,
            [o._id for o in extended_site.platform_models if o is not None])

        log.debug(
            "verify that PlatformDeviceb is linked to PlatformDevice with hasNetworkParent link"
        )
        associations = self.RR.find_associations(
            subject=stuff.platform_deviceb_id,
            predicate=PRED.hasNetworkParent,
            object=stuff.platform_device_id,
            id_only=True)
        self.assertIsNotNone(
            associations,
            "PlatformDevice child not connected to PlatformDevice parent.")

        #--------------------------------------------------------------------------------
        # Get the extended Org
        #--------------------------------------------------------------------------------
        #test the extended resource
        extended_org = self.OMS.get_marine_facility_extension(stuff.org_id)
        log.debug("test_observatory_org_extended: extended_org:  %s ",
                  str(extended_org))
        #self.assertEqual(2, len(extended_org.instruments_deployed) )
        #self.assertEqual(1, len(extended_org.platforms_not_deployed) )
        self.assertEqual(2, extended_org.number_of_platforms)
        self.assertEqual(2, len(extended_org.platform_models))

        self.assertEqual(2, extended_org.number_of_instruments)
        self.assertEqual(2, len(extended_org.instrument_models))

        self.assertEqual(1, len(extended_org.members))
        self.assertNotEqual(extended_org.members[0]._id, member_actor_id)
        self.assertEqual(extended_org.members[0]._id, member_user_id)

        self.assertEqual(1, len(extended_org.open_requests))

        self.assertTrue(len(extended_site.deployments) > 0)
        self.assertEqual(len(extended_site.deployments),
                         len(extended_site.deployment_info))

        self.assertEqual(1, extended_org.number_of_assets)
        self.assertEqual(1, extended_org.number_of_asset_types)
        self.assertEqual(1, extended_org.number_of_event_durations)
        self.assertEqual(1, extended_org.number_of_event_duration_types)

        #test the extended resource of the ION org
        ion_org_id = self.org_management_service.find_org()
        extended_org = self.OMS.get_marine_facility_extension(ion_org_id._id,
                                                              user_id=12345)
        log.debug("test_observatory_org_extended: extended_ION_org:  %s ",
                  str(extended_org))
        self.assertEqual(1, len(extended_org.members))
        self.assertEqual(0, extended_org.number_of_platforms)
        #self.assertEqual(1, len(extended_org.sites))

        #--------------------------------------------------------------------------------
        # Get the extended Site
        #--------------------------------------------------------------------------------

        #create device state events to use for op /non-op filtering in extended
        t = get_ion_ts()
        self.event_publisher.publish_event(
            ts_created=t,
            event_type='ResourceAgentStateEvent',
            origin=stuff.instrument_device_id,
            state=ResourceAgentState.STREAMING)

        self.event_publisher.publish_event(
            ts_created=t,
            event_type='ResourceAgentStateEvent',
            origin=stuff.instrument_device2_id,
            state=ResourceAgentState.INACTIVE)
        extended_site = self.OMS.get_site_extension(stuff.instrument_site2_id)

        log.debug("test_observatory_org_extended: extended_site:  %s ",
                  str(extended_site))

        self.dpclient.delete_data_product(data_product_id1)
class TestObservatoryManagementFullIntegration(IonIntegrationTestCase):

    def setUp(self):
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.RR = ResourceRegistryServiceClient()
        self.RR2 = EnhancedResourceRegistryClient(self.RR)
        self.OMS = ObservatoryManagementServiceClient()
        self.org_management_service = OrgManagementServiceClient()
        self.IMS =  InstrumentManagementServiceClient()
        self.dpclient = DataProductManagementServiceClient()
        self.pubsubcli =  PubsubManagementServiceClient()
        self.damsclient = DataAcquisitionManagementServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.data_retriever = DataRetrieverServiceClient()
        self.data_product_management = DataProductManagementServiceClient()

        self._load_stage = 0
        self._resources = {}

    def preload_ooi(self, stage=STAGE_LOAD_ASSETS):
        # Preloads OOI up to a given stage

        if self._load_stage >= stage:
            return

        if self._load_stage < STAGE_LOAD_ORGS:
            log.info("--------------------------------------------------------------------------------------------------------")
            log.info("Preloading stage: %s (OOIR2 Orgs, users, roles)", STAGE_LOAD_ORGS)
            # load_OOIR2_scenario
            self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict(
                op="load",
                scenario="OOIR2",
                path="master",
                ))
            self._load_stage = STAGE_LOAD_ORGS

        if self._load_stage < STAGE_LOAD_PARAMS:
            log.info("--------------------------------------------------------------------------------------------------------")
            log.info("Preloading stage: %s (BASE params, streamdefs)", STAGE_LOAD_PARAMS)
            # load_parameter_scenarios
            self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict(
                op="load",
                scenario="BETA",
                path="master",
                categories="ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition",
                clearcols="owner_id,org_ids",
                assets="res/preload/r2_ioc/ooi_assets",
                parseooi="True",
                ))
            self._load_stage = STAGE_LOAD_PARAMS

        if self._load_stage < STAGE_LOAD_AGENTS:
            log.info("--------------------------------------------------------------------------------------------------------")
            log.info("Preloading stage: %s (OOIR2_I agents, model links)", STAGE_LOAD_AGENTS)
            # load_OOIR2_agents
            self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict(
                op="load",
                scenario="OOIR2_I",
                path="master",
                ))
            self._load_stage = STAGE_LOAD_AGENTS

        if self._load_stage < STAGE_LOAD_ASSETS:
            log.info("--------------------------------------------------------------------------------------------------------")
            log.info("Preloading stage: %s (OOI assets linked to params, agents)", STAGE_LOAD_ASSETS)
            # load_ooi_assets
            self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict(
                op="load",
                loadooi="True",
                path="master",
                assets="res/preload/r2_ioc/ooi_assets",
                bulk="True",
                debug="True",
                ooiuntil="9/1/2013",
                ooiparams="True",
                #excludecategories: DataProduct,DataProductLink,Deployment,Workflow,WorkflowDefinition
                ))
            self._load_stage = STAGE_LOAD_ASSETS

        # 'DataProduct,DataProductLink,WorkflowDefinition,ExternalDataProvider,ExternalDatasetModel,ExternalDataset,ExternalDatasetAgent,ExternalDatasetAgentInstance',


    @unittest.skip('Work in progress')
    def test_observatory(self):
        self._load_stage = 0
        self._resources = {}
        passing = True

        self.assertTrue(True)

        # LOAD STEP 1
        self.preload_ooi(stage=STAGE_LOAD_ORGS)

        passing &= self.orguserrole_assertions()


        # LOAD STEP 2
        self.preload_ooi(stage=STAGE_LOAD_PARAMS)

        passing &= self.parameter_assertions()


        # LOAD STEP 3
        self.preload_ooi(stage=STAGE_LOAD_AGENTS)

        passing &= self.agent_assertions()


        # LOAD STEP 4
        self.preload_ooi(stage=STAGE_LOAD_ASSETS)

        # Check OOI preloaded resources to see if they match needs for this test and for correctness
        passing &= self.sites_assertions()
        passing &= self.device_assertions()
        passing &= self.deployment_assertions()

        # Extensive tests on select RSN nodes
        passing &= self.rsn_node_checks()

        # Extensive tests on select RSN instruments
        passing &= self.check_rsn_instrument()

        passing &= self.check_rsn_instrument_data_product()

        # Extensive tests on a glider
        #passing &= self.check_glider()

        # Extensive tests on a CG assembly
        #passing &= self.check_cg_assembly()



        # Add a new instrument agent
        # Add a new instrument agent instance
        # Check DataProducts
        # Check Provenance

        IonIntegrationTestCase.assertTrue(self, passing)


    # -------------------------------------------------------------------------

    def orguserrole_assertions(self):
        passing = True

        passing &= self._check_marine_facility("MF_CGSN")
        passing &= self._check_marine_facility("MF_RSN")
        passing &= self._check_marine_facility("MF_EA")

        return passing

    def _check_marine_facility(self, preload_id):
        passing = True
        log.debug("Checking marine facility %s and associations", preload_id)

        mf_obj = self.retrieve_ooi_asset(preload_id)
        mf_id = mf_obj._id
        self._resources[preload_id] = mf_id

        passing &= self.assertEquals(mf_obj.lcstate, LCS.DEPLOYED)

        res_list, _ = self.RR.find_objects(subject=mf_id, predicate=PRED.hasMembership, id_only=True)
        passing &= self.assertTrue(len(res_list) >= 3)

        res_list, _ = self.RR.find_objects(subject=mf_id, predicate=PRED.hasRole, id_only=False)
        passing &= self.assertTrue(len(res_list) >= 5)

        passing &= self._check_role_assignments(res_list, "ORG_MANAGER")
        passing &= self._check_role_assignments(res_list, "OBSERVATORY_OPERATOR")
        passing &= self._check_role_assignments(res_list, "INSTRUMENT_OPERATOR")

        return passing

    def _check_role_assignments(self, role_list, role_name):
        passing = True
        role_obj = self._find_resource_in_list(role_list, "governance_name", role_name)
        if role_obj:
            res_list = self.RR.find_subjects(predicate=PRED.hasRole, object=role_obj._id, id_only=True)
            passing &= self.assertTrue(len(res_list) >= 1)

        return passing


    def parameter_assertions(self):
        passing = True

        pctx_list, _ = self.RR.find_resources_ext(restype=RT.ParameterContext)
        passing &= self.assertTrue(len(pctx_list) >= 10)

        pdict_list, _ = self.RR.find_resources_ext(restype=RT.ParameterDictionary)
        passing &= self.assertTrue(len(pdict_list) >= 10)

        sdef_list, _ = self.RR.find_resources_ext(restype=RT.StreamDefinition)
        passing &= self.assertTrue(len(sdef_list) >= 10)

        # Verify that a PDict has the appropriate QC parameters defined
        pdicts, _ = self.RR.find_resources_ext(restype=RT.ParameterDictionary, alt_id_ns='PRE', alt_id='DICT110')
        passing &= self.assertTrue(len(pdicts)==1)
        if not pdicts:
            return passing
        pdict = pdicts[0]

        # According to the latest SAF, density should NOT have trend

        parameters, _ = self.RR.find_objects(pdict, PRED.hasParameterContext)
        names = [i.name for i in parameters if i.name.startswith('density')]
        passing &= self.assertTrue('density_trndtst_qc' not in names)

        return passing

    def agent_assertions(self):
        passing = True

        # TODO: More tests?

        return passing

    def sites_assertions(self):
        passing = True
        observatory_list, _ = self.RR.find_resources_ext(restype=RT.Observatory)
        passing &= self.assertTrue(len(observatory_list) >= 40)
        for obs in observatory_list:
            passing &= self.assertEquals(obs.lcstate, LCS.DEPLOYED)

        platform_site_list, _ = self.RR.find_resources(RT.PlatformSite, id_only=False)
        log.debug('platform sites: %s', [ps.name for ps in platform_site_list])
        passing &= self.assertTrue(len(platform_site_list) >= 30)

        return passing

    def device_assertions(self):
        passing = True
        platform_device_list, _ = self.RR.find_resources(RT.PlatformDevice, id_only=False)
        passing &= self.assertTrue(len(platform_device_list) >= 30)
        for pdev in platform_device_list:
            log.debug('platform device: %s', pdev.name)
            passing &= self.assertEquals(pdev.lcstate, LCS.PLANNED)

        platform_agent_list, _ = self.RR.find_resources(RT.PlatformAgent, id_only=False)
        passing &= self.assertTrue(len(platform_agent_list) >= 2)
        for pagent in platform_agent_list:
            log.debug('platform agent: %s', pagent.name)
            passing &= self.assertEquals(pagent.lcstate, LCS.DEPLOYED)

        instrument_agent_list, _ = self.RR.find_resources(RT.InstrumentAgent, id_only=False)
        passing &= self.assertTrue(len(instrument_agent_list) >= 3)
        for iagent in instrument_agent_list:
            log.debug('instrument agent: %s', iagent.name)
            passing &= self.assertEquals(iagent.lcstate, LCS.DEPLOYED)

            model_list, _ = self.RR.find_objects(subject=iagent._id, predicate=PRED.hasModel, id_only=True)
            passing &= self.assertTrue(len(model_list) >= 1, "IA %s" % iagent.name)

        return passing

    def deployment_assertions(self):
        passing = True
        deployment_list, _ = self.RR.find_resources(RT.Deployment, id_only=False)
        passing &= self.assertTrue(len(deployment_list) >= 30)
        for deploy in deployment_list:
            log.debug('deployment: %s', deploy.name)
            passing &= self.assertEquals(deploy.lcstate, LCS.DEPLOYED)
        return passing

    def rsn_node_checks(self):
        """
        Current preload creates:
        - PlatformDevice in PLANNED
        - PlatformSite in DEPLOYED
        - Deployment in DEPLOYED
        - Deployment is NOT activated
        """
        passing = True

        dp_obj = self.retrieve_ooi_asset("CE04OSHY-PN01C_DEP")

        passing &= self.assertEquals(dp_obj.lcstate, LCS.DEPLOYED)
        passing &= self.assertEquals(dp_obj.availability, AS.AVAILABLE)
        log.debug('test_observatory  retrieve CE04OSHY-PN01C_DEP deployment:  %s', dp_obj)

        # Check existing RSN node CE04OSHY-LV01C Deployment (PLANNED lcstate)
        CE04OSHY_LV01C_deployment = self.retrieve_ooi_asset('CE04OSHY-LV01C_DEP')
        passing &= self.assertEquals(CE04OSHY_LV01C_deployment.lcstate, LCS.DEPLOYED)
        passing &= self.assertEquals(CE04OSHY_LV01C_deployment.availability, AS.AVAILABLE)

        #self.dump_deployment(CE04OSHY_LV01C_deployment._id)
        log.debug('test_observatory  retrieve RSN node CE04OSHY-LV01C Deployment:  %s', CE04OSHY_LV01C_deployment)

        CE04OSHY_LV01C_device = self.retrieve_ooi_asset('CE04OSHY-LV01C_PD')

        # Set CE04OSHY-LV01C device to DEVELOPED state
        passing &= self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_device._id, new_lcs_state=LCE.DEVELOP, verify=LCS.DEVELOPED)

        # Set CE04OSHY-LV01C device to INTEGRATED state
        passing &= self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_device._id, new_lcs_state=LCE.INTEGRATE, verify=LCS.INTEGRATED)

        # Set CE04OSHY-LV01C device to DEPLOYED state
        passing &= self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_device._id, new_lcs_state=LCE.DEPLOY, verify=LCS.DEPLOYED)

        # Set CE04OSHY-LV01C Deployment to DEPLOYED state
        # NOTE: Deployments are created in DEPLOYED state, currently
        #self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_deployment._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')
        
        # Activate Deployment for CE04OSHY-LV01C
        self.OMS.activate_deployment(CE04OSHY_LV01C_deployment._id)
        log.debug('---------    activate_deployment CE04OSHY_LV01C_deployment -------------- ')
        self.dump_deployment(CE04OSHY_LV01C_deployment._id)
        passing &= self.validate_deployment_activated(CE04OSHY_LV01C_deployment._id)
        
        # (optional) Start CE04OSHY-LV01C platform agent with simulator

        # NOTE: DataProduct is generated in DEPLOYED state
        # # Set DataProduct for CE04OSHY-LV01C platform to DEPLOYED state
        # output_data_product_ids, assns =self.RR.find_objects(subject=CE04OSHY_LV01C_device._id, predicate=PRED.hasOutputProduct, id_only=True)
        # if output_data_product_ids:
        #     #self.assertEquals(len(child_devs), 3)
        #     for output_data_product_id in output_data_product_ids:
        #         log.debug('DataProduct for CE04OSHY-LV01C platform:  %s', output_data_product_id)
        #         self.transition_lcs_then_verify(resource_id=output_data_product_id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Check events for CE04OSHY-LV01C platform

        # Check existing CE04OSBP-LJ01C Deployment (PLANNED lcstate)
#        dp_list, _  = self.RR.find_resources_ext(alt_id_ns="PRE", alt_id="CE04OSBP-LJ01C_DEP")
#        self.assertEquals(len(dp_list), 1)
#        CE04OSHY_LV01C_deployment = dp_list[0]
#        self.assertEquals(CE04OSHY_LV01C_deployment.lcstate, 'PLANNED')
#        log.debug('test_observatory  retrieve RSN node CE04OSBP-LJ01C Deployment:  %s', CE04OSHY_LV01C_deployment)


        # Set CE04OSBP-LJ01C Deployment to DEPLOYED state

        # Update description and other attributes for CE04OSBP-LJ01C device resource

        # Create attachment (JPG image) for CE04OSBP-LJ01C device resource

        # Activate Deployment for CE04OSBP-LJ01C

        # (optional) Add/register CE04OSBP-LJ01C platform agent to parent agent

        # (optional) Start CE04OSBP-LJ01C platform agent

        return passing
    
    def check_rsn_instrument(self):
        """
        Check existing RSN instrument CE04OSBP-LJ01C-06-CTDBPO108 Deployment (PLANNED lcstate)
        Current preload creates:
        - InstrumentDevice in PLANNED
        - InstrumentSite in DEPLOYED
        - Deployment in DEPLOYED
        - Deployment is activated
        """

        passing = True
        CE04OSBP_LJ01C_06_CTDBPO108_deploy = self.retrieve_ooi_asset('CE04OSBP-LJ01C-06-CTDBPO108_DEP')
        self.dump_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        #passing &= self.assertEquals(CE04OSBP_LJ01C_06_CTDBPO108_deploy.lcstate, 'PLANNED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to DEVELOPED state
        CE04OSBP_LJ01C_06_CTDBPO108_device = self.retrieve_ooi_asset('CE04OSBP-LJ01C-06-CTDBPO108_ID')
        passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.DEVELOP, verify='DEVELOPED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to INTEGRATED state
        passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to DEPLOYED state
        passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 Deployment to DEPLOYED state
        #self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_deploy._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Activate Deployment for CE04OSBP-LJ01C-06-CTDBPO108 instrument
        log.debug('---------    activate_deployment CE04OSBP-LJ01C-06-CTDBPO108 deployment -------------- ')
        self.OMS.activate_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        passing &= self.validate_deployment_activated(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)

        # (optional) Add/register CE04OSBP-LJ01C-06-CTDBPO108 instrument agent to parent agent

        # (optional) Start CE04OSBP-LJ01C-06-CTDBPO108 instrument agent with simulator

        # Set all DataProducts for CE04OSBP-LJ01C-06-CTDBPO108 to DEPLOYED state


        # (optional) Create a substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a comparable device
        CE04OSBP_LJ01C_06_CTDBPO108_isite = self.retrieve_ooi_asset('CE04OSBP-LJ01C-06-CTDBPO108')

        ## create device here: retrieve CTD Mooring on Mooring Riser 001 - similiar?
        GP03FLMB_RI001_10_CTDMOG999_ID_idevice = self.retrieve_ooi_asset('GP03FLMB-RI001-10-CTDMOG999_ID')

        deploy_id_2 = self.create_basic_deployment(name='CE04OSBP-LJ01C-06-CTDBPO108_DEP2', description='substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a comparable device')
        self.IMS.deploy_instrument_device(instrument_device_id=GP03FLMB_RI001_10_CTDMOG999_ID_idevice._id, deployment_id=deploy_id_2)
        self.OMS.deploy_instrument_site(instrument_site_id=CE04OSBP_LJ01C_06_CTDBPO108_isite._id, deployment_id=deploy_id_2)
        self.dump_deployment(deploy_id_2)

        # (optional) Activate this second deployment - check first deployment is deactivated
        self.OMS.deactivate_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        passing &= self.validate_deployment_deactivated(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)


        # log.debug('Activate deployment deploy_id_2')
        # self.get_deployment_ids(deploy_id_2)
        # self.dump_deployment(deploy_id_2, "deploy_id_2")
        # self.OMS.activate_deployment(deploy_id_2)
        # passing &= self.validate_deployment_deactivated(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        #
        # # (optional) Set first CE04OSBP-LJ01C-06-CTDBPO108 Deployment to INTEGRATED state
        # passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_deploy._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')
        #
        # # Set first CE04OSBP-LJ01C-06-CTDBPO108 device to INTEGRATED state
        # passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')
        #
        #
        # # (optional) Create a third Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a same device from first deployment
        # deploy_id_3 = self.create_basic_deployment(name='CE04OSBP-LJ01C-06-CTDBPO108_DEP3', description='substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with same device as first')
        # self.IMS.deploy_instrument_device(instrument_device_id=GP03FLMB_RI001_10_CTDMOG999_ID_idevice._id, deployment_id=deploy_id_3)
        # self.OMS.deploy_instrument_site(instrument_site_id=CE04OSBP_LJ01C_06_CTDBPO108_isite._id, deployment_id=deploy_id_3)
        # self.dump_deployment(deploy_id_3)
        #
        #
        # # Set first CE04OSBP-LJ01C-06-CTDBPO108 device to DEPLOYED state
        # passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')
        #
        # # (optional) Activate this third deployment - check second deployment is deactivated
        # log.debug('Activate deployment deploy_id_3')
        # self.dump_deployment(deploy_id_3)
        # self.OMS.activate_deployment(deploy_id_3)
        # #todo: check second deployment is deactivated

        return passing




    def check_data_product_reference(self, reference_designator, output=[]):
        passing = True

        data_product_ids, _ = self.RR.find_resources_ext(alt_id_ns='PRE', alt_id='%s_DPI1' % reference_designator, id_only=True) # Assuming DPI1 is parsed
        passing &= self.assertEquals(len(data_product_ids), 1)

        if not data_product_ids:
            return passing

        # Let's go ahead and activate it
        data_product_id = data_product_ids[0]
        self.dpclient.activate_data_product_persistence(data_product_id)
        self.addCleanup(self.dpclient.suspend_data_product_persistence, data_product_id)

        dataset_ids, _ = self.RR.find_objects(data_product_id, PRED.hasDataset, id_only=True)
        passing &= self.assertEquals(len(dataset_ids), 1)
        if not dataset_ids:
            return passing
        dataset_id = dataset_ids[0]

        stream_def_ids, _ = self.RR.find_objects(data_product_id, PRED.hasStreamDefinition, id_only=True)
        passing &= self.assertEquals(len(dataset_ids), 1)
        if not stream_def_ids:
            return passing
        stream_def_id = stream_def_ids[0]
        output.append((data_product_id, stream_def_id, dataset_id))
        return passing

    def check_tempsf_instrument_data_product(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator, info_list)
        if not passing: return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        now = time.time()
        ntp_now = now + 2208988800

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['temperature'] = [[ 25.3884, 26.9384, 24.3394, 23.3401, 22.9832,
            29.4434, 26.9873, 15.2883, 16.3374, 14.5883, 15.7253, 18.4383,
            15.3488, 17.2993, 10.2111, 11.5993, 10.9345, 9.4444, 9.9876,
            10.9834, 11.0098, 5.3456, 4.2994, 4.3009]]

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.event.wait(20))
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
        passing &= self.assert_array_almost_equal(rdt['temperature'], [[
            25.3884, 26.9384, 24.3394, 23.3401, 22.9832, 29.4434, 26.9873,
            15.2883, 16.3374, 14.5883, 15.7253, 18.4383, 15.3488, 17.2993,
            10.2111, 11.5993, 10.9345, 9.4444, 9.9876, 10.9834, 11.0098,
            5.3456, 4.2994, 4.3009]])
        return passing
    
    def check_trhph_instrument_data_products(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator, info_list)
        if not passing:
            return passing

        data_product_id, stream_def_id, dataset_id = info_list.pop()

        pdict = self.RR2.find_parameter_dictionary_of_stream_definition_using_has_parameter_dictionary(stream_def_id)
        passing &= self.assertEquals(pdict.name, 'trhph_sample')

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)

        # calibration constants
        a = 1.98e-9
        b = -2.45e-6
        c = 9.28e-4
        d = -0.0888
        e = 0.731

        V_s = 1.506
        V_c = 0.
        T = 11.8

        r1 = 0.906
        r2 = 4.095
        r3 = 4.095

        ORP_V = 1.806
        Cl = np.nan

        offset = 2008
        gain = 4.0
        # Normally this would be 50 per the DPS but the precision is %4.0f which truncates the values to the nearest 1...
        ORP = ((ORP_V * 1000.) - offset) / gain

        ntp_now = time.time() + 2208988800

        rdt['cc_a'] = [a]
        rdt['cc_b'] = [b]
        rdt['cc_c'] = [c]
        rdt['cc_d'] = [d]
        rdt['cc_e'] = [e]
        rdt['ref_temp_volts'] = [V_s]
        rdt['resistivity_temp_volts'] = [V_c]
        rdt['eh_sensor'] = [ORP_V]
        rdt['resistivity_5'] = [r1]
        rdt['resistivity_x1'] = [r2]
        rdt['resistivity_x5'] = [r3]
        rdt['cc_offset'] = [offset]
        rdt['cc_gain'] = [gain]
        rdt['time'] = [ntp_now]

        passing &= self.assert_array_almost_equal(rdt['vent_fluid_temperaure'], [T], 2)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_chloride_conc'], [Cl], 4)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_orp'], [ORP], 4)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.event.wait(60))
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_temperaure'], [T], 2)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_chloride_conc'], [Cl], 4)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_orp'], [ORP], 4)

        return passing

    def check_vel3d_instrument_data_products(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator, info_list)
        if not passing:
            return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        pdict = self.RR2.find_parameter_dictionary_of_stream_definition_using_has_parameter_dictionary(stream_def_id)
        self.assertEquals(pdict.name, 'vel3d_b_sample')

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        lat = 14.6846
        lon = -51.044
        ts = np.array([3319563600, 3319567200, 3319570800, 3319574400,
            3319578000, 3319581600, 3319585200, 3319588800, 3319592400,
            3319596000], dtype=np.float)

        ve = np.array([ -3.2,  0.1,  0. ,  2.3, -0.1,  5.6,  5.1,  5.8,
            8.8, 10.3])

        vn = np.array([ 18.2,  9.9, 12. ,  6.6, 7.4,  3.4, -2.6,  0.2,
            -1.5,  4.1])
        vu = np.array([-1.1, -0.6, -1.4, -2, -1.7, -2, 1.3, -1.6, -1.1, -4.5])
        ve_expected = np.array([-0.085136, -0.028752, -0.036007, 0.002136,
            -0.023158, 0.043218, 0.056451, 0.054727, 0.088446, 0.085952])
        vn_expected = np.array([ 0.164012,  0.094738,  0.114471,  0.06986,  0.07029,
                    0.049237, -0.009499,  0.019311,  0.012096,  0.070017])
        vu_expected = np.array([-0.011, -0.006, -0.014, -0.02, -0.017, -0.02,
            0.013, -0.016, -0.011, -0.045])

        
        rdt['time'] = ts
        rdt['lat'] = [lat] * 10
        rdt['lon'] = [lon] * 10
        rdt['turbulent_velocity_east'] = ve
        rdt['turbulent_velocity_north'] = vn
        rdt['turbulent_velocity_up'] = vu

        passing &= self.assert_array_almost_equal(rdt['eastward_turbulent_velocity'],
                ve_expected)
        passing &= self.assert_array_almost_equal(rdt['northward_turbulent_velocity'],
                vn_expected)
        passing &= self.assert_array_almost_equal(rdt['upward_turbulent_velocity'],
                vu_expected)


        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.event.wait(20))
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['eastward_turbulent_velocity'],
                ve_expected)
        passing &= self.assert_array_almost_equal(rdt['northward_turbulent_velocity'],
                vn_expected)
        passing &= self.assert_array_almost_equal(rdt['upward_turbulent_velocity'],
                vu_expected)
        return passing

    
    def check_presta_instrument_data_products(self, reference_designator):
        # Check the parsed data product make sure it's got everything it needs and can be published persisted etc.

        # Absolute Pressure (SFLPRES_L0) is what comes off the instrumnet, SFLPRES_L1 is a pfunc
        # Let's go ahead and publish some fake data!!!
        # According to https://alfresco.oceanobservatories.org/alfresco/d/d/workspace/SpacesStore/63e16865-9d9e-4b11-b0b3-d5658faa5080/1341-00230_Data_Product_Spec_SFLPRES_OOI.pdf
        # Appendix A. Example 1.
        # p_psia_tide = 14.8670
        # the tide should be 10.2504
        passing = True
        

        info_list = []
        passing &= self.check_data_product_reference(reference_designator, info_list)
        if not passing:
            return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        now = time.time()
        ntp_now = now + 2208988800.

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['absolute_pressure'] = [14.8670]
        passing &= self.assert_array_almost_equal(rdt['seafloor_pressure'], [10.2504], 4)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(dataset_monitor.event.wait(20)) # Bumped to 20 to keep buildbot happy
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)

        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
        passing &= self.assert_array_almost_equal(rdt['seafloor_pressure'], [10.2504], 4)
        passing &= self.assert_array_almost_equal(rdt['absolute_pressure'], [14.8670], 4)

        return passing

    def check_rsn_instrument_data_product(self):
        passing = True
        # for RS03AXBS-MJ03A-06-PRESTA301 (PREST-A) there are a few listed data products
        # Parsed, Engineering
        # SFLPRES-0 SFLPRES-1
        # Check for the two data products and make sure they have the proper parameters
        # SFLPRES-0 should 
        data_products, _ = self.RR.find_resources_ext(alt_id_ns='PRE', alt_id='RS03AXBS-MJ03A-06-PRESTA301_SFLPRES_L0_DPID', id_only=True)
        passing &=self.assertTrue(len(data_products)==1)
        if not data_products:
            return passing

        data_product_id = data_products[0]
        
        stream_defs, _ = self.RR.find_objects(data_product_id,PRED.hasStreamDefinition,id_only=False)
        passing &= self.assertTrue(len(stream_defs)==1)
        if not stream_defs:
            return passing

        # Assert that the stream definition has the correct reference designator
        stream_def = stream_defs[0]
        passing &= self.assertEquals(stream_def.stream_configuration['reference_designator'], 'RS03AXBS-MJ03A-06-PRESTA301')

        # Get the pdict and make sure that the parameters corresponding to the available fields 
        # begin with the appropriate data product identifier

        pdict_ids, _ = self.RR.find_objects(stream_def, PRED.hasParameterDictionary, id_only=True)
        passing &= self.assertEquals(len(pdict_ids), 1)
        if not pdict_ids:
            return passing

        pdict_id = pdict_ids[0]
        
        pdict = DatasetManagementService.get_parameter_dictionary(pdict_id)
        available_params = [pdict.get_context(i) for i in pdict.keys() if i in stream_def.available_fields]
        for p in available_params:
            if p.name=='time': # Ignore the domain parameter
                continue
            passing &= self.assertTrue(p.ooi_short_name.startswith('SFLPRES'))
        passing &= self.check_presta_instrument_data_products('RS01SLBS-MJ01A-06-PRESTA101')
        passing &= self.check_vel3d_instrument_data_products( 'RS01SLBS-MJ01A-12-VEL3DB101')
        passing &= self.check_presta_instrument_data_products('RS03AXBS-MJ03A-06-PRESTA301')
        passing &= self.check_vel3d_instrument_data_products( 'RS03AXBS-MJ03A-12-VEL3DB301')
        passing &= self.check_tempsf_instrument_data_product( 'RS03ASHS-MJ03B-07-TMPSFA301')
        passing &= self.check_vel3d_instrument_data_products( 'RS03INT2-MJ03D-12-VEL3DB304')
        passing &= self.check_trhph_instrument_data_products( 'RS03INT1-MJ03C-10-TRHPHA301')

        self.data_product_management.activate_data_product_persistence(data_product_id)
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(data_product_id)
        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        self.assert_array_almost_equal(rdt['seafloor_pressure'], [10.2504], 4)
        self.assert_array_almost_equal(rdt['absolute_pressure'], [14.8670], 4)
        self.data_product_management.suspend_data_product_persistence(data_product_id) # Should do nothing and not raise anything

        
        return passing


    def check_glider(self):
        '''
        # Check that glider GP05MOAS-GL001 assembly is defined by OOI preload (3 instruments)
        '''
        passing = True
        GP05MOAS_GL001_device = self.retrieve_ooi_asset('GP05MOAS-GL001_PD')
        child_devs, assns =self.RR.find_objects(subject=GP05MOAS_GL001_device._id, predicate=PRED.hasDevice, id_only=True)
        passing &= self.assertEquals(len(child_devs), 3)

        # Set GP05MOAS-GL001 Deployment to DEPLOYED
        GP05MOAS_GL001_deploy = self.retrieve_ooi_asset('GP05MOAS-GL001_DEP')
        passing &= self.transition_lcs_then_verify(resource_id=GP05MOAS_GL001_deploy._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Activate Deployment for GP05MOAS-GL001
        #self.OMS.activate_deployment(GP05MOAS_GL001_deploy._id)

        # Deactivate Deployment for GP05MOAS-GL001
        #self.OMS.deactivate_deployment(GP05MOAS_GL001_deploy._id)


        # Create a new Deployment resource X without any assignment
        x_deploy_id = self.create_basic_deployment(name='X_Deployment', description='new Deployment resource X without any assignment')

        # Assign Deployment X to site GP05MOAS-GL001
        GP05MOAS_GL001_psite = self.retrieve_ooi_asset('GP05MOAS-GL001')
        self.OMS.deploy_platform_site(GP05MOAS_GL001_psite._id, x_deploy_id)

        # Assign Deployment X to first device for GP05MOAS-GL001
        GP05MOAS_GL001_device = self.retrieve_ooi_asset('GP05MOAS-GL001_PD')
        self.IMS.deploy_platform_device(GP05MOAS_GL001_device._id, x_deploy_id)

        # Set GP05MOAS-GL001 Deployment to PLANNED state
        #self.transition_lcs_then_verify(resource_id=x_deploy_id, new_lcs_state=LCE.PLAN, verify='PLANNED')
        # ??? already in planned

        # Set second GP05MOAS-GL001 Deployment to DEPLOYED
        passing &= self.transition_lcs_then_verify(resource_id=x_deploy_id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')
        self.dump_deployment(x_deploy_id)

        # Activate second Deployment for GP05MOAS-GL001
        #self.OMS.activate_deployment(x_deploy_id)

        # Deactivate second Deployment for GP05MOAS-GL001
        #self.OMS.deactivate_deployment(x_deploy_id)
        return passing


    def check_cg_assembly(self):
        passing = True

        # Set several CE01ISSM-RI002-* instrument devices to DEVELOPED state

        # Assemble several CE01ISSM-RI002-* instruments to a CG CE01ISSM-RI002 component platform

        # Set several CE01ISSM-RI002-* instrument devices to INTEGRATED state

        # Assemble CE01ISSM-RI002 platform to CG CE01ISSM-LM001 station platform

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Set CE01ISSM-LM001 station device to INTEGRATED state

        # Set CE01ISSM-LM001 station device to DEPLOYED state (children maybe too?)

        # Set CE01ISSM-LM001 Deployment to DEPLOYED

        # Activate CE01ISSM-LM001 platform assembly deployment


        # Dectivate CE01ISSM-LM001 platform assembly deployment

        # Set CE01ISSM-LM001 Deployment to INTEGRATED state

        # Set CE01ISSM-LM001 station device to INTEGRATED state

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Disassemble CE01ISSM-RI002 platform from CG CE01ISSM-LM001 station platform

        # Disassemble all CE01ISSM-RI002-* instruments from a CG CE01ISSM-RI002 component platform


        # Retire instrument one for CE01ISSM-RI002-*

        # Retire device one for CE01ISSM-RI002

        # Retire device one for CE01ISSM-LM001

        return passing

    # -------------------------------------------------------------------------

    def retrieve_ooi_asset(self, alt_id='', namespace='PRE'):
        dp_list, _  = self.RR.find_resources_ext(alt_id_ns=namespace, alt_id=alt_id)
        self.assertEquals(len(dp_list), 1)
        return dp_list[0]

    def transition_lcs_then_verify(self, resource_id, new_lcs_state, verify):
        ret = self.RR2.advance_lcs(resource_id, new_lcs_state)
        resource_obj = self.RR.read(resource_id)
        return self.assertEquals(resource_obj.lcstate, verify)

    def create_basic_deployment(self, name='', description=''):
        start = IonTime(datetime.datetime(2013,1,1))
        end = IonTime(datetime.datetime(2014,1,1))
        temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string())
        deployment_obj = IonObject(RT.Deployment,
            name=name,
            description=description,
            context=IonObject(OT.CabledNodeDeploymentContext),
            constraint_list=[temporal_bounds])
        return self.OMS.create_deployment(deployment_obj)

    def validate_deployment_activated(self, deployment_id=''):
        site_id, device_id = self.get_deployment_ids(deployment_id)
        assocs = self.RR.find_associations(subject=site_id, predicate=PRED.hasDevice, object=device_id)
        return self.assertEquals(len(assocs), 1)

    def validate_deployment_deactivated(self, deployment_id=''):
        site_id, device_id = self.get_deployment_ids(deployment_id)
        assocs = self.RR.find_associations(subject=site_id, predicate=PRED.hasDevice, object=device_id)
        return self.assertEquals(len(assocs), 0)

    def dump_deployment(self, deployment_id='', name=""):
        #site_id, device_id = self.get_deployment_ids(deployment_id)
        resource_list,_ = self.RR.find_subjects(predicate=PRED.hasDeployment, object=deployment_id, id_only=True)
        resource_list.append(deployment_id)
        resources = self.RR.read_mult(resource_list )
        log.debug('---------   dump_deployment %s summary---------------', name)
        for resource in resources:
            log.debug('%s: %s (%s)', resource._get_type(), resource.name, resource._id)

        log.debug('---------   dump_deployment %s full dump ---------------', name)

        for resource in resources:
            log.debug('resource: %s ', resource)
        log.debug('---------   dump_deployment %s end  ---------------', name)


        #assocs = self.container.resource_registry.find_assoctiations(anyside=deployment_id)
#        assocs = Container.instance.resource_registry.find_assoctiations(anyside=deployment_id)
#        log.debug('---------   dump_deployment  ---------------')
#        for assoc in assocs:
#            log.debug('SUBJECT: %s      PREDICATE: %s OBJET: %s', assoc.s, assoc.p, assoc.o)
#        log.debug('---------   dump_deployment  end  ---------------')


    def get_deployment_ids(self, deployment_id=''):
        devices = []
        sites = []
        idevice_list,_ = self.RR.find_subjects(RT.InstrumentDevice, PRED.hasDeployment, deployment_id, id_only=True)
        pdevice_list,_ = self.RR.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, id_only=True)
        devices = idevice_list + pdevice_list
        self.assertEquals(1, len(devices))
        isite_list,_ = self.RR.find_subjects(RT.InstrumentSite, PRED.hasDeployment, deployment_id, id_only=True)
        psite_list,_ = self.RR.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, id_only=True)
        sites = isite_list + psite_list
        self.assertEquals(1, len(sites))
        return sites[0], devices[0]

    def _find_resource_in_list(self, res_list, attr, attr_val, assert_found=True):
        for res in res_list:
            v = getattr(res, attr, None)
            if v == attr_val:
                return res
        if assert_found:
            self.assertTrue(False, "Attribute %s value %s not found in list" % (attr, attr_val))
        return None

    # -------------------------------------------------------------------------

    def _get_caller(self):
        s = inspect.stack()
        return "%s:%s" % (s[2][1], s[2][2])

    @assertion_wrapper
    def assert_array_almost_equal(self, *args, **kwargs):
        np.testing.assert_array_almost_equal(*args, **kwargs)

    @assertion_wrapper
    def assertEquals(self, *args, **kwargs):
        IonIntegrationTestCase.assertEquals(self, *args, **kwargs)

    @assertion_wrapper
    def assertTrue(self, *args, **kwargs):
        IonIntegrationTestCase.assertTrue(self, *args, **kwargs)
Exemplo n.º 23
0
class TransformManagementServiceIntTest(IonIntegrationTestCase):

    def setUp(self):
        # set up the container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.pubsub_cli = PubsubManagementServiceClient(node=self.container.node)
        self.tms_cli = TransformManagementServiceClient(node=self.container.node)
        self.rr_cli = ResourceRegistryServiceClient(node=self.container.node)
        self.procd_cli = ProcessDispatcherServiceClient(node=self.container.node)

        self.input_stream_id = self.pubsub_cli.create_stream(name='input_stream',original=True)

        self.input_subscription_id = self.pubsub_cli.create_subscription(query=StreamQuery(stream_ids=[self.input_stream_id]),exchange_name='transform_input',name='input_subscription')

        self.output_stream_id = self.pubsub_cli.create_stream(name='output_stream',original=True)

        self.process_definition = ProcessDefinition(name='basic_transform_definition')
        self.process_definition.executable = {'module': 'ion.processes.data.transforms.transform_example',
                                              'class':'TransformExample'}
        self.process_definition_id = self.procd_cli.create_process_definition(process_definition=self.process_definition)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    def test_create_transform(self):
        configuration = {'program_args':{'arg1':'value'}}

        transform_id = self.tms_cli.create_transform(
              name='test_transform',
              in_subscription_id=self.input_subscription_id,
              out_streams={'output':self.output_stream_id},
              process_definition_id=self.process_definition_id)

        # test transform creation in rr
        transform = self.rr_cli.read(transform_id)
        self.assertEquals(transform.name,'test_transform')


        # test associations
        predicates = [PRED.hasSubscription, PRED.hasOutStream, PRED.hasProcessDefinition]
        assocs = []
        for p in predicates:
            assocs += self.rr_cli.find_associations(transform_id,p,id_only=True)
        self.assertEquals(len(assocs),3)

        # test process creation
        transform = self.tms_cli.read_transform(transform_id)
        pid = transform.process_id
        proc = self.container.proc_manager.procs.get(pid)
        self.assertIsInstance(proc,TransformExample)

        # clean up
        self.tms_cli.delete_transform(transform_id)

    def test_create_transform_no_procdef(self):
        with self.assertRaises(NotFound):
            self.tms_cli.create_transform(name='test',in_subscription_id=self.input_subscription_id)

    def test_create_transform_bad_procdef(self):
        with self.assertRaises(NotFound):
            self.tms_cli.create_transform(name='test',
                in_subscription_id=self.input_subscription_id,
                process_definition_id='bad')
    
    def test_create_transform_no_config(self):
        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            out_streams={'output':self.output_stream_id},
            process_definition_id=self.process_definition_id,
        )
        self.tms_cli.delete_transform(transform_id)

    def test_create_transform_name_failure(self):
        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            out_streams={'output':self.output_stream_id},
            process_definition_id=self.process_definition_id,
        )
        with self.assertRaises(BadRequest):
            transform_id = self.tms_cli.create_transform(
                name='test_transform',
                in_subscription_id=self.input_subscription_id,
                out_streams={'output':self.output_stream_id},
                process_definition_id=self.process_definition_id,
            )
        self.tms_cli.delete_transform(transform_id)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    def test_create_no_output(self):
        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            process_definition_id=self.process_definition_id,
        )

        predicates = [PRED.hasSubscription, PRED.hasOutStream, PRED.hasProcessDefinition]
        assocs = []
        for p in predicates:
            assocs += self.rr_cli.find_associations(transform_id,p,id_only=True)
        self.assertEquals(len(assocs),2)

        # test process creation
        transform = self.tms_cli.read_transform(transform_id)
        pid = transform.process_id
        proc = self.container.proc_manager.procs.get(pid)
        self.assertIsInstance(proc,TransformExample)

        self.tms_cli.delete_transform(transform_id)
    def test_read_transform_exists(self):
        trans_obj = IonObject(RT.Transform,name='trans_obj')
        trans_id, _ = self.rr_cli.create(trans_obj)

        res = self.tms_cli.read_transform(trans_id)
        actual = self.rr_cli.read(trans_id)

        self.assertEquals(res._id,actual._id)


    def test_read_transform_nonexist(self):
        with self.assertRaises(NotFound) as e:
            self.tms_cli.read_transform('123')

    def test_activate_transform(self):

        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            out_streams={'output':self.output_stream_id},
            process_definition_id=self.process_definition_id
        )

        self.tms_cli.activate_transform(transform_id)

        # pubsub check if activated?
        self.tms_cli.delete_transform(transform_id)

    def test_activate_transform_nonexist(self):
        with self.assertRaises(NotFound):
            self.tms_cli.activate_transform('1234')

    def test_delete_transform(self):

        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            process_definition_id=self.process_definition_id
        )
        self.tms_cli.delete_transform(transform_id)

        # assertions
        with self.assertRaises(NotFound):
            self.rr_cli.read(transform_id)


    def test_delete_transform_nonexist(self):
        with self.assertRaises(NotFound):
            self.tms_cli.delete_transform('123')

    def test_execute_transform(self):
        # set up
        process_definition = ProcessDefinition(name='procdef_execute')
        process_definition.executable['module'] = 'ion.processes.data.transforms.transform_example'
        process_definition.executable['class'] = 'ReverseTransform'
        data = [1,2,3]

        process_definition_id, _ = self.rr_cli.create(process_definition)

        retval = self.tms_cli.execute_transform(process_definition_id,data)

        self.assertEquals(retval,[3,2,1])


    def test_integrated_transform(self):
        '''
        This example script runs a chained three way transform:
            B
        A <
            C
        Where A is the even_odd transform (generates a stream of even and odd numbers from input)
        and B and C are the basic transforms that receive even and odd input
        '''
        cc = self.container
        assertions = self.assertTrue

        pubsub_cli = PubsubManagementServiceClient(node=cc.node)
        rr_cli = ResourceRegistryServiceClient(node=cc.node)
        tms_cli = TransformManagementServiceClient(node=cc.node)
        #-------------------------------
        # Process Definition
        #-------------------------------
        # Create the process definition for the basic transform
        process_definition = IonObject(RT.ProcessDefinition, name='basic_transform_definition')
        process_definition.executable = {
            'module': 'ion.processes.data.transforms.transform_example',
            'class':'TransformExample'
        }
        basic_transform_definition_id, _ = rr_cli.create(process_definition)

        # Create The process definition for the TransformEvenOdd
        process_definition = IonObject(RT.ProcessDefinition, name='evenodd_transform_definition')
        process_definition.executable = {
            'module': 'ion.processes.data.transforms.transform_example',
            'class':'TransformEvenOdd'
        }
        evenodd_transform_definition_id, _ = rr_cli.create(process_definition)

        #-------------------------------
        # Streams
        #-------------------------------
        streams = [pubsub_cli.create_stream() for i in xrange(5)]

        #-------------------------------
        # Subscriptions
        #-------------------------------

        query = StreamQuery(stream_ids=[streams[0]])
        input_subscription_id = pubsub_cli.create_subscription(query=query, exchange_name='input_queue')

        query = StreamQuery(stream_ids = [streams[1]]) # even output
        even_subscription_id = pubsub_cli.create_subscription(query=query, exchange_name='even_queue')

        query = StreamQuery(stream_ids = [streams[2]]) # odd output
        odd_subscription_id = pubsub_cli.create_subscription(query=query, exchange_name='odd_queue')


        #-------------------------------
        # Launch the EvenOdd Transform
        #-------------------------------

        evenodd_id = tms_cli.create_transform(name='even_odd',
            in_subscription_id=input_subscription_id,
            out_streams={'even':streams[1], 'odd':streams[2]},
            process_definition_id=evenodd_transform_definition_id,
            configuration={})
        tms_cli.activate_transform(evenodd_id)


        #-------------------------------
        # Launch the Even Processing Transform
        #-------------------------------

        even_transform_id = tms_cli.create_transform(name='even_transform',
            in_subscription_id = even_subscription_id,
            out_streams={'even_plus1':streams[3]},
            process_definition_id=basic_transform_definition_id,
            configuration={})
        tms_cli.activate_transform(even_transform_id)

        #-------------------------------
        # Launch the Odd Processing Transform
        #-------------------------------

        odd_transform_id = tms_cli.create_transform(name='odd_transform',
            in_subscription_id = odd_subscription_id,
            out_streams={'odd_plus1':streams[4]},
            process_definition_id=basic_transform_definition_id,
            configuration={})
        tms_cli.activate_transform(odd_transform_id)

        #-------------------------------
        # Set up final subscribers
        #-------------------------------

        evenplus1_subscription_id = pubsub_cli.create_subscription(
            query=StreamQuery([streams[3]]),
            exchange_name='evenplus1_queue',
            name='EvenPlus1Subscription',
            description='EvenPlus1 SubscriptionDescription'
        )
        oddplus1_subscription_id = pubsub_cli.create_subscription(
            query=StreamQuery([streams[4]]),
            exchange_name='oddplus1_queue',
            name='OddPlus1Subscription',
            description='OddPlus1 SubscriptionDescription'
        )

        total_msg_count = 2

        msgs = gevent.queue.Queue()


        def even1_message_received(message, headers):
            input = int(message.get('num'))
            assertions( (input % 2) ) # Assert it is odd (transform adds 1)
            msgs.put(True)


        def odd1_message_received(message, headers):
            input = int(message.get('num'))
            assertions(not (input % 2)) # Assert it is even
            msgs.put(True)

        subscriber_registrar = StreamSubscriberRegistrar(process=cc, node=cc.node)
        even_subscriber = subscriber_registrar.create_subscriber(exchange_name='evenplus1_queue', callback=even1_message_received)
        odd_subscriber = subscriber_registrar.create_subscriber(exchange_name='oddplus1_queue', callback=odd1_message_received)

        # Start subscribers
        even_subscriber.start()
        odd_subscriber.start()

        # Activate subscriptions
        pubsub_cli.activate_subscription(evenplus1_subscription_id)
        pubsub_cli.activate_subscription(oddplus1_subscription_id)

        #-------------------------------
        # Set up fake stream producer
        #-------------------------------

        pid = cc.spawn_process(name='dummy_process_for_test',
            module='pyon.ion.process',
            cls='SimpleProcess',
            config={})
        dummy_process = cc.proc_manager.procs[pid]

        # Normally the user does not see or create the publisher, this is part of the containers business.
        # For the test we need to set it up explicitly
        publisher_registrar = StreamPublisherRegistrar(process=dummy_process, node=cc.node)
        stream_publisher = publisher_registrar.create_publisher(stream_id=streams[0])

        #-------------------------------
        # Start test
        #-------------------------------

        # Publish a stream
        for i in xrange(total_msg_count):
            stream_publisher.publish({'num':str(i)})

        time.sleep(0.5)

        for i in xrange(total_msg_count * 2):
            try:
                msgs.get()
            except Empty:
                assertions(False, "Failed to process all messages correctly.")

    """
class TestIntDataAcquisitionManagementService(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataAcquisitionManagementService
        self.client = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)

    def tearDown(self):
        pass


    #@unittest.skip('Not done yet.')
    def test_data_source_ops(self):
        # test creating a new data source
        print 'Creating new data source'
        datasource_obj = IonObject(RT.DataSource,
                           name='DataSource1',
                           description='instrument based new source' ,
                            data_source_type='sbe37')
        try:
            ds_id = self.client.create_data_source(datasource_obj)
        except BadRequest as ex:
            self.fail("failed to create new data source: %s" %ex)
        print 'new data source id = ', ds_id


        # test reading a non-existent data source
        print 'reading non-existent data source'
        try:
            dp_obj = self.client.read_data_source('some_fake_id')
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during read: %s" %dp_obj)

        # update a data source (tests read also)
        print 'Updating data source'
        # first get the existing data source object
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass

        # now tweak the object
        datasource_obj.description = 'the very first data source'
        # now write the dp back to the registry
        try:
            update_result = self.client.update_data_source(datasource_obj)
        except NotFound as ex:
            self.fail("existing data source was not found during update")
        except Conflict as ex:
            self.fail("revision conflict exception during data source update")
        #else:
        #    self.assertTrue(update_result == True)
        # now get the data source back to see if it was updated
        try:
            datasource_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during read")
        else:
            pass
        self.assertTrue(datasource_obj.description == 'the very first data source')


        # now 'delete' the data source
        print "deleting data source"
        try:
            delete_result = self.client.force_delete_data_source(ds_id)
        except NotFound as ex:
            self.fail("existing data source was not found during delete")
        #self.assertTrue(delete_result == True)
        # now try to get the deleted dp object
        try:
            dp_obj = self.client.read_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("deleted data source was found during read")

        # now try to delete the already deleted data source object
        print "deleting non-existing data source"
        try:
            delete_result = self.client.delete_data_source(ds_id)
        except NotFound as ex:
            pass
        else:
            self.fail("non-existing data source was found during delete")


    def test_register_instrument(self):
        # set up initial instrument to register
        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        self.base_register_instrument(instrument_id)

    def test_register_platform(self):
        # set up initial instrument to register
        platform_obj = IonObject(RT.PlatformDevice, name='Plat1',description='a platform that is creating the data product')
        platform_id, rev = self.rrclient.create(platform_obj)

    #@unittest.skip('Not done yet.')
    def base_register_instrument(self, instrument_id):
        # Register an instrument as a data producer in coordination with DM PubSub: create stream, register and create producer object

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)

        # test registering a new data producer
        try:
            ds_id = self.client.register_instrument(instrument_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id


        # test assigning a data product to an instrument, creating the stream for the product
        try:
            self.client.assign_data_product(instrument_id, dataproduct_id)
            self.client.assign_data_product_source(dataproduct_id, instrument_id)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        assocs = self.rrclient.find_associations(dataproduct_id, PRED.hasSource, instrument_id)
        if not assocs or len(assocs) == 0:
            self.fail("failed to assign data product to data producer")

        # test UNassigning a data product from instrument, deleting the stream for the product
        try:
            self.client.unassign_data_product(instrument_id, dataproduct_id)
            self.client.unassign_data_product_source(dataproduct_id, instrument_id)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        assocs = self.rrclient.find_associations(dataproduct_id, PRED.hasSource, instrument_id)
        if  assocs:
            self.fail("failed to unassign data product to data producer")

        # test UNregistering a new data producer
        try:
            ds_id = self.client.unregister_instrument(instrument_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)


    def test_register_external_data_set(self):
        # Register an external data set as a data producer in coordination with DM PubSub: create stream, register and create producer object


        # set up initial instrument to register
        ext_dataset_obj = IonObject(RT.ExternalDataset, name='DataSet1',description='an external data feed')
        ext_dataset_id, rev = self.rrclient.create(ext_dataset_obj)

        dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)


        # test registering a new external data set
        try:
            ds_id = self.client.register_external_data_set(ext_dataset_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" %ex)
        print 'new data producer id = ', ds_id

        # test assigning a data product to an ext_dataset_id, creating the stream for the product
        try:
            self.client.assign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %ex)

        # test UNassigning a data product from ext_dataset_id, deleting the stream for the product
        try:
            self.client.unassign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)
        except NotFound as ex:
            self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex)

        # test UNregistering a external data set
        try:
            ds_id = self.client.unregister_external_data_set(ext_dataset_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" %ex)





    #@unittest.skip('not ready')
    def test_eoi_resources(self):

            #
            # test creating a new data provider
            #
            print 'Creating new external_data_provider'
            dataprovider_obj = IonObject(RT.ExternalDataProvider,
                               name='ExtDataProvider1',
                               description='external data provider ')
            try:
                dataprovider_id = self.client.create_external_data_provider(dataprovider_obj)
            except BadRequest as ex:
                self.fail("failed to create new data provider: %s" %ex)
            print 'new data provider id = ', dataprovider_id

            #
            # test creating a new data source
            #
            print 'Creating new data source'
            datasource_obj = IonObject(RT.DataSource,
                               name='DataSource1',
                               description='data source ',
                               data_source_type='DAP')
            try:
                datasource_id = self.client.create_data_source(datasource_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source: %s" %ex)
            print 'new data source id = ', datasource_id

            #
            # test creating a new data source model
            #
            print 'Creating new data source model'
            datamodel_obj = IonObject(RT.DataSourceModel,
                               name='DataSourceModel1',
                               description='data source model')
            try:
                datamodel_id = self.client.create_data_source_model(datamodel_obj)
            except BadRequest as ex:
                self.fail("failed to create new data source model: %s" %ex)
            print 'new data source model id = ', datamodel_id


            #
            # test creating a new external data set
            #
            print 'Creating new external data set'
            dataset_obj = IonObject(RT.ExternalDataset,
                               name='ExternalDataSet1',
                               description='external data set ')
            try:
                extdataset_id = self.client.create_external_dataset(dataset_obj)
            except BadRequest as ex:
                self.fail("failed to create new external data set: %s" %ex)
            print 'new external data set id = ', extdataset_id


            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external data agent '
            datasetagent_obj = IonObject(RT.ExternalDatasetAgent,
                               name='ExternalDatasetAgent1',
                               description='external data agent ')
            try:
                datasetagent_id = self.client.create_external_dataset_agent(datasetagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent: %s" %ex)
            print 'new external data agent  id = ', datasetagent_id


            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagent_obj = IonObject(RT.DataSourceAgent,
                               name='DataSourceAgent1',
                               description=' DataSource agent ')
            try:
                datasource_agent_id = self.client.create_data_source_agent(datasourceagent_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_id





            #
            # test creating a new dataset agent instance
            #
            print 'Creating new external dataset agent instance'
            datasetagentinstance_obj = IonObject(RT.ExternalDatasetAgentInstance,
                               name='ExternalDatasetAgentInstance1',
                               description='external dataset agent instance ')
            try:
                datasetagentinstance_id = self.client.create_external_dataset_agent_instance(datasetagentinstance_obj, datasetagent_id)
            except BadRequest as ex:
                self.fail("failed to create new external dataset agent instance: %s" %ex)
            print 'new external data agent instance id = ', datasetagentinstance_id

            #
            # test creating a new datasource agent instance
            #
            print 'Creating new  data source agent '
            datasourceagentinstance_obj = IonObject(RT.DataSourceAgentInstance,
                               name='ExternalDataSourceAgentInstance1',
                               description='external DataSource agent instance ')
            try:
                datasource_agent_instance_id = self.client.create_data_source_agent_instance(datasourceagentinstance_obj)
            except BadRequest as ex:
                self.fail("failed to create new external datasource agent instance: %s" %ex)
            print 'new external data agent  id = ', datasource_agent_instance_id

            #
            # test assign / unassign
            #

            self.client.unassign_data_source_from_external_data_provider(datasource_id, dataprovider_id)

            self.client.unassign_data_source_from_data_model(datasource_id, datamodel_id)

            self.client.unassign_external_dataset_from_data_source(extdataset_id, datasource_id)

            #
            # test read
            #

            try:
                dp_obj = self.client.read_external_data_provider(dataprovider_id)
            except NotFound as ex:
                self.fail("existing data provicer was not found during read")
            else:
                pass

            try:
                dp_obj = self.client.read_data_source(datasource_id)
            except NotFound as ex:
                self.fail("existing data source was not found during read")
            else:
                pass

            #
            # test delete
            #
            try:
                self.client.delete_external_data_provider(dataprovider_id)
                self.client.delete_data_source(datasource_id)
                self.client.delete_external_dataset(extdataset_id)
                self.client.delete_data_source_model(datamodel_id)
                self.client.delete_external_dataset_agent(datasetagent_id)
                self.client.delete_data_source_agent_instance(datasource_agent_instance_id)

                self.client.force_delete_external_data_provider(dataprovider_id)
                self.client.force_delete_data_source(datasource_id)
                self.client.force_delete_external_dataset(extdataset_id)
                self.client.force_delete_data_source_model(datamodel_id)
                self.client.force_delete_external_dataset_agent(datasetagent_id)
                self.client.force_delete_data_source_agent_instance(datasource_agent_instance_id)
            except NotFound as ex:
                self.fail("existing data product was not found during delete")


            # test reading a non-existent data product
            print 'reading non-existent data product'
            try:
                bad_obj = self.client.read_external_data_provider('some_fake_id')
            except NotFound as ex:
                pass
            else:
                self.fail("non-existing data product was found during read: %s" %bad_obj)


    def make_grt_parser(self):
        return self.client.create_parser(Parser(name='grt', description='', module='ion.util.parsers.global_range_test', method='grt_parser', config=None))


    @unittest.skip("Deprecated")
    def test_qc_attachment(self):
        instrument_device = InstrumentDevice(name='whatever')
        instrument_device_id,_ = self.rrclient.create(instrument_device)
        self.addCleanup(self.rrclient.delete, instrument_device_id)
        self.client.register_instrument(instrument_device_id)
        self.addCleanup(self.client.unregister_instrument, instrument_device_id)
        dp = DataProduct(name='instrument output')

        dp_id,_ = self.rrclient.create(dp)
        self.addCleanup(self.rrclient.delete, dp_id)

        parser_id = self.make_grt_parser()
        attachment = Attachment(name='qc ref', attachment_type=AttachmentType.REFERENCE,content=global_range_test_document, context=ReferenceAttachmentContext(parser_id=parser_id))
        att_id = self.rrclient.create_attachment(dp_id, attachment)
        self.addCleanup(self.rrclient.delete_attachment, att_id)

        attachment2 = Attachment(name='qc ref2', attachment_type=AttachmentType.REFERENCE, content=global_range_test_document2, context=ReferenceAttachmentContext(parser_id=parser_id))
        att2_id = self.rrclient.create_attachment(dp_id, attachment2)
        self.addCleanup(self.rrclient.delete_attachment, att2_id)

        self.client.assign_data_product(instrument_device_id, dp_id)
        self.addCleanup(self.client.unassign_data_product, instrument_device_id, dp_id)
        svm = StoredValueManager(self.container)
        doc = svm.read_value('grt_CE01ISSM-MF005-01-CTDBPC999_TEMPWAT')
        np.testing.assert_array_almost_equal(doc['grt_min_value'], -2.)
Exemplo n.º 25
0
class TestWorkflowManagementIntegration(IonIntegrationTestCase):

    def setUp(self):
        # Start container

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.workflowclient = WorkflowManagementServiceClient(node=self.container.node)
        self.process_dispatcher = ProcessDispatcherServiceClient(node=self.container.node)

        self.ctd_stream_def = SBE37_CDM_stream_definition()

    def _create_ctd_input_stream_and_data_product(self, data_product_name='ctd_parsed'):

        cc = self.container
        assertions = self.assertTrue

        #-------------------------------
        # Create CTD Parsed as the initial data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(container=self.ctd_stream_def, name='Simulated CTD data')


        log.debug('Creating new CDM data product with a stream definition')
        dp_obj = IonObject(RT.DataProduct,name=data_product_name,description='ctd stream test')
        try:
            ctd_parsed_data_product_id = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id)
        except Exception as ex:
            self.fail("failed to create new data product: %s" %ex)

        log.debug('new ctd_parsed_data_product_id = ', ctd_parsed_data_product_id)

        #Only ever need one device for testing purposes.
        instDevice_obj,_ = self.rrclient.find_resources(restype=RT.InstrumentDevice, name='SBE37IMDevice')
        if instDevice_obj:
            instDevice_id = instDevice_obj[0]._id
        else:
            instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
            instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product_id)

        self.dataproductclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product_id, persist_data=False, persist_metadata=False)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0 )
        ctd_stream_id = stream_ids[0]

        return ctd_stream_id, ctd_parsed_data_product_id

    def _start_simple_input_stream_process(self, ctd_stream_id):
        return self._start_input_stream_process(ctd_stream_id)

    def _start_sinusoidal_input_stream_process(self, ctd_stream_id):
        return self._start_input_stream_process(ctd_stream_id, 'ion.processes.data.sinusoidal_stream_publisher', 'SinusoidalCtdPublisher')

    def _start_input_stream_process(self, ctd_stream_id, module = 'ion.processes.data.ctd_stream_publisher', class_name= 'SimpleCtdPublisher'):


        ###
        ### Start the process for producing the CTD data
        ###
        # process definition for the ctd simulator...
        producer_definition = ProcessDefinition()
        producer_definition.executable = {
            'module':module,
            'class':class_name
        }

        ctd_sim_procdef_id = self.process_dispatcher.create_process_definition(process_definition=producer_definition)

        # Start the ctd simulator to produce some data
        configuration = {
            'process':{
                'stream_id':ctd_stream_id,
                }
        }
        ctd_sim_pid = self.process_dispatcher.schedule_process(process_definition_id=ctd_sim_procdef_id, configuration=configuration)

        return ctd_sim_pid

    def _start_output_stream_listener(self, data_product_stream_ids, message_count_per_stream=10):

        cc = self.container
        assertions = self.assertTrue

        ###
        ### Make a subscriber in the test to listen for transformed data
        ###
        salinity_subscription_id = self.pubsubclient.create_subscription(
            query=StreamQuery(data_product_stream_ids),
            exchange_name = 'workflow_test',
            name = "test workflow transformations",
        )

        pid = cc.spawn_process(name='dummy_process_for_test',
            module='pyon.ion.process',
            cls='SimpleProcess',
            config={})
        dummy_process = cc.proc_manager.procs[pid]

        subscriber_registrar = StreamSubscriberRegistrar(process=dummy_process, node=cc.node)

        result = gevent.event.AsyncResult()
        results = []
        def message_received(message, headers):
            # Heads
            log.warn(' data received!')
            results.append(message)
            if len(results) >= len(data_product_stream_ids) * message_count_per_stream:   #Only wait for so many messages - per stream
                result.set(True)

        subscriber = subscriber_registrar.create_subscriber(exchange_name='workflow_test', callback=message_received)
        subscriber.start()

        # after the queue has been created it is safe to activate the subscription
        self.pubsubclient.activate_subscription(subscription_id=salinity_subscription_id)


        # Assert that we have received data
        assertions(result.get(timeout=30))

        self.pubsubclient.deactivate_subscription(subscription_id=salinity_subscription_id)

        subscriber.stop()

        return results


    def _validate_messages(self, results):

        cc = self.container
        assertions = self.assertTrue

        first_salinity_values = None

        for message in results:

            try:
                psd = PointSupplementStreamParser(stream_definition=self.ctd_stream_def, stream_granule=message)
                temp = psd.get_values('temperature')
                log.info(psd.list_field_names())
            except KeyError as ke:
                temp = None

            if temp is not None:
                assertions(isinstance(temp, numpy.ndarray))

                log.info( 'temperature=' + str(numpy.nanmin(temp)))

                first_salinity_values = None

            else:
                psd = PointSupplementStreamParser(stream_definition=SalinityTransform.outgoing_stream_def, stream_granule=message)
                log.info( psd.list_field_names())

                # Test the handy info method for the names of fields in the stream def
                assertions('salinity' in psd.list_field_names())

                # you have to know the name of the coverage in stream def
                salinity = psd.get_values('salinity')
                log.info( 'salinity=' + str(numpy.nanmin(salinity)))

                assertions(isinstance(salinity, numpy.ndarray))

                assertions(numpy.nanmin(salinity) > 0.0) # salinity should always be greater than 0

                if first_salinity_values is None:
                    first_salinity_values = salinity.tolist()
                else:
                    second_salinity_values = salinity.tolist()
                    assertions(len(first_salinity_values) == len(second_salinity_values))
                    for idx in range(0,len(first_salinity_values)):
                        assertions(first_salinity_values[idx]*2.0 == second_salinity_values[idx])


    def _create_salinity_data_process_definition(self):

        # Salinity: Data Process Definition

        #First look to see if it exists and if not, then create it
        dpd,_ = self.rrclient.find_resources(restype=RT.DataProcessDefinition, name='ctd_salinity')
        if len(dpd) > 0:
            return dpd[0]

        log.debug("Create data process definition SalinityTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='ctd_salinity',
            description='create a salinity data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
            class_name='SalinityTransform',
            process_source='SalinityTransform source code here...')
        try:
            ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except Excpetion as ex:
            self.fail("failed to create new SalinityTransform data process definition: %s" %ex)

        # create a stream definition for the data from the salinity Transform
        sal_stream_def_id = self.pubsubclient.create_stream_definition(container=SalinityTransform.outgoing_stream_def,  name='Salinity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(sal_stream_def_id, ctd_L2_salinity_dprocdef_id )

        return ctd_L2_salinity_dprocdef_id

    def _create_salinity_doubler_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd,_ = self.rrclient.find_resources(restype=RT.DataProcessDefinition, name='salinity_doubler')
        if len(dpd) > 0:
            return dpd[0]

        # Salinity Doubler: Data Process Definition
        log.debug("Create data process definition SalinityDoublerTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='salinity_doubler',
            description='create a salinity doubler data product',
            module='ion.processes.data.transforms.example_double_salinity',
            class_name='SalinityDoubler',
            process_source='SalinityDoubler source code here...')
        try:
            salinity_doubler_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except Exception as ex:
            self.fail("failed to create new SalinityDoubler data process definition: %s" %ex)


        # create a stream definition for the data from the salinity Transform
        salinity_double_stream_def_id = self.pubsubclient.create_stream_definition(container=SalinityDoubler.outgoing_stream_def,  name='SalinityDoubler')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(salinity_double_stream_def_id, salinity_doubler_dprocdef_id )

        return salinity_doubler_dprocdef_id


    def create_transform_process(self, data_process_definition_id, data_process_input_dp_id):

        data_process_definition = self.rrclient.read(data_process_definition_id)

        # Find the link between the output Stream Definition resource and the Data Process Definition resource
        stream_ids,_ = self.rrclient.find_objects(data_process_definition._id, PRED.hasStreamDefinition, RT.StreamDefinition,  id_only=True)
        if not stream_ids:
            raise Inconsistent("The data process definition %s is missing an association to an output stream definition" % data_process_definition._id )
        process_output_stream_def_id = stream_ids[0]

        #Concatenate the name of the workflow and data process definition for the name of the data product output
        data_process_name = data_process_definition.name

        # Create the output data product of the transform
        transform_dp_obj = IonObject(RT.DataProduct, name=data_process_name,description=data_process_definition.description)
        transform_dp_id = self.dataproductclient.create_data_product(transform_dp_obj, process_output_stream_def_id)
        self.dataproductclient.activate_data_product_persistence(data_product_id=transform_dp_id, persist_data=True, persist_metadata=True)

        #last one out of the for loop is the output product id
        output_data_product_id = transform_dp_id

        # Create the  transform data process
        log.debug("create data_process and start it")
        data_process_id = self.dataprocessclient.create_data_process(data_process_definition._id, [data_process_input_dp_id], {'output':transform_dp_id})
        self.dataprocessclient.activate_data_process(data_process_id)


        #Find the id of the output data stream
        stream_ids, _ = self.rrclient.find_objects(transform_dp_id, PRED.hasStream, None, True)
        if not stream_ids:
            raise Inconsistent("The data process %s is missing an association to an output stream" % data_process_id )

        return data_process_id, output_data_product_id


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_SA_transform_components(self):

        assertions = self.assertTrue

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self._create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)


        ###
        ###  Setup the first transformation
        ###

        # Salinity: Data Process Definition
        ctd_L2_salinity_dprocdef_id = self._create_salinity_data_process_definition()

        l2_salinity_all_data_process_id, ctd_l2_salinity_output_dp_id = self.create_transform_process(ctd_L2_salinity_dprocdef_id,ctd_parsed_data_product_id )

        ## get the stream id for the transform outputs
        stream_ids, _ = self.rrclient.find_objects(ctd_l2_salinity_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0 )
        sal_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_stream_id)


        ###
        ###  Setup the second transformation
        ###

        # Salinity Doubler: Data Process Definition
        salinity_doubler_dprocdef_id = self._create_salinity_doubler_data_process_definition()

        salinity_double_data_process_id, salinity_doubler_output_dp_id = self.create_transform_process(salinity_doubler_dprocdef_id, ctd_l2_salinity_output_dp_id )

        stream_ids, _ = self.rrclient.find_objects(salinity_doubler_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0 )
        sal_dbl_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_dbl_stream_id)



        #Start the input stream process
        ctd_sim_pid = self._start_simple_input_stream_process(ctd_stream_id)


        #Start te output stream listener to monitor and verify messages
        results = self._start_output_stream_listener(data_product_stream_ids)


        #Stop the transform processes
        self.dataprocessclient.deactivate_data_process(salinity_double_data_process_id)
        self.dataprocessclient.deactivate_data_process(l2_salinity_all_data_process_id)

        # stop the flow parse the messages...
        self.process_dispatcher.cancel_process(ctd_sim_pid) # kill the ctd simulator process - that is enough data

        #Validate the data from each of the messages along the way
        self._validate_messages(results)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Salinity_Test_Workflow',description='tests a workflow of multiple transform data processes')

        workflow_data_product_name = 'TEST-Workflow_Output_Product' #Set a specific output product name

        #Add a transformation process definition
        ctd_L2_salinity_dprocdef_id = self._create_salinity_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=ctd_L2_salinity_dprocdef_id, persist_process_output_data=False)  #Don't persist the intermediate data product
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Add a transformation process definition
        salinity_doubler_dprocdef_id = self._create_salinity_doubler_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=salinity_doubler_dprocdef_id, output_data_product_name=workflow_data_product_name)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        aids = self.rrclient.find_associations(workflow_def_id, PRED.hasDataProcessDefinition)
        assertions(len(aids) == 2 )

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self._create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id, timeout=30)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        #Verify the output data product name matches what was specified in the workflow definition
        workflow_product = self.rrclient.read(workflow_product_id)
        assertions(workflow_product.name == workflow_data_product_name)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 2 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        #Start the input stream process
        ctd_sim_pid = self._start_simple_input_stream_process(ctd_stream_id)

        #Start the output stream listener to monitor and verify messages
        results = self._start_output_stream_listener(data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id, False, timeout=15)  # Should test true at some point

        #Make sure the Workflow object was removed
        objs, _ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(objs) == 0)

        # stop the flow parse the messages...
        self.process_dispatcher.cancel_process(ctd_sim_pid) # kill the ctd simulator process - that is enough data

        #Validate the data from each of the messages along the way
        self._validate_messages(results)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )


    def _create_google_dt_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd,_ = self.rrclient.find_resources(restype=RT.DataProcessDefinition, name='google_dt_transform')
        if len(dpd) > 0:
            return dpd[0]

        # Data Process Definition
        log.debug("Create data process definition GoogleDtTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='google_dt_transform',
            description='Convert data streams to Google DataTables',
            module='ion.processes.data.transforms.viz.google_dt',
            class_name='VizTransformGoogleDT',
            process_source='VizTransformGoogleDT source code here...')
        try:
            procdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except Exception as ex:
            self.fail("failed to create new VizTransformGoogleDT data process definition: %s" %ex)


        # create a stream definition for the data from the
        stream_def_id = self.pubsubclient.create_stream_definition(container=VizTransformGoogleDT.outgoing_stream_def,  name='VizTransformGoogleDT')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(stream_def_id, procdef_id )

        return procdef_id


    def _validate_google_dt_results(self, results_stream_def, results):

        cc = self.container
        assertions = self.assertTrue

        for g in results:

            if isinstance(g,Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)
                #log.warn(tx.pretty_print())
                #log.warn(rdt.pretty_print())

                gdt_component = rdt['google_dt_components'][0]

                assertions(gdt_component['viz_product_type'] == 'google_realtime_dt' )
                gdt_description = gdt_component['data_table_description']
                gdt_content = gdt_component['data_table_content']

                assertions(gdt_description[0][0] == 'time')
                assertions(len(gdt_description) > 1)
                assertions(len(gdt_content) >= 0)

        return


    def _create_mpl_graphs_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd,_ = self.rrclient.find_resources(restype=RT.DataProcessDefinition, name='mpl_graphs_transform')
        if len(dpd) > 0:
            return dpd[0]

        #Data Process Definition
        log.debug("Create data process definition MatplotlibGraphsTransform")
        dpd_obj = IonObject(RT.DataProcessDefinition,
            name='mpl_graphs_transform',
            description='Convert data streams to Matplotlib graphs',
            module='ion.processes.data.transforms.viz.matplotlib_graphs',
            class_name='VizTransformMatplotlibGraphs',
            process_source='VizTransformMatplotlibGraphs source code here...')
        try:
            procdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
        except Exception as ex:
            self.fail("failed to create new VizTransformMatplotlibGraphs data process definition: %s" %ex)


        # create a stream definition for the data
        stream_def_id = self.pubsubclient.create_stream_definition(container=VizTransformMatplotlibGraphs.outgoing_stream_def,  name='VizTransformMatplotlibGraphs')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(stream_def_id, procdef_id )

        return procdef_id

    def _validate_mpl_graphs_results(self, results_stream_def, results):

        cc = self.container
        assertions = self.assertTrue


        for g in results:
            if isinstance(g,Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)
                #log.warn(tx.pretty_print())
                #log.warn(rdt.pretty_print())

                graphs = rdt['matplotlib_graphs']

                for graph in graphs:
                    assertions(graph['viz_product_type'] == 'matplotlib_graphs' )
                    # check to see if the list (numpy array) contians actual images
                    assertions(imghdr.what(graph['image_name'], graph['image_obj']) == 'png')


        return

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #unittest.skip("Skipping for debugging ")
    def test_google_dt_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='GoogleDT_Test_Workflow',description='Tests the workflow of converting stream data to Google DT')

        #Add a transformation process definition
        google_dt_procdef_id = self._create_google_dt_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=google_dt_procdef_id)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self._create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        #Start the input stream process
        ctd_sim_pid = self._start_simple_input_stream_process(ctd_stream_id)

        #Start the output stream listener to monitor and verify messages
        results = self._start_output_stream_listener(data_product_stream_ids)


        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id, False)  # Should test true at some point

        # stop the flow parse the messages...
        self.process_dispatcher.cancel_process(ctd_sim_pid) # kill the ctd simulator process - that is enough data

        #Validate the data from each of the messages along the way
        self._validate_google_dt_results(VizTransformGoogleDT.outgoing_stream_def, results)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )



    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_mpl_graphs_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Mpl_Graphs_Test_Workflow',description='Tests the workflow of converting stream data to Matplotlib graphs')

        #Add a transformation process definition
        mpl_graphs_procdef_id = self._create_mpl_graphs_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=mpl_graphs_procdef_id)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self._create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        #Start the input stream process
        ctd_sim_pid = self._start_sinusoidal_input_stream_process(ctd_stream_id)

        #Start the output stream listener to monitor and verify messages
        results = self._start_output_stream_listener(data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id, False)  # Should test true at some point

        # stop the flow parse the messages...
        self.process_dispatcher.cancel_process(ctd_sim_pid) # kill the ctd simulator process - that is enough data

        #Validate the data from each of the messages along the way
        self._validate_mpl_graphs_results(VizTransformGoogleDT.outgoing_stream_def, results)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_multiple_workflow_instances(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Multiple_Test_Workflow',description='Tests the workflow of converting stream data')

        #Add a transformation process definition
        google_dt_procdef_id = self._create_google_dt_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=google_dt_procdef_id)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the first input data product
        ctd_stream_id1, ctd_parsed_data_product_id1 = self._create_ctd_input_stream_and_data_product('ctd_parsed1')
        data_product_stream_ids.append(ctd_stream_id1)

        #Create and start the first workflow
        workflow_id1, workflow_product_id1 = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id1)

        #Create the second input data product
        ctd_stream_id2, ctd_parsed_data_product_id2 = self._create_ctd_input_stream_and_data_product('ctd_parsed2')
        data_product_stream_ids.append(ctd_stream_id2)

        #Create and start the first workflow
        workflow_id2, workflow_product_id2 = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id2)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_ids,_ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 2 )


        #Start the first input stream process
        ctd_sim_pid1 = self._start_sinusoidal_input_stream_process(ctd_stream_id1)

        #Start the second input stream process
        ctd_sim_pid2 = self._start_simple_input_stream_process(ctd_stream_id2)

        #Start the output stream listener to monitor a set number of messages being sent through the workflows
        results = self._start_output_stream_listener(data_product_stream_ids, message_count_per_stream=5)

        # stop the flow of messages...
        self.process_dispatcher.cancel_process(ctd_sim_pid1) # kill the ctd simulator process - that is enough data
        self.process_dispatcher.cancel_process(ctd_sim_pid2)

        #Stop the first workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id1, False)  # Should test true at some point

        #Stop the second workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id2, False)  # Should test true at some point

        workflow_ids,_ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 0 )

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )

        aid_list = self.rrclient.find_associations(workflow_def_id, PRED.hasDataProcessDefinition)
        assertions(len(aid_list) == 0 )
Exemplo n.º 26
0
class TestObservatoryManagementFullIntegration(IonIntegrationTestCase):
    def setUp(self):
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.RR = ResourceRegistryServiceClient()
        self.RR2 = EnhancedResourceRegistryClient(self.RR)
        self.OMS = ObservatoryManagementServiceClient()
        self.org_management_service = OrgManagementServiceClient()
        self.IMS = InstrumentManagementServiceClient()
        self.dpclient = DataProductManagementServiceClient()
        self.pubsubcli = PubsubManagementServiceClient()
        self.damsclient = DataAcquisitionManagementServiceClient()
        self.dataset_management = DatasetManagementServiceClient()
        self.data_retriever = DataRetrieverServiceClient()
        self.data_product_management = DataProductManagementServiceClient()

        self._load_stage = 0
        self._resources = {}

    def preload_ooi(self, stage=STAGE_LOAD_ASSETS):
        # Preloads OOI up to a given stage

        if self._load_stage >= stage:
            return

        if self._load_stage < STAGE_LOAD_ORGS:
            log.info(
                "--------------------------------------------------------------------------------------------------------"
            )
            log.info("Preloading stage: %s (OOIR2 Orgs, users, roles)",
                     STAGE_LOAD_ORGS)
            # load_OOIR2_scenario
            self.container.spawn_process("Loader",
                                         "ion.processes.bootstrap.ion_loader",
                                         "IONLoader",
                                         config=dict(
                                             op="load",
                                             scenario="OOIR2",
                                             path="master",
                                         ))
            self._load_stage = STAGE_LOAD_ORGS

        if self._load_stage < STAGE_LOAD_PARAMS:
            log.info(
                "--------------------------------------------------------------------------------------------------------"
            )
            log.info("Preloading stage: %s (BASE params, streamdefs)",
                     STAGE_LOAD_PARAMS)
            # load_parameter_scenarios
            self.container.spawn_process(
                "Loader",
                "ion.processes.bootstrap.ion_loader",
                "IONLoader",
                config=dict(
                    op="load",
                    scenario="BETA",
                    path="master",
                    categories=
                    "ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition",
                    clearcols="owner_id,org_ids",
                    assets="res/preload/r2_ioc/ooi_assets",
                    parseooi="True",
                ))
            self._load_stage = STAGE_LOAD_PARAMS

        if self._load_stage < STAGE_LOAD_AGENTS:
            log.info(
                "--------------------------------------------------------------------------------------------------------"
            )
            log.info("Preloading stage: %s (OOIR2_I agents, model links)",
                     STAGE_LOAD_AGENTS)
            # load_OOIR2_agents
            self.container.spawn_process("Loader",
                                         "ion.processes.bootstrap.ion_loader",
                                         "IONLoader",
                                         config=dict(
                                             op="load",
                                             scenario="OOIR2_I",
                                             path="master",
                                         ))
            self._load_stage = STAGE_LOAD_AGENTS

        if self._load_stage < STAGE_LOAD_ASSETS:
            log.info(
                "--------------------------------------------------------------------------------------------------------"
            )
            log.info(
                "Preloading stage: %s (OOI assets linked to params, agents)",
                STAGE_LOAD_ASSETS)
            # load_ooi_assets
            self.container.spawn_process(
                "Loader",
                "ion.processes.bootstrap.ion_loader",
                "IONLoader",
                config=dict(
                    op="load",
                    loadooi="True",
                    path="master",
                    assets="res/preload/r2_ioc/ooi_assets",
                    bulk="True",
                    debug="True",
                    ooiuntil="9/1/2013",
                    ooiparams="True",
                    #excludecategories: DataProduct,DataProductLink,Deployment,Workflow,WorkflowDefinition
                ))
            self._load_stage = STAGE_LOAD_ASSETS

        # 'DataProduct,DataProductLink,WorkflowDefinition,ExternalDataProvider,ExternalDatasetModel,ExternalDataset,ExternalDatasetAgent,ExternalDatasetAgentInstance',

    @unittest.skip('Work in progress')
    def test_observatory(self):
        self._load_stage = 0
        self._resources = {}
        passing = True

        self.assertTrue(True)

        # LOAD STEP 1
        self.preload_ooi(stage=STAGE_LOAD_ORGS)

        passing &= self.orguserrole_assertions()

        # LOAD STEP 2
        self.preload_ooi(stage=STAGE_LOAD_PARAMS)

        passing &= self.parameter_assertions()

        # LOAD STEP 3
        self.preload_ooi(stage=STAGE_LOAD_AGENTS)

        passing &= self.agent_assertions()

        # LOAD STEP 4
        self.preload_ooi(stage=STAGE_LOAD_ASSETS)

        # Check OOI preloaded resources to see if they match needs for this test and for correctness
        passing &= self.sites_assertions()
        passing &= self.device_assertions()
        passing &= self.deployment_assertions()

        # Extensive tests on select RSN nodes
        passing &= self.rsn_node_checks()

        # Extensive tests on select RSN instruments
        passing &= self.check_rsn_instrument()

        passing &= self.check_rsn_instrument_data_product()

        # Extensive tests on a glider
        #passing &= self.check_glider()

        # Extensive tests on a CG assembly
        #passing &= self.check_cg_assembly()

        # Add a new instrument agent
        # Add a new instrument agent instance
        # Check DataProducts
        # Check Provenance

        IonIntegrationTestCase.assertTrue(self, passing)

    # -------------------------------------------------------------------------

    def orguserrole_assertions(self):
        passing = True

        passing &= self._check_marine_facility("MF_CGSN")
        passing &= self._check_marine_facility("MF_RSN")
        passing &= self._check_marine_facility("MF_EA")

        return passing

    def _check_marine_facility(self, preload_id):
        passing = True
        log.debug("Checking marine facility %s and associations", preload_id)

        mf_obj = self.retrieve_ooi_asset(preload_id)
        mf_id = mf_obj._id
        self._resources[preload_id] = mf_id

        passing &= self.assertEquals(mf_obj.lcstate, LCS.DEPLOYED)

        res_list, _ = self.RR.find_objects(subject=mf_id,
                                           predicate=PRED.hasMembership,
                                           id_only=True)
        passing &= self.assertTrue(len(res_list) >= 3)

        res_list, _ = self.RR.find_objects(subject=mf_id,
                                           predicate=PRED.hasRole,
                                           id_only=False)
        passing &= self.assertTrue(len(res_list) >= 5)

        passing &= self._check_role_assignments(res_list, "ORG_MANAGER")
        passing &= self._check_role_assignments(res_list,
                                                "OBSERVATORY_OPERATOR")
        passing &= self._check_role_assignments(res_list,
                                                "INSTRUMENT_OPERATOR")

        return passing

    def _check_role_assignments(self, role_list, role_name):
        passing = True
        role_obj = self._find_resource_in_list(role_list, "governance_name",
                                               role_name)
        if role_obj:
            res_list = self.RR.find_subjects(predicate=PRED.hasRole,
                                             object=role_obj._id,
                                             id_only=True)
            passing &= self.assertTrue(len(res_list) >= 1)

        return passing

    def parameter_assertions(self):
        passing = True

        pctx_list, _ = self.RR.find_resources_ext(restype=RT.ParameterContext)
        passing &= self.assertTrue(len(pctx_list) >= 10)

        pdict_list, _ = self.RR.find_resources_ext(
            restype=RT.ParameterDictionary)
        passing &= self.assertTrue(len(pdict_list) >= 10)

        sdef_list, _ = self.RR.find_resources_ext(restype=RT.StreamDefinition)
        passing &= self.assertTrue(len(sdef_list) >= 10)

        # Verify that a PDict has the appropriate QC parameters defined
        pdicts, _ = self.RR.find_resources_ext(restype=RT.ParameterDictionary,
                                               alt_id_ns='PRE',
                                               alt_id='DICT110')
        passing &= self.assertTrue(len(pdicts) == 1)
        if not pdicts:
            return passing
        pdict = pdicts[0]

        # According to the latest SAF, density should NOT have trend

        parameters, _ = self.RR.find_objects(pdict, PRED.hasParameterContext)
        names = [i.name for i in parameters if i.name.startswith('density')]
        passing &= self.assertTrue('density_trndtst_qc' not in names)

        return passing

    def agent_assertions(self):
        passing = True

        # TODO: More tests?

        return passing

    def sites_assertions(self):
        passing = True
        observatory_list, _ = self.RR.find_resources_ext(
            restype=RT.Observatory)
        passing &= self.assertTrue(len(observatory_list) >= 40)
        for obs in observatory_list:
            passing &= self.assertEquals(obs.lcstate, LCS.DEPLOYED)

        platform_site_list, _ = self.RR.find_resources(RT.PlatformSite,
                                                       id_only=False)
        log.debug('platform sites: %s', [ps.name for ps in platform_site_list])
        passing &= self.assertTrue(len(platform_site_list) >= 30)

        return passing

    def device_assertions(self):
        passing = True
        platform_device_list, _ = self.RR.find_resources(RT.PlatformDevice,
                                                         id_only=False)
        passing &= self.assertTrue(len(platform_device_list) >= 30)
        for pdev in platform_device_list:
            log.debug('platform device: %s', pdev.name)
            passing &= self.assertEquals(pdev.lcstate, LCS.PLANNED)

        platform_agent_list, _ = self.RR.find_resources(RT.PlatformAgent,
                                                        id_only=False)
        passing &= self.assertTrue(len(platform_agent_list) >= 2)
        for pagent in platform_agent_list:
            log.debug('platform agent: %s', pagent.name)
            passing &= self.assertEquals(pagent.lcstate, LCS.DEPLOYED)

        instrument_agent_list, _ = self.RR.find_resources(RT.InstrumentAgent,
                                                          id_only=False)
        passing &= self.assertTrue(len(instrument_agent_list) >= 3)
        for iagent in instrument_agent_list:
            log.debug('instrument agent: %s', iagent.name)
            passing &= self.assertEquals(iagent.lcstate, LCS.DEPLOYED)

            model_list, _ = self.RR.find_objects(subject=iagent._id,
                                                 predicate=PRED.hasModel,
                                                 id_only=True)
            passing &= self.assertTrue(
                len(model_list) >= 1, "IA %s" % iagent.name)

        return passing

    def deployment_assertions(self):
        passing = True
        deployment_list, _ = self.RR.find_resources(RT.Deployment,
                                                    id_only=False)
        passing &= self.assertTrue(len(deployment_list) >= 30)
        for deploy in deployment_list:
            log.debug('deployment: %s', deploy.name)
            passing &= self.assertEquals(deploy.lcstate, LCS.DEPLOYED)
        return passing

    def rsn_node_checks(self):
        """
        Current preload creates:
        - PlatformDevice in PLANNED
        - PlatformSite in DEPLOYED
        - Deployment in DEPLOYED
        - Deployment is NOT activated
        """
        passing = True

        dp_obj = self.retrieve_ooi_asset("CE04OSHY-PN01C_DEP")

        passing &= self.assertEquals(dp_obj.lcstate, LCS.DEPLOYED)
        passing &= self.assertEquals(dp_obj.availability, AS.AVAILABLE)
        log.debug(
            'test_observatory  retrieve CE04OSHY-PN01C_DEP deployment:  %s',
            dp_obj)

        # Check existing RSN node CE04OSHY-LV01C Deployment (PLANNED lcstate)
        CE04OSHY_LV01C_deployment = self.retrieve_ooi_asset(
            'CE04OSHY-LV01C_DEP')
        passing &= self.assertEquals(CE04OSHY_LV01C_deployment.lcstate,
                                     LCS.DEPLOYED)
        passing &= self.assertEquals(CE04OSHY_LV01C_deployment.availability,
                                     AS.AVAILABLE)

        #self.dump_deployment(CE04OSHY_LV01C_deployment._id)
        log.debug(
            'test_observatory  retrieve RSN node CE04OSHY-LV01C Deployment:  %s',
            CE04OSHY_LV01C_deployment)

        CE04OSHY_LV01C_device = self.retrieve_ooi_asset('CE04OSHY-LV01C_PD')

        # Set CE04OSHY-LV01C device to DEVELOPED state
        passing &= self.transition_lcs_then_verify(
            resource_id=CE04OSHY_LV01C_device._id,
            new_lcs_state=LCE.DEVELOP,
            verify=LCS.DEVELOPED)

        # Set CE04OSHY-LV01C device to INTEGRATED state
        passing &= self.transition_lcs_then_verify(
            resource_id=CE04OSHY_LV01C_device._id,
            new_lcs_state=LCE.INTEGRATE,
            verify=LCS.INTEGRATED)

        # Set CE04OSHY-LV01C device to DEPLOYED state
        passing &= self.transition_lcs_then_verify(
            resource_id=CE04OSHY_LV01C_device._id,
            new_lcs_state=LCE.DEPLOY,
            verify=LCS.DEPLOYED)

        # Set CE04OSHY-LV01C Deployment to DEPLOYED state
        # NOTE: Deployments are created in DEPLOYED state, currently
        #self.transition_lcs_then_verify(resource_id=CE04OSHY_LV01C_deployment._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Activate Deployment for CE04OSHY-LV01C
        self.OMS.activate_deployment(CE04OSHY_LV01C_deployment._id)
        log.debug(
            '---------    activate_deployment CE04OSHY_LV01C_deployment -------------- '
        )
        self.dump_deployment(CE04OSHY_LV01C_deployment._id)
        passing &= self.validate_deployment_activated(
            CE04OSHY_LV01C_deployment._id)

        # (optional) Start CE04OSHY-LV01C platform agent with simulator

        # NOTE: DataProduct is generated in DEPLOYED state
        # # Set DataProduct for CE04OSHY-LV01C platform to DEPLOYED state
        # output_data_product_ids, assns =self.RR.find_objects(subject=CE04OSHY_LV01C_device._id, predicate=PRED.hasOutputProduct, id_only=True)
        # if output_data_product_ids:
        #     #self.assertEquals(len(child_devs), 3)
        #     for output_data_product_id in output_data_product_ids:
        #         log.debug('DataProduct for CE04OSHY-LV01C platform:  %s', output_data_product_id)
        #         self.transition_lcs_then_verify(resource_id=output_data_product_id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Check events for CE04OSHY-LV01C platform

        # Check existing CE04OSBP-LJ01C Deployment (PLANNED lcstate)
        #        dp_list, _  = self.RR.find_resources_ext(alt_id_ns="PRE", alt_id="CE04OSBP-LJ01C_DEP")
        #        self.assertEquals(len(dp_list), 1)
        #        CE04OSHY_LV01C_deployment = dp_list[0]
        #        self.assertEquals(CE04OSHY_LV01C_deployment.lcstate, 'PLANNED')
        #        log.debug('test_observatory  retrieve RSN node CE04OSBP-LJ01C Deployment:  %s', CE04OSHY_LV01C_deployment)

        # Set CE04OSBP-LJ01C Deployment to DEPLOYED state

        # Update description and other attributes for CE04OSBP-LJ01C device resource

        # Create attachment (JPG image) for CE04OSBP-LJ01C device resource

        # Activate Deployment for CE04OSBP-LJ01C

        # (optional) Add/register CE04OSBP-LJ01C platform agent to parent agent

        # (optional) Start CE04OSBP-LJ01C platform agent

        return passing

    def check_rsn_instrument(self):
        """
        Check existing RSN instrument CE04OSBP-LJ01C-06-CTDBPO108 Deployment (PLANNED lcstate)
        Current preload creates:
        - InstrumentDevice in PLANNED
        - InstrumentSite in DEPLOYED
        - Deployment in DEPLOYED
        - Deployment is activated
        """

        passing = True
        CE04OSBP_LJ01C_06_CTDBPO108_deploy = self.retrieve_ooi_asset(
            'CE04OSBP-LJ01C-06-CTDBPO108_DEP')
        self.dump_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        #passing &= self.assertEquals(CE04OSBP_LJ01C_06_CTDBPO108_deploy.lcstate, 'PLANNED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to DEVELOPED state
        CE04OSBP_LJ01C_06_CTDBPO108_device = self.retrieve_ooi_asset(
            'CE04OSBP-LJ01C-06-CTDBPO108_ID')
        passing &= self.transition_lcs_then_verify(
            resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id,
            new_lcs_state=LCE.DEVELOP,
            verify='DEVELOPED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to INTEGRATED state
        passing &= self.transition_lcs_then_verify(
            resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id,
            new_lcs_state=LCE.INTEGRATE,
            verify='INTEGRATED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 device to DEPLOYED state
        passing &= self.transition_lcs_then_verify(
            resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id,
            new_lcs_state=LCE.DEPLOY,
            verify='DEPLOYED')

        # Set CE04OSBP-LJ01C-06-CTDBPO108 Deployment to DEPLOYED state
        #self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_deploy._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')

        # Activate Deployment for CE04OSBP-LJ01C-06-CTDBPO108 instrument
        log.debug(
            '---------    activate_deployment CE04OSBP-LJ01C-06-CTDBPO108 deployment -------------- '
        )
        self.OMS.activate_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        passing &= self.validate_deployment_activated(
            CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)

        # (optional) Add/register CE04OSBP-LJ01C-06-CTDBPO108 instrument agent to parent agent

        # (optional) Start CE04OSBP-LJ01C-06-CTDBPO108 instrument agent with simulator

        # Set all DataProducts for CE04OSBP-LJ01C-06-CTDBPO108 to DEPLOYED state

        # (optional) Create a substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a comparable device
        CE04OSBP_LJ01C_06_CTDBPO108_isite = self.retrieve_ooi_asset(
            'CE04OSBP-LJ01C-06-CTDBPO108')

        ## create device here: retrieve CTD Mooring on Mooring Riser 001 - similiar?
        GP03FLMB_RI001_10_CTDMOG999_ID_idevice = self.retrieve_ooi_asset(
            'GP03FLMB-RI001-10-CTDMOG999_ID')

        deploy_id_2 = self.create_basic_deployment(
            name='CE04OSBP-LJ01C-06-CTDBPO108_DEP2',
            description=
            'substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a comparable device'
        )
        self.OMS.assign_device_to_deployment(
            instrument_device_id=GP03FLMB_RI001_10_CTDMOG999_ID_idevice._id,
            deployment_id=deploy_id_2)
        self.OMS.assign_site_to_deployment(
            instrument_site_id=CE04OSBP_LJ01C_06_CTDBPO108_isite._id,
            deployment_id=deploy_id_2)
        self.dump_deployment(deploy_id_2)

        # (optional) Activate this second deployment - check first deployment is deactivated
        self.OMS.deactivate_deployment(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        passing &= self.validate_deployment_deactivated(
            CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)

        # log.debug('Activate deployment deploy_id_2')
        # self.get_deployment_ids(deploy_id_2)
        # self.dump_deployment(deploy_id_2, "deploy_id_2")
        # self.OMS.activate_deployment(deploy_id_2)
        # passing &= self.validate_deployment_deactivated(CE04OSBP_LJ01C_06_CTDBPO108_deploy._id)
        #
        # # (optional) Set first CE04OSBP-LJ01C-06-CTDBPO108 Deployment to INTEGRATED state
        # passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_deploy._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')
        #
        # # Set first CE04OSBP-LJ01C-06-CTDBPO108 device to INTEGRATED state
        # passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.INTEGRATE, verify='INTEGRATED')
        #
        #
        # # (optional) Create a third Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with a same device from first deployment
        # deploy_id_3 = self.create_basic_deployment(name='CE04OSBP-LJ01C-06-CTDBPO108_DEP3', description='substitute Deployment for site CE04OSBP-LJ01C-06-CTDBPO108 with same device as first')
        # self.IMS.deploy_instrument_device(instrument_device_id=GP03FLMB_RI001_10_CTDMOG999_ID_idevice._id, deployment_id=deploy_id_3)
        # self.OMS.deploy_instrument_site(instrument_site_id=CE04OSBP_LJ01C_06_CTDBPO108_isite._id, deployment_id=deploy_id_3)
        # self.dump_deployment(deploy_id_3)
        #
        #
        # # Set first CE04OSBP-LJ01C-06-CTDBPO108 device to DEPLOYED state
        # passing &= self.transition_lcs_then_verify(resource_id=CE04OSBP_LJ01C_06_CTDBPO108_device._id, new_lcs_state=LCE.DEPLOY, verify='DEPLOYED')
        #
        # # (optional) Activate this third deployment - check second deployment is deactivated
        # log.debug('Activate deployment deploy_id_3')
        # self.dump_deployment(deploy_id_3)
        # self.OMS.activate_deployment(deploy_id_3)
        # #todo: check second deployment is deactivated

        return passing

    def check_data_product_reference(self, reference_designator, output=[]):
        passing = True

        data_product_ids, _ = self.RR.find_resources_ext(
            alt_id_ns='PRE',
            alt_id='%s_DPI1' % reference_designator,
            id_only=True)  # Assuming DPI1 is parsed
        passing &= self.assertEquals(len(data_product_ids), 1)

        if not data_product_ids:
            return passing

        # Let's go ahead and activate it
        data_product_id = data_product_ids[0]
        self.dpclient.activate_data_product_persistence(data_product_id)
        self.addCleanup(self.dpclient.suspend_data_product_persistence,
                        data_product_id)

        dataset_ids, _ = self.RR.find_objects(data_product_id,
                                              PRED.hasDataset,
                                              id_only=True)
        passing &= self.assertEquals(len(dataset_ids), 1)
        if not dataset_ids:
            return passing
        dataset_id = dataset_ids[0]

        stream_def_ids, _ = self.RR.find_objects(data_product_id,
                                                 PRED.hasStreamDefinition,
                                                 id_only=True)
        passing &= self.assertEquals(len(dataset_ids), 1)
        if not stream_def_ids:
            return passing
        stream_def_id = stream_def_ids[0]
        output.append((data_product_id, stream_def_id, dataset_id))
        return passing

    def check_tempsf_instrument_data_product(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator,
                                                     info_list)
        if not passing: return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        now = time.time()
        ntp_now = now + 2208988800

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['temperature'] = [[
            25.3884, 26.9384, 24.3394, 23.3401, 22.9832, 29.4434, 26.9873,
            15.2883, 16.3374, 14.5883, 15.7253, 18.4383, 15.3488, 17.2993,
            10.2111, 11.5993, 10.9345, 9.4444, 9.9876, 10.9834, 11.0098,
            5.3456, 4.2994, 4.3009
        ]]

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.wait())
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
        passing &= self.assert_array_almost_equal(rdt['temperature'], [[
            25.3884, 26.9384, 24.3394, 23.3401, 22.9832, 29.4434, 26.9873,
            15.2883, 16.3374, 14.5883, 15.7253, 18.4383, 15.3488, 17.2993,
            10.2111, 11.5993, 10.9345, 9.4444, 9.9876, 10.9834, 11.0098,
            5.3456, 4.2994, 4.3009
        ]])
        return passing

    def check_trhph_instrument_data_products(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator,
                                                     info_list)
        if not passing:
            return passing

        data_product_id, stream_def_id, dataset_id = info_list.pop()

        pdict = self.RR2.find_parameter_dictionary_of_stream_definition_using_has_parameter_dictionary(
            stream_def_id)
        passing &= self.assertEquals(pdict.name, 'trhph_sample')

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)

        # calibration constants
        a = 1.98e-9
        b = -2.45e-6
        c = 9.28e-4
        d = -0.0888
        e = 0.731

        V_s = 1.506
        V_c = 0.
        T = 11.8

        r1 = 0.906
        r2 = 4.095
        r3 = 4.095

        ORP_V = 1.806
        Cl = np.nan

        offset = 2008
        gain = 4.0
        # Normally this would be 50 per the DPS but the precision is %4.0f which truncates the values to the nearest 1...
        ORP = ((ORP_V * 1000.) - offset) / gain

        ntp_now = time.time() + 2208988800

        rdt['cc_a'] = [a]
        rdt['cc_b'] = [b]
        rdt['cc_c'] = [c]
        rdt['cc_d'] = [d]
        rdt['cc_e'] = [e]
        rdt['ref_temp_volts'] = [V_s]
        rdt['resistivity_temp_volts'] = [V_c]
        rdt['eh_sensor'] = [ORP_V]
        rdt['resistivity_5'] = [r1]
        rdt['resistivity_x1'] = [r2]
        rdt['resistivity_x5'] = [r3]
        rdt['cc_offset'] = [offset]
        rdt['cc_gain'] = [gain]
        rdt['time'] = [ntp_now]

        passing &= self.assert_array_almost_equal(rdt['vent_fluid_temperaure'],
                                                  [T], 2)
        passing &= self.assert_array_almost_equal(
            rdt['vent_fluid_chloride_conc'], [Cl], 4)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_orp'], [ORP],
                                                  4)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.wait())
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)

        passing &= self.assert_array_almost_equal(rdt['vent_fluid_temperaure'],
                                                  [T], 2)
        passing &= self.assert_array_almost_equal(
            rdt['vent_fluid_chloride_conc'], [Cl], 4)
        passing &= self.assert_array_almost_equal(rdt['vent_fluid_orp'], [ORP],
                                                  4)

        return passing

    def check_vel3d_instrument_data_products(self, reference_designator):
        passing = True
        info_list = []
        passing &= self.check_data_product_reference(reference_designator,
                                                     info_list)
        if not passing:
            return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        pdict = self.RR2.find_parameter_dictionary_of_stream_definition_using_has_parameter_dictionary(
            stream_def_id)
        self.assertEquals(pdict.name, 'vel3d_b_sample')

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        lat = 14.6846
        lon = -51.044
        ts = np.array([
            3319563600, 3319567200, 3319570800, 3319574400, 3319578000,
            3319581600, 3319585200, 3319588800, 3319592400, 3319596000
        ],
                      dtype=np.float)

        ve = np.array([-3.2, 0.1, 0., 2.3, -0.1, 5.6, 5.1, 5.8, 8.8, 10.3])

        vn = np.array([18.2, 9.9, 12., 6.6, 7.4, 3.4, -2.6, 0.2, -1.5, 4.1])
        vu = np.array([-1.1, -0.6, -1.4, -2, -1.7, -2, 1.3, -1.6, -1.1, -4.5])
        ve_expected = np.array([
            -0.085136, -0.028752, -0.036007, 0.002136, -0.023158, 0.043218,
            0.056451, 0.054727, 0.088446, 0.085952
        ])
        vn_expected = np.array([
            0.164012, 0.094738, 0.114471, 0.06986, 0.07029, 0.049237,
            -0.009499, 0.019311, 0.012096, 0.070017
        ])
        vu_expected = np.array([
            -0.011, -0.006, -0.014, -0.02, -0.017, -0.02, 0.013, -0.016,
            -0.011, -0.045
        ])

        rdt['time'] = ts
        rdt['lat'] = [lat] * 10
        rdt['lon'] = [lon] * 10
        rdt['turbulent_velocity_east'] = ve
        rdt['turbulent_velocity_north'] = vn
        rdt['turbulent_velocity_up'] = vu

        passing &= self.assert_array_almost_equal(
            rdt['eastward_turbulent_velocity'], ve_expected)
        passing &= self.assert_array_almost_equal(
            rdt['northward_turbulent_velocity'], vn_expected)
        passing &= self.assert_array_almost_equal(
            rdt['upward_turbulent_velocity'], vu_expected)

        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)
        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        passing &= self.assertTrue(dataset_monitor.wait())
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(
            rdt['eastward_turbulent_velocity'], ve_expected)
        passing &= self.assert_array_almost_equal(
            rdt['northward_turbulent_velocity'], vn_expected)
        passing &= self.assert_array_almost_equal(
            rdt['upward_turbulent_velocity'], vu_expected)
        return passing

    def check_presta_instrument_data_products(self, reference_designator):
        # Check the parsed data product make sure it's got everything it needs and can be published persisted etc.

        # Absolute Pressure (SFLPRES_L0) is what comes off the instrumnet, SFLPRES_L1 is a pfunc
        # Let's go ahead and publish some fake data!!!
        # According to https://alfresco.oceanobservatories.org/alfresco/d/d/workspace/SpacesStore/63e16865-9d9e-4b11-b0b3-d5658faa5080/1341-00230_Data_Product_Spec_SFLPRES_OOI.pdf
        # Appendix A. Example 1.
        # p_psia_tide = 14.8670
        # the tide should be 10.2504
        passing = True

        info_list = []
        passing &= self.check_data_product_reference(reference_designator,
                                                     info_list)
        if not passing:
            return passing
        data_product_id, stream_def_id, dataset_id = info_list.pop()

        now = time.time()
        ntp_now = now + 2208988800.

        rdt = RecordDictionaryTool(stream_definition_id=stream_def_id)
        rdt['time'] = [ntp_now]
        rdt['absolute_pressure'] = [14.8670]
        passing &= self.assert_array_almost_equal(rdt['seafloor_pressure'],
                                                  [10.2504], 4)
        dataset_monitor = DatasetMonitor(dataset_id)
        self.addCleanup(dataset_monitor.stop)

        ParameterHelper.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(
            dataset_monitor.wait())  # Bumped to 20 to keep buildbot happy
        if not passing: return passing

        granule = self.data_retriever.retrieve(dataset_id)

        rdt = RecordDictionaryTool.load_from_granule(granule)
        passing &= self.assert_array_almost_equal(rdt['time'], [ntp_now])
        passing &= self.assert_array_almost_equal(rdt['seafloor_pressure'],
                                                  [10.2504], 4)
        passing &= self.assert_array_almost_equal(rdt['absolute_pressure'],
                                                  [14.8670], 4)

        return passing

    def check_rsn_instrument_data_product(self):
        passing = True
        # for RS03AXBS-MJ03A-06-PRESTA301 (PREST-A) there are a few listed data products
        # Parsed, Engineering
        # SFLPRES-0 SFLPRES-1
        # Check for the two data products and make sure they have the proper parameters
        # SFLPRES-0 should
        data_products, _ = self.RR.find_resources_ext(
            alt_id_ns='PRE',
            alt_id='RS03AXBS-MJ03A-06-PRESTA301_SFLPRES_L0_DPID',
            id_only=True)
        passing &= self.assertTrue(len(data_products) == 1)
        if not data_products:
            return passing

        data_product_id = data_products[0]

        stream_defs, _ = self.RR.find_objects(data_product_id,
                                              PRED.hasStreamDefinition,
                                              id_only=False)
        passing &= self.assertTrue(len(stream_defs) == 1)
        if not stream_defs:
            return passing

        # Assert that the stream definition has the correct reference designator
        stream_def = stream_defs[0]
        passing &= self.assertEquals(
            stream_def.stream_configuration['reference_designator'],
            'RS03AXBS-MJ03A-06-PRESTA301')

        # Get the pdict and make sure that the parameters corresponding to the available fields
        # begin with the appropriate data product identifier

        pdict_ids, _ = self.RR.find_objects(stream_def,
                                            PRED.hasParameterDictionary,
                                            id_only=True)
        passing &= self.assertEquals(len(pdict_ids), 1)
        if not pdict_ids:
            return passing

        pdict_id = pdict_ids[0]

        pdict = DatasetManagementService.get_parameter_dictionary(pdict_id)
        available_params = [
            pdict.get_context(i) for i in pdict.keys()
            if i in stream_def.available_fields
        ]
        for p in available_params:
            if p.name == 'time':  # Ignore the domain parameter
                continue
            passing &= self.assertTrue(p.ooi_short_name.startswith('SFLPRES'))
        passing &= self.check_presta_instrument_data_products(
            'RS01SLBS-MJ01A-06-PRESTA101')
        passing &= self.check_vel3d_instrument_data_products(
            'RS01SLBS-MJ01A-12-VEL3DB101')
        passing &= self.check_presta_instrument_data_products(
            'RS03AXBS-MJ03A-06-PRESTA301')
        passing &= self.check_vel3d_instrument_data_products(
            'RS03AXBS-MJ03A-12-VEL3DB301')
        passing &= self.check_tempsf_instrument_data_product(
            'RS03ASHS-MJ03B-07-TMPSFA301')
        passing &= self.check_vel3d_instrument_data_products(
            'RS03INT2-MJ03D-12-VEL3DB304')
        passing &= self.check_trhph_instrument_data_products(
            'RS03INT1-MJ03C-10-TRHPHA301')

        self.data_product_management.activate_data_product_persistence(
            data_product_id)
        dataset_id = self.RR2.find_dataset_id_of_data_product_using_has_dataset(
            data_product_id)
        granule = self.data_retriever.retrieve(dataset_id)
        rdt = RecordDictionaryTool.load_from_granule(granule)
        self.assert_array_almost_equal(rdt['seafloor_pressure'], [10.2504], 4)
        self.assert_array_almost_equal(rdt['absolute_pressure'], [14.8670], 4)
        self.data_product_management.suspend_data_product_persistence(
            data_product_id)  # Should do nothing and not raise anything

        return passing

    def check_glider(self):
        '''
        # Check that glider GP05MOAS-GL001 assembly is defined by OOI preload (3 instruments)
        '''
        passing = True
        GP05MOAS_GL001_device = self.retrieve_ooi_asset('GP05MOAS-GL001_PD')
        child_devs, assns = self.RR.find_objects(
            subject=GP05MOAS_GL001_device._id,
            predicate=PRED.hasDevice,
            id_only=True)
        passing &= self.assertEquals(len(child_devs), 3)

        # Set GP05MOAS-GL001 Deployment to DEPLOYED
        GP05MOAS_GL001_deploy = self.retrieve_ooi_asset('GP05MOAS-GL001_DEP')
        passing &= self.transition_lcs_then_verify(
            resource_id=GP05MOAS_GL001_deploy._id,
            new_lcs_state=LCE.DEPLOY,
            verify='DEPLOYED')

        # Activate Deployment for GP05MOAS-GL001
        #self.OMS.activate_deployment(GP05MOAS_GL001_deploy._id)

        # Deactivate Deployment for GP05MOAS-GL001
        #self.OMS.deactivate_deployment(GP05MOAS_GL001_deploy._id)

        # Create a new Deployment resource X without any assignment
        x_deploy_id = self.create_basic_deployment(
            name='X_Deployment',
            description='new Deployment resource X without any assignment')

        # Assign Deployment X to site GP05MOAS-GL001
        GP05MOAS_GL001_psite = self.retrieve_ooi_asset('GP05MOAS-GL001')
        self.OMS.assign_site_to_deployment(GP05MOAS_GL001_psite._id,
                                           x_deploy_id)

        # Assign Deployment X to first device for GP05MOAS-GL001
        GP05MOAS_GL001_device = self.retrieve_ooi_asset('GP05MOAS-GL001_PD')
        self.OMS.assign_device_to_deployment(GP05MOAS_GL001_device._id,
                                             x_deploy_id)

        # Set GP05MOAS-GL001 Deployment to PLANNED state
        #self.transition_lcs_then_verify(resource_id=x_deploy_id, new_lcs_state=LCE.PLAN, verify='PLANNED')
        # ??? already in planned

        # Set second GP05MOAS-GL001 Deployment to DEPLOYED
        passing &= self.transition_lcs_then_verify(resource_id=x_deploy_id,
                                                   new_lcs_state=LCE.DEPLOY,
                                                   verify='DEPLOYED')
        self.dump_deployment(x_deploy_id)

        # Activate second Deployment for GP05MOAS-GL001
        #self.OMS.activate_deployment(x_deploy_id)

        # Deactivate second Deployment for GP05MOAS-GL001
        #self.OMS.deactivate_deployment(x_deploy_id)
        return passing

    def check_cg_assembly(self):
        passing = True

        # Set several CE01ISSM-RI002-* instrument devices to DEVELOPED state

        # Assemble several CE01ISSM-RI002-* instruments to a CG CE01ISSM-RI002 component platform

        # Set several CE01ISSM-RI002-* instrument devices to INTEGRATED state

        # Assemble CE01ISSM-RI002 platform to CG CE01ISSM-LM001 station platform

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Set CE01ISSM-LM001 station device to INTEGRATED state

        # Set CE01ISSM-LM001 station device to DEPLOYED state (children maybe too?)

        # Set CE01ISSM-LM001 Deployment to DEPLOYED

        # Activate CE01ISSM-LM001 platform assembly deployment

        # Dectivate CE01ISSM-LM001 platform assembly deployment

        # Set CE01ISSM-LM001 Deployment to INTEGRATED state

        # Set CE01ISSM-LM001 station device to INTEGRATED state

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Set CE01ISSM-RI002 component device to INTEGRATED state

        # Disassemble CE01ISSM-RI002 platform from CG CE01ISSM-LM001 station platform

        # Disassemble all CE01ISSM-RI002-* instruments from a CG CE01ISSM-RI002 component platform

        # Retire instrument one for CE01ISSM-RI002-*

        # Retire device one for CE01ISSM-RI002

        # Retire device one for CE01ISSM-LM001

        return passing

    # -------------------------------------------------------------------------

    def retrieve_ooi_asset(self, alt_id='', namespace='PRE'):
        dp_list, _ = self.RR.find_resources_ext(alt_id_ns=namespace,
                                                alt_id=alt_id)
        self.assertEquals(len(dp_list), 1)
        return dp_list[0]

    def transition_lcs_then_verify(self, resource_id, new_lcs_state, verify):
        ret = self.RR2.advance_lcs(resource_id, new_lcs_state)
        resource_obj = self.RR.read(resource_id)
        return self.assertEquals(resource_obj.lcstate, verify)

    def create_basic_deployment(self, name='', description=''):
        start = IonTime(datetime.datetime(2013, 1, 1))
        end = IonTime(datetime.datetime(2014, 1, 1))
        temporal_bounds = IonObject(OT.TemporalBounds,
                                    name='planned',
                                    start_datetime=start.to_string(),
                                    end_datetime=end.to_string())
        deployment_obj = IonObject(RT.Deployment,
                                   name=name,
                                   description=description,
                                   context=IonObject(
                                       OT.CabledNodeDeploymentContext),
                                   constraint_list=[temporal_bounds])
        return self.OMS.create_deployment(deployment_obj)

    def validate_deployment_activated(self, deployment_id=''):
        site_id, device_id = self.get_deployment_ids(deployment_id)
        assocs = self.RR.find_associations(subject=site_id,
                                           predicate=PRED.hasDevice,
                                           object=device_id)
        return self.assertEquals(len(assocs), 1)

    def validate_deployment_deactivated(self, deployment_id=''):
        site_id, device_id = self.get_deployment_ids(deployment_id)
        assocs = self.RR.find_associations(subject=site_id,
                                           predicate=PRED.hasDevice,
                                           object=device_id)
        return self.assertEquals(len(assocs), 0)

    def dump_deployment(self, deployment_id='', name=""):
        #site_id, device_id = self.get_deployment_ids(deployment_id)
        resource_list, _ = self.RR.find_subjects(predicate=PRED.hasDeployment,
                                                 object=deployment_id,
                                                 id_only=True)
        resource_list.append(deployment_id)
        resources = self.RR.read_mult(resource_list)
        log.debug('---------   dump_deployment %s summary---------------',
                  name)
        for resource in resources:
            log.debug('%s: %s (%s)', resource._get_type(), resource.name,
                      resource._id)

        log.debug('---------   dump_deployment %s full dump ---------------',
                  name)

        for resource in resources:
            log.debug('resource: %s ', resource)
        log.debug('---------   dump_deployment %s end  ---------------', name)

        #assocs = self.container.resource_registry.find_assoctiations(anyside=deployment_id)
#        assocs = Container.instance.resource_registry.find_assoctiations(anyside=deployment_id)
#        log.debug('---------   dump_deployment  ---------------')
#        for assoc in assocs:
#            log.debug('SUBJECT: %s      PREDICATE: %s OBJET: %s', assoc.s, assoc.p, assoc.o)
#        log.debug('---------   dump_deployment  end  ---------------')

    def get_deployment_ids(self, deployment_id=''):
        devices = []
        sites = []
        idevice_list, _ = self.RR.find_subjects(RT.InstrumentDevice,
                                                PRED.hasDeployment,
                                                deployment_id,
                                                id_only=True)
        pdevice_list, _ = self.RR.find_subjects(RT.PlatformDevice,
                                                PRED.hasDeployment,
                                                deployment_id,
                                                id_only=True)
        devices = idevice_list + pdevice_list
        self.assertEquals(1, len(devices))
        isite_list, _ = self.RR.find_subjects(RT.InstrumentSite,
                                              PRED.hasDeployment,
                                              deployment_id,
                                              id_only=True)
        psite_list, _ = self.RR.find_subjects(RT.PlatformSite,
                                              PRED.hasDeployment,
                                              deployment_id,
                                              id_only=True)
        sites = isite_list + psite_list
        self.assertEquals(1, len(sites))
        return sites[0], devices[0]

    def _find_resource_in_list(self,
                               res_list,
                               attr,
                               attr_val,
                               assert_found=True):
        for res in res_list:
            v = getattr(res, attr, None)
            if v == attr_val:
                return res
        if assert_found:
            self.assertTrue(
                False,
                "Attribute %s value %s not found in list" % (attr, attr_val))
        return None

    # -------------------------------------------------------------------------

    def _get_caller(self):
        s = inspect.stack()
        return "%s:%s" % (s[2][1], s[2][2])

    @assertion_wrapper
    def assert_array_almost_equal(self, *args, **kwargs):
        np.testing.assert_array_almost_equal(*args, **kwargs)

    @assertion_wrapper
    def assertEquals(self, *args, **kwargs):
        IonIntegrationTestCase.assertEquals(self, *args, **kwargs)

    @assertion_wrapper
    def assertTrue(self, *args, **kwargs):
        IonIntegrationTestCase.assertTrue(self, *args, **kwargs)
Exemplo n.º 27
0
class TestWorkflowManagementIntegration(IonIntegrationTestCase):
    def setUp(self):
        # Start container

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(
            node=self.container.node)
        self.pubsubclient = PubsubManagementServiceClient(
            node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(
            node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(
            node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(
            node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(
            node=self.container.node)
        self.datasetclient = DatasetManagementServiceClient(
            node=self.container.node)
        self.workflowclient = WorkflowManagementServiceClient(
            node=self.container.node)
        self.process_dispatcher = ProcessDispatcherServiceClient(
            node=self.container.node)

        self.ctd_stream_def = SBE37_CDM_stream_definition()

    def _create_ctd_input_stream_and_data_product(
            self, data_product_name='ctd_parsed'):

        cc = self.container
        assertions = self.assertTrue

        #-------------------------------
        # Create CTD Parsed as the initial data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        ctd_stream_def = SBE37_CDM_stream_definition()
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(
            container=self.ctd_stream_def, name='Simulated CTD data')

        log.debug('Creating new CDM data product with a stream definition')
        dp_obj = IonObject(RT.DataProduct,
                           name=data_product_name,
                           description='ctd stream test')
        try:
            ctd_parsed_data_product_id = self.dataproductclient.create_data_product(
                dp_obj, ctd_stream_def_id)
        except Exception as ex:
            self.fail("failed to create new data product: %s" % ex)

        log.debug('new ctd_parsed_data_product_id = ',
                  ctd_parsed_data_product_id)

        #Only ever need one device for testing purposes.
        instDevice_obj, _ = self.rrclient.find_resources(
            restype=RT.InstrumentDevice, name='SBE37IMDevice')
        if instDevice_obj:
            instDevice_id = instDevice_obj[0]._id
        else:
            instDevice_obj = IonObject(RT.InstrumentDevice,
                                       name='SBE37IMDevice',
                                       description="SBE37IMDevice",
                                       serial_number="12345")
            instDevice_id = self.imsclient.create_instrument_device(
                instrument_device=instDevice_obj)

        self.damsclient.assign_data_product(
            input_resource_id=instDevice_id,
            data_product_id=ctd_parsed_data_product_id)

        self.dataproductclient.activate_data_product_persistence(
            data_product_id=ctd_parsed_data_product_id,
            persist_data=False,
            persist_metadata=False)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product_id,
                                                   PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0)
        ctd_stream_id = stream_ids[0]

        return ctd_stream_id, ctd_parsed_data_product_id

    def _start_simple_input_stream_process(self, ctd_stream_id):
        return self._start_input_stream_process(ctd_stream_id)

    def _start_sinusoidal_input_stream_process(self, ctd_stream_id):
        return self._start_input_stream_process(
            ctd_stream_id, 'ion.processes.data.sinusoidal_stream_publisher',
            'SinusoidalCtdPublisher')

    def _start_input_stream_process(
            self,
            ctd_stream_id,
            module='ion.processes.data.ctd_stream_publisher',
            class_name='SimpleCtdPublisher'):

        ###
        ### Start the process for producing the CTD data
        ###
        # process definition for the ctd simulator...
        producer_definition = ProcessDefinition()
        producer_definition.executable = {
            'module': module,
            'class': class_name
        }

        ctd_sim_procdef_id = self.process_dispatcher.create_process_definition(
            process_definition=producer_definition)

        # Start the ctd simulator to produce some data
        configuration = {
            'process': {
                'stream_id': ctd_stream_id,
            }
        }
        ctd_sim_pid = self.process_dispatcher.schedule_process(
            process_definition_id=ctd_sim_procdef_id,
            configuration=configuration)

        return ctd_sim_pid

    def _start_output_stream_listener(self,
                                      data_product_stream_ids,
                                      message_count_per_stream=10):

        cc = self.container
        assertions = self.assertTrue

        ###
        ### Make a subscriber in the test to listen for transformed data
        ###
        salinity_subscription_id = self.pubsubclient.create_subscription(
            query=StreamQuery(data_product_stream_ids),
            exchange_name='workflow_test',
            name="test workflow transformations",
        )

        pid = cc.spawn_process(name='dummy_process_for_test',
                               module='pyon.ion.process',
                               cls='SimpleProcess',
                               config={})
        dummy_process = cc.proc_manager.procs[pid]

        subscriber_registrar = StreamSubscriberRegistrar(process=dummy_process,
                                                         node=cc.node)

        result = gevent.event.AsyncResult()
        results = []

        def message_received(message, headers):
            # Heads
            log.warn(' data received!')
            results.append(message)
            if len(results) >= len(
                    data_product_stream_ids
            ) * message_count_per_stream:  #Only wait for so many messages - per stream
                result.set(True)

        subscriber = subscriber_registrar.create_subscriber(
            exchange_name='workflow_test', callback=message_received)
        subscriber.start()

        # after the queue has been created it is safe to activate the subscription
        self.pubsubclient.activate_subscription(
            subscription_id=salinity_subscription_id)

        # Assert that we have received data
        assertions(result.get(timeout=30))

        self.pubsubclient.deactivate_subscription(
            subscription_id=salinity_subscription_id)

        subscriber.stop()

        return results

    def _validate_messages(self, results):

        cc = self.container
        assertions = self.assertTrue

        first_salinity_values = None

        for message in results:

            try:
                psd = PointSupplementStreamParser(
                    stream_definition=self.ctd_stream_def,
                    stream_granule=message)
                temp = psd.get_values('temperature')
                log.info(psd.list_field_names())
            except KeyError as ke:
                temp = None

            if temp is not None:
                assertions(isinstance(temp, numpy.ndarray))

                log.info('temperature=' + str(numpy.nanmin(temp)))

                first_salinity_values = None

            else:
                psd = PointSupplementStreamParser(
                    stream_definition=SalinityTransform.outgoing_stream_def,
                    stream_granule=message)
                log.info(psd.list_field_names())

                # Test the handy info method for the names of fields in the stream def
                assertions('salinity' in psd.list_field_names())

                # you have to know the name of the coverage in stream def
                salinity = psd.get_values('salinity')
                log.info('salinity=' + str(numpy.nanmin(salinity)))

                assertions(isinstance(salinity, numpy.ndarray))

                assertions(numpy.nanmin(salinity) >
                           0.0)  # salinity should always be greater than 0

                if first_salinity_values is None:
                    first_salinity_values = salinity.tolist()
                else:
                    second_salinity_values = salinity.tolist()
                    assertions(
                        len(first_salinity_values) == len(
                            second_salinity_values))
                    for idx in range(0, len(first_salinity_values)):
                        assertions(first_salinity_values[idx] *
                                   2.0 == second_salinity_values[idx])

    def _create_salinity_data_process_definition(self):

        # Salinity: Data Process Definition

        #First look to see if it exists and if not, then create it
        dpd, _ = self.rrclient.find_resources(restype=RT.DataProcessDefinition,
                                              name='ctd_salinity')
        if len(dpd) > 0:
            return dpd[0]

        log.debug("Create data process definition SalinityTransform")
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='ctd_salinity',
            description='create a salinity data product',
            module='ion.processes.data.transforms.ctd.ctd_L2_salinity',
            class_name='SalinityTransform',
            process_source='SalinityTransform source code here...')
        try:
            ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except Excpetion as ex:
            self.fail(
                "failed to create new SalinityTransform data process definition: %s"
                % ex)

        # create a stream definition for the data from the salinity Transform
        sal_stream_def_id = self.pubsubclient.create_stream_definition(
            container=SalinityTransform.outgoing_stream_def, name='Salinity')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            sal_stream_def_id, ctd_L2_salinity_dprocdef_id)

        return ctd_L2_salinity_dprocdef_id

    def _create_salinity_doubler_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd, _ = self.rrclient.find_resources(restype=RT.DataProcessDefinition,
                                              name='salinity_doubler')
        if len(dpd) > 0:
            return dpd[0]

        # Salinity Doubler: Data Process Definition
        log.debug("Create data process definition SalinityDoublerTransform")
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='salinity_doubler',
            description='create a salinity doubler data product',
            module='ion.processes.data.transforms.example_double_salinity',
            class_name='SalinityDoubler',
            process_source='SalinityDoubler source code here...')
        try:
            salinity_doubler_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except Exception as ex:
            self.fail(
                "failed to create new SalinityDoubler data process definition: %s"
                % ex)

        # create a stream definition for the data from the salinity Transform
        salinity_double_stream_def_id = self.pubsubclient.create_stream_definition(
            container=SalinityDoubler.outgoing_stream_def,
            name='SalinityDoubler')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            salinity_double_stream_def_id, salinity_doubler_dprocdef_id)

        return salinity_doubler_dprocdef_id

    def create_transform_process(self, data_process_definition_id,
                                 data_process_input_dp_id):

        data_process_definition = self.rrclient.read(
            data_process_definition_id)

        # Find the link between the output Stream Definition resource and the Data Process Definition resource
        stream_ids, _ = self.rrclient.find_objects(data_process_definition._id,
                                                   PRED.hasStreamDefinition,
                                                   RT.StreamDefinition,
                                                   id_only=True)
        if not stream_ids:
            raise Inconsistent(
                "The data process definition %s is missing an association to an output stream definition"
                % data_process_definition._id)
        process_output_stream_def_id = stream_ids[0]

        #Concatenate the name of the workflow and data process definition for the name of the data product output
        data_process_name = data_process_definition.name

        # Create the output data product of the transform
        transform_dp_obj = IonObject(
            RT.DataProduct,
            name=data_process_name,
            description=data_process_definition.description)
        transform_dp_id = self.dataproductclient.create_data_product(
            transform_dp_obj, process_output_stream_def_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=transform_dp_id,
            persist_data=True,
            persist_metadata=True)

        #last one out of the for loop is the output product id
        output_data_product_id = transform_dp_id

        # Create the  transform data process
        log.debug("create data_process and start it")
        data_process_id = self.dataprocessclient.create_data_process(
            data_process_definition._id, [data_process_input_dp_id],
            {'output': transform_dp_id})
        self.dataprocessclient.activate_data_process(data_process_id)

        #Find the id of the output data stream
        stream_ids, _ = self.rrclient.find_objects(transform_dp_id,
                                                   PRED.hasStream, None, True)
        if not stream_ids:
            raise Inconsistent(
                "The data process %s is missing an association to an output stream"
                % data_process_id)

        return data_process_id, output_data_product_id

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_SA_transform_components(self):

        assertions = self.assertTrue

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self._create_ctd_input_stream_and_data_product(
        )
        data_product_stream_ids.append(ctd_stream_id)

        ###
        ###  Setup the first transformation
        ###

        # Salinity: Data Process Definition
        ctd_L2_salinity_dprocdef_id = self._create_salinity_data_process_definition(
        )

        l2_salinity_all_data_process_id, ctd_l2_salinity_output_dp_id = self.create_transform_process(
            ctd_L2_salinity_dprocdef_id, ctd_parsed_data_product_id)

        ## get the stream id for the transform outputs
        stream_ids, _ = self.rrclient.find_objects(
            ctd_l2_salinity_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0)
        sal_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_stream_id)

        ###
        ###  Setup the second transformation
        ###

        # Salinity Doubler: Data Process Definition
        salinity_doubler_dprocdef_id = self._create_salinity_doubler_data_process_definition(
        )

        salinity_double_data_process_id, salinity_doubler_output_dp_id = self.create_transform_process(
            salinity_doubler_dprocdef_id, ctd_l2_salinity_output_dp_id)

        stream_ids, _ = self.rrclient.find_objects(
            salinity_doubler_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0)
        sal_dbl_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_dbl_stream_id)

        #Start the input stream process
        ctd_sim_pid = self._start_simple_input_stream_process(ctd_stream_id)

        #Start te output stream listener to monitor and verify messages
        results = self._start_output_stream_listener(data_product_stream_ids)

        #Stop the transform processes
        self.dataprocessclient.deactivate_data_process(
            salinity_double_data_process_id)
        self.dataprocessclient.deactivate_data_process(
            l2_salinity_all_data_process_id)

        # stop the flow parse the messages...
        self.process_dispatcher.cancel_process(
            ctd_sim_pid
        )  # kill the ctd simulator process - that is enough data

        #Validate the data from each of the messages along the way
        self._validate_messages(results)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(
            RT.WorkflowDefinition,
            name='Salinity_Test_Workflow',
            description='tests a workflow of multiple transform data processes'
        )

        workflow_data_product_name = 'TEST-Workflow_Output_Product'  #Set a specific output product name

        #Add a transformation process definition
        ctd_L2_salinity_dprocdef_id = self._create_salinity_data_process_definition(
        )
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=ctd_L2_salinity_dprocdef_id,
            persist_process_output_data=False
        )  #Don't persist the intermediate data product
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Add a transformation process definition
        salinity_doubler_dprocdef_id = self._create_salinity_doubler_data_process_definition(
        )
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=salinity_doubler_dprocdef_id,
            output_data_product_name=workflow_data_product_name)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(
            workflow_def_obj)

        aids = self.rrclient.find_associations(workflow_def_id,
                                               PRED.hasDataProcessDefinition)
        assertions(len(aids) == 2)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self._create_ctd_input_stream_and_data_product(
        )
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(
            workflow_def_id, ctd_parsed_data_product_id, timeout=30)

        workflow_output_ids, _ = self.rrclient.find_subjects(
            RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1)

        #Verify the output data product name matches what was specified in the workflow definition
        workflow_product = self.rrclient.read(workflow_product_id)
        assertions(workflow_product.name == workflow_data_product_name)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids, _ = self.rrclient.find_objects(
            workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 2)

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                       None, True)
            assertions(len(stream_ids) == 1)
            data_product_stream_ids.append(stream_ids[0])

        #Start the input stream process
        ctd_sim_pid = self._start_simple_input_stream_process(ctd_stream_id)

        #Start the output stream listener to monitor and verify messages
        results = self._start_output_stream_listener(data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id, False, timeout=15)  # Should test true at some point

        #Make sure the Workflow object was removed
        objs, _ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(objs) == 0)

        # stop the flow parse the messages...
        self.process_dispatcher.cancel_process(
            ctd_sim_pid
        )  # kill the ctd simulator process - that is enough data

        #Validate the data from each of the messages along the way
        self._validate_messages(results)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids, _ = self.rrclient.find_resources(
            restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0)

    def _create_google_dt_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd, _ = self.rrclient.find_resources(restype=RT.DataProcessDefinition,
                                              name='google_dt_transform')
        if len(dpd) > 0:
            return dpd[0]

        # Data Process Definition
        log.debug("Create data process definition GoogleDtTransform")
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='google_dt_transform',
            description='Convert data streams to Google DataTables',
            module='ion.processes.data.transforms.viz.google_dt',
            class_name='VizTransformGoogleDT',
            process_source='VizTransformGoogleDT source code here...')
        try:
            procdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except Exception as ex:
            self.fail(
                "failed to create new VizTransformGoogleDT data process definition: %s"
                % ex)

        # create a stream definition for the data from the
        stream_def_id = self.pubsubclient.create_stream_definition(
            container=VizTransformGoogleDT.outgoing_stream_def,
            name='VizTransformGoogleDT')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            stream_def_id, procdef_id)

        return procdef_id

    def _validate_google_dt_results(self, results_stream_def, results):

        cc = self.container
        assertions = self.assertTrue

        for g in results:

            if isinstance(g, Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)
                #log.warn(tx.pretty_print())
                #log.warn(rdt.pretty_print())

                gdt_component = rdt['google_dt_components'][0]

                assertions(
                    gdt_component['viz_product_type'] == 'google_realtime_dt')
                gdt_description = gdt_component['data_table_description']
                gdt_content = gdt_component['data_table_content']

                assertions(gdt_description[0][0] == 'time')
                assertions(len(gdt_description) > 1)
                assertions(len(gdt_content) >= 0)

        return

    def _create_mpl_graphs_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd, _ = self.rrclient.find_resources(restype=RT.DataProcessDefinition,
                                              name='mpl_graphs_transform')
        if len(dpd) > 0:
            return dpd[0]

        #Data Process Definition
        log.debug("Create data process definition MatplotlibGraphsTransform")
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='mpl_graphs_transform',
            description='Convert data streams to Matplotlib graphs',
            module='ion.processes.data.transforms.viz.matplotlib_graphs',
            class_name='VizTransformMatplotlibGraphs',
            process_source='VizTransformMatplotlibGraphs source code here...')
        try:
            procdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except Exception as ex:
            self.fail(
                "failed to create new VizTransformMatplotlibGraphs data process definition: %s"
                % ex)

        # create a stream definition for the data
        stream_def_id = self.pubsubclient.create_stream_definition(
            container=VizTransformMatplotlibGraphs.outgoing_stream_def,
            name='VizTransformMatplotlibGraphs')
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            stream_def_id, procdef_id)

        return procdef_id

    def _validate_mpl_graphs_results(self, results_stream_def, results):

        cc = self.container
        assertions = self.assertTrue

        for g in results:
            if isinstance(g, Granule):

                tx = TaxyTool.load_from_granule(g)
                rdt = RecordDictionaryTool.load_from_granule(g)
                #log.warn(tx.pretty_print())
                #log.warn(rdt.pretty_print())

                graphs = rdt['matplotlib_graphs']

                for graph in graphs:
                    assertions(
                        graph['viz_product_type'] == 'matplotlib_graphs')
                    # check to see if the list (numpy array) contians actual images
                    assertions(
                        imghdr.what(graph['image_name'], graph['image_obj']) ==
                        'png')

        return

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    #unittest.skip("Skipping for debugging ")
    def test_google_dt_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(
            RT.WorkflowDefinition,
            name='GoogleDT_Test_Workflow',
            description=
            'Tests the workflow of converting stream data to Google DT')

        #Add a transformation process definition
        google_dt_procdef_id = self._create_google_dt_data_process_definition()
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=google_dt_procdef_id)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(
            workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self._create_ctd_input_stream_and_data_product(
        )
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(
            workflow_def_id, ctd_parsed_data_product_id)

        workflow_output_ids, _ = self.rrclient.find_subjects(
            RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids, _ = self.rrclient.find_objects(
            workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1)

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                       None, True)
            assertions(len(stream_ids) == 1)
            data_product_stream_ids.append(stream_ids[0])

        #Start the input stream process
        ctd_sim_pid = self._start_simple_input_stream_process(ctd_stream_id)

        #Start the output stream listener to monitor and verify messages
        results = self._start_output_stream_listener(data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id, False)  # Should test true at some point

        # stop the flow parse the messages...
        self.process_dispatcher.cancel_process(
            ctd_sim_pid
        )  # kill the ctd simulator process - that is enough data

        #Validate the data from each of the messages along the way
        self._validate_google_dt_results(
            VizTransformGoogleDT.outgoing_stream_def, results)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids, _ = self.rrclient.find_resources(
            restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_mpl_graphs_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(
            RT.WorkflowDefinition,
            name='Mpl_Graphs_Test_Workflow',
            description=
            'Tests the workflow of converting stream data to Matplotlib graphs'
        )

        #Add a transformation process definition
        mpl_graphs_procdef_id = self._create_mpl_graphs_data_process_definition(
        )
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=mpl_graphs_procdef_id)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(
            workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self._create_ctd_input_stream_and_data_product(
        )
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(
            workflow_def_id, ctd_parsed_data_product_id)

        workflow_output_ids, _ = self.rrclient.find_subjects(
            RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids, _ = self.rrclient.find_objects(
            workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1)

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                       None, True)
            assertions(len(stream_ids) == 1)
            data_product_stream_ids.append(stream_ids[0])

        #Start the input stream process
        ctd_sim_pid = self._start_sinusoidal_input_stream_process(
            ctd_stream_id)

        #Start the output stream listener to monitor and verify messages
        results = self._start_output_stream_listener(data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id, False)  # Should test true at some point

        # stop the flow parse the messages...
        self.process_dispatcher.cancel_process(
            ctd_sim_pid
        )  # kill the ctd simulator process - that is enough data

        #Validate the data from each of the messages along the way
        self._validate_mpl_graphs_results(
            VizTransformGoogleDT.outgoing_stream_def, results)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids, _ = self.rrclient.find_resources(
            restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    #@unittest.skip("Skipping for debugging ")
    def test_multiple_workflow_instances(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(
            RT.WorkflowDefinition,
            name='Multiple_Test_Workflow',
            description='Tests the workflow of converting stream data')

        #Add a transformation process definition
        google_dt_procdef_id = self._create_google_dt_data_process_definition()
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=google_dt_procdef_id)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(
            workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the first input data product
        ctd_stream_id1, ctd_parsed_data_product_id1 = self._create_ctd_input_stream_and_data_product(
            'ctd_parsed1')
        data_product_stream_ids.append(ctd_stream_id1)

        #Create and start the first workflow
        workflow_id1, workflow_product_id1 = self.workflowclient.create_data_process_workflow(
            workflow_def_id, ctd_parsed_data_product_id1)

        #Create the second input data product
        ctd_stream_id2, ctd_parsed_data_product_id2 = self._create_ctd_input_stream_and_data_product(
            'ctd_parsed2')
        data_product_stream_ids.append(ctd_stream_id2)

        #Create and start the first workflow
        workflow_id2, workflow_product_id2 = self.workflowclient.create_data_process_workflow(
            workflow_def_id, ctd_parsed_data_product_id2)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_ids, _ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 2)

        #Start the first input stream process
        ctd_sim_pid1 = self._start_sinusoidal_input_stream_process(
            ctd_stream_id1)

        #Start the second input stream process
        ctd_sim_pid2 = self._start_simple_input_stream_process(ctd_stream_id2)

        #Start the output stream listener to monitor a set number of messages being sent through the workflows
        results = self._start_output_stream_listener(
            data_product_stream_ids, message_count_per_stream=5)

        # stop the flow of messages...
        self.process_dispatcher.cancel_process(
            ctd_sim_pid1
        )  # kill the ctd simulator process - that is enough data
        self.process_dispatcher.cancel_process(ctd_sim_pid2)

        #Stop the first workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id1, False)  # Should test true at some point

        #Stop the second workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id2, False)  # Should test true at some point

        workflow_ids, _ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 0)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids, _ = self.rrclient.find_resources(
            restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0)

        aid_list = self.rrclient.find_associations(
            workflow_def_id, PRED.hasDataProcessDefinition)
        assertions(len(aid_list) == 0)
Exemplo n.º 28
0
class TestResourceRegistryAttachments(IonIntegrationTestCase):
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()
        #container = Container()
        #print 'starting container'
        #container.start()
        #print 'started container'

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        self.RR = ResourceRegistryServiceClient(node=self.container.node)

        print 'started services'

    def test_resource_registry_blob_sanity(self):
        resource_id, _ = self.RR.create(IonObject(RT.Resource, name="foo"))

        MY_CONTENT = "the quick brown fox etc etc etc"

        #save
        att_id = self.RR.create_attachment(
            resource_id,
            IonObject(RT.Attachment,
                      name="test.txt",
                      content=MY_CONTENT,
                      content_type="text/plain",
                      keywords=["test1", "test2"],
                      attachment_type=AttachmentType.BLOB))

        #load
        attachment = self.RR.read_attachment(att_id, include_content=True)
        self.assertEqual("test.txt", attachment.name)
        self.assertEqual("text/plain", attachment.content_type)
        self.assertIn("test1", attachment.keywords)
        self.assertIn("test2", attachment.keywords)

        #content has changed; it's base64-encoded from what we put in
        self.assertEqual(MY_CONTENT, attachment.content)

        obj = self.RR.read(resource_id)
        self.assertEqual(obj.name, "foo")
        obj.name = "TheDudeAbides"
        obj = self.RR.update(obj)
        obj = self.RR.read(resource_id)
        self.assertEqual(obj.name, "TheDudeAbides")

        att = self.RR.find_attachments(resource_id)
        self.assertNotEqual(att, None)

        actor_identity_obj = IonObject("ActorIdentity", name="name")
        actor_identity_obj_id, actor_identity_obj_rev = self.RR.create(
            actor_identity_obj)
        user_info_obj = IonObject("UserInfo", name="name")
        user_info_obj_id, user_info_obj_rev = self.RR.create(user_info_obj)
        assoc_id, assoc_rev = self.RR.create_association(
            actor_identity_obj_id, PRED.hasInfo, user_info_obj_id)
        self.assertNotEqual(assoc_id, None)

        find_assoc = self.RR.find_associations(actor_identity_obj_id,
                                               PRED.hasInfo, user_info_obj_id)
        self.assertTrue(find_assoc[0]._id == assoc_id)
        subj = self.RR.find_subjects(RT.ActorIdentity, PRED.hasInfo,
                                     user_info_obj_id, True)

        res_obj1 = self.RR.read_object(actor_identity_obj_id, PRED.hasInfo,
                                       RT.UserInfo)
        self.assertEquals(res_obj1._id, user_info_obj_id)

        self.RR.delete_association(assoc_id)
        self.RR.delete_attachment(att_id)
        self.RR.delete(resource_id)
class TestFindRelatedResources(IonIntegrationTestCase, ResourceHelper):
    """
    assembly integration tests at the service level
    """

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        self.IMS  = InstrumentManagementServiceClient(node=self.container.node)
        self.OMS = ObservatoryManagementServiceClient(node=self.container.node)

        self.RR   = ResourceRegistryServiceClient(node=self.container.node)


        self.care = {}
        self.dontcare = {}
        self.realtype = {}


#    @unittest.skip('this test just for debugging setup')
#    def test_just_the_setup(self):
#        return


    def create_dummy_structure(self):
        """
        Create two observatories.
         - each observatory has 2 subsites
         - each subsite has 2 more subsites
         - each of those subsites has 2 platform sites
         - each of those platform sites has a model and 2 sub- platform sites
         - each of those sub- platform sites has a model, matching platform device, and 2 instrument sites
         - each of those instrument sites has a model and matching instrument device

         One of each resource type (observatory all the way down to instrument device/model) is what we "care" about
          - it goes in the self.care dict
         All the rest go in the self.dontcare dict

         To manage subsite/platform multiplicity, we alias them in the dict... the proper hierarchy is:
         Observatory-Site-Subsite-PlatformSite-SubPlatformSite-InstrumentSite

         self.realtype[alias] gives the real resource type of an alias
        """
        self.create_observatory(True)
        self.create_observatory(False)

        for rt in [RT.Observatory, RT_SITE, RT.Subsite,
                   RT.PlatformSite, RT_SUBPLATFORMSITE, RT.PlatformDevice, RT.PlatformModel,
                   RT.InstrumentSite, RT.InstrumentDevice, RT.InstrumentModel
        ]:
            self.assertIn(rt, self.care)

        self.expected_associations = [
            (RT.Observatory, PRED.hasSite, RT_SITE),
            (RT.Site, PRED.hasSite, RT.Subsite),
            (RT.Subsite, PRED.hasSite, RT.PlatformSite),
            (RT.PlatformSite, PRED.hasSite, RT_SUBPLATFORMSITE),
            (RT_SUBPLATFORMSITE, PRED.hasSite, RT.InstrumentSite),

            (RT_SUBPLATFORMSITE, PRED.hasModel, RT.PlatformModel),
            (RT_SUBPLATFORMSITE, PRED.hasDevice, RT.PlatformDevice),
            (RT.PlatformDevice, PRED.hasModel, RT.PlatformModel),

            (RT.InstrumentSite, PRED.hasModel, RT.InstrumentModel),
            (RT.InstrumentSite, PRED.hasDevice, RT.InstrumentDevice),
            (RT.InstrumentDevice, PRED.hasModel, RT.InstrumentModel)
        ]

        log.info("Verifying created structure")
        for (st, p, ot) in self.expected_associations:
            rst = self.realtype[st]
            rot = self.realtype[ot]
            s = self.care[st]
            o = self.care[ot]
            log.debug("searching %s->%s->%s as %s->%s->%s" % (st, p, ot, rst, p, rot))
            log.debug(" - expecting %s %s" % (rot, o))
            a = self.RR.find_associations(subject=s, predicate=p, object=o)
            if not (0 < len(a) < 3):
                a2 = self.RR.find_associations(subject=s, predicate=p)
                a2content = [("(%s %s)" % (alt.ot, alt.o)) for alt in a2]
                self.fail("Expected 1-2 associations for %s->%s->%s, got %s: %s" % (st, p, ot, len(a2), a2content))
            self.assertIn(o, [aa.o for aa in a])
        log.info("CREATED STRUCTURE APPEARS CORRECT ===============================")

    def simplify_assn_resource_ids(self, assn_list):
        count = 0

        lookup = {}

        retval = []

        for a in assn_list:
            if not a.s in lookup:
                lookup[a.s] = count
                count += 1
            if not a.o in lookup:
                lookup[a.o] = count
                count += 1
            retval.append(DotDict({"s":lookup[a.s], "st":a.st, "p":a.p, "o":lookup[a.o], "ot":a.ot}))

        return retval

    def describe_assn_graph(self, assn_list):
        return [("%s %s -> %s -> %s %s" % (a.st, a.s, a.p, a.ot, a.o)) for a in assn_list]

    #@unittest.skip('refactoring')
    def test_related_resource_crawler(self):
        """

        """
        self.create_dummy_structure()

        r = RelatedResourcesCrawler()

        # test the basic forward-backward searches
        for (st, p, ot) in self.expected_associations:
            rst = self.realtype[st]
            rot = self.realtype[ot]
            s = self.care[st]
            o = self.care[ot]

            test_sto_fn = r.generate_get_related_resources_fn(self.RR, [rot], {p: (True, False)})
            sto_crawl = test_sto_fn(s, 1) # depth of 1
            if 2 < len(sto_crawl): # we get 2 because of care/dontcare
                self.fail("got %s" % self.describe_assn_graph(self.simplify_assn_resource_ids(sto_crawl)))

            self.assertIn(o, [t.o for t in sto_crawl])

            test_ots_fn = r.generate_get_related_resources_fn(self.RR, [rst], {p: (False, True)})
            ots_crawl = test_ots_fn(o, 1) # depth of 1
            if 1 != len(ots_crawl):
                self.fail("got %s" % self.describe_assn_graph(self.simplify_assn_resource_ids(ots_crawl)))


        # test a nontrivial lookup, in which we extract resources related to an instrument device
        rw = []
        pd = {}

        # we want things related to an instrument device
        rw.append(RT.PlatformModel)
        rw.append(RT.InstrumentModel)
        rw.append(RT.PlatformDevice)
        rw.append(RT.InstrumentSite)
        rw.append(RT.PlatformSite)
        rw.append(RT.Subsite)
        rw.append(RT.Observatory)
        rw.append(RT.InstrumentDevice)
        pd[PRED.hasModel] = (True, True)
        pd[PRED.hasDevice] = (False, True)
        pd[PRED.hasSite] = (False, True)

        test_real_fn = r.generate_get_related_resources_fn(self.RR, resource_whitelist=rw, predicate_dictionary=pd)
        related = test_real_fn(self.care[RT.InstrumentDevice])

        log.debug("========= Result is:")
        for l in self.describe_assn_graph(self.simplify_assn_resource_ids(related)):
            log.debug("    %s", l)

        # check that we only got things we care about
        for a in related:
            # special case for platform model, because we don't care about the top-level platform's model
            #  so it will blow up if we don't ignore it.  if we got an extra platform model, we'd have an
            #  extra platform anyway... so this special case is safe.
            if a.st != RT.PlatformModel:
                self.assertIn(a.s, self.care.values(), "%s %s not cared about" % (a.st, a.s))

            if a.ot != RT.PlatformModel:
                self.assertIn(a.o, self.care.values(), "%s %s not cared about" % (a.ot, a.o))
class TestIntDataProcessManagementServiceMultiOut(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.dataset_management = self.datasetclient
        self.process_dispatcher = ProcessDispatcherServiceClient(node=self.container.node)

    def test_createDataProcess(self):

        #---------------------------------------------------------------------------
        # Data Process Definition
        #---------------------------------------------------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)

        # Make assertion on the newly registered data process definition
        data_process_definition = self.rrclient.read(dprocdef_id)
        self.assertEquals(data_process_definition.name, 'ctd_L0_all')
        self.assertEquals(data_process_definition.description, 'transform ctd package into three separate L0 streams')
        self.assertEquals(data_process_definition.module, 'ion.processes.data.transforms.ctd.ctd_L0_all')
        self.assertEquals(data_process_definition.class_name, 'ctd_L0_all')

        # Read the data process definition using data process management and make assertions
        dprocdef_obj = self.dataprocessclient.read_data_process_definition(dprocdef_id)
        self.assertEquals(dprocdef_obj.class_name,'ctd_L0_all')
        self.assertEquals(dprocdef_obj.module,'ion.processes.data.transforms.ctd.ctd_L0_all')

        #---------------------------------------------------------------------------
        # Create an input instrument
        #---------------------------------------------------------------------------

        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        # Register the instrument so that the data producer and stream object are created
        data_producer_id = self.damsclient.register_instrument(instrument_id)

        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)

        self.dataprocessclient.assign_input_stream_definition_to_data_process_definition(ctd_stream_def_id, dprocdef_id )

        # Assert that the link between the stream definition and the data process definition was done
        assocs = self.rrclient.find_associations(subject=dprocdef_id, predicate=PRED.hasInputStreamDefinition, object=ctd_stream_def_id, id_only=True)

        self.assertIsNotNone(assocs)

        #---------------------------------------------------------------------------
        # Input Data Product
        #---------------------------------------------------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        input_dp_obj = IonObject(   RT.DataProduct,
                                    name='InputDataProduct',
                                    description='some new dp',
                                    temporal_domain = tdom,
                                    spatial_domain = sdom)

        input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=ctd_stream_def_id, exchange_point='test')

        #Make assertions on the input data product created
        input_dp_obj = self.rrclient.read(input_dp_id)
        self.assertEquals(input_dp_obj.name, 'InputDataProduct')
        self.assertEquals(input_dp_obj.description, 'some new dp')

        self.damsclient.assign_data_product(instrument_id, input_dp_id)

        # Retrieve the stream via the DataProduct->Stream associations
        stream_ids, _ = self.rrclient.find_objects(input_dp_id, PRED.hasStream, None, True)

        self.in_stream_id = stream_ids[0]

        #---------------------------------------------------------------------------
        # Output Data Product
        #---------------------------------------------------------------------------

        outgoing_stream_conductivity_id = self.pubsubclient.create_stream_definition(name='conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_conductivity_id, dprocdef_id,binding='conductivity' )

        outgoing_stream_pressure_id = self.pubsubclient.create_stream_definition(name='pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_pressure_id, dprocdef_id, binding='pressure' )

        outgoing_stream_temperature_id = self.pubsubclient.create_stream_definition(name='temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_temperature_id, dprocdef_id, binding='temperature' )


        self.output_products={}

        output_dp_obj = IonObject(RT.DataProduct,
            name='conductivity',
            description='transform output conductivity',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_1 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_conductivity_id)
        self.output_products['conductivity'] = output_dp_id_1

        output_dp_obj = IonObject(RT.DataProduct,
            name='pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_2 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_pressure_id)
        self.output_products['pressure'] = output_dp_id_2

        output_dp_obj = IonObject(RT.DataProduct,
            name='temperature',
            description='transform output ',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_3 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_temperature_id)
        self.output_products['temperature'] = output_dp_id_3


        #---------------------------------------------------------------------------
        # Create the data process
        #---------------------------------------------------------------------------
        def _create_data_process():
            dproc_id = self.dataprocessclient.create_data_process(dprocdef_id, [input_dp_id], self.output_products)
            return dproc_id

        dproc_id = _create_data_process()

        # Make assertions on the data process created
        data_process = self.dataprocessclient.read_data_process(dproc_id)

        # Assert that the data process has a process id attached
        self.assertIsNotNone(data_process.process_id)

        # Assert that the data process got the input data product's subscription id attached as its own input_susbcription_id attribute
        self.assertIsNotNone(data_process.input_subscription_id)

        output_data_product_ids = self.rrclient.find_objects(subject=dproc_id, predicate=PRED.hasOutputProduct, object_type=RT.DataProduct, id_only=True)

        self.assertEquals(Set(output_data_product_ids[0]), Set([output_dp_id_1,output_dp_id_2,output_dp_id_3]))


    @patch.dict(CFG, {'endpoint':{'receive':{'timeout': 60}}})
    def test_createDataProcessUsingSim(self):
        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" )
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)

        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg")
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        #-------------------------------
        # Create InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        #-------------------------------
        # Create InstrumentAgentInstance to hold configuration information
        #-------------------------------


        port_agent_config = {
            'device_addr': 'sbe37-simulator.oceanobservatories.org',
            'device_port': 4001,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'command_port': 4002,
            'data_port': 4003,
            'log_level': 5,
        }


        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance",
                                          port_agent_config = port_agent_config)
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)


        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE32_CDM', parameter_dictionary_id=pdict_id)

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()



        dp_obj = IonObject(RT.DataProduct,
            name='ctd_parsed',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        raw_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE37_RAW', parameter_dictionary_id=pdict_id)

        dp_obj.name = 'ctd_raw'
        ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
            
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' )


        self.output_products={}

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)


        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id

        ctd_l0_pressure_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                                    outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id

        ctd_l0_temperature_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain = tdom,
            spatial_domain = sdom)


        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                    outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id


        #-------------------------------
        # Create listener for data process events and verify that events are received.
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-367  Data processing shall notify registered data product consumers about data processing workflow life cycle events
        #todo (contd) ... I believe the capability does not exist yet now. ANS And SA are not yet publishing any workflow life cycle events (Swarbhanu)
        
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------

        ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products)
        data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        process_id = data_process.process_id
        self.addCleanup(self.process_dispatcher.cancel_process, process_id)

        #-------------------------------
        # Wait until the process launched in the create_data_process() method is actually running, before proceeding further in this test
        #-------------------------------

        gate = ProcessStateGate(self.process_dispatcher.read_process, process_id, ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30), "The data process (%s) did not spawn in 30 seconds" % process_id)

        #-------------------------------
        # Retrieve a list of all data process defintions in RR and validate that the DPD is listed
        #-------------------------------

        # todo: Req: L4-CI-SA-RQ-366  Data processing shall manage data topic definitions
        # todo: data topics are being handled by pub sub at the level of streams
        self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        

        #todo: check that activate event is received L4-CI-SA-RQ-367
        #todo... (it looks like no event is being published when the data process is activated... so below, we just check for now
        # todo... that the subscription is indeed activated) (Swarbhanu)


        # todo: monitor process to see if it is active (sa-rq-182)
        ctd_l0_all_data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        input_subscription_id = ctd_l0_all_data_process.input_subscription_id
        subs = self.rrclient.read(input_subscription_id)
        self.assertTrue(subs.activated)

        # todo: This has not yet been completed by CEI, will prbly surface thru a DPMS call
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)


        #-------------------------------
        # Retrieve the extended resources for data process definition and for data process
        #-------------------------------
        extended_process_definition = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id)
        self.assertEqual(1, len(extended_process_definition.data_processes))
        log.debug("test_createDataProcess: extended_process_definition  %s", str(extended_process_definition))

        extended_process = self.dataprocessclient.get_data_process_extension(ctd_l0_all_data_process_id)
        self.assertEqual(1, len(extended_process.input_data_products))
        log.debug("test_createDataProcess: extended_process  %s", str(extended_process))

        ################################ Test the removal of data processes ##################################

        #-------------------------------------------------------------------
        # Test the deleting of the data process
        #-------------------------------------------------------------------

        # Before deleting, get the input streams, output streams and the subscriptions so that they can be checked after deleting
#        dp_obj_1 = self.rrclient.read(ctd_l0_all_data_process_id)
#        input_subscription_id = dp_obj_1.input_subscription_id
#        out_prods, _ = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True)
#        in_prods, _ = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasInputProduct, id_only=True)
#        in_streams = []
#        for in_prod in in_prods:
#            streams, _ = self.rrclient.find_objects(in_prod, PRED.hasStream, id_only=True)
#            in_streams.extend(streams)
#        out_streams = []
#        for out_prod in out_prods:
#            streams, _ = self.rrclient.find_objects(out_prod, PRED.hasStream, id_only=True)
#            out_streams.extend(streams)

        # Deleting the data process
        self.dataprocessclient.delete_data_process(ctd_l0_all_data_process_id)

        # Check that the data process got removed. Check the lcs state. It should be retired
        dp_obj = self.rrclient.read(ctd_l0_all_data_process_id)
        self.assertEquals(dp_obj.lcstate, LCS.RETIRED)

        # Check for process defs still attached to the data process
        dpd_assn_ids = self.rrclient.find_associations(subject=ctd_l0_all_data_process_id,  predicate=PRED.hasProcessDefinition, id_only=True)
        self.assertEquals(len(dpd_assn_ids), 0)

        # Check for output data product still attached to the data process
        out_products, assocs = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True)
        self.assertEquals(len(out_products), 0)
        self.assertEquals(len(assocs), 0)

        # Check for input data products still attached to the data process
        inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct)
        self.assertEquals(len(inprod_associations), 0)

        # Check for input data products still attached to the data process
        inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct)
        self.assertEquals(len(inprod_associations), 0)

        # Check of the data process has been deactivated
        self.assertIsNone(dp_obj.input_subscription_id)

        # Read the original subscription id of the data process and check that it has been deactivated
        with self.assertRaises(NotFound):
            self.pubsubclient.read_subscription(input_subscription_id)

        #-------------------------------------------------------------------
        # Delete the data process definition
        #-------------------------------------------------------------------

        # before deleting, get the process definition being associated to in order to be able to check later if the latter gets deleted as it should
        proc_def_ids, proc_def_asocs = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasProcessDefinition)
        self.dataprocessclient.delete_data_process_definition(ctd_L0_all_dprocdef_id)

        # check that the data process definition has been retired
        dp_proc_def = self.rrclient.read(ctd_L0_all_dprocdef_id)
        self.assertEquals(dp_proc_def.lcstate, LCS.RETIRED)

        # Check for old associations of this data process definition
        proc_defs, proc_def_asocs = self.rrclient.find_objects(ctd_L0_all_dprocdef_id, PRED.hasProcessDefinition)
        self.assertEquals(len(proc_defs), 0)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject= ctd_L0_all_dprocdef_id, id_only=True)
        self.assertEquals(len(obj_assns), 0)

        ################################ Test the removal of data processes ##################################
        # Try force delete... This should simply delete the associations and the data process object
        # from the resource registry

        #---------------------------------------------------------------------------------------------------------------
        # Force deleting a data process
        #---------------------------------------------------------------------------------------------------------------
        self.dataprocessclient.force_delete_data_process(ctd_l0_all_data_process_id)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True)

        # find all associations where this is the object
        _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True)

        self.assertEquals(len(obj_assns), 0)
        self.assertEquals(len(sbj_assns), 0)
        
        with self.assertRaises(NotFound):
            self.rrclient.read(ctd_l0_all_data_process_id)

        #---------------------------------------------------------------------------------------------------------------
        # Force deleting a data process definition
        #---------------------------------------------------------------------------------------------------------------
        self.dataprocessclient.force_delete_data_process_definition(ctd_L0_all_dprocdef_id)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True)

        # find all associations where this is the object
        _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True)

        self.assertEquals(len(obj_assns), 0)
        self.assertEquals(len(sbj_assns), 0)

        with self.assertRaises(NotFound):
            self.rrclient.read(ctd_l0_all_data_process_id)
class TransformManagementServiceIntTest(IonIntegrationTestCase):

    def setUp(self):
        # set up the container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2dm.yml')

        self.pubsub_cli = PubsubManagementServiceClient(node=self.container.node)
        self.tms_cli = TransformManagementServiceClient(node=self.container.node)
        self.rr_cli = ResourceRegistryServiceClient(node=self.container.node)
        self.procd_cli = ProcessDispatcherServiceClient(node=self.container.node)

        self.input_stream_id = self.pubsub_cli.create_stream(name='input_stream',original=True)

        self.input_subscription_id = self.pubsub_cli.create_subscription(query=StreamQuery(stream_ids=[self.input_stream_id]),exchange_name='transform_input',name='input_subscription')

        self.output_stream_id = self.pubsub_cli.create_stream(name='output_stream',original=True)

        self.process_definition = ProcessDefinition(name='basic_transform_definition')
        self.process_definition.executable = {'module': 'ion.processes.data.transforms.transform_example',
                                              'class':'TransformExample'}
        self.process_definition_id = self.procd_cli.create_process_definition(process_definition=self.process_definition)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    def test_create_transform(self):
        configuration = {'program_args':{'arg1':'value'}}

        transform_id = self.tms_cli.create_transform(
              name='test_transform',
              in_subscription_id=self.input_subscription_id,
              out_streams={'output':self.output_stream_id},
              process_definition_id=self.process_definition_id)

        # test transform creation in rr
        transform = self.rr_cli.read(transform_id)
        self.assertEquals(transform.name,'test_transform')


        # test associations
        predicates = [PRED.hasSubscription, PRED.hasOutStream, PRED.hasProcessDefinition]
        assocs = []
        for p in predicates:
            assocs += self.rr_cli.find_associations(transform_id,p,id_only=True)
        self.assertEquals(len(assocs),3)

        # test process creation
        transform = self.tms_cli.read_transform(transform_id)
        pid = transform.process_id
        proc = self.container.proc_manager.procs.get(pid)
        self.assertIsInstance(proc,TransformExample)

        # clean up
        self.tms_cli.delete_transform(transform_id)

    def test_create_transform_no_procdef(self):
        with self.assertRaises(NotFound):
            self.tms_cli.create_transform(name='test',in_subscription_id=self.input_subscription_id)

    def test_create_transform_bad_procdef(self):
        with self.assertRaises(NotFound):
            self.tms_cli.create_transform(name='test',
                in_subscription_id=self.input_subscription_id,
                process_definition_id='bad')
    
    def test_create_transform_no_config(self):
        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            out_streams={'output':self.output_stream_id},
            process_definition_id=self.process_definition_id,
        )
        self.tms_cli.delete_transform(transform_id)

    def test_create_transform_name_failure(self):
        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            out_streams={'output':self.output_stream_id},
            process_definition_id=self.process_definition_id,
        )
        with self.assertRaises(BadRequest):
            transform_id = self.tms_cli.create_transform(
                name='test_transform',
                in_subscription_id=self.input_subscription_id,
                out_streams={'output':self.output_stream_id},
                process_definition_id=self.process_definition_id,
            )
        self.tms_cli.delete_transform(transform_id)

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    def test_create_no_output(self):
        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            process_definition_id=self.process_definition_id,
        )

        predicates = [PRED.hasSubscription, PRED.hasOutStream, PRED.hasProcessDefinition]
        assocs = []
        for p in predicates:
            assocs += self.rr_cli.find_associations(transform_id,p,id_only=True)
        self.assertEquals(len(assocs),2)

        # test process creation
        transform = self.tms_cli.read_transform(transform_id)
        pid = transform.process_id
        proc = self.container.proc_manager.procs.get(pid)
        self.assertIsInstance(proc,TransformExample)

        self.tms_cli.delete_transform(transform_id)
    def test_read_transform_exists(self):
        trans_obj = IonObject(RT.Transform,name='trans_obj')
        trans_id, _ = self.rr_cli.create(trans_obj)

        res = self.tms_cli.read_transform(trans_id)
        actual = self.rr_cli.read(trans_id)

        self.assertEquals(res._id,actual._id)


    def test_read_transform_nonexist(self):
        with self.assertRaises(NotFound) as e:
            self.tms_cli.read_transform('123')

    def test_activate_transform(self):

        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            out_streams={'output':self.output_stream_id},
            process_definition_id=self.process_definition_id
        )

        self.tms_cli.activate_transform(transform_id)

        # pubsub check if activated?
        self.tms_cli.delete_transform(transform_id)

    def test_activate_transform_nonexist(self):
        with self.assertRaises(NotFound):
            self.tms_cli.activate_transform('1234')

    def test_delete_transform(self):

        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            process_definition_id=self.process_definition_id
        )
        self.tms_cli.delete_transform(transform_id)

        # assertions
        with self.assertRaises(NotFound):
            self.rr_cli.read(transform_id)


    def test_delete_transform_nonexist(self):
        with self.assertRaises(NotFound):
            self.tms_cli.delete_transform('123')

    def test_execute_transform(self):
        # set up
        process_definition = ProcessDefinition(name='procdef_execute')
        process_definition.executable['module'] = 'ion.processes.data.transforms.transform_example'
        process_definition.executable['class'] = 'ReverseTransform'
        data = [1,2,3]

        process_definition_id, _ = self.rr_cli.create(process_definition)

        retval = self.tms_cli.execute_transform(process_definition_id,data)

        self.assertEquals(retval,[3,2,1])


    def test_integrated_transform(self):
        '''
        This example script runs a chained three way transform:
            B
        A <
            C
        Where A is the even_odd transform (generates a stream of even and odd numbers from input)
        and B and C are the basic transforms that receive even and odd input
        '''
        cc = self.container
        assertions = self.assertTrue

        pubsub_cli = PubsubManagementServiceClient(node=cc.node)
        rr_cli = ResourceRegistryServiceClient(node=cc.node)
        tms_cli = TransformManagementServiceClient(node=cc.node)
        #-------------------------------
        # Process Definition
        #-------------------------------
        # Create the process definition for the basic transform
        process_definition = IonObject(RT.ProcessDefinition, name='basic_transform_definition')
        process_definition.executable = {
            'module': 'ion.processes.data.transforms.transform_example',
            'class':'TransformExample'
        }
        basic_transform_definition_id, _ = rr_cli.create(process_definition)

        # Create The process definition for the TransformEvenOdd
        process_definition = IonObject(RT.ProcessDefinition, name='evenodd_transform_definition')
        process_definition.executable = {
            'module': 'ion.processes.data.transforms.transform_example',
            'class':'TransformEvenOdd'
        }
        evenodd_transform_definition_id, _ = rr_cli.create(process_definition)

        #-------------------------------
        # Streams
        #-------------------------------
        streams = [pubsub_cli.create_stream() for i in xrange(5)]

        #-------------------------------
        # Subscriptions
        #-------------------------------

        query = StreamQuery(stream_ids=[streams[0]])
        input_subscription_id = pubsub_cli.create_subscription(query=query, exchange_name='input_queue')

        query = StreamQuery(stream_ids = [streams[1]]) # even output
        even_subscription_id = pubsub_cli.create_subscription(query=query, exchange_name='even_queue')

        query = StreamQuery(stream_ids = [streams[2]]) # odd output
        odd_subscription_id = pubsub_cli.create_subscription(query=query, exchange_name='odd_queue')


        #-------------------------------
        # Launch the EvenOdd Transform
        #-------------------------------

        evenodd_id = tms_cli.create_transform(name='even_odd',
            in_subscription_id=input_subscription_id,
            out_streams={'even':streams[1], 'odd':streams[2]},
            process_definition_id=evenodd_transform_definition_id,
            configuration={})
        tms_cli.activate_transform(evenodd_id)


        #-------------------------------
        # Launch the Even Processing Transform
        #-------------------------------

        even_transform_id = tms_cli.create_transform(name='even_transform',
            in_subscription_id = even_subscription_id,
            out_streams={'even_plus1':streams[3]},
            process_definition_id=basic_transform_definition_id,
            configuration={})
        tms_cli.activate_transform(even_transform_id)

        #-------------------------------
        # Launch the Odd Processing Transform
        #-------------------------------

        odd_transform_id = tms_cli.create_transform(name='odd_transform',
            in_subscription_id = odd_subscription_id,
            out_streams={'odd_plus1':streams[4]},
            process_definition_id=basic_transform_definition_id,
            configuration={})
        tms_cli.activate_transform(odd_transform_id)

        #-------------------------------
        # Set up final subscribers
        #-------------------------------

        evenplus1_subscription_id = pubsub_cli.create_subscription(
            query=StreamQuery([streams[3]]),
            exchange_name='evenplus1_queue',
            name='EvenPlus1Subscription',
            description='EvenPlus1 SubscriptionDescription'
        )
        oddplus1_subscription_id = pubsub_cli.create_subscription(
            query=StreamQuery([streams[4]]),
            exchange_name='oddplus1_queue',
            name='OddPlus1Subscription',
            description='OddPlus1 SubscriptionDescription'
        )

        total_msg_count = 2

        msgs = gevent.queue.Queue()


        def even1_message_received(message, headers):
            input = int(message.get('num'))
            assertions( (input % 2) ) # Assert it is odd (transform adds 1)
            msgs.put(True)


        def odd1_message_received(message, headers):
            input = int(message.get('num'))
            assertions(not (input % 2)) # Assert it is even
            msgs.put(True)

        subscriber_registrar = StreamSubscriberRegistrar(process=cc, container=cc)
        even_subscriber = subscriber_registrar.create_subscriber(exchange_name='evenplus1_queue', callback=even1_message_received)
        odd_subscriber = subscriber_registrar.create_subscriber(exchange_name='oddplus1_queue', callback=odd1_message_received)

        # Start subscribers
        even_subscriber.start()
        odd_subscriber.start()

        # Activate subscriptions
        pubsub_cli.activate_subscription(evenplus1_subscription_id)
        pubsub_cli.activate_subscription(oddplus1_subscription_id)

        #-------------------------------
        # Set up fake stream producer
        #-------------------------------

        pid = cc.spawn_process(name='dummy_process_for_test',
            module='pyon.ion.process',
            cls='SimpleProcess',
            config={})
        dummy_process = cc.proc_manager.procs[pid]

        # Normally the user does not see or create the publisher, this is part of the containers business.
        # For the test we need to set it up explicitly
        publisher_registrar = StreamPublisherRegistrar(process=dummy_process, container=cc)
        stream_publisher = publisher_registrar.create_publisher(stream_id=streams[0])

        #-------------------------------
        # Start test
        #-------------------------------

        # Publish a stream
        for i in xrange(total_msg_count):
            stream_publisher.publish({'num':str(i)})

        time.sleep(0.5)

        for i in xrange(total_msg_count * 2):
            try:
                msgs.get()
            except Empty:
                assertions(False, "Failed to process all messages correctly.")

    """
class TestIntDataProcessManagementServiceMultiOut(IonIntegrationTestCase):

    def setUp(self):
        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.dataproductclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.datasetclient =  DatasetManagementServiceClient(node=self.container.node)
        self.dataset_management = self.datasetclient

    def test_createDataProcess(self):

        #---------------------------------------------------------------------------
        # Data Process Definition
        #---------------------------------------------------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)

        # Make assertion on the newly registered data process definition
        data_process_definition = self.rrclient.read(dprocdef_id)
        self.assertEquals(data_process_definition.name, 'ctd_L0_all')
        self.assertEquals(data_process_definition.description, 'transform ctd package into three separate L0 streams')
        self.assertEquals(data_process_definition.module, 'ion.processes.data.transforms.ctd.ctd_L0_all')
        self.assertEquals(data_process_definition.class_name, 'ctd_L0_all')

        # Read the data process definition using data process management and make assertions
        dprocdef_obj = self.dataprocessclient.read_data_process_definition(dprocdef_id)
        self.assertEquals(dprocdef_obj.class_name,'ctd_L0_all')
        self.assertEquals(dprocdef_obj.module,'ion.processes.data.transforms.ctd.ctd_L0_all')

        #---------------------------------------------------------------------------
        # Create an input instrument
        #---------------------------------------------------------------------------

        instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        # Register the instrument so that the data producer and stream object are created
        data_producer_id = self.damsclient.register_instrument(instrument_id)

        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id)

        self.dataprocessclient.assign_input_stream_definition_to_data_process_definition(ctd_stream_def_id, dprocdef_id )

        # Assert that the link between the stream definition and the data process definition was done
        assocs = self.rrclient.find_associations(subject=dprocdef_id, predicate=PRED.hasInputStreamDefinition, object=ctd_stream_def_id, id_only=True)

        self.assertIsNotNone(assocs)

        #---------------------------------------------------------------------------
        # Input Data Product
        #---------------------------------------------------------------------------
        tdom, sdom = time_series_domain()
        sdom = sdom.dump()
        tdom = tdom.dump()


        input_dp_obj = IonObject(   RT.DataProduct,
                                    name='InputDataProduct',
                                    description='some new dp',
                                    temporal_domain = tdom,
                                    spatial_domain = sdom)

        input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=ctd_stream_def_id, exchange_point='test')

        #Make assertions on the input data product created
        input_dp_obj = self.rrclient.read(input_dp_id)
        self.assertEquals(input_dp_obj.name, 'InputDataProduct')
        self.assertEquals(input_dp_obj.description, 'some new dp')

        self.damsclient.assign_data_product(instrument_id, input_dp_id)

        # Retrieve the stream via the DataProduct->Stream associations
        stream_ids, _ = self.rrclient.find_objects(input_dp_id, PRED.hasStream, None, True)

        self.in_stream_id = stream_ids[0]

        #---------------------------------------------------------------------------
        # Output Data Product
        #---------------------------------------------------------------------------

        outgoing_stream_conductivity_id = self.pubsubclient.create_stream_definition(name='conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_conductivity_id, dprocdef_id,binding='conductivity' )

        outgoing_stream_pressure_id = self.pubsubclient.create_stream_definition(name='pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_pressure_id, dprocdef_id, binding='pressure' )

        outgoing_stream_temperature_id = self.pubsubclient.create_stream_definition(name='temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_temperature_id, dprocdef_id, binding='temperature' )


        self.output_products={}

        output_dp_obj = IonObject(RT.DataProduct,
            name='conductivity',
            description='transform output conductivity',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_1 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_conductivity_id)
        self.output_products['conductivity'] = output_dp_id_1

        output_dp_obj = IonObject(RT.DataProduct,
            name='pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_2 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_pressure_id)
        self.output_products['pressure'] = output_dp_id_2

        output_dp_obj = IonObject(RT.DataProduct,
            name='temperature',
            description='transform output ',
            temporal_domain = tdom,
            spatial_domain = sdom)

        output_dp_id_3 = self.dataproductclient.create_data_product(output_dp_obj, outgoing_stream_temperature_id)
        self.output_products['temperature'] = output_dp_id_3


        #---------------------------------------------------------------------------
        # Create the data process
        #---------------------------------------------------------------------------
        def _create_data_process():
            dproc_id = self.dataprocessclient.create_data_process(dprocdef_id, [input_dp_id], self.output_products)
            return dproc_id

        dproc_id = _create_data_process()

        # Make assertions on the data process created
        data_process = self.dataprocessclient.read_data_process(dproc_id)

        # Assert that the data process has a process id attached
        self.assertIsNotNone(data_process.process_id)

        # Assert that the data process got the input data product's subscription id attached as its own input_susbcription_id attribute
        self.assertIsNotNone(data_process.input_subscription_id)

        output_data_product_ids = self.rrclient.find_objects(subject=dproc_id, predicate=PRED.hasOutputProduct, object_type=RT.DataProduct, id_only=True)

        self.assertEquals(Set(output_data_product_ids[0]), Set([output_dp_id_1,output_dp_id_2,output_dp_id_3]))


    @patch.dict(CFG, {'endpoint':{'receive':{'timeout': 60}}})
    def test_createDataProcessUsingSim(self):
        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" )
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)

        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", driver_class="SBE37Driver" )
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        #-------------------------------
        # Create InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        #-------------------------------
        # Create InstrumentAgentInstance to hold configuration information
        #-------------------------------


        port_agent_config = {
            'device_addr': 'sbe37-simulator.oceanobservatories.org',
            'device_port': 4001,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'command_port': 4002,
            'data_port': 4003,
            'log_level': 5,
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", svr_addr="localhost",
                                          comms_device_address=CFG.device.sbe37.host, comms_device_port=CFG.device.sbe37.port,
                                          port_agent_config = port_agent_config)
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)


        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE32_CDM', parameter_dictionary_id=pdict_id)

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()



        dp_obj = IonObject(RT.DataProduct,
            name='ctd_parsed',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        raw_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE37_RAW', parameter_dictionary_id=pdict_id)

        dp_obj.name = 'ctd_raw'
        ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
            
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' )


        self.output_products={}

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)


        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id

        ctd_l0_pressure_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                                    outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id

        ctd_l0_temperature_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain = tdom,
            spatial_domain = sdom)


        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                    outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id


        #-------------------------------
        # Create listener for data process events and verify that events are received.
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-367  Data processing shall notify registered data product consumers about data processing workflow life cycle events
        #todo (contd) ... I believe the capability does not exist yet now. ANS And SA are not yet publishing any workflow life cycle events (Swarbhanu)
        
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------
        ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products)

        #-------------------------------
        # Retrieve a list of all data process defintions in RR and validate that the DPD is listed
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-366  Data processing shall manage data topic definitions
        # todo: This capability is not yet completed (Swarbhanu)
        self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        

        #todo: check that activate event is received L4-CI-SA-RQ-367
        #todo... (it looks like no event is being published when the data process is activated... so below, we just check for now
        # todo... that the subscription is indeed activated) (Swarbhanu)


        # todo: monitor process to see if it is active (sa-rq-182)
        ctd_l0_all_data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        input_subscription_id = ctd_l0_all_data_process.input_subscription_id
        subs = self.rrclient.read(input_subscription_id)
        self.assertTrue(subs.activated)

        # todo: This has not yet been completed by CEI, will prbly surface thru a DPMS call
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)


        #-------------------------------
        # Retrieve the extended resources for data process definition and for data process
        #-------------------------------
        extended_process_definition = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id)
        self.assertEqual(1, len(extended_process_definition.data_processes))
        log.debug("test_createDataProcess: extended_process_definition  %s", str(extended_process_definition))

        extended_process = self.dataprocessclient.get_data_process_extension(ctd_l0_all_data_process_id)
        self.assertEqual(1, len(extended_process.input_data_products))
        log.debug("test_createDataProcess: extended_process  %s", str(extended_process))

        #-------------------------------
        # Cleanup
        #-------------------------------

        self.dataprocessclient.delete_data_process(ctd_l0_all_data_process_id)
        self.dataprocessclient.delete_data_process_definition(ctd_L0_all_dprocdef_id)

        self.dataprocessclient.force_delete_data_process(ctd_l0_all_data_process_id)
        self.dataprocessclient.force_delete_data_process_definition(ctd_L0_all_dprocdef_id)
Exemplo n.º 33
0
class TransformManagementServiceIntTest(IonIntegrationTestCase):

    def setUp(self):
        # set up the container
        self._start_container()

        self.cc = ContainerAgentClient(node=self.container.node,name=self.container.name)

        self.cc.start_rel_from_url('res/deploy/r2deploy.yml')

        self.pubsub_cli = PubsubManagementServiceClient(node=self.cc.node)
        self.tms_cli = TransformManagementServiceClient(node=self.cc.node)
        self.rr_cli = ResourceRegistryServiceClient(node=self.cc.node)

        self.input_stream = IonObject(RT.Stream,name='ctd1 output', description='output from a ctd')
        self.input_stream.original = True
        self.input_stream.mimetype = 'hdf'
        self.input_stream_id = self.pubsub_cli.create_stream(self.input_stream)

        self.input_subscription = IonObject(RT.Subscription,name='ctd1 subscription', description='subscribe to this if you want ctd1 data')
        self.input_subscription.query['stream_id'] = self.input_stream_id
        self.input_subscription.exchange_name = 'a queue'
        self.input_subscription_id = self.pubsub_cli.create_subscription(self.input_subscription)

        self.output_stream = IonObject(RT.Stream,name='transform output', description='output from the transform process')
        self.output_stream.original = True
        self.output_stream.mimetype='raw'
        self.output_stream_id = self.pubsub_cli.create_stream(self.output_stream)


        self.process_definition = IonObject(RT.ProcessDefinition,name='transform_process')
        self.process_definition.executable = {'module': 'ion.services.dm.transformation.example.transform_example',
                                              'class':'TransformExample'}
        self.process_definition_id, _= self.rr_cli.create(self.process_definition)



    def test_create_transform(self):
        configuration = {'program_args':{'arg1':'value'}}

        transform_id = self.tms_cli.create_transform(
              name='test_transform',
              in_subscription_id=self.input_subscription_id,
              out_streams={'output':self.output_stream_id},
              process_definition_id=self.process_definition_id)

        # test transform creation in rr
        transform = self.rr_cli.read(transform_id)
        self.assertEquals(transform.name,'test_transform')


        # test associations
        predicates = [AT.hasSubscription, AT.hasOutStream, AT.hasProcessDefinition]
        assocs = []
        for p in predicates:
            assocs += self.rr_cli.find_associations(transform_id,p,id_only=True)
        self.assertEquals(len(assocs),3)

        # test process creation
        transform = self.tms_cli.read_transform(transform_id)
        pid = transform.process_id
        proc = self.container.proc_manager.procs[pid]
        self.assertTrue(isinstance(proc,TransformExample))

    def test_create_transform_no_procdef(self):
        with self.assertRaises(NotFound):
            self.tms_cli.create_transform(name='test',in_subscription_id=self.input_subscription_id)

    def test_create_transform_bad_procdef(self):
        with self.assertRaises(NotFound):
            self.tms_cli.create_transform(name='test',
                in_subscription_id=self.input_subscription_id,
                process_definition_id='bad')

    def test_create_transform_no_config(self):
        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            out_streams={'output':self.output_stream_id},
            process_definition_id=self.process_definition_id,
        )
    def test_create_transform_name_failure(self):
        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            out_streams={'output':self.output_stream_id},
            process_definition_id=self.process_definition_id,
        )
        with self.assertRaises(BadRequest):
            transform_id = self.tms_cli.create_transform(
                name='test_transform',
                in_subscription_id=self.input_subscription_id,
                out_streams={'output':self.output_stream_id},
                process_definition_id=self.process_definition_id,
            )

    def test_create_no_output(self):
        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            process_definition_id=self.process_definition_id,
        )

        predicates = [AT.hasSubscription, AT.hasOutStream, AT.hasProcessDefinition]
        assocs = []
        for p in predicates:
            assocs += self.rr_cli.find_associations(transform_id,p,id_only=True)
        self.assertEquals(len(assocs),2)

        # test process creation
        transform = self.tms_cli.read_transform(transform_id)
        pid = transform.process_id
        proc = self.container.proc_manager.procs[pid]
        self.assertTrue(isinstance(proc,TransformExample))

    def test_read_transform_exists(self):
        trans_obj = IonObject(RT.Transform,name='trans_obj')
        trans_id, _ = self.rr_cli.create(trans_obj)

        res = self.tms_cli.read_transform(trans_id)
        actual = self.rr_cli.read(trans_id)

        self.assertEquals(res._id,actual._id)

    def test_read_transform_nonexist(self):
        with self.assertRaises(NotFound) as e:
            res = self.tms_cli.read_transform('123')

    def test_activate_transform(self):

        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            out_streams={'output':self.output_stream_id},
            process_definition_id=self.process_definition_id
        )

        self.tms_cli.activate_transform(transform_id)

        # pubsub check if activated?

    def test_activate_transform_nonexist(self):
        with self.assertRaises(NotFound):
            self.tms_cli.activate_transform('1234')

    def test_delete_transform(self):

        transform_id = self.tms_cli.create_transform(
            name='test_transform',
            in_subscription_id=self.input_subscription_id,
            process_definition_id=self.process_definition_id
        )
        self.tms_cli.delete_transform(transform_id)

        # assertions
        with self.assertRaises(NotFound):
            self.rr_cli.read(transform_id)


    def test_delete_transform_nonexist(self):
        with self.assertRaises(NotFound):
            self.tms_cli.delete_transform('123')