class TestDeployment(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dmpsclient = DataProductManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.psmsclient = PubsubManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.dsmsclient = DataProcessManagementServiceClient( node=self.container.node) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources( RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses) #@unittest.skip("targeting") def test_create_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) start = str(int(time.mktime(datetime.datetime(2013, 1, 1).timetuple()))) end = str(int(time.mktime(datetime.datetime(2014, 1, 1).timetuple()))) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start, end_datetime=end) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.assign_site_to_deployment(site_id, deployment_id) self.omsclient.assign_device_to_deployment(device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) #retrieve the deployment objects and check that the assoc site and device are attached read_deployment_obj = self.omsclient.read_deployment(deployment_id) log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj)) site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(site_ids), 1) device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(device_ids), 1) #delete the deployment self.omsclient.force_delete_deployment(deployment_id) # now try to get the deleted dp object try: self.omsclient.read_deployment(deployment_id) except NotFound: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def test_prepare_deployment_support(self): deploy_sup = self.omsclient.prepare_deployment_support() self.assertTrue(deploy_sup) self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentDevice'].type_, "AssocDeploymentInstDevice") self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, []) self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentDevice']. associated_resources, []) self.assertEquals( deploy_sup.associations['DeploymentHasPlatformDevice'].type_, "AssocDeploymentPlatDevice") self.assertEquals( deploy_sup.associations['DeploymentHasPlatformDevice'].resources, []) self.assertEquals( deploy_sup.associations['DeploymentHasPlatformDevice']. associated_resources, []) self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentSite'].type_, "AssocDeploymentInstSite") self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentSite'].resources, []) self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentSite']. associated_resources, []) self.assertEquals( deploy_sup.associations['DeploymentHasPlatformSite'].type_, "AssocDeploymentPlatSite") self.assertEquals( deploy_sup.associations['DeploymentHasPlatformSite'].resources, []) self.assertEquals( deploy_sup.associations['DeploymentHasPlatformSite']. associated_resources, []) #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) start = str(int(time.mktime(datetime.datetime(2013, 1, 1).timetuple()))) end = str(int(time.mktime(datetime.datetime(2014, 1, 1).timetuple()))) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start, end_datetime=end) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) deploy_sup = self.omsclient.prepare_deployment_support(deployment_id) self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, []) self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentDevice']. associated_resources, []) self.assertEquals( len(deploy_sup.associations['DeploymentHasPlatformDevice']. resources), 1) self.assertEquals( deploy_sup.associations['DeploymentHasPlatformDevice']. associated_resources, []) self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentSite'].resources, []) self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentSite']. associated_resources, []) self.assertEquals( len(deploy_sup.associations['DeploymentHasPlatformSite'].resources ), 1) self.assertEquals( deploy_sup.associations['DeploymentHasPlatformSite']. associated_resources, []) self.omsclient.assign_site_to_deployment(site_id, deployment_id) self.omsclient.assign_device_to_deployment(device_id, deployment_id) deploy_sup = self.omsclient.prepare_deployment_support(deployment_id) self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, []) self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentDevice']. associated_resources, []) self.assertEquals( len(deploy_sup.associations['DeploymentHasPlatformDevice']. resources), 1) self.assertEquals( len(deploy_sup.associations['DeploymentHasPlatformDevice']. associated_resources), 1) self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentSite'].resources, []) self.assertEquals( deploy_sup.associations['DeploymentHasInstrumentSite']. associated_resources, []) self.assertEquals( len(deploy_sup.associations['DeploymentHasPlatformSite'].resources ), 1) self.assertEquals( len(deploy_sup.associations['DeploymentHasPlatformSite']. associated_resources), 1) #delete the deployment self.omsclient.force_delete_deployment(deployment_id) # now try to get the deleted dp object try: self.omsclient.read_deployment(deployment_id) except NotFound: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def base_activate_deployment(self, make_assigns=False): # Create platform site, platform device, platform model bounds = GeospatialBounds(geospatial_latitude_limit_north=float(5), geospatial_latitude_limit_south=float(5), geospatial_longitude_limit_west=float(15), geospatial_longitude_limit_east=float(15), geospatial_vertical_min=float(0), geospatial_vertical_max=float(1000)) platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site', constraint_list=[bounds]) platform_site_id = self.omsclient.create_platform_site( platform_site__obj) platform_device_obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') platform_device_id = self.imsclient.create_platform_device( platform_device_obj) platform_model__obj = IonObject(RT.PlatformModel, name='PlatformModel1', description='test platform model') platform_model_id = self.imsclient.create_platform_model( platform_model__obj) # Create instrument site #------------------------------------------------------------------------------------- bounds = GeospatialBounds(geospatial_latitude_limit_north=float(45), geospatial_latitude_limit_south=float(40), geospatial_longitude_limit_west=float(-75), geospatial_longitude_limit_east=float(-70), geospatial_vertical_min=float(0), geospatial_vertical_max=float(500)) instrument_site_obj = IonObject( RT.InstrumentSite, name='InstrumentSite1', description='test instrument site', reference_designator='GA01SUMO-FI003-01-CTDMO0999', constraint_list=[bounds]) instrument_site_id = self.omsclient.create_instrument_site( instrument_site_obj, platform_site_id) pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.psmsclient.create_stream_definition( name='SBE37_CDM', parameter_dictionary_id=pdict_id) # Create an instrument device instrument_device_obj = IonObject(RT.InstrumentDevice, name='InstrumentDevice1', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device( instrument_device_obj) self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) pp_obj = IonObject(OT.PlatformPort, reference_designator='GA01SUMO-FI003-01-CTDMO0999', port_type=PortTypeEnum.PAYLOAD, ip_address='1') port_assignments = {instrument_device_id: pp_obj} #---------------------------------------------------------------------------------------------------- # Create an instrument model instrument_model_obj = IonObject(RT.InstrumentModel, name='InstrumentModel1', description='test instrument model') instrument_model_id = self.imsclient.create_instrument_model( instrument_model_obj) # Create a deployment object #---------------------------------------------------------------------------------------------------- start = str(int(time.mktime(datetime.datetime(2013, 1, 1).timetuple()))) end = str(int(time.mktime(datetime.datetime(2020, 1, 1).timetuple()))) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start, end_datetime=end) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', context=IonObject( OT.CabledNodeDeploymentContext), port_assignments=port_assignments, constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) if make_assigns: self.imsclient.assign_platform_model_to_platform_device( platform_model_id, platform_device_id) self.imsclient.assign_instrument_model_to_instrument_device( instrument_model_id, instrument_device_id) self.omsclient.assign_platform_model_to_platform_site( platform_model_id, platform_site_id) self.omsclient.assign_instrument_model_to_instrument_site( instrument_model_id, instrument_site_id) self.omsclient.assign_site_to_deployment(platform_site_id, deployment_id) self.omsclient.assign_device_to_deployment(platform_device_id, deployment_id) ret = DotDict(instrument_site_id=instrument_site_id, instrument_device_id=instrument_device_id, instrument_model_id=instrument_model_id, platform_site_id=platform_site_id, platform_device_id=platform_device_id, platform_model_id=platform_model_id, deployment_id=deployment_id) return ret def _create_subsequent_deployment(self, prior_dep_info): platform_device_obj = IonObject(RT.PlatformDevice, name='PlatformDevice2', description='test platform device') platform_device_id = self.imsclient.create_platform_device( platform_device_obj) instrument_device_obj = IonObject(RT.InstrumentDevice, name='InstrumentDevice2', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device( instrument_device_obj) self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) self.imsclient.assign_platform_model_to_platform_device( prior_dep_info.platform_model_id, platform_device_id) self.imsclient.assign_instrument_model_to_instrument_device( prior_dep_info.instrument_model_id, instrument_device_id) start = str(int(time.mktime(datetime.datetime(2013, 6, 1).timetuple()))) end = str(int(time.mktime(datetime.datetime(2020, 6, 1).timetuple()))) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start, end_datetime=end) deployment_obj = IonObject(RT.Deployment, name='TestDeployment2', description='some new deployment', context=IonObject( OT.CabledNodeDeploymentContext), constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.assign_site_to_deployment( prior_dep_info.platform_site_id, deployment_id) self.omsclient.assign_device_to_deployment( prior_dep_info.platform_device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) ret = DotDict(instrument_device_id=instrument_device_id, platform_device_id=platform_device_id, deployment_id=deployment_id) return ret #@unittest.skip("targeting") def test_activate_deployment_normal(self): res = self.base_activate_deployment(make_assigns=True) before_activate_instrument_device_obj = self.rrclient.read( res.instrument_device_id) self.assertNotEquals(before_activate_instrument_device_obj.lcstate, LCS.DEPLOYED) log.debug("activating deployment, expecting success") self.omsclient.activate_deployment(res.deployment_id) def assertGeospatialBoundsEquals(a, b): self.assertEquals(a['geospatial_latitude_limit_north'], b['geospatial_latitude_limit_north']) self.assertEquals(a['geospatial_latitude_limit_south'], b['geospatial_latitude_limit_south']) self.assertEquals(a['geospatial_longitude_limit_west'], b['geospatial_longitude_limit_west']) self.assertEquals(a['geospatial_longitude_limit_east'], b['geospatial_longitude_limit_east']) def assertGeospatialBoundsNotEquals(a, b): self.assertNotEquals(a['geospatial_latitude_limit_north'], b['geospatial_latitude_limit_north']) self.assertNotEquals(a['geospatial_latitude_limit_south'], b['geospatial_latitude_limit_south']) self.assertNotEquals(a['geospatial_longitude_limit_west'], b['geospatial_longitude_limit_west']) self.assertNotEquals(a['geospatial_longitude_limit_east'], b['geospatial_longitude_limit_east']) after_activate_instrument_device_obj = self.rrclient.read( res.instrument_device_id) assertGeospatialBoundsNotEquals( before_activate_instrument_device_obj.geospatial_bounds, after_activate_instrument_device_obj.geospatial_bounds) deployment_obj = self.RR2.read(res.deployment_id) self.assertEquals(deployment_obj.lcstate, LCS.DEPLOYED) log.debug("deactivatin deployment, expecting success") self.omsclient.deactivate_deployment(res.deployment_id) after_deactivate_instrument_device_obj = self.rrclient.read( res.instrument_device_id) assertGeospatialBoundsNotEquals( after_activate_instrument_device_obj.geospatial_bounds, after_deactivate_instrument_device_obj.geospatial_bounds) deployment_obj = self.RR2.read(res.deployment_id) self.assertEquals(deployment_obj.lcstate, LCS.INTEGRATED) def test_activate_deployment_redeploy(self): dep_util = DeploymentUtil(self.container) res = self.base_activate_deployment(make_assigns=True) log.debug("activating first deployment, expecting success") self.omsclient.activate_deployment(res.deployment_id) deployment_obj1 = self.RR2.read(res.deployment_id) self.assertEquals(deployment_obj1.lcstate, LCS.DEPLOYED) next_dep_info = self._create_subsequent_deployment(res) deployment_obj2 = self.RR2.read(next_dep_info.deployment_id) self.assertNotEquals(deployment_obj2.lcstate, LCS.DEPLOYED) log.debug("activating subsequent deployment, expecting success") self.omsclient.activate_deployment(next_dep_info.deployment_id) deployment_obj1 = self.RR2.read(res.deployment_id) self.assertEquals(deployment_obj1.lcstate, LCS.INTEGRATED) deployment_obj2 = self.RR2.read(next_dep_info.deployment_id) self.assertEquals(deployment_obj2.lcstate, LCS.DEPLOYED) dep1_tc = dep_util.get_temporal_constraint(deployment_obj1) dep2_tc = dep_util.get_temporal_constraint(deployment_obj2) self.assertLessEqual(float(dep1_tc.end_datetime), float(dep2_tc.end_datetime)) log.debug("deactivating second deployment, expecting success") self.omsclient.deactivate_deployment(next_dep_info.deployment_id) deployment_obj2 = self.RR2.read(next_dep_info.deployment_id) self.assertEquals(deployment_obj2.lcstate, LCS.INTEGRATED) #@unittest.skip("targeting") def test_activate_deployment_nomodels(self): res = self.base_activate_deployment() self.omsclient.assign_site_to_deployment(res.platform_site_id, res.deployment_id) self.omsclient.assign_device_to_deployment(res.platform_device_id, res.deployment_id) log.debug( "activating deployment without site+device models, expecting fail") self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1") log.debug("assigning instrument site model") self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug( "activating deployment without device models, expecting fail") self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1") #@unittest.skip("targeting") def test_activate_deployment_nosite(self): res = self.base_activate_deployment() log.debug("assigning instrument models") self.imsclient.assign_instrument_model_to_instrument_device( res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument device only") self.omsclient.assign_device_to_deployment(res.instrument_device_id, res.deployment_id) log.debug( "activating deployment without instrument site, expecting fail") self.assert_deploy_fail(res.deployment_id, BadRequest) #@unittest.skip("targeting") def test_activate_deployment_nodevice(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_instrument_model_to_instrument_device( res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument site only") self.omsclient.assign_site_to_deployment(res.instrument_site_id, res.deployment_id) log.debug("activating deployment without device, expecting fail") self.assert_deploy_fail(res.deployment_id, BadRequest, "No devices were found in the deployment") def assert_deploy_fail(self, deployment_id, err_type=BadRequest, fail_message=""): with self.assertRaises(err_type) as cm: self.omsclient.activate_deployment(deployment_id) log.debug("assert_deploy_fail cm: %s", str(cm)) if fail_message: self.assertIn(fail_message, cm.exception.message) def test_3x3_matchups_remoteplatform(self): self.base_3x3_matchups(IonObject(OT.RemotePlatformDeploymentContext)) def test_3x3_matchups_cabledinstrument(self): self.base_3x3_matchups(IonObject(OT.CabledInstrumentDeploymentContext)) def test_3x3_matchups_cablednode(self): self.base_3x3_matchups(IonObject(OT.CabledNodeDeploymentContext)) def base_3x3_matchups(self, deployment_context): """ This will be 1 root platform, 3 sub platforms (2 of one model, 1 of another) and 3 sub instruments each (2-to-1) """ deployment_context_type = type(deployment_context).__name__ instrument_model_id = [ self.RR2.create(any_old(RT.InstrumentModel)) for _ in range(6) ] platform_model_id = [ self.RR2.create(any_old(RT.PlatformModel)) for _ in range(3) ] instrument_device_id = [ self.RR2.create(any_old(RT.InstrumentDevice)) for _ in range(9) ] platform_device_id = [ self.RR2.create(any_old(RT.PlatformDevice)) for _ in range(4) ] instrument_site_id = [ self.RR2.create( any_old( RT.InstrumentSite, { "reference_designator": "GA01SUMO-FI003-0%s-CTDMO0999" % (i + 1), "planned_uplink_port": IonObject( OT.PlatformPort, reference_designator="GA01SUMO-FI003-0%s-CTDMO0999" % (i + 1)) })) for i in range(9) ] platform_site_id = [ self.RR2.create( any_old( RT.PlatformSite, { "reference_designator": "GA01SUMO-FI003-0%s-CTDMO0888" % (i + 1), "planned_uplink_port": IonObject( OT.PlatformPort, reference_designator="GA01SUMO-FI003-0%s-CTDMO0888" % (i + 1)) })) for i in range(4) ] def instrument_model_at(platform_idx, instrument_idx): m = platform_idx * 2 if instrument_idx > 0: m += 1 return m def platform_model_at(platform_idx): if platform_idx > 0: return 1 return 0 def instrument_at(platform_idx, instrument_idx): return platform_idx * 3 + instrument_idx # set up the structure for p in range(3): m = platform_model_at(p) self.RR2.assign_platform_model_to_platform_site_with_has_model( platform_model_id[m], platform_site_id[p]) self.RR2.assign_platform_model_to_platform_device_with_has_model( platform_model_id[m], platform_device_id[p]) self.RR2.assign_platform_device_to_platform_device_with_has_device( platform_device_id[p], platform_device_id[3]) self.RR2.assign_platform_site_to_platform_site_with_has_site( platform_site_id[p], platform_site_id[3]) for i in range(3): m = instrument_model_at(p, i) idx = instrument_at(p, i) self.RR2.assign_instrument_model_to_instrument_site_with_has_model( instrument_model_id[m], instrument_site_id[idx]) self.RR2.assign_instrument_model_to_instrument_device_with_has_model( instrument_model_id[m], instrument_device_id[idx]) self.RR2.assign_instrument_device_to_platform_device_with_has_device( instrument_device_id[idx], platform_device_id[p]) self.RR2.assign_instrument_site_to_platform_site_with_has_site( instrument_site_id[idx], platform_site_id[p]) # top level models self.RR2.assign_platform_model_to_platform_device_with_has_model( platform_model_id[2], platform_device_id[3]) self.RR2.assign_platform_model_to_platform_site_with_has_model( platform_model_id[2], platform_site_id[3]) # verify structure for p in range(3): parent_id = self.RR2.find_platform_device_id_by_platform_device_using_has_device( platform_device_id[p]) self.assertEqual(platform_device_id[3], parent_id) parent_id = self.RR2.find_platform_site_id_by_platform_site_using_has_site( platform_site_id[p]) self.assertEqual(platform_site_id[3], parent_id) for i in range(len(platform_site_id)): self.assertEqual( self.RR2. find_platform_model_of_platform_device_using_has_model( platform_device_id[i]), self.RR2.find_platform_model_of_platform_site_using_has_model( platform_site_id[i])) for i in range(len(instrument_site_id)): self.assertEqual( self.RR2. find_instrument_model_of_instrument_device_using_has_model( instrument_device_id[i]), self.RR2. find_instrument_model_of_instrument_site_using_has_model( instrument_site_id[i])) # OOIReferenceDesignator format: GA01SUMO-FI003-03-CTDMO0999 (site-platform_id-port-device_id) port_assignments = {} for p in range(3): ref_desig = "GA01SUMO-FI003-0%s-CTDMO0888" % (p + 1) pp_obj = IonObject(OT.PlatformPort, reference_designator=ref_desig, port_type=PortTypeEnum.PAYLOAD, ip_address=str(p)) port_assignments[platform_device_id[p]] = pp_obj for i in range(3): ref_desig = "GA01SUMO-FI003-0%s-CTDMO0999" % ((p * 3) + i + 1) pp_obj = IonObject(OT.PlatformPort, reference_designator=ref_desig, port_type=PortTypeEnum.PAYLOAD, ip_address=str(p)) idx = instrument_at(p, i) port_assignments[instrument_device_id[idx]] = pp_obj deployment_id = self.RR2.create( any_old( RT.Deployment, { "context": deployment_context, "port_assignments": port_assignments })) log.debug("assigning device/site to %s deployment", deployment_context_type) if OT.RemotePlatformDeploymentContext == deployment_context_type: self.RR2.assign_deployment_to_platform_device_with_has_deployment( deployment_id, platform_device_id[3]) self.RR2.assign_deployment_to_platform_site_with_has_deployment( deployment_id, platform_site_id[3]) elif OT.CabledInstrumentDeploymentContext == deployment_context_type: self.RR2.assign_deployment_to_instrument_device_with_has_deployment( deployment_id, instrument_device_id[1]) self.RR2.assign_deployment_to_instrument_site_with_has_deployment( deployment_id, instrument_site_id[1]) elif OT.CabledNodeDeploymentContext == deployment_context_type: self.RR2.assign_deployment_to_platform_device_with_has_deployment( deployment_id, platform_device_id[1]) self.RR2.assign_deployment_to_platform_site_with_has_deployment( deployment_id, platform_site_id[1]) log.debug("activation of %s deployment", deployment_context_type) self.omsclient.activate_deployment(deployment_id) log.debug("validation of %s deployment", deployment_context_type) if OT.RemotePlatformDeploymentContext == deployment_context_type: # verify proper associations for i, d in enumerate(platform_device_id): self.assertEqual( d, self.RR2. find_platform_device_id_of_platform_site_using_has_device( platform_site_id[i])) for i, d in enumerate(instrument_device_id): self.assertEqual( d, self.RR2. find_instrument_device_id_of_instrument_site_using_has_device( instrument_site_id[i])) elif OT.CabledInstrumentDeploymentContext == deployment_context_type: self.assertEqual( instrument_device_id[1], self.RR2. find_instrument_device_id_of_instrument_site_using_has_device( instrument_site_id[1])) elif OT.CabledNodeDeploymentContext == deployment_context_type: expected_platforms = [1] # verify proper associations for i, d in enumerate(platform_device_id): self.assertEqual( i in expected_platforms, d in self.RR2. find_platform_device_ids_of_platform_site_using_has_device( platform_site_id[i]))
class TestObservatoryManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) #print 'TestObservatoryManagementServiceIntegration: started services' # @unittest.skip('this exists only for debugging the launch process') # def test_just_the_setup(self): # return #@unittest.skip('targeting') def test_resources_associations(self): self._make_associations() #@unittest.skip('targeting') def test_find_related_frames_of_reference(self): # finding subordinates gives a dict of obj lists, convert objs to ids def idify(adict): ids = {} for k, v in adict.iteritems(): ids[k] = [] for obj in v: ids[k].append(obj._id) return ids # a short version of the function we're testing, with id-ify def short(resource_id, output_types): ret = self.OMS.find_related_frames_of_reference(resource_id, output_types) return idify(ret) #set up associations first stuff = self._make_associations() #basic traversal of tree from instrument to platform ids = short(stuff.instrument_site_id, [RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) #since this is the first search, just make sure the input inst_id got stripped if RT.InstrumentSite in ids: self.assertNotIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #basic traversal of tree from platform to instrument ids = short(stuff.platform_siteb_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) self.assertNotIn(stuff.instrument_site2_id, ids[RT.InstrumentSite]) #full traversal of tree from observatory down to instrument ids = short(stuff.observatory_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #full traversal of tree from instrument to observatory ids = short(stuff.instrument_site_id, [RT.Observatory]) self.assertIn(RT.Observatory, ids) self.assertIn(stuff.observatory_id, ids[RT.Observatory]) #partial traversal, only down to platform ids = short(stuff.observatory_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site3_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(RT.InstrumentSite, ids) #partial traversal, only down to platform ids = short(stuff.instrument_site_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertNotIn(RT.Observatory, ids) def _make_associations(self): """ create one of each resource and association used by OMS to guard against problems in ion-definitions """ #raise unittest.SkipTest("https://jira.oceanobservatories.org/tasks/browse/CISWCORE-41") """ the tree we're creating (observatory, sites, platforms, instruments) rows are lettered, colums numbered. - first row is implied a - first column is implied 1 - site Z, just because O--Sz | S--S2--P3--I4 | Sb-Pb2-Ib3 | P--I2 | Pb | I """ #stuff we control observatory_id, _ = self.RR.create(any_old(RT.Observatory)) subsite_id, _ = self.RR.create(any_old(RT.Subsite)) subsite2_id, _ = self.RR.create(any_old(RT.Subsite)) subsiteb_id, _ = self.RR.create(any_old(RT.Subsite)) subsitez_id, _ = self.RR.create(any_old(RT.Subsite)) platform_site_id, _ = self.RR.create(any_old(RT.PlatformSite)) platform_siteb_id, _ = self.RR.create(any_old(RT.PlatformSite)) platform_siteb2_id, _ = self.RR.create(any_old(RT.PlatformSite)) platform_site3_id, _ = self.RR.create(any_old(RT.PlatformSite)) instrument_site_id, _ = self.RR.create(any_old(RT.InstrumentSite)) instrument_site2_id, _ = self.RR.create(any_old(RT.InstrumentSite)) instrument_siteb3_id, _ = self.RR.create(any_old(RT.InstrumentSite)) instrument_site4_id, _ = self.RR.create(any_old(RT.InstrumentSite)) #stuff we associate to instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) deployment_id, _ = self.RR.create(any_old(RT.Deployment)) #observatory self.RR.create_association(observatory_id, PRED.hasSite, subsite_id) self.RR.create_association(observatory_id, PRED.hasSite, subsitez_id) #site self.RR.create_association(subsite_id, PRED.hasSite, subsite2_id) self.RR.create_association(subsite_id, PRED.hasSite, subsiteb_id) self.RR.create_association(subsite2_id, PRED.hasSite, platform_site3_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_siteb2_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_site_id) #platform_site self.RR.create_association(platform_site3_id, PRED.hasSite, instrument_site4_id) self.RR.create_association(platform_siteb2_id, PRED.hasSite, instrument_siteb3_id) self.RR.create_association(platform_site_id, PRED.hasSite, instrument_site2_id) self.RR.create_association(platform_site_id, PRED.hasSite, platform_siteb_id) self.RR.create_association(platform_siteb_id, PRED.hasSite, instrument_site_id) self.RR.create_association(platform_site_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_site_id, PRED.hasDevice, platform_device_id) self.RR.create_association(platform_site_id, PRED.hasDeployment, deployment_id) #instrument_site self.RR.create_association(instrument_site_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_site_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(instrument_site_id, PRED.hasDeployment, deployment_id) ret = DotDict() ret.observatory_id = observatory_id ret.subsite_id = subsite_id ret.subsite2_id = subsite2_id ret.subsiteb_id = subsiteb_id ret.subsitez_id = subsitez_id ret.platform_site_id = platform_site_id ret.platform_siteb_id = platform_siteb_id ret.platform_siteb2_id = platform_siteb2_id ret.platform_site3_id = platform_site3_id ret.instrument_site_id = instrument_site_id ret.instrument_site2_id = instrument_site2_id ret.instrument_siteb3_id = instrument_siteb3_id ret.instrument_site4_id = instrument_site4_id return ret #@unittest.skip("targeting") def test_create_observatory(self): observatory_obj = IonObject(RT.Observatory, name='TestFacility', description='some new mf') self.OMS.create_observatory(observatory_obj) #@unittest.skip("targeting") def test_find_observatory_org(self): org_obj = IonObject(RT.Org, name='TestOrg', description='some new mf org') org_id = self.OMS.create_marine_facility(org_obj) observatory_obj = IonObject(RT.Observatory, name='TestObservatory', description='some new obs') observatory_id = self.OMS.create_observatory(observatory_obj) #make association self.OMS.assign_resource_to_observatory_org(observatory_id, org_id) #find association org_objs = self.OMS.find_org_by_observatory(observatory_id) self.assertEqual(1, len(org_objs)) self.assertEqual(org_id, org_objs[0]._id) print("org_id=<" + org_id + ">") #create a subsite with parent Observatory subsite_obj = IonObject(RT.Subsite, name= 'TestSubsite', description = 'sample subsite') subsite_id = self.OMS.create_subsite(subsite_obj, observatory_id) self.assertIsNotNone(subsite_id, "Subsite not created.") # verify that Subsite is linked to Observatory mf_subsite_assoc = self.RR.get_association(observatory_id, PRED.hasSite, subsite_id) self.assertIsNotNone(mf_subsite_assoc, "Subsite not connected to Observatory.") # add the Subsite as a resource of this Observatory self.OMS.assign_resource_to_observatory_org(resource_id=subsite_id, org_id=org_id) # verify that Subsite is linked to Org org_subsite_assoc = self.RR.get_association(org_id, PRED.hasResource, subsite_id) self.assertIsNotNone(org_subsite_assoc, "Subsite not connected as resource to Org.") #create a logical platform with parent Subsite platform_site_obj = IonObject(RT.PlatformSite, name= 'TestPlatformSite', description = 'sample logical platform') platform_site_id = self.OMS.create_platform_site(platform_site_obj, subsite_id) self.assertIsNotNone(platform_site_id, "PlatformSite not created.") # verify that PlatformSite is linked to Site site_lp_assoc = self.RR.get_association(subsite_id, PRED.hasSite, platform_site_id) self.assertIsNotNone(site_lp_assoc, "PlatformSite not connected to Site.") # add the PlatformSite as a resource of this Observatory self.OMS.assign_resource_to_observatory_org(resource_id=platform_site_id, org_id=org_id) # verify that PlatformSite is linked to Org org_lp_assoc = self.RR.get_association(org_id, PRED.hasResource, platform_site_id) self.assertIsNotNone(org_lp_assoc, "PlatformSite not connected as resource to Org.") #create a logical instrument with parent logical platform instrument_site_obj = IonObject(RT.InstrumentSite, name= 'TestInstrumentSite', description = 'sample logical instrument') instrument_site_id = self.OMS.create_instrument_site(instrument_site_obj, platform_site_id) self.assertIsNotNone(instrument_site_id, "InstrumentSite not created.") # verify that InstrumentSite is linked to PlatformSite li_lp_assoc = self.RR.get_association(platform_site_id, PRED.hasSite, instrument_site_id) self.assertIsNotNone(li_lp_assoc, "InstrumentSite not connected to PlatformSite.") # add the InstrumentSite as a resource of this Observatory self.OMS.assign_resource_to_observatory_org(resource_id=instrument_site_id, org_id=org_id) # verify that InstrumentSite is linked to Org org_li_assoc = self.RR.get_association(org_id, PRED.hasResource, instrument_site_id) self.assertIsNotNone(org_li_assoc, "InstrumentSite not connected as resource to Org.") # remove the InstrumentSite as a resource of this Observatory self.OMS.unassign_resource_from_observatory_org(instrument_site_id, org_id) # verify that InstrumentSite is linked to Org assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.InstrumentSite, id_only=True ) self.assertEqual(len(assocs), 0) # remove the InstrumentSite self.OMS.delete_instrument_site(instrument_site_id) assocs, _ = self.RR.find_objects(platform_site_id, PRED.hasSite, RT.InstrumentSite, id_only=True ) self.assertEqual(len(assocs), 1) #todo: remove the dangling association # remove the PlatformSite as a resource of this Observatory self.OMS.unassign_resource_from_observatory_org(platform_site_id, org_id) # verify that PlatformSite is linked to Org assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.PlatformSite, id_only=True ) self.assertEqual(len(assocs), 0) # remove the Site as a resource of this Observatory self.OMS.unassign_resource_from_observatory_org(subsite_id, org_id) # verify that Site is linked to Org assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.Subsite, id_only=True ) self.assertEqual(len(assocs), 0)
class DiscoveryIntTest(IonIntegrationTestCase): def setUp(self): super(DiscoveryIntTest, self).setUp() self._start_container() self.addCleanup(DiscoveryIntTest.es_cleanup) self.container.start_rel_from_url('res/deploy/r2dm.yml') self.discovery = DiscoveryServiceClient() self.catalog = CatalogManagementServiceClient() self.ims = IndexManagementServiceClient() self.rr = ResourceRegistryServiceClient() if use_es: self.es_host = CFG.get_safe('server.elasticsearch.host', 'localhost') self.es_port = CFG.get_safe('server.elasticsearch.port', '9200') CFG.server.elasticsearch.shards = 1 CFG.server.elasticsearch.replicas = 0 CFG.server.elasticsearch.river_shards = 1 CFG.server.elasticsearch.river_replicas = 0 self.es = ep.ElasticSearch( host=self.es_host, port=self.es_port, timeout=10, verbose=True ) op = DotDict(CFG) op.op = 'clean_bootstrap' self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op) @staticmethod def es_cleanup(): es_host = CFG.get_safe('server.elasticsearch.host', 'localhost') es_port = CFG.get_safe('server.elasticsearch.port', '9200') es = ep.ElasticSearch( host=es_host, port=es_port, timeout=10 ) indexes = STD_INDEXES.keys() indexes.append('%s_resources_index' % get_sys_name().lower()) indexes.append('%s_events_index' % get_sys_name().lower()) for index in indexes: IndexManagementService._es_call(es.river_couchdb_delete,index) IndexManagementService._es_call(es.index_delete,index) def poll(self, tries, callback, *args, **kwargs): ''' Polling wrapper for queries Elasticsearch may not index and cache the changes right away so we may need a couple of tries and a little time to go by before the results show. ''' for i in xrange(tries): retval = callback(*args, **kwargs) if retval: return retval time.sleep(0.2) return None def test_traversal(self): dp = DataProcess() transform = Transform() pd = ProcessDefinition() dp_id, _ = self.rr.create(dp) transform_id, _ = self.rr.create(transform) pd_id, _ = self.rr.create(pd) self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform) self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition) results = self.discovery.traverse(dp_id) results.sort() correct = [pd_id, transform_id] correct.sort() self.assertTrue(results == correct, '%s' % results) def test_iterative_traversal(self): dp = DataProcess() transform = Transform() pd = ProcessDefinition() dp_id, _ = self.rr.create(dp) transform_id, _ = self.rr.create(transform) pd_id, _ = self.rr.create(pd) self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform) self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition) results = self.discovery.iterative_traverse(dp_id) results.sort() correct = [transform_id] self.assertTrue(results == correct) results = self.discovery.iterative_traverse(dp_id, 1) results.sort() correct = [transform_id, pd_id] correct.sort() self.assertTrue(results == correct) @skipIf(not use_es, 'No ElasticSearch') def test_view_crud(self): view_id = self.discovery.create_view('big_view',fields=['name']) catalog_id = self.discovery.list_catalogs(view_id)[0] index_ids = self.catalog.list_indexes(catalog_id) self.assertTrue(len(index_ids)) view = self.discovery.read_view(view_id) self.assertIsInstance(view,View) self.assertTrue(view.name == 'big_view') view.name = 'not_so_big_view' self.discovery.update_view(view) view = self.discovery.read_view(view_id) self.assertTrue(view.name == 'not_so_big_view') self.discovery.delete_view(view_id) with self.assertRaises(NotFound): self.discovery.read_view(view_id) def test_view_best_match(self): #--------------------------------------------------------------- # Matches the best catalog available OR creates a new one #--------------------------------------------------------------- catalog_id = self.catalog.create_catalog('dev', keywords=['name','model']) view_id = self.discovery.create_view('exact_view', fields=['name','model']) catalog_ids = self.discovery.list_catalogs(view_id) self.assertTrue(catalog_ids == [catalog_id]) view_id = self.discovery.create_view('another_view', fields=['name','model']) catalog_ids = self.discovery.list_catalogs(view_id) self.assertTrue(catalog_ids == [catalog_id]) view_id = self.discovery.create_view('big_view', fields=['name']) catalog_ids = self.discovery.list_catalogs(view_id) self.assertTrue(catalog_ids != [catalog_id]) @skipIf(not use_es, 'No ElasticSearch') def test_basic_searching(self): #- - - - - - - - - - - - - - - - - # set up the fake resources #- - - - - - - - - - - - - - - - - instrument_pool = [ InstrumentDevice(name='sonobuoy1', hardware_version='1'), InstrumentDevice(name='sonobuoy2', hardware_version='2'), InstrumentDevice(name='sonobuoy3', hardware_version='3') ] for instrument in instrument_pool: self.rr.create(instrument) view_id = self.discovery.create_view('devices', fields=['hardware_version']) search_string = "search 'hardware_version' is '2' from '%s'"%view_id results = self.poll(5, self.discovery.parse,search_string) result = results[0]['_source'] self.assertIsInstance(result, InstrumentDevice) self.assertTrue(result.name == 'sonobuoy2') self.assertTrue(result.hardware_version == '2') @skipIf(not use_es, 'No ElasticSearch') def test_associative_searching(self): view_id = self.discovery.create_view('devices', fields=['model']) site_id,_ = self.rr.create(Site('my_site')) pd_id, _ = self.rr.create(PlatformDevice('my_device', model='abc123')) self.rr.create_association(subject=site_id, object=pd_id, predicate=PRED.hasDevice) search_string = "search 'model' is 'abc*' from '%s' and belongs to '%s'"%(view_id, site_id) results = self.poll(5, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(pd_id in results) def test_iterative_associative_searching(self): #-------------------------------------------------------------------------------- # Tests the ability to limit the iterations #-------------------------------------------------------------------------------- dp = DataProcess() transform = Transform() pd = ProcessDefinition() dp_id, _ = self.rr.create(dp) transform_id, _ = self.rr.create(transform) pd_id, _ = self.rr.create(pd) self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform) self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition) search_string = "belongs to '%s' depth 1" % dp_id results = self.poll(5, self.discovery.parse,search_string) results = list([i._id for i in results]) correct = [transform_id] self.assertTrue(results == correct, '%s' % results) search_string = "belongs to '%s' depth 2" % dp_id results = self.poll(5, self.discovery.parse,search_string) results = list([i._id for i in results]) results.sort() correct = [transform_id, pd_id] correct.sort() self.assertTrue(results == correct) @skipIf(not use_es, 'No ElasticSearch') def test_ranged_value_searching(self): discovery = self.discovery rr = self.rr view_id = discovery.create_view('bank_view', fields=['cash_balance']) bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=10)) search_string = "search 'cash_balance' values from 0 to 100 from '%s'" % view_id results = self.poll(5, discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == bank_id) @skipIf(not use_es, 'No ElasticSearch') def test_collections_searching(self): site_id, _ = self.rr.create(Site(name='black_mesa')) view_id = self.discovery.create_view('big', fields=['name']) # Add the site to a new collection collection_id = self.ims.create_collection('resource_collection', [site_id]) search_string = "search 'name' is '*' from '%s' and in '%s'" %(view_id, collection_id) results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0] == site_id, '%s' % results) @skipIf(not use_es, 'No ElasticSearch') def test_search_by_name(self): inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123') dev_id, _ = self.rr.create(inst_dev) self.discovery.create_view('devs',fields=['name','serial_number']) search_string = "search 'serial_number' is 'abc*' from 'devs'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dev_id) @skipIf(not use_es, 'No ElasticSearch') def test_search_by_name_index(self): inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123') dev_id, _ = self.rr.create(inst_dev) search_string = "search 'serial_number' is 'abc*' from 'resources_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dev_id) bank_acc = BankAccount(name='blah', cash_balance=10) res_id , _ = self.rr.create(bank_acc) search_string = "search 'cash_balance' values from 0 to 100 from 'resources_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == res_id) @skipIf(not use_es, 'No ElasticSearch') def test_data_product_search(self): # Create the dataproduct dp = DataProduct(name='test_product') dp.data_format.name = 'test_signal' dp.data_format.description = 'test signal' dp.data_format.character_set = 'utf8' dp.data_format.nominal_sampling_rate_maximum = '44000' dp.data_format.nominal_sampling_rate_minimum = '44000' dp.data_product_level = 'basic' dp_id, _ = self.rr.create(dp) search_string = "search 'data_format.name' is 'test_signal' from 'data_products_index'" results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) search_string = "search 'data_product_level' is 'basic' from 'data_products_index'" results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) search_string = "search 'data_format.character_set' is 'utf8' from 'data_products_index'" results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) @skipIf(not use_es, 'No ElasticSearch') def test_events_search(self): # Create a resource to force a new event dp = DataProcess() dp_id, rev = self.rr.create(dp) search_string = "SEARCH 'origin' IS '%s' FROM 'events_index'" % dp_id results = self.poll(9, self.discovery.parse,search_string) origin_type = results[0]['_source'].origin_type origin_id = results[0]['_source'].origin self.assertTrue(origin_type == RT.DataProcess) self.assertTrue(origin_id == dp_id) @skipIf(not use_es, 'No ElasticSearch') def test_geo_distance_search(self): pd = PlatformDevice(name='test_dev') pd_id, _ = self.rr.create(pd) search_string = "search 'nominal_location' geo distance 20 km from lat 0 lon 0 from 'devices_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == pd_id) self.assertTrue(results[0]['_source'].name == 'test_dev') @skipIf(not use_es, 'No ElasticSearch') def test_geo_bbox_search(self): pd = PlatformDevice(name='test_dev') pd.nominal_location.lat = 5 pd.nominal_location.lon = 5 pd_id, _ = self.rr.create(pd) search_string = "search 'nominal_location' geo box top-left lat 10 lon 0 bottom-right lat 0 lon 10 from 'devices_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == pd_id) self.assertTrue(results[0]['_source'].name == 'test_dev') @skipIf(not use_es, 'No ElasticSearch') def test_user_search(self): user = UserInfo() user.name = 'test' user.contact.phone = '5551212' user_id, _ = self.rr.create(user) search_string = 'search "name" is "test" from "users_index"' results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == user_id) self.assertTrue(results[0]['_source'].name == 'test') search_string = 'search "contact.phone" is "5551212" from "users_index"' results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == user_id) self.assertTrue(results[0]['_source'].name == 'test')
class TestOmsLaunch(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.platformModel_id = None # rsn_oms: to retrieve network structure and information from RSN-OMS: # Note that OmsClientFactory will create an "embedded" RSN OMS # simulator object by default. self.rsn_oms = OmsClientFactory.create_instance() self.all_platforms = {} self.topology = {} self.agent_device_map = {} self.agent_streamconfig_map = {} self._async_data_result = AsyncResult() self._data_subscribers = [] self._samples_received = [] self.addCleanup(self._stop_data_subscribers) self._async_event_result = AsyncResult() self._event_subscribers = [] self._events_received = [] self.addCleanup(self._stop_event_subscribers) self._start_event_subscriber() self._set_up_DataProduct_obj() self._set_up_PlatformModel_obj() def _set_up_DataProduct_obj(self): # Create data product object to be used for each of the platform log streams tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() pdict_id = self.dataset_management.read_parameter_dictionary_by_name('platform_eng_parsed', id_only=True) self.platform_eng_stream_def_id = self.pubsubcli.create_stream_definition( name='platform_eng', parameter_dictionary_id=pdict_id) self.dp_obj = IonObject(RT.DataProduct, name='platform_eng data', description='platform_eng test', temporal_domain = tdom, spatial_domain = sdom) def _set_up_PlatformModel_obj(self): # Create PlatformModel platformModel_obj = IonObject(RT.PlatformModel, name='RSNPlatformModel', description="RSNPlatformModel") try: self.platformModel_id = self.imsclient.create_platform_model(platformModel_obj) except BadRequest as ex: self.fail("failed to create new PLatformModel: %s" %ex) log.debug( 'new PlatformModel id = %s', self.platformModel_id) def _traverse(self, platform_id, parent_platform_objs=None): """ Recursive routine that repeatedly calls _prepare_platform to build the object dictionary for each platform. @param platform_id ID of the platform to be visited @param parent_platform_objs dict of objects associated to parent platform, if any. @retval the dict returned by _prepare_platform at this level. """ log.info("Starting _traverse for %r", platform_id) plat_objs = self._prepare_platform(platform_id, parent_platform_objs) self.all_platforms[platform_id] = plat_objs # now, traverse the children: retval = self.rsn_oms.getSubplatformIDs(platform_id) subplatform_ids = retval[platform_id] for subplatform_id in subplatform_ids: self._traverse(subplatform_id, plat_objs) # note, topology indexed by platform_id self.topology[platform_id] = plat_objs['children'] return plat_objs def _prepare_platform(self, platform_id, parent_platform_objs): """ This routine generalizes the manual construction currently done in test_oms_launch.py. It is called by the recursive _traverse method so all platforms starting from a given base platform are prepared. Note: For simplicity in this test, sites are organized in the same hierarchical way as the platforms themselves. @param platform_id ID of the platform to be visited @param parent_platform_objs dict of objects associated to parent platform, if any. @retval a dict of associated objects similar to those in test_oms_launch """ site__obj = IonObject(RT.PlatformSite, name='%s_PlatformSite' % platform_id, description='%s_PlatformSite platform site' % platform_id) site_id = self.omsclient.create_platform_site(site__obj) if parent_platform_objs: # establish hasSite association with the parent self.rrclient.create_association( subject=parent_platform_objs['site_id'], predicate=PRED.hasSite, object=site_id) # prepare platform attributes and ports: monitor_attributes = self._prepare_platform_attributes(platform_id) ports = self._prepare_platform_ports(platform_id) device__obj = IonObject(RT.PlatformDevice, name='%s_PlatformDevice' % platform_id, description='%s_PlatformDevice platform device' % platform_id, ports=ports, platform_monitor_attributes = monitor_attributes) device_id = self.imsclient.create_platform_device(device__obj) self.imsclient.assign_platform_model_to_platform_device(self.platformModel_id, device_id) self.rrclient.create_association(subject=site_id, predicate=PRED.hasDevice, object=device_id) self.damsclient.register_instrument(instrument_id=device_id) if parent_platform_objs: # establish hasDevice association with the parent self.rrclient.create_association( subject=parent_platform_objs['device_id'], predicate=PRED.hasDevice, object=device_id) agent__obj = IonObject(RT.PlatformAgent, name='%s_PlatformAgent' % platform_id, description='%s_PlatformAgent platform agent' % platform_id) agent_id = self.imsclient.create_platform_agent(agent__obj) if parent_platform_objs: # add this platform_id to parent's children: parent_platform_objs['children'].append(platform_id) self.imsclient.assign_platform_model_to_platform_agent(self.platformModel_id, agent_id) # agent_instance_obj = IonObject(RT.PlatformAgentInstance, # name='%s_PlatformAgentInstance' % platform_id, # description="%s_PlatformAgentInstance" % platform_id) # # agent_instance_id = self.imsclient.create_platform_agent_instance( # agent_instance_obj, agent_id, device_id) plat_objs = { 'platform_id': platform_id, 'site__obj': site__obj, 'site_id': site_id, 'device__obj': device__obj, 'device_id': device_id, 'agent__obj': agent__obj, 'agent_id': agent_id, # 'agent_instance_obj': agent_instance_obj, # 'agent_instance_id': agent_instance_id, 'children': [] } log.info("plat_objs for platform_id %r = %s", platform_id, str(plat_objs)) self.agent_device_map[platform_id] = device__obj stream_config = self._create_stream_config(plat_objs) self.agent_streamconfig_map[platform_id] = stream_config # self._start_data_subscriber(agent_instance_id, stream_config) return plat_objs def _prepare_platform_attributes(self, platform_id): """ Returns the list of PlatformMonitorAttributes objects corresponding to the attributes associated to the given platform. """ result = self.rsn_oms.getPlatformAttributes(platform_id) self.assertTrue(platform_id in result) ret_infos = result[platform_id] monitor_attributes = [] for attrName, attrDfn in ret_infos.iteritems(): log.debug("platform_id=%r: preparing attribute=%r", platform_id, attrName) monitor_rate = attrDfn['monitorCycleSeconds'] units = attrDfn['units'] plat_attr_obj = IonObject(OT.PlatformMonitorAttributes, id=attrName, monitor_rate=monitor_rate, units=units) monitor_attributes.append(plat_attr_obj) return monitor_attributes def _prepare_platform_ports(self, platform_id): """ Returns the list of PlatformPort objects corresponding to the ports associated to the given platform. """ result = self.rsn_oms.getPlatformPorts(platform_id) self.assertTrue(platform_id in result) port_dict = result[platform_id] ports = [] for port_id, port in port_dict.iteritems(): log.debug("platform_id=%r: preparing port=%r", platform_id, port_id) ip_address = port['comms']['ip'] plat_port_obj = IonObject(OT.PlatformPort, port_id=port_id, ip_address=ip_address) ports.append(plat_port_obj) return ports def _create_stream_config(self, plat_objs): platform_id = plat_objs['platform_id'] device_id = plat_objs['device_id'] #create the log data product self.dp_obj.name = '%s platform_eng data' % platform_id data_product_id = self.dpclient.create_data_product(data_product=self.dp_obj, stream_definition_id=self.platform_eng_stream_def_id) self.damsclient.assign_data_product(input_resource_id=device_id, data_product_id=data_product_id) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(data_product_id, PRED.hasStream, None, True) stream_config = self._build_stream_config(stream_ids[0]) return stream_config def _build_stream_config(self, stream_id=''): platform_eng_dictionary = DatasetManagementService.get_parameter_dictionary_by_name('platform_eng_parsed') #get the streamroute object from pubsub by passing the stream_id stream_def_ids, _ = self.rrclient.find_objects(stream_id, PRED.hasStreamDefinition, RT.StreamDefinition, True) stream_route = self.pubsubcli.read_stream_route(stream_id=stream_id) stream_config = {'routing_key' : stream_route.routing_key, 'stream_id' : stream_id, 'stream_definition_ref' : stream_def_ids[0], 'exchange_point' : stream_route.exchange_point, 'parameter_dictionary':platform_eng_dictionary.dump()} return stream_config def _set_platform_agent_instances(self): """ Once most of the objs/defs associated with all platforms are in place, this method creates and associates the PlatformAgentInstance elements. """ self.platform_configs = {} for platform_id, plat_objs in self.all_platforms.iteritems(): PLATFORM_CONFIG = self.platform_configs[platform_id] = { 'platform_id': platform_id, 'platform_topology': self.topology, # 'agent_device_map': self.agent_device_map, 'agent_streamconfig_map': self.agent_streamconfig_map, 'driver_config': DVR_CONFIG, } agent_config = { 'platform_config': PLATFORM_CONFIG, } agent_instance_obj = IonObject(RT.PlatformAgentInstance, name='%s_PlatformAgentInstance' % platform_id, description="%s_PlatformAgentInstance" % platform_id, agent_config=agent_config) agent_id = plat_objs['agent_id'] device_id = plat_objs['device_id'] agent_instance_id = self.imsclient.create_platform_agent_instance( agent_instance_obj, agent_id, device_id) plat_objs['agent_instance_obj'] = agent_instance_obj plat_objs['agent_instance_id'] = agent_instance_id stream_config = self.agent_streamconfig_map[platform_id] self._start_data_subscriber(agent_instance_id, stream_config) def _start_data_subscriber(self, stream_name, stream_config): """ Starts data subscriber for the given stream_name and stream_config """ def consume_data(message, stream_route, stream_id): # A callback for processing subscribed-to data. log.info('Subscriber received data message: %s.', str(message)) self._samples_received.append(message) self._async_data_result.set() log.info('_start_data_subscriber stream_name=%r', stream_name) stream_id = stream_config['stream_id'] # Create subscription for the stream exchange_name = '%s_queue' % stream_name self.container.ex_manager.create_xn_queue(exchange_name).purge() sub = StandaloneStreamSubscriber(exchange_name, consume_data) sub.start() self._data_subscribers.append(sub) sub_id = self.pubsubcli.create_subscription(name=exchange_name, stream_ids=[stream_id]) self.pubsubcli.activate_subscription(sub_id) sub.subscription_id = sub_id def _stop_data_subscribers(self): """ Stop the data subscribers on cleanup. """ try: for sub in self._data_subscribers: if hasattr(sub, 'subscription_id'): try: self.pubsubcli.deactivate_subscription(sub.subscription_id) except: pass self.pubsubcli.delete_subscription(sub.subscription_id) sub.stop() finally: self._data_subscribers = [] def _start_event_subscriber(self, event_type="PlatformAlarmEvent", sub_type="power"): """ Starts event subscriber for events of given event_type ("PlatformAlarmEvent" by default) and given sub_type ("power" by default). """ # TODO note: ion-definitions still using 'PlatformAlarmEvent' but we # should probably define 'PlatformExternalEvent' or something like that. def consume_event(evt, *args, **kwargs): # A callback for consuming events. log.info('Event subscriber received evt: %s.', str(evt)) self._events_received.append(evt) self._async_event_result.set(evt) sub = EventSubscriber(event_type=event_type, sub_type=sub_type, callback=consume_event) sub.start() log.info("registered event subscriber for event_type=%r, sub_type=%r", event_type, sub_type) self._event_subscribers.append(sub) sub._ready_event.wait(timeout=EVENT_TIMEOUT) def _stop_event_subscribers(self): """ Stops the event subscribers on cleanup. """ try: for sub in self._event_subscribers: if hasattr(sub, 'subscription_id'): try: self.pubsubcli.deactivate_subscription(sub.subscription_id) except: pass self.pubsubcli.delete_subscription(sub.subscription_id) sub.stop() finally: self._event_subscribers = [] def test_oms_create_and_launch(self): # pick a base platform: base_platform_id = BASE_PLATFORM_ID # and trigger the traversal of the branch rooted at that base platform # to create corresponding ION objects and configuration dictionaries: base_platform_objs = self._traverse(base_platform_id) # now that most of the topology information is there, add the # PlatformAgentInstance elements self._set_platform_agent_instances() base_platform_config = self.platform_configs[base_platform_id] log.info("base_platform_id = %r", base_platform_id) log.info("topology = %s", str(self.topology)) #------------------------------- # Launch Base Platform AgentInstance, connect to the resource agent client #------------------------------- agent_instance_id = base_platform_objs['agent_instance_id'] pid = self.imsclient.start_platform_agent_instance(platform_agent_instance_id=agent_instance_id) log.debug("start_platform_agent_instance returned pid=%s", pid) #wait for start instance_obj = self.imsclient.read_platform_agent_instance(agent_instance_id) gate = ProcessStateGate(self.processdispatchclient.read_process, instance_obj.agent_process_id, ProcessStateEnum.RUNNING) self.assertTrue(gate.await(90), "The platform agent instance did not spawn in 90 seconds") agent_instance_obj= self.imsclient.read_instrument_agent_instance(agent_instance_id) log.debug('test_oms_create_and_launch: Platform agent instance obj: %s', str(agent_instance_obj)) # Start a resource agent client to talk with the instrument agent. self._pa_client = ResourceAgentClient('paclient', name=agent_instance_obj.agent_process_id, process=FakeProcess()) log.debug(" test_oms_create_and_launch:: got pa client %s", str(self._pa_client)) log.debug("base_platform_config =\n%s", base_platform_config) # ping_agent can be issued before INITIALIZE retval = self._pa_client.ping_agent(timeout=TIMEOUT) log.debug( 'Base Platform ping_agent = %s', str(retval) ) # issue INITIALIZE command to the base platform, which will launch the # creation of the whole platform hierarchy rooted at base_platform_config['platform_id'] # cmd = AgentCommand(command=PlatformAgentEvent.INITIALIZE, kwargs=dict(plat_config=base_platform_config)) cmd = AgentCommand(command=PlatformAgentEvent.INITIALIZE) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug( 'Base Platform INITIALIZE = %s', str(retval) ) # GO_ACTIVE cmd = AgentCommand(command=PlatformAgentEvent.GO_ACTIVE) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug( 'Base Platform GO_ACTIVE = %s', str(retval) ) # RUN: this command includes the launch of the resource monitoring greenlets cmd = AgentCommand(command=PlatformAgentEvent.RUN) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug( 'Base Platform RUN = %s', str(retval) ) # START_EVENT_DISPATCH kwargs = dict(params="TODO set params") cmd = AgentCommand(command=PlatformAgentEvent.START_EVENT_DISPATCH, kwargs=kwargs) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) self.assertTrue(retval.result is not None) # wait for data sample # just wait for at least one -- see consume_data above log.info("waiting for reception of a data sample...") self._async_data_result.get(timeout=DATA_TIMEOUT) self.assertTrue(len(self._samples_received) >= 1) log.info("waiting a bit more for reception of more data samples...") sleep(10) log.info("Got data samples: %d", len(self._samples_received)) # wait for event # just wait for at least one event -- see consume_event above log.info("waiting for reception of an event...") self._async_event_result.get(timeout=EVENT_TIMEOUT) log.info("Received events: %s", len(self._events_received)) # STOP_EVENT_DISPATCH cmd = AgentCommand(command=PlatformAgentEvent.STOP_EVENT_DISPATCH) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) self.assertTrue(retval.result is not None) # GO_INACTIVE cmd = AgentCommand(command=PlatformAgentEvent.GO_INACTIVE) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug( 'Base Platform GO_INACTIVE = %s', str(retval) ) # RESET: Resets the base platform agent, which includes termination of # its sub-platforms processes: cmd = AgentCommand(command=PlatformAgentEvent.RESET) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug( 'Base Platform RESET = %s', str(retval) ) #------------------------------- # Stop Base Platform AgentInstance #------------------------------- self.imsclient.stop_platform_agent_instance(platform_agent_instance_id=agent_instance_id)
class TestResourceRegistry(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url("res/deploy/r2coi.yml") # Now create client to bank service self.resource_registry_service = ResourceRegistryServiceClient(node=self.container.node) def test_crud(self): # Some quick registry tests # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", name="name", foo="bar") self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", {"name": "name", "foo": "bar"}) self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", {"name": "name"}, foo="bar") self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Instantiate an object obj = IonObject("UserInfo", name="name") # Can set attributes that aren't in the object's schema with self.assertRaises(AttributeError) as cm: setattr(obj, "foo", "bar") self.assertTrue(cm.exception.message == "'UserInfo' object has no attribute 'foo'") # Cam't call update with object that hasn't been persisted with self.assertRaises(BadRequest) as cm: self.resource_registry_service.update(obj) self.assertTrue(cm.exception.message.startswith("Object does not have required '_id' or '_rev' attribute")) # Persist object and read it back obj_id, obj_rev = self.resource_registry_service.create(obj) read_obj = self.resource_registry_service.read(obj_id) # Cannot create object with _id and _rev fields pre-set with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create(read_obj) self.assertTrue(cm.exception.message.startswith("Doc must not have '_id'")) # Update object read_obj.name = "John Doe" self.resource_registry_service.update(read_obj) # Update should fail with revision mismatch with self.assertRaises(Conflict) as cm: self.resource_registry_service.update(read_obj) self.assertTrue(cm.exception.message.startswith("Object not based on most current version")) # Re-read and update object read_obj = self.resource_registry_service.read(obj_id) self.resource_registry_service.update(read_obj) # Delete object self.resource_registry_service.delete(obj_id) # Make sure read, update and delete report error with self.assertRaises(NotFound) as cm: self.resource_registry_service.read(obj_id) self.assertTrue(cm.exception.message.startswith("Object with id")) with self.assertRaises(NotFound) as cm: self.resource_registry_service.update(read_obj) self.assertTrue(cm.exception.message.startswith("Object with id")) with self.assertRaises(NotFound) as cm: self.resource_registry_service.delete(obj_id) self.assertTrue(cm.exception.message.startswith("Object with id")) # Owner creation tests user = IonObject("ActorIdentity", name="user") uid, _ = self.resource_registry_service.create(user) inst = IonObject("InstrumentDevice", name="instrument") iid, _ = self.resource_registry_service.create(inst, headers={"ion-actor-id": str(uid)}) ids, _ = self.resource_registry_service.find_objects(iid, PRED.hasOwner, RT.ActorIdentity, id_only=True) self.assertEquals(len(ids), 1) assoc = self.resource_registry_service.read(ids[0]) self.resource_registry_service.delete(iid) with self.assertRaises(NotFound) as ex: assoc = self.resource_registry_service.read(iid) def test_lifecycle(self): att = IonObject("InstrumentDevice", name="mine", description="desc") rid, rev = self.resource_registry_service.create(att) att1 = self.resource_registry_service.read(rid) self.assertEquals(att1.name, att.name) self.assertEquals(att1.lcstate, LCS.DRAFT_PRIVATE) new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.PLAN) self.assertEquals(new_state, LCS.PLANNED_PRIVATE) att2 = self.resource_registry_service.read(rid) self.assertEquals(att2.lcstate, LCS.PLANNED_PRIVATE) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.execute_lifecycle_transition(rid, LCE.UNANNOUNCE) self.assertTrue( "type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce" in cm.exception.message ) new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.DEVELOP) self.assertEquals(new_state, LCS.DEVELOPED_PRIVATE) self.assertRaises( iex.BadRequest, self.resource_registry_service.execute_lifecycle_transition, resource_id=rid, transition_event="NONE##", ) self.resource_registry_service.set_lifecycle_state(rid, LCS.INTEGRATED_PRIVATE) att1 = self.resource_registry_service.read(rid) self.assertEquals(att1.lcstate, LCS.INTEGRATED_PRIVATE) def test_association(self): # Instantiate ActorIdentity object actor_identity_obj = IonObject("ActorIdentity", name="name") actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(actor_identity_obj) read_actor_identity_obj = self.resource_registry_service.read(actor_identity_obj_id) # Instantiate UserInfo object user_info_obj = IonObject("UserInfo", name="name") user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(user_info_obj) read_user_info_obj = self.resource_registry_service.read(user_info_obj_id) # Test create failures with self.assertRaises(AttributeError) as cm: self.resource_registry_service.create_association(actor_identity_obj_id, PRED.bogus, user_info_obj_id) self.assertTrue(cm.exception.message == "bogus") # Predicate not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(actor_identity_obj_id, None, user_info_obj_id) self.assertTrue(cm.exception.message == "Association must have all elements set") # Bad association type with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, "bogustype" ) self.assertTrue(cm.exception.message == "Unsupported assoc_type: bogustype") # Subject id or object not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(None, PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message == "Association must have all elements set") # Object id or object not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, None) self.assertTrue(cm.exception.message == "Association must have all elements set") # Bad subject id with self.assertRaises(NotFound) as cm: self.resource_registry_service.create_association("bogus", PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message == "Object with id bogus does not exist.") # Bad object id with self.assertRaises(NotFound) as cm: self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, "bogus") self.assertTrue(cm.exception.message == "Object with id bogus does not exist.") # _id missing from subject with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(actor_identity_obj, PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message == "Subject id or rev not available") # _id missing from object with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj) self.assertTrue(cm.exception.message == "Object id or rev not available") # Wrong subject type with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(user_info_obj_id, PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message == "Illegal subject type UserInfo for predicate hasInfo") # Wrong object type with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id ) self.assertTrue(cm.exception.message == "Illegal object type ActorIdentity for predicate hasInfo") # Create two different association types between the same subject and predicate assoc_id1, assoc_rev1 = self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id ) # Read object, subject res_obj1 = self.resource_registry_service.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo) self.assertEquals(res_obj1._id, user_info_obj_id) res_obj1 = self.resource_registry_service.read_object( actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True ) self.assertEquals(res_obj1, user_info_obj_id) res_obj2 = self.resource_registry_service.read_subject(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id) self.assertEquals(res_obj2._id, actor_identity_obj_id) res_obj2 = self.resource_registry_service.read_subject( RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True ) self.assertEquals(res_obj2, actor_identity_obj_id) # Create a similar association to a specific revision # TODO: This is not a supported case so far assoc_id2, assoc_rev2 = self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, "H2R" ) # Search for associations (good cases) ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo) ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo) self.assertTrue(len(ret1) == len(ret2) == len(ret3)) self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id) ret1 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False ) ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=False) ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=False) self.assertTrue(ret1 == ret2 == ret3) # Search for associations (good cases) ret1 = self.resource_registry_service.find_associations( read_actor_identity_obj, PRED.hasInfo, read_user_info_obj ) ret2 = self.resource_registry_service.find_associations(read_actor_identity_obj, PRED.hasInfo) ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo) self.assertTrue(len(ret1) == len(ret2) == len(ret3)) self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id) ret1 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None, True ) ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=True) ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=True) self.assertTrue(ret1 == ret2 == ret3) # Search for associations (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations(None, None, None) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations(actor_identity_obj_id, None, None) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations(None, None, user_info_obj_id) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations(actor_identity_obj, None, user_info_obj_id) self.assertTrue(cm.exception.message == "Object id not available in subject") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations(actor_identity_obj_id, None, user_info_obj) self.assertTrue(cm.exception.message == "Object id not available in object") # Find subjects (good cases) subj_ret1 = self.resource_registry_service.find_subjects(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True) subj_ret2 = self.resource_registry_service.find_subjects( RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True ) self.assertTrue(len(subj_ret1) == len(subj_ret2)) self.assertTrue(subj_ret1[0] == subj_ret2[0]) self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id) subj_ret3 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, True) subj_ret4 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, True) self.assertTrue(len(subj_ret3) == len(subj_ret4)) self.assertTrue(subj_ret3[0] == subj_ret4[0]) self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id) subj_ret5 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, False) subj_ret6 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, False) self.assertTrue(len(subj_ret5) == len(subj_ret6)) self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id) self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id) # Find subjects (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_subjects(None, None, None) self.assertTrue(cm.exception.message == "Must provide object") with self.assertRaises(AttributeError) as cm: self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.bogus, user_info_obj_id, True) self.assertTrue(cm.exception.message == "bogus") ret = self.resource_registry_service.find_subjects(RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True) self.assertTrue(len(ret[0]) == 0) ret = self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True) self.assertTrue(len(ret[0]) == 0) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj, True) self.assertTrue(cm.exception.message == "Object id not available in object") # Find objects (good cases) subj_ret1 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True) subj_ret2 = self.resource_registry_service.find_objects( read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True ) self.assertTrue(len(subj_ret1) == len(subj_ret2)) self.assertTrue(subj_ret1[0] == subj_ret2[0]) self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id) subj_ret3 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, True) subj_ret4 = self.resource_registry_service.find_objects(actor_identity_obj_id, None, None, True) self.assertTrue(len(subj_ret3) == len(subj_ret4)) self.assertTrue(subj_ret3[0] == subj_ret4[0]) self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id) subj_ret5 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, False) subj_ret6 = self.resource_registry_service.find_objects(read_actor_identity_obj, None, None, False) self.assertTrue(len(subj_ret5) == len(subj_ret6)) self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id) self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id) # Find objects (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_objects(None, None, None) self.assertTrue(cm.exception.message == "Must provide subject") with self.assertRaises(AttributeError) as cm: self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True) self.assertTrue(cm.exception.message == "bogus") ret = self.resource_registry_service.find_objects( actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True ) self.assertTrue(len(ret[0]) == 0) ret = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True) self.assertTrue(len(ret[0]) == 0) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_objects(actor_identity_obj, PRED.hasInfo, RT.UserInfo, True) self.assertTrue(cm.exception.message == "Object id not available in subject") # Get association (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association(None, None, None) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association(actor_identity_obj_id, None, None) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association(None, None, user_info_obj_id) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association(actor_identity_obj, None, user_info_obj_id) self.assertTrue(cm.exception.message == "Object id not available in subject") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association(actor_identity_obj_id, None, user_info_obj) self.assertTrue(cm.exception.message == "Object id not available in object") # Delete one of the associations self.resource_registry_service.delete_association(assoc_id2) assoc = self.resource_registry_service.get_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) self.assertTrue(assoc._id == assoc_id1) # Delete (bad cases) with self.assertRaises(NotFound) as cm: self.resource_registry_service.delete_association("bogus") self.assertTrue(cm.exception.message == "Object with id bogus does not exist.") # Delete other association self.resource_registry_service.delete_association(assoc_id1) # Delete resources self.resource_registry_service.delete(actor_identity_obj_id) self.resource_registry_service.delete(user_info_obj_id) def test_find_resources(self): with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_resources(RT.UserInfo, LCS.DRAFT, "name", False) self.assertTrue(cm.exception.message == "find by name does not support lcstate") ret = self.resource_registry_service.find_resources(RT.UserInfo, None, "name", False) self.assertTrue(len(ret[0]) == 0) # Instantiate an object obj = IonObject("InstrumentDevice", name="name") # Persist object and read it back obj_id, obj_rev = self.resource_registry_service.create(obj) read_obj = self.resource_registry_service.read(obj_id) ret = self.resource_registry_service.find_resources(RT.InstrumentDevice, None, "name", False) self.assertTrue(len(ret[0]) == 1) self.assertTrue(ret[0][0]._id == read_obj._id) ret = self.resource_registry_service.find_resources(RT.InstrumentDevice, LCS.DRAFT, None, False) self.assertTrue(len(ret[0]) == 1) self.assertTrue(ret[0][0]._id == read_obj._id) # @attr('INT', group='coirr1') # class TestResourceRegistry1(IonIntegrationTestCase): # # def setUp(self): # # Start container # self._start_container() # self.container.start_rel_from_url('res/deploy/r2coi.yml') # # # Now create client to bank service # self.resource_registry_service = ResourceRegistryServiceClient(node=self.container.node) def test_attach(self): binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82" # Owner creation tests instrument = IonObject("InstrumentDevice", name="instrument") iid, _ = self.resource_registry_service.create(instrument) att = Attachment(content=binary, attachment_type=AttachmentType.BLOB) aid1 = self.resource_registry_service.create_attachment(iid, att) att1 = self.resource_registry_service.read_attachment(aid1) self.assertEquals(binary, att1.content) import base64 att = Attachment(content=base64.encodestring(binary), attachment_type=AttachmentType.ASCII) aid2 = self.resource_registry_service.create_attachment(iid, att) att1 = self.resource_registry_service.read_attachment(aid2) self.assertEquals(binary, base64.decodestring(att1.content)) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, [aid1, aid2]) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True) self.assertEquals(att_ids, [aid2, aid1]) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True, limit=1) self.assertEquals(att_ids, [aid2]) atts = self.resource_registry_service.find_attachments(iid, id_only=False, limit=1) self.assertEquals(atts[0].content, att1.content) self.resource_registry_service.delete_attachment(aid1) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, [aid2]) self.resource_registry_service.delete_attachment(aid2) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, [])
class TestPlatformInstrument(BaseIntTestPlatform): def setUp(self): self._start_container() self._pp = pprint.PrettyPrinter() log.debug("oms_uri = %s", OMS_URI) self.oms = CIOMSClientFactory.create_instance(OMS_URI) #url = OmsTestMixin.start_http_server() #log.debug("TestPlatformInstrument:setup http url %s", url) # #result = self.oms.event.register_event_listener(url) #log.debug("TestPlatformInstrument:setup register_event_listener result %s", result) self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.datasetclient = DatasetManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.org_id = self.RR2.create(any_old(RT.Org)) log.debug("Org created: %s", self.org_id) # see _set_receive_timeout self._receive_timeout = 300 self.instrument_device_id = '' self.platform_device_id = '' self.platform_site_id = '' self.platform_agent_instance_id = '' self._pa_client = '' def done(): CIOMSClientFactory.destroy_instance(self.oms) event_notifications = OmsTestMixin.stop_http_server() log.info("event_notifications = %s" % str(event_notifications)) self.addCleanup(done) @unittest.skip('Must be run locally...') def test_platform_with_instrument_streaming(self): # # The following is with just a single platform and the single # instrument "SBE37_SIM_08", which corresponds to the one on port 4008. # #load the paramaters and the param dicts necesssary for the VEL3D log.debug( "load params------------------------------------------------------------------------------") self._load_params() log.debug( " _register_oms_listener------------------------------------------------------------------------------") self._register_oms_listener() #create the instrument device/agent/mode log.debug( "---------- create_instrument_resources ----------" ) self._create_instrument_resources() #create the platform device, agent and instance log.debug( "---------- create_platform_configuration ----------" ) self._create_platform_configuration('LPJBox_CI_Ben_Hall') self.rrclient.create_association(subject=self.platform_device_id, predicate=PRED.hasDevice, object=self.instrument_device_id) log.debug( "---------- start_platform ----------" ) self._start_platform() self.addCleanup(self._stop_platform) # get everything in command mode: self._ping_agent() log.debug( " ---------- initialize ----------" ) self._initialize() _ia_client = ResourceAgentClient(self.instrument_device_id, process=FakeProcess()) state = _ia_client.get_agent_state() log.info("TestPlatformInstrument get_agent_state %s", state) log.debug( " ---------- go_active ----------" ) self._go_active() state = _ia_client.get_agent_state() log.info("TestPlatformInstrument get_agent_state %s", state) log.debug( "---------- run ----------" ) self._run() gevent.sleep(2) log.debug( " ---------- _start_resource_monitoring ----------" ) self._start_resource_monitoring() gevent.sleep(2) # # # verify the instrument is command state: # state = ia_client.get_agent_state() # log.debug(" TestPlatformInstrument get_agent_state: %s", state) # self.assertEqual(state, ResourceAgentState.COMMAND) _stop_resource_monitoring log.debug( " ---------- _stop_resource_monitoring ----------" ) self._stop_resource_monitoring() gevent.sleep(2) log.debug( " ---------- go_inactive ----------" ) self._go_inactive() state = _ia_client.get_agent_state() log.info("TestPlatformInstrument get_agent_state %s", state) self._reset() self._shutdown() def _get_platform_attributes(self): log.debug( " ----------get_platform_attributes ----------") attr_infos = self.oms.attr.get_platform_attributes('LPJBox_CI_Ben_Hall') log.debug('_get_platform_attributes: %s', self._pp.pformat(attr_infos)) attrs = attr_infos['LPJBox_CI_Ben_Hall'] for attrid, arrinfo in attrs.iteritems(): arrinfo['attr_id'] = attrid log.debug('_get_platform_attributes: %s', self._pp.pformat(attrs)) return attrs def _load_params(self): log.info(" ---------- load_params ----------") # load_parameter_scenarios self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict( op="load", scenario="BETA", path="master", categories="ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition", clearcols="owner_id,org_ids", assets="res/preload/r2_ioc/ooi_assets", parseooi="True", )) def _create_platform_configuration(self, platform_id, parent_platform_id=None): """ This method is an adaptation of test_agent_instance_config in test_instrument_management_service_integration.py @param platform_id @param parent_platform_id @return a DotDict with various of the constructed elements associated to the platform. """ tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() param_dict_name = 'platform_eng_parsed' parsed_rpdict_id = self.dataset_management.read_parameter_dictionary_by_name( param_dict_name, id_only=True) self.parsed_stream_def_id = self.pubsubclient.create_stream_definition( name='parsed', parameter_dictionary_id=parsed_rpdict_id) driver_config = PLTFRM_DVR_CONFIG driver_config['attributes'] = self._get_platform_attributes() #self._platform_attributes[platform_id] #OMS returning an error for port.get_platform_ports #driver_config['ports'] = self._platform_ports[platform_id] log.debug("driver_config: %s", driver_config) # instance creation platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, { 'driver_config': driver_config}) platform_agent_instance_obj.agent_config = { 'platform_config': { 'platform_id': 'LPJBox_CI_Ben_Hall', 'parent_platform_id': None } } self.platform_agent_instance_id = self.imsclient.create_platform_agent_instance(platform_agent_instance_obj) # agent creation platform_agent_obj = any_old(RT.PlatformAgent, { "stream_configurations": self._get_platform_stream_configs(), 'driver_module': PLTFRM_DVR_MOD, 'driver_class': PLTFRM_DVR_CLS}) platform_agent_id = self.imsclient.create_platform_agent(platform_agent_obj) # device creation self.platform_device_id = self.imsclient.create_platform_device(any_old(RT.PlatformDevice)) # data product creation dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom}) dp_id = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=self.parsed_stream_def_id) self.damsclient.assign_data_product(input_resource_id=self.platform_device_id, data_product_id=dp_id) self.dpclient.activate_data_product_persistence(data_product_id=dp_id) self.addCleanup(self.dpclient.delete_data_product, dp_id) # assignments self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(self.platform_agent_instance_id, self.platform_device_id) self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(platform_agent_id, self.platform_agent_instance_id) self.RR2.assign_platform_device_to_org_with_has_resource(self.platform_agent_instance_id, self.org_id) ####################################### # dataset log.debug('data product = %s', dp_id) stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True) log.debug('Data product stream_ids = %s', stream_ids) stream_id = stream_ids[0] # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id1 = %s', dataset_ids[0]) ####################################### log.debug('_create_platform_site_and_deployment platform_device_id: %s', self.platform_device_id) site_object = IonObject(RT.PlatformSite, name='PlatformSite1') self.platform_site_id = self.omsclient.create_platform_site(platform_site=site_object, parent_id='') log.debug('_create_platform_site_and_deployment site id: %s', self.platform_site_id) #create supporting objects for the Deployment resource # 1. temporal constraint # find current deployment using time constraints current_time = int( calendar.timegm(time.gmtime()) ) # two years on either side of current time start = current_time - 63115200 end = current_time + 63115200 temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end)) # 2. PlatformPort object which defines device to port map platform_port_obj= IonObject(OT.PlatformPort, reference_designator = 'GA01SUMO-FI003-01-CTDMO0999', port_type=PortTypeEnum.UPLINK, ip_address='0') # now create the Deployment deployment_obj = IonObject(RT.Deployment, name='TestPlatformDeployment', description='some new deployment', context=IonObject(OT.CabledNodeDeploymentContext), constraint_list=[temporal_bounds], port_assignments={self.platform_device_id:platform_port_obj}) platform_deployment_id = self.omsclient.create_deployment(deployment=deployment_obj, site_id=self.platform_site_id, device_id=self.platform_device_id) log.debug('_create_platform_site_and_deployment deployment_id: %s', platform_deployment_id) deploy_obj2 = self.omsclient.read_deployment(platform_deployment_id) log.debug('_create_platform_site_and_deployment deploy_obj2 : %s', deploy_obj2) return self.platform_site_id, platform_deployment_id def _create_instrument_resources(self): # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='VEL3D', description="VEL3D") instModel_id = self.imsclient.create_instrument_model(instModel_obj) log.debug( 'new InstrumentModel id = %s ', instModel_id) raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='raw' ) vel3d_b_sample = StreamConfiguration(stream_name='vel3d_b_sample', parameter_dictionary_name='vel3d_b_sample') vel3d_b_engineering = StreamConfiguration(stream_name='vel3d_b_engineering', parameter_dictionary_name='vel3d_b_engineering') # Create InstrumentAgent instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/nobska_mavs4_ooicore-0.0.7-py2.7.egg", stream_configurations = [raw_config, vel3d_b_sample, vel3d_b_engineering]) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) log.debug('new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice instDevice_obj = IonObject(RT.InstrumentDevice, name='VEL3DDevice', description="VEL3DDevice", serial_number="12345" ) self.instrument_device_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, self.instrument_device_id) port_agent_config = { 'device_addr': '10.180.80.6', 'device_port': 2101, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': 1025, 'data_port': 1026, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='VEL3DAgentInstance', description="VEL3DAgentInstance", port_agent_config = port_agent_config, alerts= []) instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, self.instrument_device_id) self._start_port_agent(self.imsclient.read_instrument_agent_instance(instAgentInstance_id)) vel3d_b_sample_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('vel3d_b_sample', id_only=True) vel3d_b_sample_stream_def_id = self.pubsubclient.create_stream_definition(name='vel3d_b_sample', parameter_dictionary_id=vel3d_b_sample_pdict_id) vel3d_b_engineering_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('vel3d_b_engineering', id_only=True) vel3d_b_engineering_stream_def_id = self.pubsubclient.create_stream_definition(name='vel3d_b_engineering', parameter_dictionary_id=vel3d_b_engineering_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('raw', id_only=True) raw_stream_def_id = self.pubsubclient.create_stream_definition(name='raw', parameter_dictionary_id=raw_pdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='vel3d_b_sample', description='vel3d_b_sample', temporal_domain = tdom, spatial_domain = sdom) data_product_id1 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=vel3d_b_sample_stream_def_id) self.damsclient.assign_data_product(input_resource_id=self.instrument_device_id, data_product_id=data_product_id1) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1) dp_obj = IonObject(RT.DataProduct, name='vel3d_b_engineering', description='vel3d_b_engineering', temporal_domain = tdom, spatial_domain = sdom) data_product_id2 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=vel3d_b_engineering_stream_def_id) self.damsclient.assign_data_product(input_resource_id=self.instrument_device_id, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id2) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id3 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) self.damsclient.assign_data_product(input_resource_id=self.instrument_device_id, data_product_id=data_product_id3) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id3) #create instrument site and associated deployment site_object = IonObject(RT.InstrumentSite, name='InstrumentSite1') instrument_site_id = self.omsclient.create_instrument_site(instrument_site=site_object, parent_id=self.platform_site_id) log.debug('_create_instrument_site_and_deployment site id: %s', instrument_site_id) #create supporting objects for the Deployment resource # 1. temporal constraint # find current deployment using time constraints current_time = int( calendar.timegm(time.gmtime()) ) # two years on either side of current time start = current_time - 63115200 end = current_time + 63115200 temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end)) # 2. PlatformPort object which defines device to port map platform_port_obj= IonObject(OT.PlatformPort, reference_designator = 'GA01SUMO-FI003-03-CTDMO0999', port_type=PortTypeEnum.PAYLOAD, ip_address='0') # now create the Deployment deployment_obj = IonObject(RT.Deployment, name='TestInstrumentDeployment', description='some new deployment', context=IonObject(OT.CabledInstrumentDeploymentContext), constraint_list=[temporal_bounds], port_assignments={self.instrument_device_id:platform_port_obj}) instrument_deployment_id = self.omsclient.create_deployment(deployment=deployment_obj, site_id=instrument_site_id, device_id=self.instrument_device_id) log.debug('_create_instrument_site_and_deployment deployment_id: %s', instrument_deployment_id) def _start_port_agent(self, instrument_agent_instance_obj=None): """ Construct and start the port agent, ONLY NEEDED FOR INSTRUMENT AGENTS. """ _port_agent_config = instrument_agent_instance_obj.port_agent_config # It blocks until the port agent starts up or a timeout _pagent = PortAgentProcess.launch_process(_port_agent_config, test_mode = True) pid = _pagent.get_pid() port = _pagent.get_data_port() cmd_port = _pagent.get_command_port() log.info("IMS:_start_pagent returned from PortAgentProcess.launch_process pid: %s ", pid) # Hack to get ready for DEMO. Further though needs to be put int # how we pass this config info around. host = 'localhost' driver_config = instrument_agent_instance_obj.driver_config comms_config = driver_config.get('comms_config') if comms_config: host = comms_config.get('addr') else: log.warn("No comms_config specified, using '%s'" % host) # Configure driver to use port agent port number. instrument_agent_instance_obj.driver_config['comms_config'] = { 'addr' : host, 'cmd_port' : cmd_port, 'port' : port } instrument_agent_instance_obj.driver_config['pagent_pid'] = pid self.imsclient.update_instrument_agent_instance(instrument_agent_instance_obj) return self.imsclient.read_instrument_agent_instance(instrument_agent_instance_obj._id) def _start_platform(self): """ Starts the given platform waiting for it to transition to the UNINITIALIZED state (note that the agent starts in the LAUNCHING state). More in concrete the sequence of steps here are: - prepares subscriber to receive the UNINITIALIZED state transition - launches the platform process - waits for the start of the process - waits for the transition to the UNINITIALIZED state """ ############################################################## # prepare to receive the UNINITIALIZED state transition: async_res = AsyncResult() def consume_event(evt, *args, **kwargs): log.debug("Got ResourceAgentStateEvent %s from origin %r", evt.state, evt.origin) if evt.state == PlatformAgentState.UNINITIALIZED: async_res.set(evt) # start subscriber: sub = EventSubscriber(event_type="ResourceAgentStateEvent", origin=self.platform_device_id, callback=consume_event) sub.start() log.info("registered event subscriber to wait for state=%r from origin %r", PlatformAgentState.UNINITIALIZED, self.platform_device_id) #self._event_subscribers.append(sub) sub._ready_event.wait(timeout=EVENT_TIMEOUT) ############################################################## # now start the platform: agent_instance_id = self.platform_agent_instance_id log.debug("about to call start_platform_agent_instance with id=%s", agent_instance_id) pid = self.imsclient.start_platform_agent_instance(platform_agent_instance_id=agent_instance_id) log.debug("start_platform_agent_instance returned pid=%s", pid) #wait for start agent_instance_obj = self.imsclient.read_platform_agent_instance(agent_instance_id) gate = AgentProcessStateGate(self.processdispatchclient.read_process, self.platform_device_id, ProcessStateEnum.RUNNING) self.assertTrue(gate.await(90), "The platform agent instance did not spawn in 90 seconds") # Start a resource agent client to talk with the agent. self._pa_client = ResourceAgentClient(self.platform_device_id, name=gate.process_id, process=FakeProcess()) log.debug("got platform agent client %s", str(self._pa_client)) ############################################################## # wait for the UNINITIALIZED event: async_res.get(timeout=self._receive_timeout) def _register_oms_listener(self): #load the paramaters and the param dicts necesssary for the VEL3D log.debug( "---------- connect_to_oms ---------- ") log.debug("oms_uri = %s", OMS_URI) self.oms = CIOMSClientFactory.create_instance(OMS_URI) #buddha url url = "http://10.22.88.168:5000/ion-service/oms_event" log.info("test_oms_events_receive:setup http url %s", url) result = self.oms.event.register_event_listener(url) log.debug("_register_oms_listener register_event_listener result %s", result) #------------------------------------------------------------------------------------- # Set up the subscriber to catch the alert event #------------------------------------------------------------------------------------- def callback_for_alert(event, *args, **kwargs): log.debug("caught an OMSDeviceStatusEvent: %s", event) self.catch_alert.put(event) self.event_subscriber = EventSubscriber(event_type='OMSDeviceStatusEvent', callback=callback_for_alert) self.event_subscriber.start() self.addCleanup(self.event_subscriber.stop) result = self.oms.event.generate_test_event({'platform_id': 'fake_platform_id', 'message': "fake event triggered from CI using OMS' generate_test_event", 'severity': '3', 'group ': 'power'}) log.debug("_register_oms_listener generate_test_event result %s", result) def _stop_platform(self): try: self.IMS.stop_platform_agent_instance(self.platform_agent_instance_id) except Exception: log.warn( "platform_id=%r: Exception in IMS.stop_platform_agent_instance with " "platform_agent_instance_id = %r. Perhaps already dead.", self.platform_device_id, self.platform_agent_instance_id)
class IngestionManagementIntTest(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.ingestion_management = IngestionManagementServiceClient() self.resource_registry = ResourceRegistryServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.ingest_name = 'basic' self.exchange = 'testdata' @staticmethod def clean_subscriptions(): ingestion_management = IngestionManagementServiceClient() pubsub = PubsubManagementServiceClient() rr = ResourceRegistryServiceClient() ingestion_config_ids = ingestion_management.list_ingestion_configurations( id_only=True) for ic in ingestion_config_ids: subscription_ids, assocs = rr.find_objects( subject=ic, predicate=PRED.hasSubscription, id_only=True) for subscription_id, assoc in zip(subscription_ids, assocs): rr.delete_association(assoc) try: pubsub.deactivate_subscription(subscription_id) except: log.exception("Unable to decativate subscription: %s", subscription_id) pubsub.delete_subscription(subscription_id) def create_ingest_config(self): self.queue = IngestionQueue(name='test', type='testdata') # Create the ingestion config ingestion_config_id = self.ingestion_management.create_ingestion_configuration( name=self.ingest_name, exchange_point_id=self.exchange, queues=[self.queue]) return ingestion_config_id def test_ingestion_config_crud(self): ingestion_config_id = self.create_ingest_config() ingestion_config = self.ingestion_management.read_ingestion_configuration( ingestion_config_id) self.assertTrue(ingestion_config.name == self.ingest_name) self.assertTrue(ingestion_config.queues[0].name == 'test') self.assertTrue(ingestion_config.queues[0].type == 'testdata') ingestion_config.name = 'another' self.ingestion_management.update_ingestion_configuration( ingestion_config) # Create an association just to make sure that it will delete them sub = Subscription() sub_id, _ = self.resource_registry.create(sub) assoc_id, _ = self.resource_registry.create_association( subject=ingestion_config_id, predicate=PRED.hasSubscription, object=sub_id) self.ingestion_management.delete_ingestion_configuration( ingestion_config_id) with self.assertRaises(NotFound): self.resource_registry.read(assoc_id) def test_list_ingestion(self): # Create the ingest_config config_id = self.create_ingest_config() retval = self.ingestion_management.list_ingestion_configurations( id_only=True) # Nice thing about this is that it breaks if r2dm adds an ingest_config self.assertTrue(config_id in retval)
class TestPlatformInstrument(BaseIntTestPlatform): def setUp(self): self._start_container() self._pp = pprint.PrettyPrinter() log.debug("oms_uri = %s", OMS_URI) self.oms = CIOMSClientFactory.create_instance(OMS_URI) #url = OmsTestMixin.start_http_server() #log.debug("TestPlatformInstrument:setup http url %s", url) # #result = self.oms.event.register_event_listener(url) #log.debug("TestPlatformInstrument:setup register_event_listener result %s", result) self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.org_id = self.RR2.create(any_old(RT.Org)) log.debug("Org created: %s", self.org_id) # see _set_receive_timeout self._receive_timeout = 300 self.instrument_device_id = '' self.platform_device_id = '' self.platform_site_id = '' self.platform_agent_instance_id = '' self._pa_client = '' def done(): CIOMSClientFactory.destroy_instance(self.oms) event_notifications = OmsTestMixin.stop_http_server() log.info("event_notifications = %s" % str(event_notifications)) self.addCleanup(done) @unittest.skip('Must be run locally...') def test_platform_with_instrument_streaming(self): # # The following is with just a single platform and the single # instrument "SBE37_SIM_08", which corresponds to the one on port 4008. # #load the paramaters and the param dicts necesssary for the VEL3D log.debug( "load params------------------------------------------------------------------------------" ) self._load_params() log.debug( " _register_oms_listener------------------------------------------------------------------------------" ) self._register_oms_listener() #create the instrument device/agent/mode log.debug("---------- create_instrument_resources ----------") self._create_instrument_resources() #create the platform device, agent and instance log.debug("---------- create_platform_configuration ----------") self._create_platform_configuration('LPJBox_CI_Ben_Hall') self.rrclient.create_association(subject=self.platform_device_id, predicate=PRED.hasDevice, object=self.instrument_device_id) log.debug("---------- start_platform ----------") self._start_platform() self.addCleanup(self._stop_platform) # get everything in command mode: self._ping_agent() log.debug(" ---------- initialize ----------") self._initialize() _ia_client = ResourceAgentClient(self.instrument_device_id, process=FakeProcess()) state = _ia_client.get_agent_state() log.info("TestPlatformInstrument get_agent_state %s", state) log.debug(" ---------- go_active ----------") self._go_active() state = _ia_client.get_agent_state() log.info("TestPlatformInstrument get_agent_state %s", state) log.debug("---------- run ----------") self._run() gevent.sleep(2) log.debug(" ---------- _start_resource_monitoring ----------") self._start_resource_monitoring() gevent.sleep(2) # # # verify the instrument is command state: # state = ia_client.get_agent_state() # log.debug(" TestPlatformInstrument get_agent_state: %s", state) # self.assertEqual(state, ResourceAgentState.COMMAND) _stop_resource_monitoring log.debug(" ---------- _stop_resource_monitoring ----------") self._stop_resource_monitoring() gevent.sleep(2) log.debug(" ---------- go_inactive ----------") self._go_inactive() state = _ia_client.get_agent_state() log.info("TestPlatformInstrument get_agent_state %s", state) self._reset() self._shutdown() def _get_platform_attributes(self): log.debug(" ----------get_platform_attributes ----------") attr_infos = self.oms.attr.get_platform_attributes( 'LPJBox_CI_Ben_Hall') log.debug('_get_platform_attributes: %s', self._pp.pformat(attr_infos)) attrs = attr_infos['LPJBox_CI_Ben_Hall'] for attrid, arrinfo in attrs.iteritems(): arrinfo['attr_id'] = attrid log.debug('_get_platform_attributes: %s', self._pp.pformat(attrs)) return attrs def _load_params(self): log.info(" ---------- load_params ----------") # load_parameter_scenarios self.container.spawn_process( "Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict( op="load", scenario="BETA", path="master", categories= "ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition", clearcols="owner_id,org_ids", assets="res/preload/r2_ioc/ooi_assets", parseooi="True", )) def _create_platform_configuration(self, platform_id, parent_platform_id=None): """ This method is an adaptation of test_agent_instance_config in test_instrument_management_service_integration.py @param platform_id @param parent_platform_id @return a DotDict with various of the constructed elements associated to the platform. """ param_dict_name = 'platform_eng_parsed' parsed_rpdict_id = self.dataset_management.read_parameter_dictionary_by_name( param_dict_name, id_only=True) self.parsed_stream_def_id = self.pubsubclient.create_stream_definition( name='parsed', parameter_dictionary_id=parsed_rpdict_id) driver_config = PLTFRM_DVR_CONFIG driver_config['attributes'] = self._get_platform_attributes( ) #self._platform_attributes[platform_id] #OMS returning an error for port.get_platform_ports #driver_config['ports'] = self._platform_ports[platform_id] log.debug("driver_config: %s", driver_config) # instance creation platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, {'driver_config': driver_config}) platform_agent_instance_obj.agent_config = { 'platform_config': { 'platform_id': 'LPJBox_CI_Ben_Hall', 'parent_platform_id': None } } self.platform_agent_instance_id = self.imsclient.create_platform_agent_instance( platform_agent_instance_obj) # agent creation platform_agent_obj = any_old( RT.PlatformAgent, { "stream_configurations": self._get_platform_stream_configs(), 'driver_module': PLTFRM_DVR_MOD, 'driver_class': PLTFRM_DVR_CLS }) platform_agent_id = self.imsclient.create_platform_agent( platform_agent_obj) # device creation self.platform_device_id = self.imsclient.create_platform_device( any_old(RT.PlatformDevice)) # data product creation dp_obj = any_old(RT.DataProduct) dp_id = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=self.parsed_stream_def_id) self.damsclient.assign_data_product( input_resource_id=self.platform_device_id, data_product_id=dp_id) self.dpclient.activate_data_product_persistence(data_product_id=dp_id) self.addCleanup(self.dpclient.delete_data_product, dp_id) # assignments self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance( self.platform_agent_instance_id, self.platform_device_id) self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition( platform_agent_id, self.platform_agent_instance_id) self.RR2.assign_platform_device_to_org_with_has_resource( self.platform_agent_instance_id, self.org_id) ####################################### # dataset log.debug('data product = %s', dp_id) stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True) log.debug('Data product stream_ids = %s', stream_ids) stream_id = stream_ids[0] # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id1 = %s', dataset_ids[0]) ####################################### log.debug( '_create_platform_site_and_deployment platform_device_id: %s', self.platform_device_id) site_object = IonObject(RT.PlatformSite, name='PlatformSite1') self.platform_site_id = self.omsclient.create_platform_site( platform_site=site_object, parent_id='') log.debug('_create_platform_site_and_deployment site id: %s', self.platform_site_id) #create supporting objects for the Deployment resource # 1. temporal constraint # find current deployment using time constraints current_time = int(calendar.timegm(time.gmtime())) # two years on either side of current time start = current_time - 63115200 end = current_time + 63115200 temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end)) # 2. PlatformPort object which defines device to port map platform_port_obj = IonObject( OT.PlatformPort, reference_designator='GA01SUMO-FI003-01-CTDMO0999', port_type=PortTypeEnum.UPLINK, ip_address='0') # now create the Deployment deployment_obj = IonObject( RT.Deployment, name='TestPlatformDeployment', description='some new deployment', context=IonObject(OT.CabledNodeDeploymentContext), constraint_list=[temporal_bounds], port_assignments={self.platform_device_id: platform_port_obj}) platform_deployment_id = self.omsclient.create_deployment( deployment=deployment_obj, site_id=self.platform_site_id, device_id=self.platform_device_id) log.debug('_create_platform_site_and_deployment deployment_id: %s', platform_deployment_id) deploy_obj2 = self.omsclient.read_deployment(platform_deployment_id) log.debug('_create_platform_site_and_deployment deploy_obj2 : %s', deploy_obj2) return self.platform_site_id, platform_deployment_id def _create_instrument_resources(self): # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='VEL3D', description="VEL3D") instModel_id = self.imsclient.create_instrument_model(instModel_obj) log.debug('new InstrumentModel id = %s ', instModel_id) raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='raw') vel3d_b_sample = StreamConfiguration( stream_name='vel3d_b_sample', parameter_dictionary_name='vel3d_b_sample') vel3d_b_engineering = StreamConfiguration( stream_name='vel3d_b_engineering', parameter_dictionary_name='vel3d_b_engineering') # Create InstrumentAgent instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri= "http://sddevrepo.oceanobservatories.org/releases/nobska_mavs4_ooicore-0.0.7-py2.7.egg", stream_configurations=[ raw_config, vel3d_b_sample, vel3d_b_engineering ]) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) log.debug('new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) # Create InstrumentDevice instDevice_obj = IonObject(RT.InstrumentDevice, name='VEL3DDevice', description="VEL3DDevice", serial_number="12345") self.instrument_device_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, self.instrument_device_id) port_agent_config = { 'device_addr': '10.180.80.6', 'device_port': 2101, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': 1025, 'data_port': 1026, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='VEL3DAgentInstance', description="VEL3DAgentInstance", port_agent_config=port_agent_config, alerts=[]) instAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, self.instrument_device_id) self._start_port_agent( self.imsclient.read_instrument_agent_instance( instAgentInstance_id)) vel3d_b_sample_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'vel3d_b_sample', id_only=True) vel3d_b_sample_stream_def_id = self.pubsubclient.create_stream_definition( name='vel3d_b_sample', parameter_dictionary_id=vel3d_b_sample_pdict_id) vel3d_b_engineering_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'vel3d_b_engineering', id_only=True) vel3d_b_engineering_stream_def_id = self.pubsubclient.create_stream_definition( name='vel3d_b_engineering', parameter_dictionary_id=vel3d_b_engineering_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'raw', id_only=True) raw_stream_def_id = self.pubsubclient.create_stream_definition( name='raw', parameter_dictionary_id=raw_pdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='vel3d_b_sample', description='vel3d_b_sample') data_product_id1 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=vel3d_b_sample_stream_def_id) self.damsclient.assign_data_product( input_resource_id=self.instrument_device_id, data_product_id=data_product_id1) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id1) dp_obj = IonObject(RT.DataProduct, name='vel3d_b_engineering', description='vel3d_b_engineering') data_product_id2 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=vel3d_b_engineering_stream_def_id) self.damsclient.assign_data_product( input_resource_id=self.instrument_device_id, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id2) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test') data_product_id3 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=raw_stream_def_id) self.damsclient.assign_data_product( input_resource_id=self.instrument_device_id, data_product_id=data_product_id3) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id3) #create instrument site and associated deployment site_object = IonObject(RT.InstrumentSite, name='InstrumentSite1') instrument_site_id = self.omsclient.create_instrument_site( instrument_site=site_object, parent_id=self.platform_site_id) log.debug('_create_instrument_site_and_deployment site id: %s', instrument_site_id) #create supporting objects for the Deployment resource # 1. temporal constraint # find current deployment using time constraints current_time = int(calendar.timegm(time.gmtime())) # two years on either side of current time start = current_time - 63115200 end = current_time + 63115200 temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end)) # 2. PlatformPort object which defines device to port map platform_port_obj = IonObject( OT.PlatformPort, reference_designator='GA01SUMO-FI003-03-CTDMO0999', port_type=PortTypeEnum.PAYLOAD, ip_address='0') # now create the Deployment deployment_obj = IonObject( RT.Deployment, name='TestInstrumentDeployment', description='some new deployment', context=IonObject(OT.CabledInstrumentDeploymentContext), constraint_list=[temporal_bounds], port_assignments={self.instrument_device_id: platform_port_obj}) instrument_deployment_id = self.omsclient.create_deployment( deployment=deployment_obj, site_id=instrument_site_id, device_id=self.instrument_device_id) log.debug('_create_instrument_site_and_deployment deployment_id: %s', instrument_deployment_id) def _start_port_agent(self, instrument_agent_instance_obj=None): """ Construct and start the port agent, ONLY NEEDED FOR INSTRUMENT AGENTS. """ _port_agent_config = instrument_agent_instance_obj.port_agent_config # It blocks until the port agent starts up or a timeout _pagent = PortAgentProcess.launch_process(_port_agent_config, test_mode=True) pid = _pagent.get_pid() port = _pagent.get_data_port() cmd_port = _pagent.get_command_port() log.info( "IMS:_start_pagent returned from PortAgentProcess.launch_process pid: %s ", pid) # Hack to get ready for DEMO. Further though needs to be put int # how we pass this config info around. host = 'localhost' driver_config = instrument_agent_instance_obj.driver_config comms_config = driver_config.get('comms_config') if comms_config: host = comms_config.get('addr') else: log.warn("No comms_config specified, using '%s'" % host) # Configure driver to use port agent port number. instrument_agent_instance_obj.driver_config['comms_config'] = { 'addr': host, 'cmd_port': cmd_port, 'port': port } instrument_agent_instance_obj.driver_config['pagent_pid'] = pid self.imsclient.update_instrument_agent_instance( instrument_agent_instance_obj) return self.imsclient.read_instrument_agent_instance( instrument_agent_instance_obj._id) def _start_platform(self): """ Starts the given platform waiting for it to transition to the UNINITIALIZED state (note that the agent starts in the LAUNCHING state). More in concrete the sequence of steps here are: - prepares subscriber to receive the UNINITIALIZED state transition - launches the platform process - waits for the start of the process - waits for the transition to the UNINITIALIZED state """ ############################################################## # prepare to receive the UNINITIALIZED state transition: async_res = AsyncResult() def consume_event(evt, *args, **kwargs): log.debug("Got ResourceAgentStateEvent %s from origin %r", evt.state, evt.origin) if evt.state == PlatformAgentState.UNINITIALIZED: async_res.set(evt) # start subscriber: sub = EventSubscriber(event_type="ResourceAgentStateEvent", origin=self.platform_device_id, callback=consume_event) sub.start() log.info( "registered event subscriber to wait for state=%r from origin %r", PlatformAgentState.UNINITIALIZED, self.platform_device_id) #self._event_subscribers.append(sub) sub._ready_event.wait(timeout=EVENT_TIMEOUT) ############################################################## # now start the platform: agent_instance_id = self.platform_agent_instance_id log.debug("about to call start_platform_agent_instance with id=%s", agent_instance_id) pid = self.imsclient.start_platform_agent_instance( platform_agent_instance_id=agent_instance_id) log.debug("start_platform_agent_instance returned pid=%s", pid) #wait for start agent_instance_obj = self.imsclient.read_platform_agent_instance( agent_instance_id) gate = AgentProcessStateGate(self.processdispatchclient.read_process, self.platform_device_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (90), "The platform agent instance did not spawn in 90 seconds") # Start a resource agent client to talk with the agent. self._pa_client = ResourceAgentClient(self.platform_device_id, name=gate.process_id, process=FakeProcess()) log.debug("got platform agent client %s", str(self._pa_client)) ############################################################## # wait for the UNINITIALIZED event: async_res.get(timeout=self._receive_timeout) def _register_oms_listener(self): #load the paramaters and the param dicts necesssary for the VEL3D log.debug("---------- connect_to_oms ---------- ") log.debug("oms_uri = %s", OMS_URI) self.oms = CIOMSClientFactory.create_instance(OMS_URI) #buddha url url = "http://10.22.88.168:5000/ion-service/oms_event" log.info("test_oms_events_receive:setup http url %s", url) result = self.oms.event.register_event_listener(url) log.debug("_register_oms_listener register_event_listener result %s", result) #------------------------------------------------------------------------------------- # Set up the subscriber to catch the alert event #------------------------------------------------------------------------------------- def callback_for_alert(event, *args, **kwargs): log.debug("caught an OMSDeviceStatusEvent: %s", event) self.catch_alert.put(event) self.event_subscriber = EventSubscriber( event_type='OMSDeviceStatusEvent', callback=callback_for_alert) self.event_subscriber.start() self.addCleanup(self.event_subscriber.stop) result = self.oms.event.generate_test_event({ 'platform_id': 'fake_platform_id', 'message': "fake event triggered from CI using OMS' generate_test_event", 'severity': '3', 'group ': 'power' }) log.debug("_register_oms_listener generate_test_event result %s", result) def _stop_platform(self): try: self.IMS.stop_platform_agent_instance( self.platform_agent_instance_id) except Exception: log.warn( "platform_id=%r: Exception in IMS.stop_platform_agent_instance with " "platform_agent_instance_id = %r. Perhaps already dead.", self.platform_device_id, self.platform_agent_instance_id)
class TestResourceRegistry(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2coi.yml') # Now create client to bank service self.resource_registry_service = ResourceRegistryServiceClient( node=self.container.node) def test_crud(self): # Some quick registry tests # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", name="name", foo="bar") self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", {"name": "name", "foo": "bar"}) self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", {"name": "name"}, foo="bar") self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Instantiate an object obj = IonObject("UserInfo", name="name") # Can set attributes that aren't in the object's schema with self.assertRaises(AttributeError) as cm: setattr(obj, "foo", "bar") self.assertTrue( cm.exception.message == "'UserInfo' object has no attribute 'foo'") # Cam't call update with object that hasn't been persisted with self.assertRaises(BadRequest) as cm: self.resource_registry_service.update(obj) self.assertTrue( cm.exception.message.startswith( "Object does not have required '_id' or '_rev' attribute")) # Persist object and read it back obj_id, obj_rev = self.resource_registry_service.create(obj) read_obj = self.resource_registry_service.read(obj_id) # Cannot create object with _id and _rev fields pre-set with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create(read_obj) self.assertTrue( cm.exception.message.startswith("Doc must not have '_id'")) # Update object read_obj.name = "John Doe" self.resource_registry_service.update(read_obj) # Update should fail with revision mismatch with self.assertRaises(Conflict) as cm: self.resource_registry_service.update(read_obj) self.assertTrue( cm.exception.message.startswith( "Object not based on most current version")) # Re-read and update object read_obj = self.resource_registry_service.read(obj_id) self.resource_registry_service.update(read_obj) # Delete object self.resource_registry_service.delete(obj_id) # Make sure read, update and delete report error with self.assertRaises(NotFound) as cm: self.resource_registry_service.read(obj_id) self.assertTrue(cm.exception.message.startswith("Object with id")) with self.assertRaises(NotFound) as cm: self.resource_registry_service.update(read_obj) self.assertTrue(cm.exception.message.startswith("Object with id")) with self.assertRaises(NotFound) as cm: self.resource_registry_service.delete(obj_id) self.assertTrue(cm.exception.message.startswith("Object with id")) # Owner creation tests user = IonObject("ActorIdentity", name='user') uid, _ = self.resource_registry_service.create(user) inst = IonObject("InstrumentDevice", name='instrument') iid, _ = self.resource_registry_service.create( inst, headers={'ion-actor-id': str(uid)}) ids, _ = self.resource_registry_service.find_objects(iid, PRED.hasOwner, RT.ActorIdentity, id_only=True) self.assertEquals(len(ids), 1) assoc = self.resource_registry_service.read(ids[0]) self.resource_registry_service.delete(iid) with self.assertRaises(NotFound) as ex: assoc = self.resource_registry_service.read(iid) def test_lifecycle(self): att = IonObject("InstrumentDevice", name='mine', description='desc') rid, rev = self.resource_registry_service.create(att) att1 = self.resource_registry_service.read(rid) self.assertEquals(att1.name, att.name) self.assertEquals(att1.lcstate, LCS.DRAFT_PRIVATE) new_state = self.resource_registry_service.execute_lifecycle_transition( rid, LCE.PLAN) self.assertEquals(new_state, LCS.PLANNED_PRIVATE) att2 = self.resource_registry_service.read(rid) self.assertEquals(att2.lcstate, LCS.PLANNED_PRIVATE) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.execute_lifecycle_transition( rid, LCE.UNANNOUNCE) self.assertTrue( "type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce" in cm.exception.message) new_state = self.resource_registry_service.execute_lifecycle_transition( rid, LCE.DEVELOP) self.assertEquals(new_state, LCS.DEVELOPED_PRIVATE) self.assertRaises( iex.BadRequest, self.resource_registry_service.execute_lifecycle_transition, resource_id=rid, transition_event='NONE##') self.resource_registry_service.set_lifecycle_state( rid, LCS.INTEGRATED_PRIVATE) att1 = self.resource_registry_service.read(rid) self.assertEquals(att1.lcstate, LCS.INTEGRATED_PRIVATE) def test_association(self): # Instantiate ActorIdentity object actor_identity_obj = IonObject("ActorIdentity", name="name") actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create( actor_identity_obj) read_actor_identity_obj = self.resource_registry_service.read( actor_identity_obj_id) # Instantiate UserInfo object user_info_obj = IonObject("UserInfo", name="name") user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create( user_info_obj) read_user_info_obj = self.resource_registry_service.read( user_info_obj_id) # Test create failures with self.assertRaises(AttributeError) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, PRED.bogus, user_info_obj_id) self.assertTrue(cm.exception.message == "bogus") # Predicate not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, None, user_info_obj_id) self.assertTrue( cm.exception.message == "Association must have all elements set") # Bad association type with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, 'bogustype') self.assertTrue( cm.exception.message == "Unsupported assoc_type: bogustype") # Subject id or object not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( None, PRED.hasInfo, user_info_obj_id) self.assertTrue( cm.exception.message == "Association must have all elements set") # Object id or object not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, None) self.assertTrue( cm.exception.message == "Association must have all elements set") # Bad subject id with self.assertRaises(NotFound) as cm: self.resource_registry_service.create_association( "bogus", PRED.hasInfo, user_info_obj_id) self.assertTrue( cm.exception.message == "Object with id bogus does not exist.") # Bad object id with self.assertRaises(NotFound) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, "bogus") self.assertTrue( cm.exception.message == "Object with id bogus does not exist.") # _id missing from subject with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( actor_identity_obj, PRED.hasInfo, user_info_obj_id) self.assertTrue( cm.exception.message == "Subject id or rev not available") # _id missing from object with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, user_info_obj) self.assertTrue( cm.exception.message == "Object id or rev not available") # Wrong subject type with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( user_info_obj_id, PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message == "Illegal subject type UserInfo for predicate hasInfo") # Wrong object type with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id) self.assertTrue( cm.exception.message == "Illegal object type ActorIdentity for predicate hasInfo") # Create two different association types between the same subject and predicate assoc_id1, assoc_rev1 = self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) # Read object, subject res_obj1 = self.resource_registry_service.read_object( actor_identity_obj_id, PRED.hasInfo, RT.UserInfo) self.assertEquals(res_obj1._id, user_info_obj_id) res_obj1 = self.resource_registry_service.read_object( actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True) self.assertEquals(res_obj1, user_info_obj_id) res_obj2 = self.resource_registry_service.read_subject( RT.ActorIdentity, PRED.hasInfo, user_info_obj_id) self.assertEquals(res_obj2._id, actor_identity_obj_id) res_obj2 = self.resource_registry_service.read_subject( RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True) self.assertEquals(res_obj2, actor_identity_obj_id) # Create a similar association to a specific revision # TODO: This is not a supported case so far assoc_id2, assoc_rev2 = self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, "H2R") # Search for associations (good cases) ret1 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) ret2 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo) ret3 = self.resource_registry_service.find_associations( None, PRED.hasInfo) self.assertTrue(len(ret1) == len(ret2) == len(ret3)) self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id) ret1 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False) ret2 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo, id_only=False) ret3 = self.resource_registry_service.find_associations( predicate=PRED.hasInfo, id_only=False) self.assertTrue(ret1 == ret2 == ret3) # Search for associations (good cases) ret1 = self.resource_registry_service.find_associations( read_actor_identity_obj, PRED.hasInfo, read_user_info_obj) ret2 = self.resource_registry_service.find_associations( read_actor_identity_obj, PRED.hasInfo) ret3 = self.resource_registry_service.find_associations( None, PRED.hasInfo) self.assertTrue(len(ret1) == len(ret2) == len(ret3)) self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id) ret1 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None, True) ret2 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo, id_only=True) ret3 = self.resource_registry_service.find_associations( predicate=PRED.hasInfo, id_only=True) self.assertTrue(ret1 == ret2 == ret3) # Search for associations (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations(None, None, None) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations( actor_identity_obj_id, None, None) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations( None, None, user_info_obj_id) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations( actor_identity_obj, None, user_info_obj_id) self.assertTrue( cm.exception.message == "Object id not available in subject") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations( actor_identity_obj_id, None, user_info_obj) self.assertTrue( cm.exception.message == "Object id not available in object") # Find subjects (good cases) subj_ret1 = self.resource_registry_service.find_subjects( RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True) subj_ret2 = self.resource_registry_service.find_subjects( RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True) self.assertTrue(len(subj_ret1) == len(subj_ret2)) self.assertTrue(subj_ret1[0] == subj_ret2[0]) self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id) subj_ret3 = self.resource_registry_service.find_subjects( None, PRED.hasInfo, user_info_obj_id, True) subj_ret4 = self.resource_registry_service.find_subjects( None, None, read_user_info_obj, True) self.assertTrue(len(subj_ret3) == len(subj_ret4)) self.assertTrue(subj_ret3[0] == subj_ret4[0]) self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id) subj_ret5 = self.resource_registry_service.find_subjects( None, PRED.hasInfo, user_info_obj_id, False) subj_ret6 = self.resource_registry_service.find_subjects( None, None, read_user_info_obj, False) self.assertTrue(len(subj_ret5) == len(subj_ret6)) self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id) self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id) # Find subjects (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_subjects(None, None, None) self.assertTrue(cm.exception.message == "Must provide object") with self.assertRaises(AttributeError) as cm: self.resource_registry_service.find_subjects( RT.UserCredentials, PRED.bogus, user_info_obj_id, True) self.assertTrue(cm.exception.message == "bogus") ret = self.resource_registry_service.find_subjects( RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True) self.assertTrue(len(ret[0]) == 0) ret = self.resource_registry_service.find_subjects( RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True) self.assertTrue(len(ret[0]) == 0) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_subjects( RT.UserCredentials, PRED.hasInfo, user_info_obj, True) self.assertTrue( cm.exception.message == "Object id not available in object") # Find objects (good cases) subj_ret1 = self.resource_registry_service.find_objects( actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True) subj_ret2 = self.resource_registry_service.find_objects( read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True) self.assertTrue(len(subj_ret1) == len(subj_ret2)) self.assertTrue(subj_ret1[0] == subj_ret2[0]) self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id) subj_ret3 = self.resource_registry_service.find_objects( actor_identity_obj_id, PRED.hasInfo, None, True) subj_ret4 = self.resource_registry_service.find_objects( actor_identity_obj_id, None, None, True) self.assertTrue(len(subj_ret3) == len(subj_ret4)) self.assertTrue(subj_ret3[0] == subj_ret4[0]) self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id) subj_ret5 = self.resource_registry_service.find_objects( actor_identity_obj_id, PRED.hasInfo, None, False) subj_ret6 = self.resource_registry_service.find_objects( read_actor_identity_obj, None, None, False) self.assertTrue(len(subj_ret5) == len(subj_ret6)) self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id) self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id) # Find objects (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_objects(None, None, None) self.assertTrue(cm.exception.message == "Must provide subject") with self.assertRaises(AttributeError) as cm: self.resource_registry_service.find_objects( actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True) self.assertTrue(cm.exception.message == "bogus") ret = self.resource_registry_service.find_objects( actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True) self.assertTrue(len(ret[0]) == 0) ret = self.resource_registry_service.find_objects( actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True) self.assertTrue(len(ret[0]) == 0) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_objects( actor_identity_obj, PRED.hasInfo, RT.UserInfo, True) self.assertTrue( cm.exception.message == "Object id not available in subject") # Get association (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association(None, None, None) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association( actor_identity_obj_id, None, None) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association( None, None, user_info_obj_id) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association( actor_identity_obj, None, user_info_obj_id) self.assertTrue( cm.exception.message == "Object id not available in subject") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association( actor_identity_obj_id, None, user_info_obj) self.assertTrue( cm.exception.message == "Object id not available in object") # Delete one of the associations self.resource_registry_service.delete_association(assoc_id2) assoc = self.resource_registry_service.get_association( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) self.assertTrue(assoc._id == assoc_id1) # Delete (bad cases) with self.assertRaises(NotFound) as cm: self.resource_registry_service.delete_association("bogus") self.assertTrue( cm.exception.message == "Object with id bogus does not exist.") # Delete other association self.resource_registry_service.delete_association(assoc_id1) # Delete resources self.resource_registry_service.delete(actor_identity_obj_id) self.resource_registry_service.delete(user_info_obj_id) def test_find_resources(self): with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_resources( RT.UserInfo, LCS.DRAFT, "name", False) self.assertTrue( cm.exception.message == "find by name does not support lcstate") ret = self.resource_registry_service.find_resources( RT.UserInfo, None, "name", False) self.assertTrue(len(ret[0]) == 0) # Instantiate an object obj = IonObject("InstrumentDevice", name="name") # Persist object and read it back obj_id, obj_rev = self.resource_registry_service.create(obj) read_obj = self.resource_registry_service.read(obj_id) ret = self.resource_registry_service.find_resources( RT.InstrumentDevice, None, "name", False) self.assertTrue(len(ret[0]) == 1) self.assertTrue(ret[0][0]._id == read_obj._id) ret = self.resource_registry_service.find_resources( RT.InstrumentDevice, LCS.DRAFT, None, False) self.assertTrue(len(ret[0]) == 1) self.assertTrue(ret[0][0]._id == read_obj._id) def test_attach(self): binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82" # Owner creation tests instrument = IonObject("InstrumentDevice", name='instrument') iid, _ = self.resource_registry_service.create(instrument) att = Attachment(content=binary, attachment_type=AttachmentType.BLOB) aid1 = self.resource_registry_service.create_attachment(iid, att) att1 = self.resource_registry_service.read_attachment(aid1) self.assertEquals(binary, att1.content) import base64 att = Attachment(content=base64.encodestring(binary), attachment_type=AttachmentType.ASCII) aid2 = self.resource_registry_service.create_attachment(iid, att) att1 = self.resource_registry_service.read_attachment(aid2) self.assertEquals(binary, base64.decodestring(att1.content)) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, [aid1, aid2]) att_ids = self.resource_registry_service.find_attachments( iid, id_only=True, descending=True) self.assertEquals(att_ids, [aid2, aid1]) att_ids = self.resource_registry_service.find_attachments( iid, id_only=True, descending=True, limit=1) self.assertEquals(att_ids, [aid2]) atts = self.resource_registry_service.find_attachments(iid, id_only=False, limit=1) self.assertEquals(atts[0].content, att1.content) self.resource_registry_service.delete_attachment(aid1) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, [aid2]) self.resource_registry_service.delete_attachment(aid2) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, []) def test_read_mult(self): test_resource1_id, _ = self.resource_registry_service.create( Resource(name='test1')) test_resource2_id, _ = self.resource_registry_service.create( Resource(name='test2')) res_list = [test_resource1_id, test_resource2_id] objects = self.resource_registry_service.read_mult(res_list) for o in objects: self.assertIsInstance(o, Resource) self.assertTrue(o._id in res_list) def test_find_associations_mult(self): dp = DataProcess() transform = Transform() pd = ProcessDefinition() dp_id, _ = self.resource_registry_service.create(dp) transform_id, _ = self.resource_registry_service.create(transform) pd_id, _ = self.resource_registry_service.create(pd) self.resource_registry_service.create_association( subject=dp_id, object=transform_id, predicate=PRED.hasTransform) self.resource_registry_service.create_association( subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition) results, _ = self.resource_registry_service.find_associations_mult( subjects=[dp_id], id_only=True) self.assertTrue(results == [transform_id]) results, _ = self.resource_registry_service.find_associations_mult( subjects=[dp_id, transform_id], id_only=True) results.sort() correct = [transform_id, pd_id] correct.sort() self.assertTrue(results == correct)
class TestDeployment(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) #@unittest.skip("targeting") def test_create_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment') deployment_id = self.omsclient.create_deployment(deployment_obj, site_id, device_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) #retrieve the deployment objects and check that the assoc site and device are attached read_deployment_obj = self.omsclient.read_deployment(deployment_id) log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj) ) site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(site_ids), 1) device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(device_ids), 1) #delete the deployment self.omsclient.delete_deployment(deployment_id) # now try to get the deleted dp object try: deployment_obj = self.omsclient.read_deployment(deployment_id) except NotFound as ex: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def test_activate_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) platform_model__obj = IonObject(RT.PlatformModel, name='PlatformModel1', description='test platform model') model_id = self.imsclient.create_platform_model(platform_model__obj) self.imsclient.assign_platform_model_to_platform_device(model_id, device_id) self.omsclient.assign_platform_model_to_platform_site(model_id, site_id) #create a deployment with metadata and an initial site and device instrument_site__obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site(instrument_site__obj, site_id) instrument_device__obj = IonObject(RT.InstrumentDevice, name='InstrumentDevice1', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device(instrument_device__obj) self.rrclient.create_association(device_id, PRED.hasDevice, instrument_device_id) instrument_model__obj = IonObject(RT.InstrumentModel, name='InstrumentModel1', description='test instrument model') instrument_model_id = self.imsclient.create_instrument_model(instrument_model__obj) self.imsclient.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site(instrument_model_id, instrument_site_id) #self.rrclient.create_association(instrument_site_id, PRED.hasModel, instrument_model_id) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment') deployment_id = self.omsclient.create_deployment(deployment_obj, site_id, device_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) self.omsclient.activate_deployment(deployment_id)
class TestDeployment(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url("res/deploy/r2deploy.yml") self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dmpsclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.psmsclient = PubsubManagementServiceClient(node=self.container.node) # @unittest.skip("targeting") def test_create_deployment(self): # create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name="PlatformSite1", description="test platform site") site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name="PlatformDevice1", description="test platform device") device_id = self.imsclient.create_platform_device(platform_device__obj) deployment_obj = IonObject(RT.Deployment, name="TestDeployment", description="some new deployment") deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_platform_site(site_id, deployment_id) self.imsclient.deploy_platform_device(device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) # retrieve the deployment objects and check that the assoc site and device are attached read_deployment_obj = self.omsclient.read_deployment(deployment_id) log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj)) site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(site_ids), 1) device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(device_ids), 1) # delete the deployment self.omsclient.delete_deployment(deployment_id) # now try to get the deleted dp object try: deployment_obj = self.omsclient.read_deployment(deployment_id) except NotFound as ex: pass else: self.fail("deleted deployment was found during read") # @unittest.skip("targeting") def test_activate_deployment(self): # create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name="PlatformSite1", description="test platform site") site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device_obj = IonObject(RT.PlatformDevice, name="PlatformDevice1", description="test platform device") platform_device_id = self.imsclient.create_platform_device(platform_device_obj) platform_model__obj = IonObject(RT.PlatformModel, name="PlatformModel1", description="test platform model") model_id = self.imsclient.create_platform_model(platform_model__obj) self.imsclient.assign_platform_model_to_platform_device(model_id, platform_device_id) self.omsclient.assign_platform_model_to_platform_site(model_id, site_id) # create a deployment with metadata and an initial site and device instrument_site_obj = IonObject(RT.InstrumentSite, name="InstrumentSite1", description="test instrument site") instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, site_id) # assign data products appropriately # set up stream (this would be preload) ctd_stream_def = SBE37_CDM_stream_definition() ctd_stream_def_id = self.psmsclient.create_stream_definition(container=ctd_stream_def) craft = CoverageCraft sdom, tdom = craft.create_domains() sdom = sdom.dump() tdom = tdom.dump() parameter_dictionary = craft.create_parameters() parameter_dictionary = parameter_dictionary.dump() dp_obj = IonObject( RT.DataProduct, name="DP1", description="some new dp", temporal_domain=tdom, spatial_domain=sdom ) log_data_product_id = self.dmpsclient.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary) self.omsclient.create_site_data_product(instrument_site_id, log_data_product_id) instrument_device_obj = IonObject( RT.InstrumentDevice, name="InstrumentDevice1", description="test instrument device" ) instrument_device_id = self.imsclient.create_instrument_device(instrument_device_obj) self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) dp_obj = IonObject( RT.DataProduct, name="DP1", description="some new dp", temporal_domain=tdom, spatial_domain=sdom ) inst_data_product_id = self.dmpsclient.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary) # assign data products appropriately self.damsclient.assign_data_product( input_resource_id=instrument_device_id, data_product_id=inst_data_product_id ) instrument_model_obj = IonObject( RT.InstrumentModel, name="InstrumentModel1", description="test instrument model" ) instrument_model_id = self.imsclient.create_instrument_model(instrument_model_obj) self.imsclient.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site(instrument_model_id, instrument_site_id) # self.rrclient.create_association(instrument_site_id, PRED.hasModel, instrument_model_id) deployment_obj = IonObject(RT.Deployment, name="TestDeployment", description="some new deployment") deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_instrument_site(instrument_site_id, deployment_id) self.imsclient.deploy_instrument_device(instrument_device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) self.omsclient.activate_deployment(deployment_id)
class TestDataProductManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dpsc_cli = DataProductManagementServiceClient() self.rrclient = ResourceRegistryServiceClient() self.damsclient = DataAcquisitionManagementServiceClient() self.pubsubcli = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore( datastore_name) self.stream_def_id = self.pubsubcli.create_stream_definition( name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition( name='ingestion worker') ingestion_worker_definition.executable = { 'module': 'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class': 'ScienceGranuleIngestionWorker' } process_definition_id = self.process_dispatcher.create_process_definition( process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] #------------------------------------------------------------------------------------------------ # First launch the ingestors #------------------------------------------------------------------------------------------------ self.exchange_space = 'science_granule_ingestion' self.exchange_point = 'science_data' config = DotDict() config.process.datastore_name = 'datasets' config.process.queue_name = self.exchange_space self.exchange_names.append(self.exchange_space) self.exchange_points.append(self.exchange_point) pid = self.process_dispatcher.schedule_process( self.process_definitions['ingestion_worker'], configuration=config) log.debug("the ingestion worker process id: %s", pid) self.pids.append(pid) self.addCleanup(self.cleaning_up) def cleaning_up(self): for pid in self.pids: log.debug("number of pids to be terminated: %s", len(self.pids)) try: self.process_dispatcher.cancel_process(pid) log.debug("Terminated the process: %s", pid) except: log.debug("could not terminate the process id: %s" % pid) IngestionManagementIntTest.clean_subscriptions() for xn in self.exchange_names: xni = self.container.ex_manager.create_xn_queue(xn) xni.delete() for xp in self.exchange_points: xpi = self.container.ex_manager.create_xp(xp) xpi.delete() def get_datastore(self, dataset_id): dataset = self.dataset_management.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore( datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore @attr('EXT') @attr('PREP') def test_create_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict') ctd_stream_def_id = self.pubsubcli.create_stream_definition( name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ # Generic time-series data domain creation tdom, sdom = time_series_domain() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain=tdom.dump(), spatial_domain=sdom.dump()) dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0 dp_obj.ooi_product_name = "PRODNAME" #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product( data_product=dp_obj, stream_definition_id=ctd_stream_def_id) # Assert that the data product has an associated stream at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertNotEquals(len(stream_ids), 0) # Assert that the data product has an associated stream def at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True) self.assertNotEquals(len(stream_ids), 0) self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Created data product %s', dp_obj) #------------------------------------------------------------------------------------------------ # test creating a new data product with a stream definition #------------------------------------------------------------------------------------------------ log.debug('Creating new data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp', temporal_domain=tdom.dump(), spatial_domain=sdom.dump()) dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id2) log.debug('new dp_id = %s' % dp_id2) #------------------------------------------------------------------------------------------------ #make sure data product is associated with stream def #------------------------------------------------------------------------------------------------ streamdefs = [] streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True) for s in streams: log.debug("Checking stream %s" % s) sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True) for sd in sdefs: log.debug("Checking streamdef %s" % sd) streamdefs.append(sd) self.assertIn(ctd_stream_def_id, streamdefs) group_names = self.dpsc_cli.get_data_product_group_list() self.assertIn("PRODNAME", group_names) # test reading a non-existent data product log.debug('reading non-existent data product') with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product('some_fake_id') # update a data product (tests read also) log.debug('Updating data product') # first get the existing dp object dp_obj = self.dpsc_cli.read_data_product(dp_id) # now tweak the object dp_obj.description = 'the very first dp' dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 20.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -20.0 # now write the dp back to the registry update_result = self.dpsc_cli.update_data_product(dp_obj) # now get the dp back to see if it was updated dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertEquals(dp_obj.description, 'the very first dp') self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Updated data product %s', dp_obj) #test extension extended_product = self.dpsc_cli.get_data_product_extension(dp_id) self.assertEqual(dp_id, extended_product._id) self.assertEqual( ComputedValueAvailability.PROVIDED, extended_product.computed.product_download_size_estimated.status) self.assertEqual( 0, extended_product.computed.product_download_size_estimated.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.parameters.status) #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value) def ion_object_encoder(obj): return obj.__dict__ #test prepare for create data_product_data = self.dpsc_cli.prepare_data_product_support() #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, "") self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual( len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual( len(data_product_data.associations['Dataset'].resources), 0) self.assertEqual( len(data_product_data.associations['StreamDefinition']. associated_resources), 0) self.assertEqual( len(data_product_data.associations['Dataset'].associated_resources ), 0) #test prepare for update data_product_data = self.dpsc_cli.prepare_data_product_support(dp_id) #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, dp_id) self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual( len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual( len(data_product_data.associations['Dataset'].resources), 1) self.assertEqual( len(data_product_data.associations['StreamDefinition']. associated_resources), 1) self.assertEqual( data_product_data.associations['StreamDefinition']. associated_resources[0].s, dp_id) self.assertEqual( len(data_product_data.associations['Dataset'].associated_resources ), 1) self.assertEqual( data_product_data.associations['Dataset'].associated_resources[0]. s, dp_id) # now 'delete' the data product log.debug("deleting data product: %s" % dp_id) self.dpsc_cli.delete_data_product(dp_id) # Assert that there are no associated streams leftover after deleting the data product stream_ids, assoc_ids = self.rrclient.find_objects( dp_id, PRED.hasStream, RT.Stream, True) self.assertEquals(len(stream_ids), 0) self.assertEquals(len(assoc_ids), 0) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product(dp_id) # Get the events corresponding to the data product ret = self.unsc.get_recent_events(resource_id=dp_id) events = ret.value for event in events: log.debug("event time: %s" % event.ts_created) self.assertTrue(len(events) > 0) def test_data_product_stream_def(self): pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition( name='Simulated CTD data', parameter_dictionary_id=pdict_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain=tdom, spatial_domain=sdom) dp_id = self.dpsc_cli.create_data_product( data_product=dp_obj, stream_definition_id=ctd_stream_def_id) stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id) self.assertEquals(ctd_stream_def_id, stream_def_id) def test_derived_data_product(self): pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition( name='ctd parsed', parameter_dictionary_id=pdict_id) self.addCleanup(self.pubsubcli.delete_stream_definition, ctd_stream_def_id) tdom, sdom = time_series_domain() dp = DataProduct(name='Instrument DP', temporal_domain=tdom.dump(), spatial_domain=sdom.dump()) dp_id = self.dpsc_cli.create_data_product( dp, stream_definition_id=ctd_stream_def_id) self.addCleanup(self.dpsc_cli.force_delete_data_product, dp_id) self.dpsc_cli.activate_data_product_persistence(dp_id) self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, dp_id) dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True) if not dataset_ids: raise NotFound("Data Product %s dataset does not exist" % str(dp_id)) dataset_id = dataset_ids[0] # Make the derived data product simple_stream_def_id = self.pubsubcli.create_stream_definition( name='TEMPWAT stream def', parameter_dictionary_id=pdict_id, available_fields=['time', 'temp']) tempwat_dp = DataProduct(name='TEMPWAT') tempwat_dp_id = self.dpsc_cli.create_data_product( tempwat_dp, stream_definition_id=simple_stream_def_id, parent_data_product_id=dp_id) self.addCleanup(self.dpsc_cli.delete_data_product, tempwat_dp_id) self.dpsc_cli.activate_data_product_persistence(tempwat_dp_id) self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, tempwat_dp_id) # Check that the streams associated with the data product are persisted with stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) for stream_id in stream_ids: self.assertTrue(self.ingestclient.is_persisted(stream_id)) stream_id = stream_ids[0] route = self.pubsubcli.read_stream_route(stream_id=stream_id) rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id) rdt['time'] = np.arange(20) rdt['temp'] = np.arange(20) rdt['pressure'] = np.arange(20) publisher = StandaloneStreamPublisher(stream_id, route) dataset_modified = Event() def cb(*args, **kwargs): dataset_modified.set() es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True) es.start() self.addCleanup(es.stop) publisher.publish(rdt.to_granule()) self.assertTrue(dataset_modified.wait(30)) tempwat_dataset_ids, _ = self.rrclient.find_objects(tempwat_dp_id, PRED.hasDataset, id_only=True) tempwat_dataset_id = tempwat_dataset_ids[0] granule = self.data_retriever.retrieve( tempwat_dataset_id, delivery_format=simple_stream_def_id) rdt = RecordDictionaryTool.load_from_granule(granule) np.testing.assert_array_equal(rdt['time'], np.arange(20)) self.assertEquals(set(rdt.fields), set(['time', 'temp'])) def test_activate_suspend_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition( name='Simulated CTD data', parameter_dictionary_id=pdict_id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain=tdom, spatial_domain=sdom) log.debug("Created an IonObject for a data product: %s" % dp_obj) #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product( data_product=dp_obj, stream_definition_id=ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test activate and suspend data product persistence #------------------------------------------------------------------------------------------------ self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True) if not dataset_ids: raise NotFound("Data Product %s dataset does not exist" % str(dp_id)) dataset_id = dataset_ids[0] # Check that the streams associated with the data product are persisted with stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) for stream_id in stream_ids: self.assertTrue(self.ingestclient.is_persisted(stream_id)) stream_id = stream_ids[0] route = self.pubsubcli.read_stream_route(stream_id=stream_id) rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id) rdt['time'] = np.arange(20) rdt['temp'] = np.arange(20) publisher = StandaloneStreamPublisher(stream_id, route) dataset_modified = Event() def cb(*args, **kwargs): dataset_modified.set() es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True) es.start() self.addCleanup(es.stop) publisher.publish(rdt.to_granule()) self.assertTrue(dataset_modified.wait(30)) #-------------------------------------------------------------------------------- # Now get the data in one chunk using an RPC Call to start_retreive #-------------------------------------------------------------------------------- replay_data = self.data_retriever.retrieve(dataset_ids[0]) self.assertIsInstance(replay_data, Granule) log.debug( "The data retriever was able to replay the dataset that was attached to the data product " "we wanted to be persisted. Therefore the data product was indeed persisted with " "otherwise we could not have retrieved its dataset using the data retriever. Therefore " "this demonstration shows that L4-CI-SA-RQ-267 is satisfied: 'Data product management shall persist data products'" ) data_product_object = self.rrclient.read(dp_id) self.assertEquals(data_product_object.name, 'DP1') self.assertEquals(data_product_object.description, 'some new dp') log.debug( "Towards L4-CI-SA-RQ-308: 'Data product management shall persist data product metadata'. " " Attributes in create for the data product obj, name= '%s', description='%s', match those of object from the " "resource registry, name='%s', desc='%s'" % (dp_obj.name, dp_obj.description, data_product_object.name, data_product_object.description)) #------------------------------------------------------------------------------------------------ # test suspend data product persistence #------------------------------------------------------------------------------------------------ self.dpsc_cli.suspend_data_product_persistence(dp_id) dataset_modified.clear() rdt['time'] = np.arange(20, 40) publisher.publish(rdt.to_granule()) self.assertFalse(dataset_modified.wait(2)) self.dpsc_cli.activate_data_product_persistence(dp_id) dataset_modified.clear() publisher.publish(rdt.to_granule()) self.assertTrue(dataset_modified.wait(30)) granule = self.data_retriever.retrieve(dataset_id) rdt = RecordDictionaryTool.load_from_granule(granule) np.testing.assert_array_almost_equal(rdt['time'], np.arange(40)) dataset_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasDataset, id_only=True) self.assertEquals(len(dataset_ids), 1) self.dpsc_cli.suspend_data_product_persistence(dp_id) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.rrclient.read(dp_id) def test_lookup_values(self): ph = ParameterHelper(self.dataset_management, self.addCleanup) pdict_id = ph.create_lookups() stream_def_id = self.pubsubcli.create_stream_definition( 'lookup', parameter_dictionary_id=pdict_id) self.addCleanup(self.pubsubcli.delete_stream_definition, stream_def_id) data_product = DataProduct(name='lookup data product') tdom, sdom = time_series_domain() data_product.temporal_domain = tdom.dump() data_product.spatial_domain = sdom.dump() data_product_id = self.dpsc_cli.create_data_product( data_product, stream_definition_id=stream_def_id) self.addCleanup(self.dpsc_cli.delete_data_product, data_product_id) data_producer = DataProducer(name='producer') data_producer.producer_context = DataProcessProducerContext() data_producer.producer_context.configuration['qc_keys'] = [ 'offset_document' ] data_producer_id, _ = self.rrclient.create(data_producer) self.addCleanup(self.rrclient.delete, data_producer_id) assoc, _ = self.rrclient.create_association( subject=data_product_id, object=data_producer_id, predicate=PRED.hasDataProducer) self.addCleanup(self.rrclient.delete_association, assoc) document_keys = self.damsclient.list_qc_references(data_product_id) self.assertEquals(document_keys, ['offset_document']) svm = StoredValueManager(self.container) svm.stored_value_cas('offset_document', {'offset_a': 2.0}) self.dpsc_cli.activate_data_product_persistence(data_product_id) dataset_ids, _ = self.rrclient.find_objects(subject=data_product_id, predicate=PRED.hasDataset, id_only=True) dataset_id = dataset_ids[0] dataset_monitor = DatasetMonitor(dataset_id) self.addCleanup(dataset_monitor.stop) rdt = RecordDictionaryTool(stream_definition_id=stream_def_id) rdt['time'] = [0] rdt['temp'] = [20.] granule = rdt.to_granule() stream_ids, _ = self.rrclient.find_objects(subject=data_product_id, predicate=PRED.hasStream, id_only=True) stream_id = stream_ids[0] route = self.pubsubcli.read_stream_route(stream_id=stream_id) publisher = StandaloneStreamPublisher(stream_id, route) publisher.publish(granule) self.assertTrue(dataset_monitor.event.wait(10)) granule = self.data_retriever.retrieve(dataset_id) rdt2 = RecordDictionaryTool.load_from_granule(granule) np.testing.assert_array_equal(rdt['temp'], rdt2['temp']) np.testing.assert_array_almost_equal(rdt2['calibrated'], np.array([22.0])) svm.stored_value_cas('updated_document', {'offset_a': 3.0}) dataset_monitor = DatasetMonitor(dataset_id) self.addCleanup(dataset_monitor.stop) ep = EventPublisher(event_type=OT.ExternalReferencesUpdatedEvent) ep.publish_event(origin=data_product_id, reference_keys=['updated_document']) rdt = RecordDictionaryTool(stream_definition_id=stream_def_id) rdt['time'] = [1] rdt['temp'] = [20.] granule = rdt.to_granule() gevent.sleep(2) # Yield so that the event goes through publisher.publish(granule) self.assertTrue(dataset_monitor.event.wait(10)) granule = self.data_retriever.retrieve(dataset_id) rdt2 = RecordDictionaryTool.load_from_granule(granule) np.testing.assert_array_equal(rdt2['temp'], np.array([20., 20.])) np.testing.assert_array_almost_equal(rdt2['calibrated'], np.array([22.0, 23.0]))
class TestPlatformInstrument(BaseIntTestPlatform): def setUp(self): self._start_container() self._pp = pprint.PrettyPrinter() log.debug("oms_uri = %s", OMS_URI) self.oms = CIOMSClientFactory.create_instance(OMS_URI) self._get_platform_attributes() url = OmsTestMixin.start_http_server() log.info("TestPlatformInstrument:setup http url %s", url) result = self.oms.event.register_event_listener(url) log.info( "TestPlatformInstrument:setup register_event_listener result %s", result) # response = self.oms.port.get_platform_ports('LPJBox_CI_Ben_Hall') # log.info("TestPlatformInstrument:setup get_platform_ports %s", response) self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.org_id = self.RR2.create(any_old(RT.Org)) log.debug("Org created: %s", self.org_id) # see _set_receive_timeout self._receive_timeout = 177 self.instrument_device = '' self.platform_device = '' self.platform_agent_instance_id = '' self._pa_client = '' def done(): CIOMSClientFactory.destroy_instance(self.oms) event_notifications = OmsTestMixin.stop_http_server() log.info("event_notifications = %s" % str(event_notifications)) self.addCleanup(done) def _get_platform_attributes(self): attr_infos = self.oms.attr.get_platform_attributes( 'LPJBox_CI_Ben_Hall') log.debug('_get_platform_attributes: %s', self._pp.pformat(attr_infos)) # ret_infos = attr_infos['LPJBox_CI_Ben_Hall'] # for attrName, attr_defn in ret_infos.iteritems(): # attr = AttrNode(attrName, attr_defn) # pnode.add_attribute(attr) return attr_infos @unittest.skip('Still in construction...') def test_platform_with_instrument_streaming(self): # # The following is with just a single platform and the single # instrument "SBE37_SIM_08", which corresponds to the one on port 4008. # #load the paramaters and the param dicts necesssary for the VEL3D self._load_params() #create the instrument device/agent/mode self._create_instrument_resources() #create the platform device, agent and instance self._create_platform_configuration('LPJBox_CI_Ben_Hall') self.rrclient.create_association(subject=self.platform_device, predicate=PRED.hasDevice, object=self.instrument_device) self._start_platform() # self.addCleanup(self._stop_platform, p_root) # get everything in command mode: self._ping_agent() self._initialize() _ia_client = ResourceAgentClient(self.instrument_device, process=FakeProcess()) state = _ia_client.get_agent_state() log.info("TestPlatformInstrument get_agent_state %s", state) self._go_active() # self._run() gevent.sleep(3) # note that this includes the instrument also getting to the command state # self._stream_instruments() # get client to the instrument: # the i_obj is a DotDict with various pieces captured during the # set-up of the instrument, in particular instrument_device_id #i_obj = self._get_instrument(instr_key) # log.debug("KK creating ResourceAgentClient") # ia_client = ResourceAgentClient(i_obj.instrument_device_id, # process=FakeProcess()) # log.debug("KK got ResourceAgentClient: %s", ia_client) # # # verify the instrument is command state: # state = ia_client.get_agent_state() # log.debug("KK instrument state: %s", state) # self.assertEqual(state, ResourceAgentState.COMMAND) self._reset() self._shutdown() def _load_params(self): log.info( "--------------------------------------------------------------------------------------------------------" ) # load_parameter_scenarios self.container.spawn_process( "Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict( op="load", scenario="BETA", path="master", categories= "ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition", clearcols="owner_id,org_ids", assets="res/preload/r2_ioc/ooi_assets", parseooi="True", )) def _create_platform_configuration(self, platform_id, parent_platform_id=None): """ This method is an adaptation of test_agent_instance_config in test_instrument_management_service_integration.py @param platform_id @param parent_platform_id @return a DotDict with various of the constructed elements associated to the platform. """ tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() param_dict_name = 'platform_eng_parsed' parsed_rpdict_id = self.dataset_management.read_parameter_dictionary_by_name( param_dict_name, id_only=True) self.parsed_stream_def_id = self.pubsubclient.create_stream_definition( name='parsed', parameter_dictionary_id=parsed_rpdict_id) driver_config = PLTFRM_DVR_CONFIG driver_config['attributes'] = self._get_platform_attributes( ) #self._platform_attributes[platform_id] #OMS returning an error for port.get_platform_ports #driver_config['ports'] = self._platform_ports[platform_id] log.debug("driver_config: %s", driver_config) # instance creation platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, {'driver_config': driver_config}) platform_agent_instance_obj.agent_config = { 'platform_config': { 'platform_id': 'LPJBox_CI_Ben_Hall', 'parent_platform_id': None } } self.platform_agent_instance_id = self.imsclient.create_platform_agent_instance( platform_agent_instance_obj) # agent creation platform_agent_obj = any_old( RT.PlatformAgent, { "stream_configurations": self._get_platform_stream_configs(), 'driver_module': PLTFRM_DVR_MOD, 'driver_class': PLTFRM_DVR_CLS }) platform_agent_id = self.imsclient.create_platform_agent( platform_agent_obj) # device creation self.platform_device = self.imsclient.create_platform_device( any_old(RT.PlatformDevice)) # data product creation dp_obj = any_old(RT.DataProduct, { "temporal_domain": tdom, "spatial_domain": sdom }) dp_id = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=self.parsed_stream_def_id) self.damsclient.assign_data_product( input_resource_id=self.platform_device, data_product_id=dp_id) self.dpclient.activate_data_product_persistence(data_product_id=dp_id) self.addCleanup(self.dpclient.delete_data_product, dp_id) # assignments self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance( self.platform_agent_instance_id, self.platform_device) self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition( platform_agent_id, self.platform_agent_instance_id) self.RR2.assign_platform_device_to_org_with_has_resource( self.platform_agent_instance_id, self.org_id) ####################################### # dataset log.debug('data product = %s', dp_id) stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True) log.debug('Data product stream_ids = %s', stream_ids) stream_id = stream_ids[0] # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id1 = %s', dataset_ids[0]) ####################################### return def _create_instrument_resources(self): # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='VEL3D', description="VEL3D") instModel_id = self.imsclient.create_instrument_model(instModel_obj) log.debug('new InstrumentModel id = %s ', instModel_id) raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='raw') vel3d_b_sample = StreamConfiguration( stream_name='vel3d_b_sample', parameter_dictionary_name='vel3d_b_sample') vel3d_b_engineering = StreamConfiguration( stream_name='vel3d_b_engineering', parameter_dictionary_name='vel3d_b_engineering') # Create InstrumentAgent instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri= "http://sddevrepo.oceanobservatories.org/releases/nobska_mavs4_ooicore-0.0.7-py2.7.egg", stream_configurations=[ raw_config, vel3d_b_sample, vel3d_b_engineering ]) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) log.debug('new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) # Create InstrumentDevice instDevice_obj = IonObject(RT.InstrumentDevice, name='VEL3DDevice', description="VEL3DDevice", serial_number="12345") self.instrument_device = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, self.instrument_device) port_agent_config = { 'device_addr': '10.180.80.6', 'device_port': 2101, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': 1025, 'data_port': 1026, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='VEL3DAgentInstance', description="VEL3DAgentInstance", port_agent_config=port_agent_config, alerts=[]) instAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, self.instrument_device) self._start_port_agent( self.imsclient.read_instrument_agent_instance( instAgentInstance_id)) vel3d_b_sample_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'vel3d_b_sample', id_only=True) vel3d_b_sample_stream_def_id = self.pubsubclient.create_stream_definition( name='vel3d_b_sample', parameter_dictionary_id=vel3d_b_sample_pdict_id) vel3d_b_engineering_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'vel3d_b_engineering', id_only=True) vel3d_b_engineering_stream_def_id = self.pubsubclient.create_stream_definition( name='vel3d_b_engineering', parameter_dictionary_id=vel3d_b_engineering_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'raw', id_only=True) raw_stream_def_id = self.pubsubclient.create_stream_definition( name='raw', parameter_dictionary_id=raw_pdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='vel3d_b_sample', description='vel3d_b_sample', temporal_domain=tdom, spatial_domain=sdom) data_product_id1 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=vel3d_b_sample_stream_def_id) self.damsclient.assign_data_product( input_resource_id=self.instrument_device, data_product_id=data_product_id1) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id1) dp_obj = IonObject(RT.DataProduct, name='vel3d_b_engineering', description='vel3d_b_engineering', temporal_domain=tdom, spatial_domain=sdom) data_product_id2 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=vel3d_b_engineering_stream_def_id) self.damsclient.assign_data_product( input_resource_id=self.instrument_device, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id2) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain=tdom, spatial_domain=sdom) data_product_id3 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=raw_stream_def_id) self.damsclient.assign_data_product( input_resource_id=self.instrument_device, data_product_id=data_product_id3) self.dpclient.activate_data_product_persistence( data_product_id=data_product_id3) def _start_port_agent(self, instrument_agent_instance_obj=None): """ Construct and start the port agent, ONLY NEEDED FOR INSTRUMENT AGENTS. """ _port_agent_config = instrument_agent_instance_obj.port_agent_config # It blocks until the port agent starts up or a timeout _pagent = PortAgentProcess.launch_process(_port_agent_config, test_mode=True) pid = _pagent.get_pid() port = _pagent.get_data_port() cmd_port = _pagent.get_command_port() log.info( "IMS:_start_pagent returned from PortAgentProcess.launch_process pid: %s ", pid) # Hack to get ready for DEMO. Further though needs to be put int # how we pass this config info around. host = 'localhost' driver_config = instrument_agent_instance_obj.driver_config comms_config = driver_config.get('comms_config') if comms_config: host = comms_config.get('addr') else: log.warn("No comms_config specified, using '%s'" % host) # Configure driver to use port agent port number. instrument_agent_instance_obj.driver_config['comms_config'] = { 'addr': host, 'cmd_port': cmd_port, 'port': port } instrument_agent_instance_obj.driver_config['pagent_pid'] = pid self.imsclient.update_instrument_agent_instance( instrument_agent_instance_obj) return self.imsclient.read_instrument_agent_instance( instrument_agent_instance_obj._id) def _start_platform(self): """ Starts the given platform waiting for it to transition to the UNINITIALIZED state (note that the agent starts in the LAUNCHING state). More in concrete the sequence of steps here are: - prepares subscriber to receive the UNINITIALIZED state transition - launches the platform process - waits for the start of the process - waits for the transition to the UNINITIALIZED state """ ############################################################## # prepare to receive the UNINITIALIZED state transition: async_res = AsyncResult() def consume_event(evt, *args, **kwargs): log.debug("Got ResourceAgentStateEvent %s from origin %r", evt.state, evt.origin) if evt.state == PlatformAgentState.UNINITIALIZED: async_res.set(evt) # start subscriber: sub = EventSubscriber(event_type="ResourceAgentStateEvent", origin=self.platform_device, callback=consume_event) sub.start() log.info( "registered event subscriber to wait for state=%r from origin %r", PlatformAgentState.UNINITIALIZED, self.platform_device) #self._event_subscribers.append(sub) sub._ready_event.wait(timeout=EVENT_TIMEOUT) ############################################################## # now start the platform: agent_instance_id = self.platform_agent_instance_id log.debug("about to call start_platform_agent_instance with id=%s", agent_instance_id) pid = self.imsclient.start_platform_agent_instance( platform_agent_instance_id=agent_instance_id) log.debug("start_platform_agent_instance returned pid=%s", pid) #wait for start agent_instance_obj = self.imsclient.read_platform_agent_instance( agent_instance_id) gate = AgentProcessStateGate(self.processdispatchclient.read_process, self.platform_device._id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (90), "The platform agent instance did not spawn in 90 seconds") # Start a resource agent client to talk with the agent. self._pa_client = ResourceAgentClient(self.platform_device, name=gate.process_id, process=FakeProcess()) log.debug("got platform agent client %s", str(self._pa_client)) ############################################################## # wait for the UNINITIALIZED event: async_res.get(timeout=self._receive_timeout)
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) print 'started services' @unittest.skip('this test just for debugging setup') def test_just_the_setup(self): return def test_resources_associations(self): """ create one of each resource and association used by IMS to guard against problems in ion-definitions """ #stuff we control instrument_agent_instance_id, _ = self.RR.create( any_old(RT.InstrumentAgentInstance)) instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) platform_agent_instance_id, _ = self.RR.create( any_old(RT.PlatformAgentInstance)) platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice)) sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel)) #stuff we associate to data_producer_id, _ = self.RR.create(any_old(RT.DataProducer)) instrument_agent_instance_id #is only a target #instrument_agent self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_agent_id, PRED.hasInstance, instrument_agent_instance_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id) #self.RR.create_association(instrument_device_id, PRED.hasSensor, sensor_device_id) self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id) instrument_model_id #is only a target platform_agent_instance_id #is only a target #platform_agent self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_agent_id, PRED.hasInstance, platform_agent_instance_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id) self.RR.create_association(platform_device_id, PRED.hasInstrument, instrument_device_id) platform_model_id #is only a target #sensor_device self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id) sensor_model_id #is only a target
class DiscoveryIntTest(IonIntegrationTestCase): def setUp(self): super(DiscoveryIntTest, self).setUp() config = DotDict() config.bootstrap.use_es = True self._start_container() self.addCleanup(DiscoveryIntTest.es_cleanup) self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) self.discovery = DiscoveryServiceClient() self.catalog = CatalogManagementServiceClient() self.ims = IndexManagementServiceClient() self.rr = ResourceRegistryServiceClient() self.dataset_management = DatasetManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() @staticmethod def es_cleanup(): es_host = CFG.get_safe('server.elasticsearch.host', 'localhost') es_port = CFG.get_safe('server.elasticsearch.port', '9200') es = ep.ElasticSearch( host=es_host, port=es_port, timeout=10 ) indexes = STD_INDEXES.keys() indexes.append('%s_resources_index' % get_sys_name().lower()) indexes.append('%s_events_index' % get_sys_name().lower()) for index in indexes: IndexManagementService._es_call(es.river_couchdb_delete,index) IndexManagementService._es_call(es.index_delete,index) def poll(self, tries, callback, *args, **kwargs): ''' Polling wrapper for queries Elasticsearch may not index and cache the changes right away so we may need a couple of tries and a little time to go by before the results show. ''' for i in xrange(tries): retval = callback(*args, **kwargs) if retval: return retval time.sleep(0.2) return None def test_traversal(self): dp = DataProcess() transform = Transform() pd = ProcessDefinition() dp_id, _ = self.rr.create(dp) transform_id, _ = self.rr.create(transform) pd_id, _ = self.rr.create(pd) self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform) self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition) results = self.discovery.traverse(dp_id) results.sort() correct = [pd_id, transform_id] correct.sort() self.assertTrue(results == correct, '%s' % results) def test_iterative_traversal(self): dp = DataProcess() transform = Transform() pd = ProcessDefinition() dp_id, _ = self.rr.create(dp) transform_id, _ = self.rr.create(transform) pd_id, _ = self.rr.create(pd) self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform) self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition) results = self.discovery.iterative_traverse(dp_id) results.sort() correct = [transform_id] self.assertTrue(results == correct) results = self.discovery.iterative_traverse(dp_id, 1) results.sort() correct = [transform_id, pd_id] correct.sort() self.assertTrue(results == correct) @skipIf(not use_es, 'No ElasticSearch') def test_view_crud(self): view_id = self.discovery.create_view('big_view',fields=['name']) catalog_id = self.discovery.list_catalogs(view_id)[0] index_ids = self.catalog.list_indexes(catalog_id) self.assertTrue(len(index_ids)) view = self.discovery.read_view(view_id) self.assertIsInstance(view,View) self.assertTrue(view.name == 'big_view') view.name = 'not_so_big_view' self.discovery.update_view(view) view = self.discovery.read_view(view_id) self.assertTrue(view.name == 'not_so_big_view') self.discovery.delete_view(view_id) with self.assertRaises(NotFound): self.discovery.read_view(view_id) def test_view_best_match(self): #--------------------------------------------------------------- # Matches the best catalog available OR creates a new one #--------------------------------------------------------------- catalog_id = self.catalog.create_catalog('dev', keywords=['name','model']) view_id = self.discovery.create_view('exact_view', fields=['name','model']) catalog_ids = self.discovery.list_catalogs(view_id) self.assertTrue(catalog_ids == [catalog_id]) view_id = self.discovery.create_view('another_view', fields=['name','model']) catalog_ids = self.discovery.list_catalogs(view_id) self.assertTrue(catalog_ids == [catalog_id]) view_id = self.discovery.create_view('big_view', fields=['name']) catalog_ids = self.discovery.list_catalogs(view_id) self.assertTrue(catalog_ids != [catalog_id]) @skipIf(not use_es, 'No ElasticSearch') def test_basic_searching(self): #- - - - - - - - - - - - - - - - - # set up the fake resources #- - - - - - - - - - - - - - - - - instrument_pool = [ InstrumentDevice(name='sonobuoy1', firmware_version='1'), InstrumentDevice(name='sonobuoy2', firmware_version='2'), InstrumentDevice(name='sonobuoy3', firmware_version='3') ] for instrument in instrument_pool: self.rr.create(instrument) view_id = self.discovery.create_view('devices', fields=['firmware_version']) search_string = "search 'firmware_version' is '2' from '%s'"%view_id results = self.poll(5, self.discovery.parse,search_string) result = results[0]['_source'] self.assertIsInstance(result, InstrumentDevice) self.assertTrue(result.name == 'sonobuoy2') self.assertTrue(result.firmware_version == '2') @skipIf(not use_es, 'No ElasticSearch') def test_associative_searching(self): dp_id,_ = self.rr.create(DataProduct('test_foo')) ds_id,_ = self.rr.create(Dataset('test_bar', registered=True)) self.rr.create_association(subject=dp_id, object=ds_id, predicate='hasDataset') search_string = "search 'type_' is 'Dataset' from 'resources_index' and belongs to '%s'" % dp_id results = self.poll(5, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(ds_id in results) def test_iterative_associative_searching(self): #-------------------------------------------------------------------------------- # Tests the ability to limit the iterations #-------------------------------------------------------------------------------- dp = DataProcess() transform = Transform() pd = ProcessDefinition() dp_id, _ = self.rr.create(dp) transform_id, _ = self.rr.create(transform) pd_id, _ = self.rr.create(pd) self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform) self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition) search_string = "belongs to '%s' depth 1" % dp_id results = self.poll(5, self.discovery.parse,search_string) results = list([i._id for i in results]) correct = [transform_id] self.assertTrue(results == correct, '%s' % results) search_string = "belongs to '%s' depth 2" % dp_id results = self.poll(5, self.discovery.parse,search_string) results = list([i._id for i in results]) results.sort() correct = [transform_id, pd_id] correct.sort() self.assertTrue(results == correct) @skipIf(not use_es, 'No ElasticSearch') def test_ranged_value_searching(self): discovery = self.discovery rr = self.rr view_id = discovery.create_view('bank_view', fields=['cash_balance']) bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=10)) search_string = "search 'cash_balance' values from 0 to 100 from '%s'" % view_id results = self.poll(5, discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == bank_id) bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=90)) search_string = "search 'cash_balance' values from 80 from '%s'" % view_id results = self.poll(5, discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == bank_id) @skipIf(not use_es, 'No ElasticSearch') def test_collections_searching(self): site_id, _ = self.rr.create(Site(name='black_mesa')) view_id = self.discovery.create_view('big', fields=['name']) # Add the site to a new collection collection_id = self.ims.create_collection('resource_collection', [site_id]) search_string = "search 'name' is '*' from '%s' and in '%s'" %(view_id, collection_id) results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0] == site_id, '%s' % results) @skipIf(not use_es, 'No ElasticSearch') def test_search_by_name(self): inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123') dev_id, _ = self.rr.create(inst_dev) self.discovery.create_view('devs',fields=['name','serial_number']) search_string = "search 'serial_number' is 'abc*' from 'devs'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dev_id) @skipIf(not use_es, 'No ElasticSearch') def test_search_by_name_index(self): inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123') dev_id, _ = self.rr.create(inst_dev) search_string = "search 'serial_number' is 'abc*' from 'resources_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dev_id) bank_acc = BankAccount(name='blah', cash_balance=10) res_id , _ = self.rr.create(bank_acc) search_string = "search 'cash_balance' values from 0 to 100 from 'resources_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == res_id) #@skipIf(not use_es, 'No ElasticSearch') @skip('Skip until time to refactor, data_format is removed from DataProduct resource') def test_data_product_search(self): # Create the dataproduct dp = DataProduct(name='test_product') dp.data_format.name = 'test_signal' dp.data_format.description = 'test signal' dp.data_format.character_set = 'utf8' dp.data_format.nominal_sampling_rate_maximum = '44000' dp.data_format.nominal_sampling_rate_minimum = '44000' dp.CDM_data_type = 'basic' dp_id, _ = self.rr.create(dp) search_string = "search 'data_format.name' is 'test_signal' from 'data_products_index'" results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) search_string = "search 'CDM_data_type' is 'basic' from 'data_products_index'" results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) search_string = "search 'data_format.character_set' is 'utf8' from 'data_products_index'" results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) @skipIf(not use_es, 'No ElasticSearch') def test_events_search(self): # Create a resource to force a new event dp = DataProcess() dp_id, rev = self.rr.create(dp) search_string = "SEARCH 'origin' IS '%s' FROM 'events_index'" % dp_id results = self.poll(9, self.discovery.parse,search_string) origin_type = results[0]['_source'].origin_type origin_id = results[0]['_source'].origin self.assertTrue(origin_type == RT.DataProcess) self.assertTrue(origin_id == dp_id) @skipIf(not use_es, 'No ElasticSearch') def test_geo_distance_search(self): pd = PlatformDevice(name='test_dev') pd_id, _ = self.rr.create(pd) search_string = "search 'index_location' geo distance 20 km from lat 0 lon 0 from 'devices_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == pd_id) self.assertTrue(results[0]['_source'].name == 'test_dev') @skipIf(not use_es, 'No ElasticSearch') def test_geo_bbox_search(self): pd = PlatformDevice(name='test_dev') pd.index_location.lat = 5 pd.index_location.lon = 5 pd_id, _ = self.rr.create(pd) search_string = "search 'index_location' geo box top-left lat 10 lon 0 bottom-right lat 0 lon 10 from 'devices_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == pd_id) self.assertTrue(results[0]['_source'].name == 'test_dev') @skipIf(not use_es, 'No ElasticSearch') def test_time_search(self): today = date.today() yesterday = today - timedelta(days=1) tomorrow = today + timedelta(days=1) data_product = DataProduct() dp_id, _ = self.rr.create(data_product) search_string = "search 'ts_created' time from '%s' to '%s' from 'data_products_index'" % (yesterday, tomorrow) results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results,'Results not found') self.assertTrue(results[0]['_id'] == dp_id) search_string = "search 'ts_created' time from '%s' from 'data_products_index'" % yesterday results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results,'Results not found') self.assertTrue(results[0]['_id'] == dp_id) @skipIf(not use_es, 'No ElasticSearch') def test_user_search(self): user = UserInfo() user.name = 'test' user.contact.phones.append('5551212') user_id, _ = self.rr.create(user) search_string = 'search "name" is "test" from "users_index"' results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == user_id) self.assertTrue(results[0]['_source'].name == 'test') search_string = 'search "contact.phones" is "5551212" from "users_index"' results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == user_id) self.assertTrue(results[0]['_source'].name == 'test') @skipIf(not use_es, 'No ElasticSearch') def test_subobject_search(self): contact = ContactInformation() contact.email = '*****@*****.**' contact.individual_name_family = 'Tester' contact.individual_names_given = 'Intern' dp = DataProduct(name='example') dp.contacts.append(contact) dp_id,_ = self.rr.create(dp) #-------------------------------------------------------------------------------- # Example using the full field name #-------------------------------------------------------------------------------- search_string = 'search "contacts.email" is "*****@*****.**" from "data_products"' results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) self.assertEquals(results[0]['_source'].name, 'example') #-------------------------------------------------------------------------------- # Example using a sub-object's field name (ambiguous searching) #-------------------------------------------------------------------------------- search_string = 'search "individual_names_given" is "Intern" from "data_products"' results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) self.assertEquals(results[0]['_source'].name, 'example') @skipIf(not use_es, 'No ElasticSearch') def test_descriptive_phrase_search(self): dp = DataProduct(name='example', description='This is simply a description for this data product') dp_id, _ = self.rr.create(dp) search_string = 'search "description" like "description for" from "data_products"' results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) self.assertEquals(results[0]['_source'].name, 'example') @skipIf(not use_es, 'No ElasticSearch') def test_ownership_searching(self): # Create two data products so that there is competition to the search, one is parsed # (with conductivity as a parameter) and the other is raw dp = DataProduct(name='example dataproduct') pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict') stream_def_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id) tdom, sdom = time_series_domain() dp.spatial_domain = sdom.dump() dp.temporal_domain = tdom.dump() dp_id = self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id, exchange_point='xp1') pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_raw_param_dict') stream_def_id = self.pubsub_management.create_stream_definition('ctd raw', parameter_dictionary_id=pdict_id) dp = DataProduct(name='WRONG') dp.spatial_domain = sdom.dump() dp.temporal_domain = tdom.dump() self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id, exchange_point='xp1') parameter_search = 'search "name" is "conductivity" from "resources_index"' results = self.poll(9, self.discovery.parse, parameter_search) param_id = results[0]['_id'] data_product_search = 'search "name" is "*" from "data_products_index" and has "%s"' % param_id results = self.poll(9, self.discovery.parse, data_product_search) print results self.assertEquals(results[0], dp_id)
class TestOmsLaunch(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() # Use the network definition provided by RSN OMS directly. rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri']) self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms) # get serialized version for the configuration: self._network_definition_ser = NetworkUtil.serialize_network_definition( self._network_definition) if log.isEnabledFor(logging.DEBUG): log.debug("NetworkDefinition serialization:\n%s", self._network_definition_ser) self.platformModel_id = None self.all_platforms = {} self.agent_streamconfig_map = {} self._async_data_result = AsyncResult() self._data_subscribers = [] self._samples_received = [] self.addCleanup(self._stop_data_subscribers) self._async_event_result = AsyncResult() self._event_subscribers = [] self._events_received = [] self.addCleanup(self._stop_event_subscribers) self._start_event_subscriber() self._set_up_DataProduct_obj() self._set_up_PlatformModel_obj() def _set_up_DataProduct_obj(self): # Create data product object to be used for each of the platform log streams tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() self.pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'platform_eng_parsed', id_only=True) self.platform_eng_stream_def_id = self.pubsubcli.create_stream_definition( name='platform_eng', parameter_dictionary_id=self.pdict_id) self.dp_obj = IonObject(RT.DataProduct, name='platform_eng data', description='platform_eng test', temporal_domain=tdom, spatial_domain=sdom) def _set_up_PlatformModel_obj(self): # Create PlatformModel platformModel_obj = IonObject(RT.PlatformModel, name='RSNPlatformModel', description="RSNPlatformModel") try: self.platformModel_id = self.imsclient.create_platform_model( platformModel_obj) except BadRequest as ex: self.fail("failed to create new PLatformModel: %s" % ex) log.debug('new PlatformModel id = %s', self.platformModel_id) def _traverse(self, pnode, platform_id, parent_platform_objs=None): """ Recursive routine that repeatedly calls _prepare_platform to build the object dictionary for each platform. @param pnode PlatformNode @param platform_id ID of the platform to be visited @param parent_platform_objs dict of objects associated to parent platform, if any. @retval the dict returned by _prepare_platform at this level. """ log.info("Starting _traverse for %r", platform_id) plat_objs = self._prepare_platform(pnode, platform_id, parent_platform_objs) self.all_platforms[platform_id] = plat_objs # now, traverse the children: for sub_pnode in pnode.subplatforms.itervalues(): subplatform_id = sub_pnode.platform_id self._traverse(sub_pnode, subplatform_id, plat_objs) return plat_objs def _prepare_platform(self, pnode, platform_id, parent_platform_objs): """ This routine generalizes the manual construction originally done in test_oms_launch.py. It is called by the recursive _traverse method so all platforms starting from a given base platform are prepared. Note: For simplicity in this test, sites are organized in the same hierarchical way as the platforms themselves. @param pnode PlatformNode @param platform_id ID of the platform to be visited @param parent_platform_objs dict of objects associated to parent platform, if any. @retval a dict of associated objects similar to those in test_oms_launch """ site__obj = IonObject(RT.PlatformSite, name='%s_PlatformSite' % platform_id, description='%s_PlatformSite platform site' % platform_id) site_id = self.omsclient.create_platform_site(site__obj) if parent_platform_objs: # establish hasSite association with the parent self.rrclient.create_association( subject=parent_platform_objs['site_id'], predicate=PRED.hasSite, object=site_id) # prepare platform attributes and ports: monitor_attribute_objs, monitor_attribute_dicts = self._prepare_platform_attributes( pnode, platform_id) port_objs, port_dicts = self._prepare_platform_ports( pnode, platform_id) device__obj = IonObject( RT.PlatformDevice, name='%s_PlatformDevice' % platform_id, description='%s_PlatformDevice platform device' % platform_id, # ports=port_objs, # platform_monitor_attributes = monitor_attribute_objs ) device__dict = dict( ports=port_dicts, platform_monitor_attributes=monitor_attribute_dicts) self.device_id = self.imsclient.create_platform_device(device__obj) self.imsclient.assign_platform_model_to_platform_device( self.platformModel_id, self.device_id) self.rrclient.create_association(subject=site_id, predicate=PRED.hasDevice, object=self.device_id) self.damsclient.register_instrument(instrument_id=self.device_id) if parent_platform_objs: # establish hasDevice association with the parent self.rrclient.create_association( subject=parent_platform_objs['device_id'], predicate=PRED.hasDevice, object=self.device_id) agent__obj = IonObject(RT.PlatformAgent, name='%s_PlatformAgent' % platform_id, description='%s_PlatformAgent platform agent' % platform_id) agent_id = self.imsclient.create_platform_agent(agent__obj) if parent_platform_objs: # add this platform_id to parent's children: parent_platform_objs['children'].append(platform_id) self.imsclient.assign_platform_model_to_platform_agent( self.platformModel_id, agent_id) # agent_instance_obj = IonObject(RT.PlatformAgentInstance, # name='%s_PlatformAgentInstance' % platform_id, # description="%s_PlatformAgentInstance" % platform_id) # # agent_instance_id = self.imsclient.create_platform_agent_instance( # agent_instance_obj, agent_id, device_id) plat_objs = { 'platform_id': platform_id, 'site__obj': site__obj, 'site_id': site_id, 'device__obj': device__obj, 'device_id': self.device_id, 'agent__obj': agent__obj, 'agent_id': agent_id, # 'agent_instance_obj': agent_instance_obj, # 'agent_instance_id': agent_instance_id, 'children': [] } log.info("plat_objs for platform_id %r = %s", platform_id, str(plat_objs)) stream_config = self._create_stream_config(plat_objs) self.agent_streamconfig_map[platform_id] = stream_config # self.agent_streamconfig_map[platform_id] = None # self._start_data_subscriber(agent_instance_id, stream_config) return plat_objs def _prepare_platform_attributes(self, pnode, platform_id): """ Returns the list of PlatformMonitorAttributes objects corresponding to the attributes associated to the given platform. """ # TODO complete the clean-up of this method ret_infos = dict((n, a.defn) for (n, a) in pnode.attrs.iteritems()) monitor_attribute_objs = [] monitor_attribute_dicts = [] for attrName, attrDfn in ret_infos.iteritems(): log.debug("platform_id=%r: preparing attribute=%r", platform_id, attrName) monitor_rate = attrDfn['monitorCycleSeconds'] units = attrDfn['units'] plat_attr_obj = IonObject(OT.PlatformMonitorAttributes, id=attrName, monitor_rate=monitor_rate, units=units) plat_attr_dict = dict(id=attrName, monitor_rate=monitor_rate, units=units) monitor_attribute_objs.append(plat_attr_obj) monitor_attribute_dicts.append(plat_attr_dict) return monitor_attribute_objs, monitor_attribute_dicts def _prepare_platform_ports(self, pnode, platform_id): """ Returns the list of PlatformPort objects corresponding to the ports associated to the given platform. """ # TODO complete the clean-up of this method port_objs = [] port_dicts = [] for port_id, network in pnode.ports.iteritems(): log.debug("platform_id=%r: preparing port=%r network=%s", platform_id, port_id, network) # # Note: the name "IP" address has been changed to "network" address # in the CI-OMS interface spec. # plat_port_obj = IonObject(OT.PlatformPort, port_id=port_id, ip_address=network) plat_port_dict = dict(port_id=port_id, network=network) port_objs.append(plat_port_obj) port_dicts.append(plat_port_dict) return port_objs, port_dicts def _create_stream_config(self, plat_objs): platform_id = plat_objs['platform_id'] device_id = plat_objs['device_id'] #create the log data product self.dp_obj.name = '%s platform_eng data' % platform_id self.data_product_id = self.dpclient.create_data_product( data_product=self.dp_obj, stream_definition_id=self.platform_eng_stream_def_id) self.damsclient.assign_data_product( input_resource_id=self.device_id, data_product_id=self.data_product_id) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.rrclient.find_objects(self.data_product_id, PRED.hasStream, None, True) stream_config = self._build_stream_config(stream_ids[0]) return stream_config def _build_stream_config(self, stream_id=''): platform_eng_dictionary = DatasetManagementService.get_parameter_dictionary_by_name( 'platform_eng_parsed') #get the streamroute object from pubsub by passing the stream_id stream_def_ids, _ = self.rrclient.find_objects( stream_id, PRED.hasStreamDefinition, RT.StreamDefinition, True) stream_route = self.pubsubcli.read_stream_route(stream_id=stream_id) stream_config = { 'routing_key': stream_route.routing_key, 'stream_id': stream_id, 'stream_definition_ref': stream_def_ids[0], 'exchange_point': stream_route.exchange_point, 'parameter_dictionary': platform_eng_dictionary.dump() } return stream_config def _set_platform_agent_instances(self): """ Once most of the objs/defs associated with all platforms are in place, this method creates and associates the PlatformAgentInstance elements. """ self.platform_configs = {} for platform_id, plat_objs in self.all_platforms.iteritems(): PLATFORM_CONFIG = { 'platform_id': platform_id, 'agent_streamconfig_map': None, #self.agent_streamconfig_map, 'driver_config': DVR_CONFIG, 'network_definition': self._network_definition_ser } self.platform_configs[platform_id] = { 'platform_id': platform_id, 'agent_streamconfig_map': self.agent_streamconfig_map, 'driver_config': DVR_CONFIG, 'network_definition': self._network_definition_ser } agent_config = { 'platform_config': PLATFORM_CONFIG, } self.stream_id = self.agent_streamconfig_map[platform_id][ 'stream_id'] # import pprint # print '============== platform id within unit test: %s ===========' % platform_id # pprint.pprint(agent_config) #agent_config['platform_config']['agent_streamconfig_map'] = None agent_instance_obj = IonObject( RT.PlatformAgentInstance, name='%s_PlatformAgentInstance' % platform_id, description="%s_PlatformAgentInstance" % platform_id, agent_config=agent_config) agent_id = plat_objs['agent_id'] device_id = plat_objs['device_id'] agent_instance_id = self.imsclient.create_platform_agent_instance( agent_instance_obj, agent_id, self.device_id) plat_objs['agent_instance_obj'] = agent_instance_obj plat_objs['agent_instance_id'] = agent_instance_id stream_config = self.agent_streamconfig_map[platform_id] self._start_data_subscriber(agent_instance_id, stream_config) def _start_data_subscriber(self, stream_name, stream_config): """ Starts data subscriber for the given stream_name and stream_config """ def consume_data(message, stream_route, stream_id): # A callback for processing subscribed-to data. log.info('Subscriber received data message: %s.', str(message)) self._samples_received.append(message) self._async_data_result.set() log.info('_start_data_subscriber stream_name=%r', stream_name) stream_id = self.stream_id #stream_config['stream_id'] # Create subscription for the stream exchange_name = '%s_queue' % stream_name self.container.ex_manager.create_xn_queue(exchange_name).purge() sub = StandaloneStreamSubscriber(exchange_name, consume_data) sub.start() self._data_subscribers.append(sub) sub_id = self.pubsubcli.create_subscription(name=exchange_name, stream_ids=[stream_id]) self.pubsubcli.activate_subscription(sub_id) sub.subscription_id = sub_id def _stop_data_subscribers(self): """ Stop the data subscribers on cleanup. """ try: for sub in self._data_subscribers: if hasattr(sub, 'subscription_id'): try: self.pubsubcli.deactivate_subscription( sub.subscription_id) except: pass self.pubsubcli.delete_subscription(sub.subscription_id) sub.stop() finally: self._data_subscribers = [] def _start_event_subscriber(self, event_type="DeviceEvent", sub_type="platform_event"): """ Starts event subscriber for events of given event_type ("DeviceEvent" by default) and given sub_type ("platform_event" by default). """ def consume_event(evt, *args, **kwargs): # A callback for consuming events. log.info('Event subscriber received evt: %s.', str(evt)) self._events_received.append(evt) self._async_event_result.set(evt) sub = EventSubscriber(event_type=event_type, sub_type=sub_type, callback=consume_event) sub.start() log.info("registered event subscriber for event_type=%r, sub_type=%r", event_type, sub_type) self._event_subscribers.append(sub) sub._ready_event.wait(timeout=EVENT_TIMEOUT) def _stop_event_subscribers(self): """ Stops the event subscribers on cleanup. """ try: for sub in self._event_subscribers: if hasattr(sub, 'subscription_id'): try: self.pubsubcli.deactivate_subscription( sub.subscription_id) except: pass self.pubsubcli.delete_subscription(sub.subscription_id) sub.stop() finally: self._event_subscribers = [] @skip("IMS does't net implement topology") def test_hierarchy(self): self._create_launch_verify(BASE_PLATFORM_ID) @skip("Needs alignment with recent IMS changes") def test_single_platform(self): self._create_launch_verify('LJ01D') def _create_launch_verify(self, base_platform_id): # and trigger the traversal of the branch rooted at that base platform # to create corresponding ION objects and configuration dictionaries: pnode = self._network_definition.pnodes[base_platform_id] base_platform_objs = self._traverse(pnode, base_platform_id) # now that most of the topology information is there, add the # PlatformAgentInstance elements self._set_platform_agent_instances() base_platform_config = self.platform_configs[base_platform_id] log.info("base_platform_id = %r", base_platform_id) #------------------------------------------------------------------------------------- # Create Data Process Definition and Data Process for the eng stream monitor process #------------------------------------------------------------------------------------- dpd_obj = IonObject( RT.DataProcessDefinition, name='DemoStreamAlertTransform', description='For testing EventTriggeredTransform_B', module='ion.processes.data.transforms.event_alert_transform', class_name='DemoStreamAlertTransform') self.platform_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) #THERE SHOULD BE NO STREAMDEF REQUIRED HERE. platform_streamdef_id = self.pubsubcli.create_stream_definition( name='platform_eng_parsed', parameter_dictionary_id=self.pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( platform_streamdef_id, self.platform_dprocdef_id, binding='output') config = { 'process': { 'timer_interval': 5, 'queue_name': 'a_queue', 'variable_name': 'input_voltage', 'time_field_name': 'preferred_timestamp', 'valid_values': [-100, 100], 'timer_origin': 'Interval Timer' } } platform_data_process_id = self.dataprocessclient.create_data_process( self.platform_dprocdef_id, [self.data_product_id], {}, config) self.dataprocessclient.activate_data_process(platform_data_process_id) self.addCleanup(self.dataprocessclient.delete_data_process, platform_data_process_id) #------------------------------- # Launch Base Platform AgentInstance, connect to the resource agent client #------------------------------- agent_instance_id = base_platform_objs['agent_instance_id'] log.debug( "about to call imsclient.start_platform_agent_instance with id=%s", agent_instance_id) pid = self.imsclient.start_platform_agent_instance( platform_agent_instance_id=agent_instance_id) log.debug("start_platform_agent_instance returned pid=%s", pid) #wait for start instance_obj = self.imsclient.read_platform_agent_instance( agent_instance_id) gate = ProcessStateGate(self.processdispatchclient.read_process, instance_obj.agent_process_id, ProcessStateEnum.RUNNING) self.assertTrue( gate. await (90), "The platform agent instance did not spawn in 90 seconds") agent_instance_obj = self.imsclient.read_instrument_agent_instance( agent_instance_id) log.debug( 'test_oms_create_and_launch: Platform agent instance obj: %s', str(agent_instance_obj)) # Start a resource agent client to talk with the instrument agent. self._pa_client = ResourceAgentClient( 'paclient', name=agent_instance_obj.agent_process_id, process=FakeProcess()) log.debug(" test_oms_create_and_launch:: got pa client %s", str(self._pa_client)) log.debug("base_platform_config =\n%s", base_platform_config) # ping_agent can be issued before INITIALIZE retval = self._pa_client.ping_agent(timeout=TIMEOUT) log.debug('Base Platform ping_agent = %s', str(retval)) # issue INITIALIZE command to the base platform, which will launch the # creation of the whole platform hierarchy rooted at base_platform_config['platform_id'] # cmd = AgentCommand(command=PlatformAgentEvent.INITIALIZE, kwargs=dict(plat_config=base_platform_config)) cmd = AgentCommand(command=PlatformAgentEvent.INITIALIZE) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug('Base Platform INITIALIZE = %s', str(retval)) # GO_ACTIVE cmd = AgentCommand(command=PlatformAgentEvent.GO_ACTIVE) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug('Base Platform GO_ACTIVE = %s', str(retval)) # RUN: cmd = AgentCommand(command=PlatformAgentEvent.RUN) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug('Base Platform RUN = %s', str(retval)) # START_MONITORING: cmd = AgentCommand(command=PlatformAgentEvent.START_MONITORING) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug('Base Platform START_MONITORING = %s', str(retval)) # wait for data sample # just wait for at least one -- see consume_data above log.info("waiting for reception of a data sample...") self._async_data_result.get(timeout=DATA_TIMEOUT) self.assertTrue(len(self._samples_received) >= 1) log.info("waiting a bit more for reception of more data samples...") sleep(15) log.info("Got data samples: %d", len(self._samples_received)) # wait for event # just wait for at least one event -- see consume_event above log.info("waiting for reception of an event...") self._async_event_result.get(timeout=EVENT_TIMEOUT) log.info("Received events: %s", len(self._events_received)) #get the extended platfrom which wil include platform aggreate status fields extended_platform = self.imsclient.get_platform_device_extension( self.device_id) # log.debug( 'test_single_platform extended_platform: %s', str(extended_platform) ) # log.debug( 'test_single_platform power_status_roll_up: %s', str(extended_platform.computed.power_status_roll_up.value) ) # log.debug( 'test_single_platform comms_status_roll_up: %s', str(extended_platform.computed.communications_status_roll_up.value) ) # STOP_MONITORING: cmd = AgentCommand(command=PlatformAgentEvent.STOP_MONITORING) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug('Base Platform STOP_MONITORING = %s', str(retval)) # GO_INACTIVE cmd = AgentCommand(command=PlatformAgentEvent.GO_INACTIVE) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug('Base Platform GO_INACTIVE = %s', str(retval)) # RESET: Resets the base platform agent, which includes termination of # its sub-platforms processes: cmd = AgentCommand(command=PlatformAgentEvent.RESET) retval = self._pa_client.execute_agent(cmd, timeout=TIMEOUT) log.debug('Base Platform RESET = %s', str(retval)) #------------------------------- # Stop Base Platform AgentInstance #------------------------------- self.imsclient.stop_platform_agent_instance( platform_agent_instance_id=agent_instance_id)
class IngestionManagementIntTest(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url("res/deploy/r2deploy.yml") self.ingestion_management = IngestionManagementServiceClient() self.resource_registry = ResourceRegistryServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.ingest_name = "basic" self.exchange = "testdata" @staticmethod def clean_subscriptions(): ingestion_management = IngestionManagementServiceClient() pubsub = PubsubManagementServiceClient() rr = ResourceRegistryServiceClient() ingestion_config_ids = ingestion_management.list_ingestion_configurations(id_only=True) for ic in ingestion_config_ids: assocs = rr.find_associations(subject=ic, predicate=PRED.hasSubscription, id_only=False) for assoc in assocs: rr.delete_association(assoc) try: pubsub.deactivate_subscription(assoc.o) except: pass pubsub.delete_subscription(assoc.o) def create_ingest_config(self): self.queue = IngestionQueue(name="test", type="testdata") # Create the ingestion config ingestion_config_id = self.ingestion_management.create_ingestion_configuration( name=self.ingest_name, exchange_point_id=self.exchange, queues=[self.queue] ) return ingestion_config_id def test_ingestion_config_crud(self): ingestion_config_id = self.create_ingest_config() ingestion_config = self.ingestion_management.read_ingestion_configuration(ingestion_config_id) self.assertTrue(ingestion_config.name == self.ingest_name) self.assertTrue(ingestion_config.queues[0].name == "test") self.assertTrue(ingestion_config.queues[0].type == "testdata") ingestion_config.name = "another" self.ingestion_management.update_ingestion_configuration(ingestion_config) # Create an association just to make sure that it will delete them sub = Subscription() sub_id, _ = self.resource_registry.create(sub) assoc_id, _ = self.resource_registry.create_association( subject=ingestion_config_id, predicate=PRED.hasSubscription, object=sub_id ) self.ingestion_management.delete_ingestion_configuration(ingestion_config_id) with self.assertRaises(NotFound): self.resource_registry.read(assoc_id) def test_list_ingestion(self): # Create the ingest_config config_id = self.create_ingest_config() retval = self.ingestion_management.list_ingestion_configurations(id_only=True) # Nice thing about this is that it breaks if r2dm adds an ingest_config self.assertTrue(config_id in retval)
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) print 'started services' # @unittest.skip('this test just for debugging setup') # def test_just_the_setup(self): # return @attr('EXT') def test_resources_associations_extensions(self): """ create one of each resource and association used by IMS to guard against problems in ion-definitions """ #stuff we control # instrument_agent_instance_id, _ = self.RR.create(any_old(RT.InstrumentAgentInstance)) instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) platform_agent_instance_id, _ = self.RR.create(any_old(RT.PlatformAgentInstance)) platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice)) sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel)) #stuff we associate to data_producer_id, _ = self.RR.create(any_old(RT.DataProducer)) #instrument_agent_instance_id #is only a target #instrument_agent self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id) # self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) # self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id) self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id) self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id) instrument_model_id #is only a target platform_agent_instance_id #is only a target #platform_agent self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id) self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) platform_model_id #is only a target #sensor_device self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id) self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id) sensor_model_id #is only a target def addInstOwner(inst_id, subject): actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject}) user_id = self.IDS.create_actor_identity(actor_identity_obj) user_info_obj = any_old(RT.UserInfo) user_info_id = self.IDS.create_user_info(user_id, user_info_obj) self.RR.create_association(inst_id, PRED.hasOwner, user_id) #Testing multiple instrument owners addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254") addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256") #testing data products dp_id, _ = self.RR.create(any_old(RT.DataProduct)) self.RR.create_association(instrument_device_id, PRED.hasOutputProduct, dp_id) extended_instrument = self.IMS.get_instrument_device_extension(instrument_device_id) self.assertEqual(instrument_device_id, extended_instrument._id) self.assertEqual(len(extended_instrument.owners), 2) self.assertEqual(extended_instrument.instrument_model._id, instrument_model_id) #check data products self.assertEqual(1, len(extended_instrument.data_products)) #check model inst_model_obj = self.RR.read(instrument_model_id) self.assertEqual(inst_model_obj.name, extended_instrument.instrument_model.name) #check agent inst_agent_obj = self.RR.read(instrument_agent_id) self.assertEqual(inst_agent_obj.name, extended_instrument.instrument_agent.name) #check platform device plat_device_obj = self.RR.read(platform_device_id) self.assertEqual(plat_device_obj.name, extended_instrument.platform_device.name) #check sensor devices self.assertEqual(1, len(extended_instrument.sensor_devices)) self.assertEqual("1.1", extended_instrument.computed.firmware_version) self.assertEqual("42", extended_instrument.computed.last_data_received_time) self.assertEqual("23", extended_instrument.computed.operational_state) self.assertEqual("34", extended_instrument.computed.last_command_status) self.assertEqual("45", extended_instrument.computed.last_command_date) self.assertEqual("56", extended_instrument.computed.last_command) self.assertEqual("67", extended_instrument.computed.last_commanded_by) self.assertEqual("78", extended_instrument.computed.power_status_roll_up) self.assertEqual("89", extended_instrument.computed.communications_status_roll_up) self.assertEqual("98", extended_instrument.computed.data_status_roll_up) self.assertEqual("87", extended_instrument.computed.location_status_roll_up) self.assertEqual(['mon', 'tue', 'wed'], extended_instrument.computed.recent_events)
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) # @unittest.skip('this test just for debugging setup') # def test_just_the_setup(self): # return @attr('EXT') def test_resources_associations_extensions(self): """ create one of each resource and association used by IMS to guard against problems in ion-definitions """ #stuff we control instrument_agent_instance_id, _ = self.RR.create(any_old(RT.InstrumentAgentInstance)) instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent)) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) platform_agent_instance_id, _ = self.RR.create(any_old(RT.PlatformAgentInstance)) platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice)) sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel)) #stuff we associate to data_producer_id, _ = self.RR.create(any_old(RT.DataProducer)) org_id, _ = self.RR.create(any_old(RT.Org)) #instrument_agent_instance_id #is only a target #instrument_agent self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id) self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id) self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id) self.RR.create_association(org_id, PRED.hasResource, instrument_device_id) instrument_model_id #is only a target platform_agent_instance_id #is only a target #platform_agent self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id) self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) platform_model_id #is only a target #sensor_device self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id) self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id) sensor_model_id #is only a target #create a parsed product for this instrument output tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', processing_level_code='Parsed_Canonical', temporal_domain = tdom, spatial_domain = sdom) pdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id) data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug( 'new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1) def addInstOwner(inst_id, subject): actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject}) user_id = self.IDS.create_actor_identity(actor_identity_obj) user_info_obj = any_old(RT.UserInfo) user_info_id = self.IDS.create_user_info(user_id, user_info_obj) self.RR.create_association(inst_id, PRED.hasOwner, user_id) #Testing multiple instrument owners addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254") addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256") extended_instrument = self.IMS.get_instrument_device_extension(instrument_device_id) self.assertEqual(instrument_device_id, extended_instrument._id) self.assertEqual(len(extended_instrument.owners), 2) self.assertEqual(extended_instrument.instrument_model._id, instrument_model_id) # Lifecycle self.assertEquals(len(extended_instrument.lcstate_transitions), 7) self.assertEquals(set(extended_instrument.lcstate_transitions.keys()), set(['enable', 'develop', 'deploy', 'retire', 'plan', 'integrate', 'announce'])) # Verify that computed attributes exist for the extended instrument self.assertIsInstance(extended_instrument.computed.firmware_version, ComputedFloatValue) self.assertIsInstance(extended_instrument.computed.last_data_received_datetime, ComputedFloatValue) self.assertIsInstance(extended_instrument.computed.last_calibration_datetime, ComputedFloatValue) self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue) self.assertIsInstance(extended_instrument.computed.power_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.communications_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.data_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.location_status_roll_up, ComputedIntValue) log.debug("extended_instrument.computed: %s", extended_instrument.computed) #check model inst_model_obj = self.RR.read(instrument_model_id) self.assertEqual(inst_model_obj.name, extended_instrument.instrument_model.name) #check agent instance inst_agent_instance_obj = self.RR.read(instrument_agent_instance_id) self.assertEqual(inst_agent_instance_obj.name, extended_instrument.agent_instance.name) #check agent inst_agent_obj = self.RR.read(instrument_agent_id) #compound assoc return list of lists so check the first element self.assertEqual(inst_agent_obj.name, extended_instrument.instrument_agent.name) #check platform device plat_device_obj = self.RR.read(platform_device_id) self.assertEqual(plat_device_obj.name, extended_instrument.platform_device.name) extended_platform = self.IMS.get_platform_device_extension(platform_device_id) self.assertEqual(1, len(extended_platform.instrument_devices)) self.assertEqual(instrument_device_id, extended_platform.instrument_devices[0]._id) self.assertEqual(1, len(extended_platform.instrument_models)) self.assertEqual(instrument_model_id, extended_platform.instrument_models[0]._id) self.assertEquals(extended_platform.platform_agent._id, platform_agent_id) self.assertEquals(len(extended_platform.lcstate_transitions), 7) self.assertEquals(set(extended_platform.lcstate_transitions.keys()), set(['enable', 'develop', 'deploy', 'retire', 'plan', 'integrate', 'announce'])) #check sensor devices self.assertEqual(1, len(extended_instrument.sensor_devices)) #check data_product_parameters_set self.assertEqual(ComputedValueAvailability.PROVIDED, extended_instrument.computed.data_product_parameters_set.status) self.assertTrue( 'Parsed_Canonical' in extended_instrument.computed.data_product_parameters_set.value) # the ctd parameters should include 'temp' self.assertTrue( 'temp' in extended_instrument.computed.data_product_parameters_set.value['Parsed_Canonical']) #none of these will work because there is no agent self.assertEqual(ComputedValueAvailability.NOTAVAILABLE, extended_instrument.computed.firmware_version.status) # self.assertEqual(ComputedValueAvailability.NOTAVAILABLE, # extended_instrument.computed.operational_state.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.power_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.communications_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.data_status_roll_up.status) # self.assertEqual(StatusType.STATUS_OK, # extended_instrument.computed.data_status_roll_up.value) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.location_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.recent_events.status) # self.assertEqual([], extended_instrument.computed.recent_events.value) # cleanup c = DotDict() c.resource_registry = self.RR self.RR2.pluck(instrument_agent_id) self.RR2.pluck(instrument_model_id) self.RR2.pluck(instrument_device_id) self.RR2.pluck(platform_agent_id) self.RR2.pluck(sensor_device_id) self.IMS.force_delete_instrument_agent(instrument_agent_id) self.IMS.force_delete_instrument_model(instrument_model_id) self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_platform_agent_instance(platform_agent_instance_id) self.IMS.force_delete_platform_agent(platform_agent_id) self.IMS.force_delete_platform_device(platform_device_id) self.IMS.force_delete_platform_model(platform_model_id) self.IMS.force_delete_sensor_device(sensor_device_id) self.IMS.force_delete_sensor_model(sensor_model_id) #stuff we associate to self.RR.delete(data_producer_id) self.RR.delete(org_id) def test_custom_attributes(self): """ Test assignment of custom attributes """ instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel, {"custom_attributes": {"favorite_color": "attr desc goes here"} })) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice, {"custom_attributes": {"favorite_color": "red", "bogus_attr": "should raise warning" } })) self.IMS.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id) # cleanup self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_instrument_model(instrument_model_id) def _get_datastore(self, dataset_id): dataset = self.DSC.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore def test_resource_state_save_restore(self): # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") instModel_id = self.IMS.create_instrument_model(instModel_obj) log.debug( 'new InstrumentModel id = %s ', instModel_id) # Create InstrumentAgent raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 ) parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict', records_per_granule=2, granule_publish_rate=5 ) instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg", stream_configurations = [raw_config, parsed_config] ) instAgent_id = self.IMS.create_instrument_agent(instAgent_obj) log.debug( 'new InstrumentAgent id = %s', instAgent_id) self.IMS.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 ' + '(SA Req: L4-CI-SA-RQ-241) ') instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) instDevice_id = self.IMS.create_instrument_device(instrument_device=instDevice_obj) self.IMS.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config = port_agent_config) instAgentInstance_id = self.IMS.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() spdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=spdict_id) rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True) raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug( 'new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) self.DP.activate_data_product_persistence(data_product_id=data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug( 'Data product streams1 = %s', stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True) log.debug( 'Data set for data_product_id1 = %s', dataset_ids[0]) self.parsed_dataset = dataset_ids[0] #create the datastore at the beginning of each int test that persists data dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id2 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) log.debug( 'new dp_id = %s', str(data_product_id2)) self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.DP.activate_data_product_persistence(data_product_id=data_product_id2) # spin up agent self.IMS.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) self.addCleanup(self.IMS.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) #wait for start instance_obj = self.IMS.read_instrument_agent_instance(instAgentInstance_id) gate = ProcessStateGate(self.PDC.read_process, instance_obj.agent_process_id, ProcessStateEnum.RUNNING) self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" % instance_obj.agent_process_id) # take snapshot of config snap_id = self.IMS.save_resource_state(instDevice_id, "xyzzy snapshot") snap_obj = self.RR.read_attachment(snap_id, include_content=True) print "Saved config:" print snap_obj.content #modify config instance_obj.driver_config["comms_config"] = "BAD_DATA" self.RR.update(instance_obj) #restore config self.IMS.restore_resource_state(instDevice_id, snap_id) instance_obj = self.RR.read(instAgentInstance_id) self.assertNotEqual("BAD_DATA", instance_obj.driver_config["comms_config"]) self.DP.delete_data_product(data_product_id1) self.DP.delete_data_product(data_product_id2) def test_agent_instance_config(self): """ Verify that agent configurations are being built properly """ clients = DotDict() clients.resource_registry = self.RR clients.pubsub_management = self.PSC clients.dataset_management = self.DSC pconfig_builder = PlatformAgentConfigurationBuilder(clients) iconfig_builder = InstrumentAgentConfigurationBuilder(clients) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() org_obj = any_old(RT.Org) org_id = self.RR2.create(org_obj) inst_startup_config = {'startup': 'config'} generic_alerts_config = {'lvl1': {'lvl2': 'lvl3val'}} required_config_keys = [ 'org_name', 'device_type', 'agent', 'driver_config', 'stream_config', 'startup_config', 'aparam_alert_config', 'children'] def verify_instrument_config(config, device_id): for key in required_config_keys: self.assertIn(key, config) self.assertEqual(org_obj.name, config['org_name']) self.assertEqual(RT.InstrumentDevice, config['device_type']) self.assertIn('driver_config', config) driver_config = config['driver_config'] expected_driver_fields = {'process_type': ('ZMQPyClassDriverLauncher',), } for k, v in expected_driver_fields.iteritems(): self.assertIn(k, driver_config) self.assertEqual(v, driver_config[k]) self.assertEqual self.assertEqual({'resource_id': device_id}, config['agent']) self.assertEqual(inst_startup_config, config['startup_config']) self.assertIn('aparam_alert_config', config) self.assertEqual(generic_alerts_config, config['aparam_alert_config']) self.assertIn('stream_config', config) for key in ['children']: self.assertEqual({}, config[key]) def verify_child_config(config, device_id, inst_device_id=None): for key in required_config_keys: self.assertIn(key, config) self.assertEqual(org_obj.name, config['org_name']) self.assertEqual(RT.PlatformDevice, config['device_type']) self.assertEqual({'resource_id': device_id}, config['agent']) self.assertIn('aparam_alert_config', config) self.assertEqual(generic_alerts_config, config['aparam_alert_config']) self.assertIn('stream_config', config) self.assertIn('driver_config', config) self.assertIn('foo', config['driver_config']) self.assertEqual('bar', config['driver_config']['foo']) self.assertIn('process_type', config['driver_config']) self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type']) if None is inst_device_id: for key in ['children', 'startup_config']: self.assertEqual({}, config[key]) else: for key in ['startup_config']: self.assertEqual({}, config[key]) self.assertIn(inst_device_id, config['children']) verify_instrument_config(config['children'][inst_device_id], inst_device_id) def verify_parent_config(config, parent_device_id, child_device_id, inst_device_id=None): for key in required_config_keys: self.assertIn(key, config) self.assertEqual(org_obj.name, config['org_name']) self.assertEqual(RT.PlatformDevice, config['device_type']) self.assertIn('process_type', config['driver_config']) self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type']) self.assertEqual({'resource_id': parent_device_id}, config['agent']) self.assertIn('aparam_alert_config', config) self.assertEqual(generic_alerts_config, config['aparam_alert_config']) self.assertIn('stream_config', config) for key in ['startup_config']: self.assertEqual({}, config[key]) self.assertIn(child_device_id, config['children']) verify_child_config(config['children'][child_device_id], child_device_id, inst_device_id) rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True) raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id) #todo: create org and figure out which agent resource needs to get assigned to it def _make_platform_agent_structure(agent_config=None): if None is agent_config: agent_config = {} # instance creation platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, {'driver_config': {'foo': 'bar'}, 'alerts': generic_alerts_config}) platform_agent_instance_obj.agent_config = agent_config platform_agent_instance_id = self.IMS.create_platform_agent_instance(platform_agent_instance_obj) # agent creation raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 ) platform_agent_obj = any_old(RT.PlatformAgent, {"stream_configurations":[raw_config]}) platform_agent_id = self.IMS.create_platform_agent(platform_agent_obj) # device creation platform_device_id = self.IMS.create_platform_device(any_old(RT.PlatformDevice)) # data product creation dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom}) dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) self.DAMS.assign_data_product(input_resource_id=platform_device_id, data_product_id=dp_id) self.DP.activate_data_product_persistence(data_product_id=dp_id) # assignments self.RR2.assign_platform_agent_instance_to_platform_device(platform_agent_instance_id, platform_device_id) self.RR2.assign_platform_agent_to_platform_agent_instance(platform_agent_id, platform_agent_instance_id) self.RR2.assign_platform_device_to_org_with_has_resource(platform_agent_instance_id, org_id) return platform_agent_instance_id, platform_agent_id, platform_device_id def _make_instrument_agent_structure(agent_config=None): if None is agent_config: agent_config = {} # instance creation instrument_agent_instance_obj = any_old(RT.InstrumentAgentInstance, {"startup_config": inst_startup_config, 'alerts': generic_alerts_config}) instrument_agent_instance_obj.agent_config = agent_config instrument_agent_instance_id = self.IMS.create_instrument_agent_instance(instrument_agent_instance_obj) # agent creation raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 ) instrument_agent_obj = any_old(RT.InstrumentAgent, {"stream_configurations":[raw_config]}) instrument_agent_id = self.IMS.create_instrument_agent(instrument_agent_obj) # device creation instrument_device_id = self.IMS.create_instrument_device(any_old(RT.InstrumentDevice)) # data product creation dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom}) dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=dp_id) self.DP.activate_data_product_persistence(data_product_id=dp_id) # assignments self.RR2.assign_instrument_agent_instance_to_instrument_device(instrument_agent_instance_id, instrument_device_id) self.RR2.assign_instrument_agent_to_instrument_agent_instance(instrument_agent_id, instrument_agent_instance_id) self.RR2.assign_instrument_device_to_org_with_has_resource(instrument_agent_instance_id, org_id) return instrument_agent_instance_id, instrument_agent_id, instrument_device_id # can't do anything without an agent instance obj log.debug("Testing that preparing a launcher without agent instance raises an error") self.assertRaises(AssertionError, pconfig_builder.prepare, will_launch=False) log.debug("Making the structure for a platform agent, which will be the child") platform_agent_instance_child_id, _, platform_device_child_id = _make_platform_agent_structure() platform_agent_instance_child_obj = self.RR2.read(platform_agent_instance_child_id) log.debug("Preparing a valid agent instance launch, for config only") pconfig_builder.set_agent_instance_object(platform_agent_instance_child_obj) child_config = pconfig_builder.prepare(will_launch=False) verify_child_config(child_config, platform_device_child_id) log.debug("Making the structure for a platform agent, which will be the parent") platform_agent_instance_parent_id, _, platform_device_parent_id = _make_platform_agent_structure() platform_agent_instance_parent_obj = self.RR2.read(platform_agent_instance_parent_id) log.debug("Testing child-less parent as a child config") pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj) parent_config = pconfig_builder.prepare(will_launch=False) verify_child_config(parent_config, platform_device_parent_id) log.debug("assigning child platform to parent") self.RR2.assign_platform_device_to_platform_device(platform_device_child_id, platform_device_parent_id) child_device_ids = self.RR2.find_platform_device_ids_of_device(platform_device_parent_id) self.assertNotEqual(0, len(child_device_ids)) log.debug("Testing parent + child as parent config") pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj) parent_config = pconfig_builder.prepare(will_launch=False) verify_parent_config(parent_config, platform_device_parent_id, platform_device_child_id) log.debug("making the structure for an instrument agent") instrument_agent_instance_id, _, instrument_device_id = _make_instrument_agent_structure() instrument_agent_instance_obj = self.RR2.read(instrument_agent_instance_id) log.debug("Testing instrument config") iconfig_builder.set_agent_instance_object(instrument_agent_instance_obj) instrument_config = iconfig_builder.prepare(will_launch=False) verify_instrument_config(instrument_config, instrument_device_id) log.debug("assigning instrument to platform") self.RR2.assign_instrument_device_to_platform_device(instrument_device_id, platform_device_child_id) child_device_ids = self.RR2.find_instrument_device_ids_of_device(platform_device_child_id) self.assertNotEqual(0, len(child_device_ids)) log.debug("Testing entire config") pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj) full_config = pconfig_builder.prepare(will_launch=False) verify_parent_config(full_config, platform_device_parent_id, platform_device_child_id, instrument_device_id)
class TestObservatoryManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.org_management_service = OrgManagementServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #print 'TestObservatoryManagementServiceIntegration: started services' self.event_publisher = EventPublisher() # @unittest.skip('this exists only for debugging the launch process') # def test_just_the_setup(self): # return def destroy(self, resource_ids): self.OMS.force_delete_observatory(resource_ids.observatory_id) self.OMS.force_delete_subsite(resource_ids.subsite_id) self.OMS.force_delete_subsite(resource_ids.subsite2_id) self.OMS.force_delete_subsite(resource_ids.subsiteb_id) self.OMS.force_delete_subsite(resource_ids.subsitez_id) self.OMS.force_delete_platform_site(resource_ids.platform_site_id) self.OMS.force_delete_platform_site(resource_ids.platform_siteb_id) self.OMS.force_delete_platform_site(resource_ids.platform_siteb2_id) self.OMS.force_delete_platform_site(resource_ids.platform_site3_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site2_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_siteb3_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site4_id) #@unittest.skip('targeting') def test_resources_associations(self): resources = self._make_associations() self.destroy(resources) #@unittest.skip('targeting') def test_find_related_frames_of_reference(self): # finding subordinates gives a dict of obj lists, convert objs to ids def idify(adict): ids = {} for k, v in adict.iteritems(): ids[k] = [] for obj in v: ids[k].append(obj._id) return ids # a short version of the function we're testing, with id-ify def short(resource_id, output_types): ret = self.OMS.find_related_frames_of_reference(resource_id, output_types) return idify(ret) #set up associations first stuff = self._make_associations() #basic traversal of tree from instrument to platform ids = short(stuff.instrument_site_id, [RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) #since this is the first search, just make sure the input inst_id got stripped if RT.InstrumentSite in ids: self.assertNotIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #basic traversal of tree from platform to instrument ids = short(stuff.platform_siteb_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) self.assertNotIn(stuff.instrument_site2_id, ids[RT.InstrumentSite]) #full traversal of tree from observatory down to instrument ids = short(stuff.observatory_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #full traversal of tree from instrument to observatory ids = short(stuff.instrument_site_id, [RT.Observatory]) self.assertIn(RT.Observatory, ids) self.assertIn(stuff.observatory_id, ids[RT.Observatory]) #partial traversal, only down to platform ids = short(stuff.observatory_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site3_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(RT.InstrumentSite, ids) #partial traversal, only down to platform ids = short(stuff.instrument_site_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertNotIn(RT.Observatory, ids) self.destroy(stuff) def _make_associations(self): """ create one of each resource and association used by OMS to guard against problems in ion-definitions """ #raise unittest.SkipTest("https://jira.oceanobservatories.org/tasks/browse/CISWCORE-41") """ the tree we're creating (observatory, sites, platforms, instruments) rows are lettered, colums numbered. - first row is implied a - first column is implied 1 - site Z, just because O--Sz | S--S2--P3--I4 | Sb-Pb2-Ib3 | P--I2 <- PlatformDevice, InstrumentDevice2 | Pb <- PlatformDevice b | I <- InstrumentDevice """ org_id = self.OMS.create_marine_facility(any_old(RT.Org)) def create_under_org(resource_type): obj = any_old(resource_type) if RT.InstrumentDevice == resource_type: resource_id = self.IMS.create_instrument_device(obj) else: resource_id, _ = self.RR.create(obj) self.OMS.assign_resource_to_observatory_org(resource_id=resource_id, org_id=org_id) return resource_id #stuff we control observatory_id = create_under_org(RT.Observatory) subsite_id = create_under_org(RT.Subsite) subsite2_id = create_under_org(RT.Subsite) subsiteb_id = create_under_org(RT.Subsite) subsitez_id = create_under_org(RT.Subsite) platform_site_id = create_under_org(RT.PlatformSite) platform_siteb_id = create_under_org(RT.PlatformSite) platform_siteb2_id = create_under_org(RT.PlatformSite) platform_site3_id = create_under_org(RT.PlatformSite) instrument_site_id = create_under_org(RT.InstrumentSite) instrument_site2_id = create_under_org(RT.InstrumentSite) instrument_siteb3_id = create_under_org(RT.InstrumentSite) instrument_site4_id = create_under_org(RT.InstrumentSite) #stuff we associate to instrument_device_id = create_under_org(RT.InstrumentDevice) instrument_device2_id = create_under_org(RT.InstrumentDevice) platform_device_id = create_under_org(RT.PlatformDevice) platform_deviceb_id = create_under_org(RT.PlatformDevice) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) deployment_id, _ = self.RR.create(any_old(RT.Deployment)) #observatory self.RR.create_association(observatory_id, PRED.hasSite, subsite_id) self.RR.create_association(observatory_id, PRED.hasSite, subsitez_id) #site self.RR.create_association(subsite_id, PRED.hasSite, subsite2_id) self.RR.create_association(subsite_id, PRED.hasSite, subsiteb_id) self.RR.create_association(subsite2_id, PRED.hasSite, platform_site3_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_siteb2_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_site_id) #platform_site(s) self.RR.create_association(platform_site3_id, PRED.hasSite, instrument_site4_id) self.RR.create_association(platform_siteb2_id, PRED.hasSite, instrument_siteb3_id) self.RR.create_association(platform_site_id, PRED.hasSite, instrument_site2_id) self.RR.create_association(platform_site_id, PRED.hasSite, platform_siteb_id) self.RR.create_association(platform_siteb_id, PRED.hasSite, instrument_site_id) self.RR.create_association(platform_siteb_id, PRED.hasDevice, platform_deviceb_id) self.RR.create_association(platform_site_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_site_id, PRED.hasDevice, platform_device_id) self.RR.create_association(platform_site_id, PRED.hasDeployment, deployment_id) #instrument_site(s) self.RR.create_association(instrument_site_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_site_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(instrument_site_id, PRED.hasDeployment, deployment_id) self.RR.create_association(instrument_site2_id, PRED.hasDevice, instrument_device2_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device2_id, PRED.hasModel, instrument_model_id) ret = DotDict() ret.org_id = org_id ret.observatory_id = observatory_id ret.subsite_id = subsite_id ret.subsite2_id = subsite2_id ret.subsiteb_id = subsiteb_id ret.subsitez_id = subsitez_id ret.platform_site_id = platform_site_id ret.platform_siteb_id = platform_siteb_id ret.platform_siteb2_id = platform_siteb2_id ret.platform_site3_id = platform_site3_id ret.instrument_site_id = instrument_site_id ret.instrument_site2_id = instrument_site2_id ret.instrument_siteb3_id = instrument_siteb3_id ret.instrument_site4_id = instrument_site4_id ret.instrument_device_id = instrument_device_id ret.instrument_device2_id = instrument_device2_id ret.platform_device_id = platform_device_id ret.platform_deviceb_id = platform_deviceb_id ret.instrument_model_id = instrument_model_id ret.platform_model_id = platform_model_id ret.deployment_id = deployment_id return ret #@unittest.skip("targeting") def test_create_observatory(self): observatory_obj = IonObject(RT.Observatory, name='TestFacility', description='some new mf') observatory_id = self.OMS.create_observatory(observatory_obj) self.OMS.force_delete_observatory(observatory_id) #@unittest.skip("targeting") def test_find_observatory_org(self): org_obj = IonObject(RT.Org, name='TestOrg', description='some new mf org') org_id = self.OMS.create_marine_facility(org_obj) observatory_obj = IonObject(RT.Observatory, name='TestObservatory', description='some new obs') observatory_id = self.OMS.create_observatory(observatory_obj) #make association self.OMS.assign_resource_to_observatory_org(observatory_id, org_id) #find association org_objs = self.OMS.find_org_by_observatory(observatory_id) self.assertEqual(1, len(org_objs)) self.assertEqual(org_id, org_objs[0]._id) print("org_id=<" + org_id + ">") #create a subsite with parent Observatory subsite_obj = IonObject(RT.Subsite, name= 'TestSubsite', description = 'sample subsite') subsite_id = self.OMS.create_subsite(subsite_obj, observatory_id) self.assertIsNotNone(subsite_id, "Subsite not created.") # verify that Subsite is linked to Observatory mf_subsite_assoc = self.RR.get_association(observatory_id, PRED.hasSite, subsite_id) self.assertIsNotNone(mf_subsite_assoc, "Subsite not connected to Observatory.") # add the Subsite as a resource of this Observatory self.OMS.assign_resource_to_observatory_org(resource_id=subsite_id, org_id=org_id) # verify that Subsite is linked to Org org_subsite_assoc = self.RR.get_association(org_id, PRED.hasResource, subsite_id) self.assertIsNotNone(org_subsite_assoc, "Subsite not connected as resource to Org.") #create a logical platform with parent Subsite platform_site_obj = IonObject(RT.PlatformSite, name= 'TestPlatformSite', description = 'sample logical platform') platform_site_id = self.OMS.create_platform_site(platform_site_obj, subsite_id) self.assertIsNotNone(platform_site_id, "PlatformSite not created.") # verify that PlatformSite is linked to Site site_lp_assoc = self.RR.get_association(subsite_id, PRED.hasSite, platform_site_id) self.assertIsNotNone(site_lp_assoc, "PlatformSite not connected to Site.") # add the PlatformSite as a resource of this Observatory self.OMS.assign_resource_to_observatory_org(resource_id=platform_site_id, org_id=org_id) # verify that PlatformSite is linked to Org org_lp_assoc = self.RR.get_association(org_id, PRED.hasResource, platform_site_id) self.assertIsNotNone(org_lp_assoc, "PlatformSite not connected as resource to Org.") #create a logical instrument with parent logical platform instrument_site_obj = IonObject(RT.InstrumentSite, name= 'TestInstrumentSite', description = 'sample logical instrument') instrument_site_id = self.OMS.create_instrument_site(instrument_site_obj, platform_site_id) self.assertIsNotNone(instrument_site_id, "InstrumentSite not created.") # verify that InstrumentSite is linked to PlatformSite li_lp_assoc = self.RR.get_association(platform_site_id, PRED.hasSite, instrument_site_id) self.assertIsNotNone(li_lp_assoc, "InstrumentSite not connected to PlatformSite.") # add the InstrumentSite as a resource of this Observatory self.OMS.assign_resource_to_observatory_org(resource_id=instrument_site_id, org_id=org_id) # verify that InstrumentSite is linked to Org org_li_assoc = self.RR.get_association(org_id, PRED.hasResource, instrument_site_id) self.assertIsNotNone(org_li_assoc, "InstrumentSite not connected as resource to Org.") # remove the InstrumentSite as a resource of this Observatory self.OMS.unassign_resource_from_observatory_org(instrument_site_id, org_id) # verify that InstrumentSite is linked to Org assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.InstrumentSite, id_only=True ) self.assertEqual(len(assocs), 0) # remove the InstrumentSite self.OMS.delete_instrument_site(instrument_site_id) assocs, _ = self.RR.find_objects(platform_site_id, PRED.hasSite, RT.InstrumentSite, id_only=True ) self.assertEqual(len(assocs), 1) #todo: remove the dangling association # remove the PlatformSite as a resource of this Observatory self.OMS.unassign_resource_from_observatory_org(platform_site_id, org_id) # verify that PlatformSite is linked to Org assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.PlatformSite, id_only=True ) self.assertEqual(len(assocs), 0) # remove the Site as a resource of this Observatory self.OMS.unassign_resource_from_observatory_org(subsite_id, org_id) # verify that Site is linked to Org assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.Subsite, id_only=True ) self.assertEqual(len(assocs), 0) self.RR.delete(org_id) self.OMS.force_delete_observatory(observatory_id) self.OMS.force_delete_subsite(subsite_id) self.OMS.force_delete_platform_site(platform_site_id) self.OMS.force_delete_instrument_site(instrument_site_id) #@unittest.skip("in development...") @attr('EXT') def test_observatory_org_extended(self): stuff = self._make_associations() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubcli.create_stream_definition(name='parsed', parameter_dictionary_id=parsed_pdict_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id1 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) self.damsclient.assign_data_product(input_resource_id=stuff.instrument_device_id, data_product_id=data_product_id1) #-------------------------------------------------------------------------------- # Get the extended Site (platformSite) #-------------------------------------------------------------------------------- extended_site = self.OMS.get_site_extension(stuff.platform_site_id) log.debug("extended_site: %s ", str(extended_site)) self.assertEqual(1, len(extended_site.platform_devices)) self.assertEqual(1, len(extended_site.platform_models)) self.assertEqual(stuff.platform_device_id, extended_site.platform_devices[0]._id) self.assertEqual(stuff.platform_model_id, extended_site.platform_models[0]._id) #-------------------------------------------------------------------------------- # Get the extended Org #-------------------------------------------------------------------------------- #test the extended resource extended_org = self.org_management_service.get_marine_facility_extension(stuff.org_id) log.debug("test_observatory_org_extended: extended_org: %s ", str(extended_org)) #self.assertEqual(2, len(extended_org.instruments_deployed) ) #self.assertEqual(1, len(extended_org.platforms_not_deployed) ) self.assertEqual(2, extended_org.number_of_platforms) self.assertEqual(2, len(extended_org.platform_models) ) self.assertEqual(2, extended_org.number_of_instruments) self.assertEqual(2, len(extended_org.instrument_models) ) #test the extended resource of the ION org ion_org_id = self.org_management_service.find_org() extended_org = self.org_management_service.get_marine_facility_extension(ion_org_id._id, user_id=12345) log.debug("test_observatory_org_extended: extended_ION_org: %s ", str(extended_org)) self.assertEqual(0, len(extended_org.members)) self.assertEqual(0, extended_org.number_of_platforms) #self.assertEqual(1, len(extended_org.sites)) #-------------------------------------------------------------------------------- # Get the extended Site #-------------------------------------------------------------------------------- #create device state events to use for op /non-op filtering in extended t = get_ion_ts() self.event_publisher.publish_event( ts_created= t, event_type = 'ResourceAgentStateEvent', origin = stuff.instrument_device_id, state=ResourceAgentState.STREAMING ) self.event_publisher.publish_event( ts_created= t, event_type = 'ResourceAgentStateEvent', origin = stuff.instrument_device2_id, state=ResourceAgentState.INACTIVE ) extended_site = self.OMS.get_site_extension(stuff.instrument_site2_id) log.debug("test_observatory_org_extended: extended_site: %s ", str(extended_site)) self.dpclient.delete_data_product(data_product_id1)
class TestDeployment(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dmpsclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.psmsclient = PubsubManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses) #@unittest.skip("targeting") def test_create_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) start = IonTime(datetime.datetime(2013,1,1)) end = IonTime(datetime.datetime(2014,1,1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_platform_site(site_id, deployment_id) self.imsclient.deploy_platform_device(device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) #retrieve the deployment objects and check that the assoc site and device are attached read_deployment_obj = self.omsclient.read_deployment(deployment_id) log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj) ) site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(site_ids), 1) device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(device_ids), 1) #delete the deployment self.RR2.pluck(deployment_id) self.omsclient.force_delete_deployment(deployment_id) # now try to get the deleted dp object try: self.omsclient.read_deployment(deployment_id) except NotFound: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def base_activate_deployment(self): #------------------------------------------------------------------------------------- # Create platform site, platform device, platform model #------------------------------------------------------------------------------------- platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') platform_site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device_obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') platform_device_id = self.imsclient.create_platform_device(platform_device_obj) platform_model__obj = IonObject(RT.PlatformModel, name='PlatformModel1', description='test platform model') platform_model_id = self.imsclient.create_platform_model(platform_model__obj) #------------------------------------------------------------------------------------- # Create instrument site #------------------------------------------------------------------------------------- instrument_site_obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, platform_site_id) pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.psmsclient.create_stream_definition(name='SBE37_CDM', parameter_dictionary_id=pdict_id) #---------------------------------------------------------------------------------------------------- # Create an instrument device #---------------------------------------------------------------------------------------------------- instrument_device_obj = IonObject(RT.InstrumentDevice, name='InstrumentDevice1', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device(instrument_device_obj) self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) #---------------------------------------------------------------------------------------------------- # Create an instrument model #---------------------------------------------------------------------------------------------------- instrument_model_obj = IonObject(RT.InstrumentModel, name='InstrumentModel1', description='test instrument model') instrument_model_id = self.imsclient.create_instrument_model(instrument_model_obj) #---------------------------------------------------------------------------------------------------- # Create a deployment object #---------------------------------------------------------------------------------------------------- start = IonTime(datetime.datetime(2013,1,1)) end = IonTime(datetime.datetime(2014,1,1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', context=IonObject(OT.CabledNodeDeploymentContext), constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) ret = DotDict(instrument_site_id=instrument_site_id, instrument_device_id=instrument_device_id, instrument_model_id=instrument_model_id, platform_site_id=platform_site_id, platform_device_id=platform_device_id, platform_model_id=platform_model_id, deployment_id=deployment_id) return ret #@unittest.skip("targeting") def test_activate_deployment_normal(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_platform_model_to_platform_device(res.platform_model_id, res.platform_device_id) self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_platform_model_to_platform_site(res.platform_model_id, res.platform_site_id) self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("adding instrument site and device to deployment") self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug("adding platform site and device to deployment") self.omsclient.deploy_platform_site(res.platform_site_id, res.deployment_id) self.imsclient.deploy_platform_device(res.platform_device_id, res.deployment_id) log.debug("activating deployment, expecting success") self.omsclient.activate_deployment(res.deployment_id) log.debug("deactivatin deployment, expecting success") self.omsclient.deactivate_deployment(res.deployment_id) #@unittest.skip("targeting") def test_activate_deployment_nomodels(self): res = self.base_activate_deployment() self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug("activating deployment without site+device models, expecting fail") self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1") log.debug("assigning instrument site model") self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("activating deployment without device models, expecting fail") self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1") #@unittest.skip("targeting") def test_activate_deployment_nosite(self): res = self.base_activate_deployment() log.debug("assigning instrument models") self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument device only") self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug("activating deployment without instrument site, expecting fail") self.assert_deploy_fail(res.deployment_id, BadRequest, "Devices in this deployment outnumber sites") #@unittest.skip("targeting") def test_activate_deployment_nodevice(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument site only") self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) log.debug("activating deployment without device, expecting fail") self.assert_deploy_fail(res.deployment_id, BadRequest, "No devices were found in the deployment") def test_activate_deployment_asymmetric_children(self): """ P0 | \ P1 P2 | I1 Complex deployment using CSP P1, P2, and P3 share the same platform model. The CSP solver should be able to work this out based on relationships to parents """ log.debug("create models") imodel_id = self.RR2.create(any_old(RT.InstrumentModel)) pmodel_id = self.RR2.create(any_old(RT.PlatformModel)) log.debug("create devices") idevice_id = self.RR2.create(any_old(RT.InstrumentDevice)) pdevice_id = [self.RR2.create(any_old(RT.PlatformDevice)) for _ in range(3)] log.debug("create sites") isite_id = self.RR2.create(any_old(RT.InstrumentSite)) psite_id = [self.RR2.create(any_old(RT.PlatformSite)) for _ in range(3)] log.debug("assign models") self.RR2.assign_instrument_model_to_instrument_device_with_has_model(imodel_id, idevice_id) self.RR2.assign_instrument_model_to_instrument_site_with_has_model(imodel_id, isite_id) for x in range(3): self.RR2.assign_platform_model_to_platform_device_with_has_model(pmodel_id, pdevice_id[x]) self.RR2.assign_platform_model_to_platform_site_with_has_model(pmodel_id, psite_id[x]) log.debug("assign hierarchy") self.RR2.assign_instrument_device_to_platform_device_with_has_device(idevice_id, pdevice_id[1]) self.RR2.assign_instrument_site_to_platform_site_with_has_site(isite_id, psite_id[1]) for x in range(1,3): self.RR2.assign_platform_device_to_platform_device_with_has_device(pdevice_id[x], pdevice_id[0]) self.RR2.assign_platform_site_to_platform_site_with_has_site(psite_id[x], psite_id[0]) log.debug("create and activate deployment") dep_id = self.RR2.create(any_old(RT.Deployment, {"context": IonObject(OT.RemotePlatformDeploymentContext)})) self.RR2.assign_deployment_to_platform_device_with_has_deployment(dep_id, pdevice_id[0]) self.RR2.assign_deployment_to_platform_site_with_has_deployment(dep_id, psite_id[0]) self.omsclient.activate_deployment(dep_id) log.debug("verifying deployment") self.assertEqual(idevice_id, self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(isite_id), "The instrument device was not assigned to the instrument site") for x in range(3): self.assertEqual(pdevice_id[x], self.RR2.find_platform_device_id_of_platform_site_using_has_device(psite_id[x]), "Platform device %d was not assigned to platform site %d" % (x, x)) def assert_deploy_fail(self, deployment_id, err_type=BadRequest, fail_message="did not specify fail_message"): with self.assertRaises(err_type) as cm: self.omsclient.activate_deployment(deployment_id) self.assertIn(fail_message, cm.exception.message) def test_3x3_matchups_remoteplatform(self): self.base_3x3_matchups(IonObject(OT.RemotePlatformDeploymentContext)) def test_3x3_matchups_cabledinstrument(self): self.base_3x3_matchups(IonObject(OT.CabledInstrumentDeploymentContext)) def test_3x3_matchups_cablednode(self): self.base_3x3_matchups(IonObject(OT.CabledNodeDeploymentContext)) def base_3x3_matchups(self, deployment_context): """ This will be 1 root platform, 3 sub platforms (2 of one model, 1 of another) and 3 sub instruments each (2-to-1) """ deployment_context_type = type(deployment_context).__name__ instrument_model_id = [self.RR2.create(any_old(RT.InstrumentModel)) for _ in range(6)] platform_model_id = [self.RR2.create(any_old(RT.PlatformModel)) for _ in range(3)] instrument_device_id = [self.RR2.create(any_old(RT.InstrumentDevice)) for _ in range(9)] platform_device_id = [self.RR2.create(any_old(RT.PlatformDevice)) for _ in range(4)] instrument_site_id = [self.RR2.create(any_old(RT.InstrumentSite, {"planned_uplink_port": IonObject(OT.PlatformPort, reference_designator="instport_%d" % (i+1))})) for i in range(9)] platform_site_id = [self.RR2.create(any_old(RT.PlatformSite, {"planned_uplink_port": IonObject(OT.PlatformPort, reference_designator="platport_%d" % (i+1))})) for i in range(4)] def instrument_model_at(platform_idx, instrument_idx): m = platform_idx * 2 if instrument_idx > 0: m += 1 return m def platform_model_at(platform_idx): if platform_idx > 0: return 1 return 0 def instrument_at(platform_idx, instrument_idx): return platform_idx * 3 + instrument_idx # set up the structure for p in range(3): m = platform_model_at(p) self.RR2.assign_platform_model_to_platform_site_with_has_model(platform_model_id[m], platform_site_id[p]) self.RR2.assign_platform_model_to_platform_device_with_has_model(platform_model_id[m], platform_device_id[p]) self.RR2.assign_platform_device_to_platform_device_with_has_device(platform_device_id[p], platform_device_id[3]) self.RR2.assign_platform_site_to_platform_site_with_has_site(platform_site_id[p], platform_site_id[3]) for i in range(3): m = instrument_model_at(p, i) idx = instrument_at(p, i) self.RR2.assign_instrument_model_to_instrument_site_with_has_model(instrument_model_id[m], instrument_site_id[idx]) self.RR2.assign_instrument_model_to_instrument_device_with_has_model(instrument_model_id[m], instrument_device_id[idx]) self.RR2.assign_instrument_device_to_platform_device_with_has_device(instrument_device_id[idx], platform_device_id[p]) self.RR2.assign_instrument_site_to_platform_site_with_has_site(instrument_site_id[idx], platform_site_id[p]) # top level models self.RR2.assign_platform_model_to_platform_device_with_has_model(platform_model_id[2], platform_device_id[3]) self.RR2.assign_platform_model_to_platform_site_with_has_model(platform_model_id[2], platform_site_id[3]) # verify structure for p in range(3): parent_id = self.RR2.find_platform_device_id_by_platform_device_using_has_device(platform_device_id[p]) self.assertEqual(platform_device_id[3], parent_id) parent_id = self.RR2.find_platform_site_id_by_platform_site_using_has_site(platform_site_id[p]) self.assertEqual(platform_site_id[3], parent_id) for i in range(len(platform_site_id)): self.assertEqual(self.RR2.find_platform_model_of_platform_device_using_has_model(platform_device_id[i]), self.RR2.find_platform_model_of_platform_site_using_has_model(platform_site_id[i])) for i in range(len(instrument_site_id)): self.assertEqual(self.RR2.find_instrument_model_of_instrument_device_using_has_model(instrument_device_id[i]), self.RR2.find_instrument_model_of_instrument_site_using_has_model(instrument_site_id[i])) port_assignments = {} for p in range(3): port_assignments[platform_device_id[p]] = "platport_%d" % (p+1) for i in range(3): idx = instrument_at(p, i) port_assignments[instrument_device_id[idx]] = "instport_%d" % (idx+1) deployment_id = self.RR2.create(any_old(RT.Deployment, {"context": deployment_context, "port_assignments": port_assignments})) log.debug("assigning device/site to %s deployment", deployment_context_type) if OT.RemotePlatformDeploymentContext == deployment_context_type: self.RR2.assign_deployment_to_platform_device_with_has_deployment(deployment_id, platform_device_id[3]) self.RR2.assign_deployment_to_platform_site_with_has_deployment(deployment_id, platform_site_id[3]) elif OT.CabledInstrumentDeploymentContext == deployment_context_type: self.RR2.assign_deployment_to_instrument_device_with_has_deployment(deployment_id, instrument_device_id[1]) self.RR2.assign_deployment_to_instrument_site_with_has_deployment(deployment_id, instrument_site_id[1]) elif OT.CabledNodeDeploymentContext == deployment_context_type: self.RR2.assign_deployment_to_platform_device_with_has_deployment(deployment_id, platform_device_id[1]) self.RR2.assign_deployment_to_platform_site_with_has_deployment(deployment_id, platform_site_id[1]) log.debug("activation of %s deployment", deployment_context_type) self.omsclient.activate_deployment(deployment_id) log.debug("validation of %s deployment", deployment_context_type) if OT.RemotePlatformDeploymentContext == deployment_context_type: # verify proper associations for i, d in enumerate(platform_device_id): self.assertEqual(d, self.RR2.find_platform_device_id_of_platform_site_using_has_device(platform_site_id[i])) for i, d in enumerate(instrument_device_id): self.assertEqual(d, self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(instrument_site_id[i])) elif OT.CabledInstrumentDeploymentContext == deployment_context_type: self.assertEqual(instrument_device_id[1], self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(instrument_site_id[1])) elif OT.CabledNodeDeploymentContext == deployment_context_type: expected_platforms = [1] expected_instruments = [3, 4, 5] # verify proper associations for i, d in enumerate(platform_device_id): self.assertEqual(i in expected_platforms, d in self.RR2.find_platform_device_ids_of_platform_site_using_has_device(platform_site_id[i])) for i, d in enumerate(instrument_device_id): self.assertEqual(i in expected_instruments, d in self.RR2.find_instrument_device_ids_of_instrument_site_using_has_device(instrument_site_id[i]))
class TestFindRelatedResources(IonIntegrationTestCase): """ assembly integration tests at the service level """ def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.RR = ResourceRegistryServiceClient(node=self.container.node) self.care = {} self.dontcare = {} self.realtype = {} # @unittest.skip('this test just for debugging setup') # def test_just_the_setup(self): # return def create_any(self, resourcetype, first, label=None): rsrc_id, rev = self.RR.create(any_old(resourcetype)) if label is None: label = resourcetype if first: if label in self.care: self.fail("tried to add a duplicate %s" % label) log.debug("Creating %s as %s", resourcetype, label) self.care[label] = rsrc_id self.realtype[label] = resourcetype self.assertIn(label, self.care) self.assertIn(label, self.realtype) else: if not resourcetype in self.dontcare: self.dontcare[resourcetype] = [] self.dontcare[resourcetype].append(rsrc_id) return rsrc_id def create_observatory(self, first=False): obs_id = self.create_any(RT.Observatory, first) site_id1 = self.create_site(first) site_id2 = self.create_site(False) self.RR.create_association(subject=obs_id, predicate=PRED.hasSite, object=site_id1) self.RR.create_association(subject=obs_id, predicate=PRED.hasSite, object=site_id2) return obs_id def create_site(self, first=False): site_id = self.create_any(RT.Subsite, first, RT_SITE) subsite_id1 = self.create_subsite(first) subsite_id2 = self.create_subsite(False) self.RR.create_association(subject=site_id, predicate=PRED.hasSite, object=subsite_id1) self.RR.create_association(subject=site_id, predicate=PRED.hasSite, object=subsite_id2) return site_id def create_subsite(self, first=False): subsite_id = self.create_any(RT.Subsite, first) platformsite_id1 = self.create_platformsite(first) platformsite_id2 = self.create_platformsite(False) self.RR.create_association(subject=subsite_id, predicate=PRED.hasSite, object=platformsite_id1) self.RR.create_association(subject=subsite_id, predicate=PRED.hasSite, object=platformsite_id2) return subsite_id def create_platformsite(self, first=False): platform_model_id = self.create_any(RT.PlatformModel, False) # we never care about this level platformsite_id = self.create_any(RT.PlatformSite, first) subplatformsite_id1 = self.create_subplatformsite(first) subplatformsite_id2 = self.create_subplatformsite(False) self.RR.create_association(subject=platformsite_id, predicate=PRED.hasSite, object=subplatformsite_id1) self.RR.create_association(subject=platformsite_id, predicate=PRED.hasSite, object=subplatformsite_id2) self.RR.create_association(subject=platformsite_id, predicate=PRED.hasModel, object=platform_model_id) return platformsite_id def create_subplatformsite(self, first=False): platform_model_id = self.create_any(RT.PlatformModel, first) subplatformsite_id = self.create_any(RT.PlatformSite, first, RT_SUBPLATFORMSITE) self.RR.create_association(subject=subplatformsite_id, predicate=PRED.hasModel, object=platform_model_id) platformdevice_id = self.create_platform_device(platform_model_id, first) self.RR.create_association(subject=subplatformsite_id, predicate=PRED.hasDevice, object=platformdevice_id) instrumentsite_id1 = self.create_instrumentsite(platformdevice_id, first) instrumentsite_id2 = self.create_instrumentsite(platformdevice_id, False) self.RR.create_association(subject=subplatformsite_id, predicate=PRED.hasSite, object=instrumentsite_id1) self.RR.create_association(subject=subplatformsite_id, predicate=PRED.hasSite, object=instrumentsite_id2) return subplatformsite_id def create_instrumentsite(self, platform_device_id, first=False): instrument_model_id = self.create_any(RT.InstrumentModel, first) instrumentsite_id = self.create_any(RT.InstrumentSite, first) self.RR.create_association(subject=instrumentsite_id, predicate=PRED.hasModel, object=instrument_model_id) instrument_device_id = self.create_instrumentdevice(instrument_model_id, first) self.RR.create_association(subject=platform_device_id, predicate=PRED.hasDevice, object=instrument_device_id) self.RR.create_association(subject=instrumentsite_id, predicate=PRED.hasDevice, object=instrument_device_id) return instrumentsite_id def create_platform_device(self, platform_model_id, first=False): platformdevice_id = self.create_any(RT.PlatformDevice, first) self.RR.create_association(subject=platformdevice_id, predicate=PRED.hasModel, object=platform_model_id) return platformdevice_id def create_instrumentdevice(self, instrument_model_id, first=False): instrumentdevice_id = self.create_any(RT.InstrumentDevice, first) self.RR.create_association(subject=instrumentdevice_id, predicate=PRED.hasModel, object=instrument_model_id) return instrumentdevice_id def create_dummy_structure(self): """ Create two observatories. - each observatory has 2 subsites - each subsite has 2 more subsites - each of those subsites has 2 platform sites - each of those platform sites has a model and 2 sub- platform sites - each of those sub- platform sites has a model, matching platform device, and 2 instrument sites - each of those instrument sites has a model and matching instrument device One of each resource type (observatory all the way down to instrument device/model) is what we "care" about - it goes in the self.care dict All the rest go in the self.dontcare dict To manage subsite/platform multiplicity, we alias them in the dict... the proper hierarchy is: Observatory-Site-Subsite-PlatformSite-SubPlatformSite-InstrumentSite self.realtype[alias] gives the real resource type of an alias """ self.create_observatory(True) self.create_observatory(False) for rt in [RT.Observatory, RT_SITE, RT.Subsite, RT.PlatformSite, RT_SUBPLATFORMSITE, RT.PlatformDevice, RT.PlatformModel, RT.InstrumentSite, RT.InstrumentDevice, RT.InstrumentModel ]: self.assertIn(rt, self.care) self.expected_associations = [ (RT.Observatory, PRED.hasSite, RT_SITE), (RT.Site, PRED.hasSite, RT.Subsite), (RT.Subsite, PRED.hasSite, RT.PlatformSite), (RT.PlatformSite, PRED.hasSite, RT_SUBPLATFORMSITE), (RT_SUBPLATFORMSITE, PRED.hasSite, RT.InstrumentSite), (RT_SUBPLATFORMSITE, PRED.hasModel, RT.PlatformModel), (RT_SUBPLATFORMSITE, PRED.hasDevice, RT.PlatformDevice), (RT.PlatformDevice, PRED.hasModel, RT.PlatformModel), (RT.InstrumentSite, PRED.hasModel, RT.InstrumentModel), (RT.InstrumentSite, PRED.hasDevice, RT.InstrumentDevice), (RT.InstrumentDevice, PRED.hasModel, RT.InstrumentModel) ] log.info("Verifying created structure") for (st, p, ot) in self.expected_associations: rst = self.realtype[st] rot = self.realtype[ot] s = self.care[st] o = self.care[ot] log.debug("searching %s->%s->%s as %s->%s->%s" % (st, p, ot, rst, p, rot)) log.debug(" - expecting %s %s" % (rot, o)) a = self.RR.find_associations(subject=s, predicate=p, object=o) if not (0 < len(a) < 3): a2 = self.RR.find_associations(subject=s, predicate=p) a2content = [("(%s %s)" % (alt.ot, alt.o)) for alt in a2] self.fail("Expected 1-2 associations for %s->%s->%s, got %s: %s" % (st, p, ot, len(a2), a2content)) self.assertIn(o, [aa.o for aa in a]) log.info("CREATED STRUCTURE APPEARS CORRECT ===============================") def simplify_assn_resource_ids(self, assn_list): count = 0 lookup = {} retval = [] for a in assn_list: if not a.s in lookup: lookup[a.s] = count count += 1 if not a.o in lookup: lookup[a.o] = count count += 1 retval.append(DotDict({"s":lookup[a.s], "st":a.st, "p":a.p, "o":lookup[a.o], "ot":a.ot})) return retval def describe_assn_graph(self, assn_list): return [("%s %s -> %s -> %s %s" % (a.st, a.s, a.p, a.ot, a.o)) for a in assn_list] #@unittest.skip('refactoring') def test_related_resource_crawler(self): """ """ self.create_dummy_structure() r = RelatedResourcesCrawler() # test the basic forward-backward searches for (st, p, ot) in self.expected_associations: rst = self.realtype[st] rot = self.realtype[ot] s = self.care[st] o = self.care[ot] test_sto_fn = r.generate_get_related_resources_fn(self.RR, [rot], {p: (True, False)}) sto_crawl = test_sto_fn(s, 1) # depth of 1 if 2 < len(sto_crawl): # we get 2 because of care/dontcare self.fail("got %s" % self.describe_assn_graph(self.simplify_assn_resource_ids(sto_crawl))) self.assertIn(o, [t.o for t in sto_crawl]) test_ots_fn = r.generate_get_related_resources_fn(self.RR, [rst], {p: (False, True)}) ots_crawl = test_ots_fn(o, 1) # depth of 1 if 1 != len(ots_crawl): self.fail("got %s" % self.describe_assn_graph(self.simplify_assn_resource_ids(ots_crawl))) # test a nontrivial lookup, in which we extract resources related to an instrument device rw = [] pd = {} # we want things related to an instrument device rw.append(RT.PlatformModel) rw.append(RT.InstrumentModel) rw.append(RT.PlatformDevice) rw.append(RT.InstrumentSite) rw.append(RT.PlatformSite) rw.append(RT.Subsite) rw.append(RT.Observatory) rw.append(RT.InstrumentDevice) pd[PRED.hasModel] = (True, True) pd[PRED.hasDevice] = (False, True) pd[PRED.hasSite] = (False, True) test_real_fn = r.generate_get_related_resources_fn(self.RR, resource_whitelist=rw, predicate_dictionary=pd) related = test_real_fn(self.care[RT.InstrumentDevice]) log.debug("========= Result is:") for l in self.describe_assn_graph(self.simplify_assn_resource_ids(related)): log.debug(" %s", l) # check that we only got things we care about for a in related: # special case for platform model, because we don't care about the top-level platform's model # so it will blow up if we don't ignore it. if we got an extra platform model, we'd have an # extra platform anyway... so this special case is safe. if a.st != RT.PlatformModel: self.assertIn(a.s, self.care.values(), "%s %s not cared about" % (a.st, a.s)) if a.ot != RT.PlatformModel: self.assertIn(a.o, self.care.values(), "%s %s not cared about" % (a.ot, a.o))
class TestDeployment(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dmpsclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.psmsclient = PubsubManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.resource_impl = ResourceImpl(self.c) #@unittest.skip("targeting") def test_create_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) start = IonTime(datetime.datetime(2013,1,1)) end = IonTime(datetime.datetime(2014,1,1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_platform_site(site_id, deployment_id) self.imsclient.deploy_platform_device(device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) #retrieve the deployment objects and check that the assoc site and device are attached read_deployment_obj = self.omsclient.read_deployment(deployment_id) log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj) ) site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(site_ids), 1) device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(device_ids), 1) #delete the deployment self.resource_impl.pluck(deployment_id) self.omsclient.force_delete_deployment(deployment_id) # now try to get the deleted dp object try: deployment_obj = self.omsclient.read_deployment(deployment_id) except NotFound: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def test_activate_deployment(self): #------------------------------------------------------------------------------------- # Create platform site, platform device, platform model #------------------------------------------------------------------------------------- platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device_obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') platform_device_id = self.imsclient.create_platform_device(platform_device_obj) platform_model__obj = IonObject(RT.PlatformModel, name='PlatformModel1', description='test platform model') model_id = self.imsclient.create_platform_model(platform_model__obj) #------------------------------------------------------------------------------------- # Assign platform model to platform device and site #------------------------------------------------------------------------------------- self.imsclient.assign_platform_model_to_platform_device(model_id, platform_device_id) self.omsclient.assign_platform_model_to_platform_site(model_id, site_id) #------------------------------------------------------------------------------------- # Create instrument site #------------------------------------------------------------------------------------- instrument_site_obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, site_id) pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.psmsclient.create_stream_definition(name='SBE37_CDM', parameter_dictionary_id=pdict_id) # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='Log Data Product', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) out_log_data_product_id = self.dmpsclient.create_data_product(dp_obj, ctd_stream_def_id) #---------------------------------------------------------------------------------------------------- # Start the transform (a logical transform) that acts as an instrument site #---------------------------------------------------------------------------------------------------- self.omsclient.create_site_data_product( site_id= instrument_site_id, data_product_id = out_log_data_product_id) #---------------------------------------------------------------------------------------------------- # Create an instrument device #---------------------------------------------------------------------------------------------------- instrument_device_obj = IonObject(RT.InstrumentDevice, name='InstrumentDevice1', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device(instrument_device_obj) self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) dp_obj = IonObject(RT.DataProduct, name='Instrument Data Product', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) inst_data_product_id = self.dmpsclient.create_data_product(dp_obj, ctd_stream_def_id) #assign data products appropriately self.damsclient.assign_data_product(input_resource_id=instrument_device_id, data_product_id=inst_data_product_id) #---------------------------------------------------------------------------------------------------- # Create an instrument model #---------------------------------------------------------------------------------------------------- instrument_model_obj = IonObject(RT.InstrumentModel, name='InstrumentModel1', description='test instrument model') instrument_model_id = self.imsclient.create_instrument_model(instrument_model_obj) self.imsclient.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site(instrument_model_id, instrument_site_id) #---------------------------------------------------------------------------------------------------- # Create a deployment object #---------------------------------------------------------------------------------------------------- start = IonTime(datetime.datetime(2013,1,1)) end = IonTime(datetime.datetime(2014,1,1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_instrument_site(instrument_site_id, deployment_id) self.imsclient.deploy_instrument_device(instrument_device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) self.omsclient.activate_deployment(deployment_id)
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2sa.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) print 'started services' def test_just_the_setup(self): return def test_resources_associations(self): """ create one of each resource and association used by IMS to guard against problems in ion-definitions """ #stuff we control instrument_agent_instance_id, _ = self.RR.create(any_old(RT.InstrumentAgentInstance)) instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) platform_agent_instance_id, _ = self.RR.create(any_old(RT.PlatformAgentInstance)) platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice)) sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel)) #stuff we associate to logical_platform_id, _ = self.RR.create(any_old(RT.LogicalPlatform)) logical_instrument_id, _ = self.RR.create(any_old(RT.LogicalInstrument)) data_producer_id, _ = self.RR.create(any_old(RT.DataProducer)) instrument_agent_instance_id #is only a target #instrument_agent self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_agent_id, PRED.hasInstance, instrument_agent_instance_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device_id, PRED.hasAssignment, logical_instrument_id) self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id) #self.RR.create_association(instrument_device_id, PRED.hasSensor, sensor_device_id) self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id) instrument_model_id #is only a target platform_agent_instance_id #is only a target #platform_agent self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_agent_id, PRED.hasInstance, platform_agent_instance_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device_id, PRED.hasAssignment, logical_platform_id) self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id) self.RR.create_association(platform_device_id, PRED.hasInstrument, instrument_device_id) platform_model_id #is only a target #sensor_device self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id) sensor_model_id #is only a target
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) print 'started services' # @unittest.skip('this test just for debugging setup') # def test_just_the_setup(self): # return @attr('EXT') def test_resources_associations_extensions(self): """ create one of each resource and association used by IMS to guard against problems in ion-definitions """ #stuff we control instrument_agent_instance_id, _ = self.RR.create(any_old(RT.InstrumentAgentInstance)) instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent)) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) platform_agent_instance_id, _ = self.RR.create(any_old(RT.PlatformAgentInstance)) platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice)) sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel)) #stuff we associate to data_producer_id, _ = self.RR.create(any_old(RT.DataProducer)) org_id, _ = self.RR.create(any_old(RT.Org)) #instrument_agent_instance_id #is only a target #instrument_agent self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id) self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id) self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id) self.RR.create_association(org_id, PRED.hasResource, instrument_device_id) instrument_model_id #is only a target platform_agent_instance_id #is only a target #platform_agent self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id) self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) platform_model_id #is only a target #sensor_device self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id) self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id) sensor_model_id #is only a target #create a parsed product for this instrument output tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', processing_level_code='Parsed_Canonical', temporal_domain = tdom, spatial_domain = sdom) pdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id) data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug( 'new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1) def addInstOwner(inst_id, subject): actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject}) user_id = self.IDS.create_actor_identity(actor_identity_obj) user_info_obj = any_old(RT.UserInfo) user_info_id = self.IDS.create_user_info(user_id, user_info_obj) self.RR.create_association(inst_id, PRED.hasOwner, user_id) #Testing multiple instrument owners addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254") addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256") extended_instrument = self.IMS.get_instrument_device_extension(instrument_device_id) self.assertEqual(instrument_device_id, extended_instrument._id) self.assertEqual(len(extended_instrument.owners), 2) self.assertEqual(extended_instrument.instrument_model._id, instrument_model_id) # Verify that computed attributes exist for the extended instrument self.assertIsInstance(extended_instrument.computed.firmware_version, ComputedFloatValue) self.assertIsInstance(extended_instrument.computed.last_data_received_datetime, ComputedFloatValue) self.assertIsInstance(extended_instrument.computed.last_calibration_datetime, ComputedFloatValue) self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue) self.assertIsInstance(extended_instrument.computed.power_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.communications_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.data_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.location_status_roll_up, ComputedIntValue) log.debug("extended_instrument.computed: %s", extended_instrument.computed) #check model inst_model_obj = self.RR.read(instrument_model_id) self.assertEqual(inst_model_obj.name, extended_instrument.instrument_model.name) #check agent instance inst_agent_instance_obj = self.RR.read(instrument_agent_instance_id) self.assertEqual(inst_agent_instance_obj.name, extended_instrument.agent_instance.name) #check agent inst_agent_obj = self.RR.read(instrument_agent_id) #compound assoc return list of lists so check the first element self.assertEqual(inst_agent_obj.name, extended_instrument.instrument_agent[0].name) #check platform device plat_device_obj = self.RR.read(platform_device_id) self.assertEqual(plat_device_obj.name, extended_instrument.platform_device.name) extended_platform = self.IMS.get_platform_device_extension(platform_device_id) self.assertEqual(1, len(extended_platform.instrument_devices)) self.assertEqual(instrument_device_id, extended_platform.instrument_devices[0]._id) self.assertEqual(1, len(extended_platform.instrument_models)) self.assertEqual(instrument_model_id, extended_platform.instrument_models[0]._id) #check sensor devices self.assertEqual(1, len(extended_instrument.sensor_devices)) #check data_product_parameters_set self.assertEqual(ComputedValueAvailability.PROVIDED, extended_instrument.computed.data_product_parameters_set.status) self.assertTrue( 'Parsed_Canonical' in extended_instrument.computed.data_product_parameters_set.value) # the ctd parameters should include 'temp' self.assertTrue( 'temp' in extended_instrument.computed.data_product_parameters_set.value['Parsed_Canonical']) #none of these will work because there is no agent self.assertEqual(ComputedValueAvailability.NOTAVAILABLE, extended_instrument.computed.firmware_version.status) # self.assertEqual(ComputedValueAvailability.NOTAVAILABLE, # extended_instrument.computed.operational_state.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.power_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.communications_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.data_status_roll_up.status) # self.assertEqual(StatusType.STATUS_OK, # extended_instrument.computed.data_status_roll_up.value) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.location_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.recent_events.status) # self.assertEqual([], extended_instrument.computed.recent_events.value) # cleanup c = DotDict() c.resource_registry = self.RR self.RR2.pluck(instrument_agent_id) self.RR2.pluck(instrument_model_id) self.RR2.pluck(instrument_device_id) self.RR2.pluck(platform_agent_id) self.IMS.force_delete_instrument_agent(instrument_agent_id) self.IMS.force_delete_instrument_model(instrument_model_id) self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_platform_agent_instance(platform_agent_instance_id) self.IMS.force_delete_platform_agent(platform_agent_id) self.IMS.force_delete_platform_device(platform_device_id) self.IMS.force_delete_platform_model(platform_model_id) self.IMS.force_delete_sensor_device(sensor_device_id) self.IMS.force_delete_sensor_model(sensor_model_id) #stuff we associate to self.RR.delete(data_producer_id) self.RR.delete(org_id) def test_custom_attributes(self): """ Test assignment of custom attributes """ instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel, {"custom_attributes": {"favorite_color": "attr desc goes here"} })) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice, {"custom_attributes": {"favorite_color": "red", "bogus_attr": "should raise warning" } })) self.IMS.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id) # cleanup self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_instrument_model(instrument_model_id) def _get_datastore(self, dataset_id): dataset = self.DSC.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore def test_resource_state_save_restore(self): # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") instModel_id = self.IMS.create_instrument_model(instModel_obj) log.debug( 'new InstrumentModel id = %s ', instModel_id) # Create InstrumentAgent raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 ) parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict', records_per_granule=2, granule_publish_rate=5 ) instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg", stream_configurations = [raw_config, parsed_config] ) instAgent_id = self.IMS.create_instrument_agent(instAgent_obj) log.debug( 'new InstrumentAgent id = %s', instAgent_id) self.IMS.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 ' + '(SA Req: L4-CI-SA-RQ-241) ') instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) instDevice_id = self.IMS.create_instrument_device(instrument_device=instDevice_obj) self.IMS.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config = port_agent_config) instAgentInstance_id = self.IMS.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() spdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=spdict_id) rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True) raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug( 'new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) self.DP.activate_data_product_persistence(data_product_id=data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug( 'Data product streams1 = %s', stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True) log.debug( 'Data set for data_product_id1 = %s', dataset_ids[0]) self.parsed_dataset = dataset_ids[0] #create the datastore at the beginning of each int test that persists data dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id2 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) log.debug( 'new dp_id = %s', str(data_product_id2)) self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.DP.activate_data_product_persistence(data_product_id=data_product_id2) # spin up agent self.IMS.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) self.addCleanup(self.IMS.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) #wait for start instance_obj = self.IMS.read_instrument_agent_instance(instAgentInstance_id) gate = ProcessStateGate(self.PDC.read_process, instance_obj.agent_process_id, ProcessStateEnum.RUNNING) self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" % instance_obj.agent_process_id) # take snapshot of config snap_id = self.IMS.save_resource_state(instDevice_id, "xyzzy snapshot") snap_obj = self.RR.read_attachment(snap_id, include_content=True) print "Saved config:" print snap_obj.content #modify config instance_obj.driver_config["comms_config"] = "BAD_DATA" self.RR.update(instance_obj) #restore config self.IMS.restore_resource_state(instDevice_id, snap_id) instance_obj = self.RR.read(instAgentInstance_id) self.assertNotEqual("BAD_DATA", instance_obj.driver_config["comms_config"]) self.DP.delete_data_product(data_product_id1) self.DP.delete_data_product(data_product_id2) def test_agent_instance_config(self): """ Verify that agent configurations are being built properly """ clients = DotDict() clients.resource_registry = self.RR clients.pubsub_management = self.PSC clients.dataset_management = self.DSC pconfig_builder = PlatformAgentConfigurationBuilder(clients) iconfig_builder = InstrumentAgentConfigurationBuilder(clients) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() org_id = self.RR2.create(any_old(RT.Org)) inst_startup_config = {'startup': 'config'} required_config_keys = [ 'org_name', 'device_type', 'agent', 'driver_config', 'stream_config', 'startup_config', 'alarm_defs', 'children'] def verify_instrument_config(config, device_id): for key in required_config_keys: self.assertIn(key, config) self.assertEqual('Org_1', config['org_name']) self.assertEqual(RT.InstrumentDevice, config['device_type']) self.assertIn('driver_config', config) driver_config = config['driver_config'] expected_driver_fields = {'process_type': ('ZMQPyClassDriverLauncher',), } for k, v in expected_driver_fields.iteritems(): self.assertIn(k, driver_config) self.assertEqual(v, driver_config[k]) self.assertEqual self.assertEqual({'resource_id': device_id}, config['agent']) self.assertEqual(inst_startup_config, config['startup_config']) self.assertIn('stream_config', config) for key in ['alarm_defs', 'children']: self.assertEqual({}, config[key]) def verify_child_config(config, device_id, inst_device_id=None): for key in required_config_keys: self.assertIn(key, config) self.assertEqual('Org_1', config['org_name']) self.assertEqual(RT.PlatformDevice, config['device_type']) self.assertEqual({'process_type': ('ZMQPyClassDriverLauncher',)}, config['driver_config']) self.assertEqual({'resource_id': device_id}, config['agent']) self.assertIn('stream_config', config) if None is inst_device_id: for key in ['alarm_defs', 'children', 'startup_config']: self.assertEqual({}, config[key]) else: for key in ['alarm_defs', 'startup_config']: self.assertEqual({}, config[key]) self.assertIn(inst_device_id, config['children']) verify_instrument_config(config['children'][inst_device_id], inst_device_id) def verify_parent_config(config, parent_device_id, child_device_id, inst_device_id=None): for key in required_config_keys: self.assertIn(key, config) self.assertEqual('Org_1', config['org_name']) self.assertEqual(RT.PlatformDevice, config['device_type']) self.assertEqual({'process_type': ('ZMQPyClassDriverLauncher',)}, config['driver_config']) self.assertEqual({'resource_id': parent_device_id}, config['agent']) self.assertIn('stream_config', config) for key in ['alarm_defs', 'startup_config']: self.assertEqual({}, config[key]) self.assertIn(child_device_id, config['children']) verify_child_config(config['children'][child_device_id], child_device_id, inst_device_id) rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True) raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id) #todo: create org and figure out which agent resource needs to get assigned to it def _make_platform_agent_structure(agent_config=None): if None is agent_config: agent_config = {} # instance creation platform_agent_instance_obj = any_old(RT.PlatformAgentInstance) platform_agent_instance_obj.agent_config = agent_config platform_agent_instance_id = self.IMS.create_platform_agent_instance(platform_agent_instance_obj) # agent creation raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 ) platform_agent_obj = any_old(RT.PlatformAgent, {"stream_configurations":[raw_config]}) platform_agent_id = self.IMS.create_platform_agent(platform_agent_obj) # device creation platform_device_id = self.IMS.create_platform_device(any_old(RT.PlatformDevice)) # data product creation dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom}) dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) self.DAMS.assign_data_product(input_resource_id=platform_device_id, data_product_id=dp_id) self.DP.activate_data_product_persistence(data_product_id=dp_id) # assignments self.RR2.assign_platform_agent_instance_to_platform_device(platform_agent_instance_id, platform_device_id) self.RR2.assign_platform_agent_to_platform_agent_instance(platform_agent_id, platform_agent_instance_id) self.RR2.assign_platform_device_to_org_with_has_resource(platform_agent_instance_id, org_id) return platform_agent_instance_id, platform_agent_id, platform_device_id def _make_instrument_agent_structure(agent_config=None): if None is agent_config: agent_config = {} # instance creation instrument_agent_instance_obj = any_old(RT.InstrumentAgentInstance, {"startup_config": inst_startup_config}) instrument_agent_instance_obj.agent_config = agent_config instrument_agent_instance_id = self.IMS.create_instrument_agent_instance(instrument_agent_instance_obj) # agent creation raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict', records_per_granule=2, granule_publish_rate=5 ) instrument_agent_obj = any_old(RT.InstrumentAgent, {"stream_configurations":[raw_config]}) instrument_agent_id = self.IMS.create_instrument_agent(instrument_agent_obj) # device creation instrument_device_id = self.IMS.create_instrument_device(any_old(RT.InstrumentDevice)) # data product creation dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom}) dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=dp_id) self.DP.activate_data_product_persistence(data_product_id=dp_id) # assignments self.RR2.assign_instrument_agent_instance_to_instrument_device(instrument_agent_instance_id, instrument_device_id) self.RR2.assign_instrument_agent_to_instrument_agent_instance(instrument_agent_id, instrument_agent_instance_id) self.RR2.assign_instrument_device_to_org_with_has_resource(instrument_agent_instance_id, org_id) return instrument_agent_instance_id, instrument_agent_id, instrument_device_id # can't do anything without an agent instance obj log.debug("Testing that preparing a launcher without agent instance raises an error") self.assertRaises(AssertionError, pconfig_builder.prepare, will_launch=False) log.debug("Making the structure for a platform agent, which will be the child") platform_agent_instance_child_id, _, platform_device_child_id = _make_platform_agent_structure() platform_agent_instance_child_obj = self.RR2.read(platform_agent_instance_child_id) log.debug("Preparing a valid agent instance launch, for config only") pconfig_builder.set_agent_instance_object(platform_agent_instance_child_obj) child_config = pconfig_builder.prepare(will_launch=False) verify_child_config(child_config, platform_device_child_id) log.debug("Making the structure for a platform agent, which will be the parent") platform_agent_instance_parent_id, _, platform_device_parent_id = _make_platform_agent_structure() platform_agent_instance_parent_obj = self.RR2.read(platform_agent_instance_parent_id) log.debug("Testing child-less parent as a child config") pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj) parent_config = pconfig_builder.prepare(will_launch=False) verify_child_config(parent_config, platform_device_parent_id) log.debug("assigning child platform to parent") self.RR2.assign_platform_device_to_platform_device(platform_device_child_id, platform_device_parent_id) child_device_ids = self.RR2.find_platform_device_ids_of_device(platform_device_parent_id) self.assertNotEqual(0, len(child_device_ids)) log.debug("Testing parent + child as parent config") pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj) parent_config = pconfig_builder.prepare(will_launch=False) verify_parent_config(parent_config, platform_device_parent_id, platform_device_child_id) log.debug("making the structure for an instrument agent") instrument_agent_instance_id, _, instrument_device_id = _make_instrument_agent_structure() instrument_agent_instance_obj = self.RR2.read(instrument_agent_instance_id) log.debug("Testing instrument config") iconfig_builder.set_agent_instance_object(instrument_agent_instance_obj) instrument_config = iconfig_builder.prepare(will_launch=False) verify_instrument_config(instrument_config, instrument_device_id) log.debug("assigning instrument to platform") self.RR2.assign_instrument_device_to_platform_device(instrument_device_id, platform_device_child_id) child_device_ids = self.RR2.find_instrument_device_ids_of_device(platform_device_child_id) self.assertNotEqual(0, len(child_device_ids)) log.debug("Testing entire config") pconfig_builder.set_agent_instance_object(platform_agent_instance_parent_obj) full_config = pconfig_builder.prepare(will_launch=False) verify_parent_config(full_config, platform_device_parent_id, platform_device_child_id, instrument_device_id) #self.fail(parent_config) #plauncher.prepare(will_launch=False) def sample_nested_platform_agent_instance_config(self): """ for informational purposes """ ret = {'org_name': 'Org_1', 'alarm_defs': {}, 'driver_config': {'process_type': ('ZMQPyClassDriverLauncher',)}, 'stream_config': {'parameter_dictionary': 'lots of stuff'}, 'agent': {'resource_id': '33e54106c4444444862da082098bc123'}, 'startup_config': {}, 'device_type': 'PlatformDevice', 'children': {'76a39596eeff4fd5b409c4cb93f0e581': {'org_name': 'Org_1', 'alarm_defs': {}, 'driver_config': {'process_type': ('ZMQPyClassDriverLauncher',)}, 'stream_config': {'parameter_dictionary': 'lots of stuff'}, 'agent': {'resource_id': '76a39596eeff4fd5b409c4cb93f0e581'}, 'startup_config': {}, 'device_type': 'PlatformDevice', 'children': {}}}} return ret
class DiscoveryIntTest(IonIntegrationTestCase): def setUp(self): raise SkipTest("Not yet ported to Postgres") super(DiscoveryIntTest, self).setUp() config = DotDict() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml', config) self.discovery = DiscoveryServiceClient() self.catalog = CatalogManagementServiceClient() self.ims = IndexManagementServiceClient() self.rr = ResourceRegistryServiceClient() self.dataset_management = DatasetManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() def test_geo_distance_search(self): pd = PlatformDevice(name='test_dev') pd_id, _ = self.rr.create(pd) search_string = "search 'index_location' geo distance 20 km from lat 0 lon 0 from 'devices_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == pd_id) self.assertTrue(results[0]['_source'].name == 'test_dev') def test_ranged_value_searching(self): discovery = self.discovery rr = self.rr view_id = discovery.create_view('bank_view', fields=['cash_balance']) bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=10)) search_string = "search 'cash_balance' values from 0 to 100 from '%s'" % view_id results = self.poll(5, discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == bank_id) bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=90)) search_string = "search 'cash_balance' values from 80 from '%s'" % view_id results = self.poll(5, discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == bank_id) def test_collections_searching(self): site_id, _ = self.rr.create(Site(name='black_mesa')) view_id = self.discovery.create_view('big', fields=['name']) # Add the site to a new collection collection_id = self.ims.create_collection('resource_collection', [site_id]) search_string = "search 'name' is '*' from '%s' and in '%s'" %(view_id, collection_id) results = self.poll(9, self.discovery.parse,search_string,id_only=True) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0] == site_id, '%s' % results) def test_search_by_name_index(self): inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123') dev_id, _ = self.rr.create(inst_dev) search_string = "search 'serial_number' is 'abc*' from 'resources_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dev_id) bank_acc = BankAccount(name='blah', cash_balance=10) res_id , _ = self.rr.create(bank_acc) search_string = "search 'cash_balance' values from 0 to 100 from 'resources_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == res_id) def test_data_product_search(self): # Create the dataproduct dp = DataProduct(name='test_product') dp.data_format.name = 'test_signal' dp.data_format.description = 'test signal' dp.data_format.character_set = 'utf8' dp.data_format.nominal_sampling_rate_maximum = '44000' dp.data_format.nominal_sampling_rate_minimum = '44000' dp.CDM_data_type = 'basic' dp_id, _ = self.rr.create(dp) search_string = "search 'data_format.name' is 'test_signal' from 'data_products_index'" results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) search_string = "search 'CDM_data_type' is 'basic' from 'data_products_index'" results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) search_string = "search 'data_format.character_set' is 'utf8' from 'data_products_index'" results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) def test_events_search(self): # Create a resource to force a new event dp = DataProcess() dp_id, rev = self.rr.create(dp) search_string = "SEARCH 'origin' IS '%s' FROM 'events_index'" % dp_id results = self.poll(9, self.discovery.parse,search_string) origin_type = results[0]['_source'].origin_type origin_id = results[0]['_source'].origin self.assertTrue(origin_type == RT.DataProcess) self.assertTrue(origin_id == dp_id) def test_time_search(self): today = date.today() past = today - timedelta(days=2) future = today + timedelta(days=2) data_product = DataProduct() dp_id, _ = self.rr.create(data_product) search_string = "search 'type_' is 'DataProduct' from 'data_products_index' and search 'ts_created' time from '%s' to '%s' from 'data_products_index'" % (past, future) results = self.poll(9, self.discovery.parse,search_string,id_only=True) self.assertIsNotNone(results,'Results not found') self.assertIn(dp_id, results) search_string = "search 'type_' is 'DataProduct' from 'data_products_index' and search 'ts_created' time from '%s' from 'data_products_index'" % past results = self.poll(9, self.discovery.parse,search_string,id_only=True) self.assertIsNotNone(results,'Results not found') self.assertIn(dp_id, results) def test_user_search(self): user = UserInfo() user.name = 'test' user.contact.phones.append('5551212') user_id, _ = self.rr.create(user) search_string = 'search "name" is "test" from "users_index"' results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == user_id) self.assertTrue(results[0]['_source'].name == 'test') search_string = 'search "contact.phones" is "5551212" from "users_index"' results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == user_id) self.assertTrue(results[0]['_source'].name == 'test') def test_subobject_search(self): contact = ContactInformation() contact.email = '*****@*****.**' contact.individual_name_family = 'Tester' contact.individual_names_given = 'Intern' dp = DataProduct(name='example') dp.contacts.append(contact) dp_id,_ = self.rr.create(dp) #-------------------------------------------------------------------------------- # Example using the full field name #-------------------------------------------------------------------------------- search_string = 'search "contacts.email" is "*****@*****.**" from "data_products"' results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) self.assertEquals(results[0]['_source'].name, 'example') #-------------------------------------------------------------------------------- # Example using a sub-object's field name (ambiguous searching) #-------------------------------------------------------------------------------- search_string = 'search "individual_names_given" is "Intern" from "data_products"' results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) self.assertEquals(results[0]['_source'].name, 'example') def test_descriptive_phrase_search(self): dp = DataProduct(name='example', description='This is simply a description for this data product') dp_id, _ = self.rr.create(dp) search_string = 'search "description" like "description for" from "data_products"' results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) self.assertEquals(results[0]['_source'].name, 'example') def test_match_search(self): dp = DataProduct(name='example', description='This is simply a description for this data product') dp_id, _ = self.rr.create(dp) search_string = 'search "description" match "this data product" from "data_products"' results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) self.assertEquals(results[0]['_source'].name, 'example') def test_expected_match_results(self): names = [ 'Instrument for site1', 'Instrument for simulator', 'CTD1', 'SBE37', 'SSN-719', 'Submerssible Expendable Bathyothermograph', 'VELPT', 'VELO', 'Safire2 169' ] for name in names: res_id, _ = self.rr.create(InstrumentDevice(name=name)) self.addCleanup(self.rr.delete, res_id) search_string = 'search "name" match "expendable" from "devices"' results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertEquals(len(results),1) self.assertEquals(results[0]['_source'].name, 'Submerssible Expendable Bathyothermograph') search_string = 'search "name" match "instrument for" from "devices"' results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertEquals(len(results),2) self.assertTrue('Instrument for' in results[0]['_source'].name) self.assertTrue('Instrument for' in results[1]['_source'].name) search_string = 'search "name" match "velo for" from "devices"' results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertEquals(len(results),1) self.assertEquals(results[0]['_source'].name, 'VELO') def test_ownership_searching(self): # Create two data products so that there is competition to the search, one is parsed # (with conductivity as a parameter) and the other is raw dp = DataProduct(name='example dataproduct') pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict') stream_def_id = self.pubsub_management.create_stream_definition('ctd parsed', parameter_dictionary_id=pdict_id) dp_id = self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id, exchange_point='xp1') pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_raw_param_dict') stream_def_id = self.pubsub_management.create_stream_definition('ctd raw', parameter_dictionary_id=pdict_id) dp = DataProduct(name='WRONG') self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id, exchange_point='xp1') parameter_search = 'search "name" is "conductivity" from "resources_index"' results = self.poll(9, self.discovery.parse, parameter_search) param_id = results[0]['_id'] data_product_search = 'search "name" is "*" from "data_products_index" and has "%s"' % param_id results = self.poll(9, self.discovery.parse, data_product_search,id_only=True) self.assertIn(dp_id, results) #self.assertEquals(results[0], dp_id) def test_associative_searching(self): dp_id,_ = self.rr.create(DataProduct('test_foo')) ds_id,_ = self.rr.create(Dataset('test_bar', registered=True)) self.rr.create_association(subject=dp_id, object=ds_id, predicate='hasDataset') search_string = "search 'type_' is 'Dataset' from 'resources_index' and belongs to '%s'" % dp_id results = self.poll(5, self.discovery.parse, search_string, id_only=True) self.assertIsNotNone(results, 'Results not found') self.assertTrue(ds_id in results) def test_iterative_associative_searching(self): #-------------------------------------------------------------------------------- # Tests the ability to limit the iterations #-------------------------------------------------------------------------------- dp = DataProcess() transform = Transform() pd = ProcessDefinition() dp_id, _ = self.rr.create(dp) transform_id, _ = self.rr.create(transform) pd_id, _ = self.rr.create(pd) self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform) self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition) search_string = "belongs to '%s' depth 1" % dp_id results = self.poll(5, self.discovery.parse,search_string) results = list([i._id for i in results]) correct = [transform_id] self.assertTrue(results == correct, '%s' % results) search_string = "belongs to '%s' depth 2" % dp_id results = self.poll(5, self.discovery.parse,search_string) results = list([i._id for i in results]) results.sort() correct = [transform_id, pd_id] correct.sort() self.assertTrue(results == correct)
class TestResourceRegistry(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to bank service self.resource_registry_service = ResourceRegistryServiceClient() @unittest.skip('Represents a bug in storage/retrieval') def test_tuple_in_dict(self): # create a resource with a tuple saved in a dict transform_obj = IonObject(RT.Transform) transform_obj.configuration = {} transform_obj.configuration["tuple"] = ('STRING',) transform_id, _ = self.resource_registry_service.create(transform_obj) # read the resource back returned_transform_obj = self.resource_registry_service.read(transform_id) self.assertEqual(transform_obj.configuration["tuple"], returned_transform_obj.configuration["tuple"]) def test_basics(self): # Sequence all the tests so that we can save numerous system start and stops self._do_test_crud() self._do_test_read_mult() self._do_test_lifecycle() self._do_test_attach() self._do_test_association() self._do_test_find_resources() self._do_test_find_objects_mult() def _do_test_crud(self): # Some quick registry tests # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", name="name", foo="bar") self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", {"name": "name", "foo": "bar"}) self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", {"name": "name"}, foo="bar") self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Instantiate an object obj = IonObject("UserInfo", name="name") # Can set attributes that aren't in the object's schema with self.assertRaises(AttributeError) as cm: setattr(obj, "foo", "bar") self.assertTrue(cm.exception.message == "'UserInfo' object has no attribute 'foo'") # Cam't call update with object that hasn't been persisted with self.assertRaises(BadRequest) as cm: self.resource_registry_service.update(obj) self.assertTrue(cm.exception.message.startswith("Object does not have required '_id' or '_rev' attribute")) # Persist object and read it back obj_id, obj_rev = self.resource_registry_service.create(obj) read_obj = self.resource_registry_service.read(obj_id) # Cannot create object with _id and _rev fields pre-set with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create(read_obj) # Update object read_obj.name = "John Doe" self.resource_registry_service.update(read_obj) # Update should fail with revision mismatch with self.assertRaises(Conflict) as cm: self.resource_registry_service.update(read_obj) # Re-read and update object read_obj = self.resource_registry_service.read(obj_id) self.resource_registry_service.update(read_obj) # Delete object self.resource_registry_service.delete(obj_id) # Make sure read, update and delete report error with self.assertRaises(NotFound) as cm: self.resource_registry_service.read(obj_id) self.assertTrue(cm.exception.message.startswith("Object with id")) with self.assertRaises(NotFound) as cm: self.resource_registry_service.update(read_obj) self.assertTrue(cm.exception.message.startswith("Object with id")) with self.assertRaises(NotFound) as cm: self.resource_registry_service.delete(obj_id) self.assertTrue(cm.exception.message.startswith("Object with id")) # Owner creation tests user = IonObject("ActorIdentity", name='user') uid,_ = self.resource_registry_service.create(user) inst = IonObject("InstrumentDevice", name='instrument') iid,_ = self.resource_registry_service.create(inst, headers={'ion-actor-id':str(uid)}) ids,_ = self.resource_registry_service.find_objects(iid, PRED.hasOwner, RT.ActorIdentity, id_only=True) self.assertEquals(len(ids), 1) assoc = self.resource_registry_service.read(ids[0]) self.resource_registry_service.delete(iid) with self.assertRaises(NotFound) as ex: assoc = self.resource_registry_service.read(iid) def _do_test_read_mult(self): test_resource1_id,_ = self.resource_registry_service.create(Resource(name='test1')) test_resource2_id,_ = self.resource_registry_service.create(Resource(name='test2')) res_list = [test_resource1_id, test_resource2_id] objects = self.resource_registry_service.read_mult(res_list) for o in objects: self.assertIsInstance(o,Resource) self.assertTrue(o._id in res_list) def _do_test_lifecycle(self): # Lifecycle tests att = IonObject("InstrumentDevice", name='mine', description='desc') rid,rev = self.resource_registry_service.create(att) att1 = self.resource_registry_service.read(rid) self.assertEquals(att1.name, att.name) self.assertEquals(att1.lcstate, LCS.DRAFT) self.assertEquals(att1.availability, AS.PRIVATE) new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.PLAN) self.assertEquals(new_state, lcstate(LCS.PLANNED, AS.PRIVATE)) att2 = self.resource_registry_service.read(rid) self.assertEquals(att2.lcstate, LCS.PLANNED) self.assertEquals(att2.availability, AS.PRIVATE) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.execute_lifecycle_transition(rid, LCE.UNANNOUNCE) self.assertTrue("type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce" in cm.exception.message) new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.DEVELOP) self.assertEquals(new_state, lcstate(LCS.DEVELOPED, AS.PRIVATE)) with self.assertRaises(BadRequest): self.resource_registry_service.execute_lifecycle_transition( resource_id=rid, transition_event='NONE##') self.resource_registry_service.set_lifecycle_state(rid, lcstate(LCS.INTEGRATED, AS.PRIVATE)) att1 = self.resource_registry_service.read(rid) self.assertEquals(att1.lcstate, LCS.INTEGRATED) self.assertEquals(att1.availability, AS.PRIVATE) def _do_test_attach(self): binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82" # Owner creation tests instrument = IonObject("InstrumentDevice", name='instrument') iid,_ = self.resource_registry_service.create(instrument) att = Attachment(content=binary, attachment_type=AttachmentType.BLOB) aid1 = self.resource_registry_service.create_attachment(iid, att) att1 = self.resource_registry_service.read_attachment(aid1, include_content=True) self.assertEquals(binary, att1.content) import base64 att = Attachment(content=base64.encodestring(binary), attachment_type=AttachmentType.ASCII) aid2 = self.resource_registry_service.create_attachment(iid, att) att1 = self.resource_registry_service.read_attachment(aid2, include_content=True) self.assertEquals(binary, base64.decodestring(att1.content)) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, [aid1, aid2]) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True) self.assertEquals(att_ids, [aid2, aid1]) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True, limit=1) self.assertEquals(att_ids, [aid2]) atts = self.resource_registry_service.find_attachments(iid, id_only=False, include_content=True, limit=1) self.assertEquals(atts[0].content, binary) self.resource_registry_service.delete_attachment(aid1) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, [aid2]) self.resource_registry_service.delete_attachment(aid2) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, []) def _do_test_association(self): # Instantiate ActorIdentity object actor_identity_obj = IonObject("ActorIdentity", name="name") actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create(actor_identity_obj) read_actor_identity_obj = self.resource_registry_service.read(actor_identity_obj_id) # Instantiate UserInfo object user_info_obj = IonObject("UserInfo", name="name") user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(user_info_obj) read_user_info_obj = self.resource_registry_service.read(user_info_obj_id) # Test create failures with self.assertRaises(AttributeError) as cm: self.resource_registry_service.create_association(actor_identity_obj_id, PRED.bogus, user_info_obj_id) self.assertTrue(cm.exception.message == "bogus") # Predicate not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(actor_identity_obj_id, None, user_info_obj_id) self.assertTrue(cm.exception.message == "Association must have all elements set") # Subject id or object not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(None, PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message == "Association must have all elements set") # Object id or object not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, None) self.assertTrue(cm.exception.message == "Association must have all elements set") # Bad subject id with self.assertRaises(NotFound) as cm: self.resource_registry_service.create_association("bogus", PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message == "Object with id bogus does not exist.") # Bad object id with self.assertRaises(NotFound) as cm: self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, "bogus") self.assertTrue(cm.exception.message == "Object with id bogus does not exist.") # _id missing from subject with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(actor_identity_obj, PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message.startswith("Subject id")) # _id missing from object with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj) self.assertTrue(cm.exception.message.startswith("Object id")) # Wrong subject type with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(user_info_obj_id, PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message == "Illegal subject type UserInfo for predicate hasInfo") # Wrong object type with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id) self.assertTrue(cm.exception.message == "Illegal object type ActorIdentity for predicate hasInfo") # Create two different association types between the same subject and predicate assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) # Read object, subject res_obj1 = self.resource_registry_service.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo) self.assertEquals(res_obj1._id, user_info_obj_id) res_obj1 = self.resource_registry_service.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True) self.assertEquals(res_obj1, user_info_obj_id) res_obj2 = self.resource_registry_service.read_subject(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id) self.assertEquals(res_obj2._id, actor_identity_obj_id) res_obj2 = self.resource_registry_service.read_subject(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True) self.assertEquals(res_obj2, actor_identity_obj_id) # Search for associations (good cases) ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo) ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo) self.assertTrue(len(ret1) == len(ret2) == len(ret3)) self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id) ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False) ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=False) ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=False) self.assertTrue(ret1 == ret2 == ret3) # Search for associations (good cases) ret1 = self.resource_registry_service.find_associations(read_actor_identity_obj, PRED.hasInfo, read_user_info_obj) ret2 = self.resource_registry_service.find_associations(read_actor_identity_obj, PRED.hasInfo) ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo) self.assertTrue(len(ret1) == len(ret2) == len(ret3)) self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id) ret1 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None, True) ret2 = self.resource_registry_service.find_associations(actor_identity_obj_id, PRED.hasInfo, id_only=True) ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=True) self.assertTrue(ret1 == ret2 == ret3) # Search for associations (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations(None, None, None) self.assertIn("Illegal parameters", cm.exception.message) # Find subjects (good cases) subj_ret1 = self.resource_registry_service.find_subjects(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True) subj_ret2 = self.resource_registry_service.find_subjects(RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True) self.assertTrue(len(subj_ret1) == len(subj_ret2)) self.assertTrue(subj_ret1[0] == subj_ret2[0]) self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id) subj_ret3 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, True) subj_ret4 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, True) self.assertTrue(len(subj_ret3) == len(subj_ret4)) self.assertTrue(subj_ret3[0] == subj_ret4[0]) self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id) subj_ret5 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, False) subj_ret6 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, False) self.assertTrue(len(subj_ret5) == len(subj_ret6)) self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id) self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id) # Find subjects (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_subjects(None, None, None) self.assertTrue(cm.exception.message == "Must provide object") with self.assertRaises(AttributeError) as cm: self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.bogus, user_info_obj_id, True) self.assertTrue(cm.exception.message == "bogus") ret = self.resource_registry_service.find_subjects(RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True) self.assertTrue(len(ret[0]) == 0) ret = self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True) self.assertTrue(len(ret[0]) == 0) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj, True) self.assertTrue(cm.exception.message == "Object id not available in object") # Find objects (good cases) subj_ret1 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True) subj_ret2 = self.resource_registry_service.find_objects(read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True) self.assertTrue(len(subj_ret1) == len(subj_ret2)) self.assertTrue(subj_ret1[0] == subj_ret2[0]) self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id) subj_ret3 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, True) subj_ret4 = self.resource_registry_service.find_objects(actor_identity_obj_id, None, None, True) self.assertTrue(len(subj_ret3) == len(subj_ret4)) self.assertTrue(subj_ret3[0] == subj_ret4[0]) self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id) subj_ret5 = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, None, False) subj_ret6 = self.resource_registry_service.find_objects(read_actor_identity_obj, None, None, False) self.assertTrue(len(subj_ret5) == len(subj_ret6)) self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id) self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id) # Find objects (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_objects(None, None, None) self.assertTrue(cm.exception.message == "Must provide subject") with self.assertRaises(AttributeError) as cm: self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True) self.assertTrue(cm.exception.message == "bogus") ret = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True) self.assertTrue(len(ret[0]) == 0) ret = self.resource_registry_service.find_objects(actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True) self.assertTrue(len(ret[0]) == 0) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_objects(actor_identity_obj, PRED.hasInfo, RT.UserInfo, True) self.assertTrue(cm.exception.message == "Object id not available in subject") # Get association (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association(None, None, None) self.assertIn("Illegal parameters", cm.exception.message) assoc = self.resource_registry_service.get_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) self.assertTrue(assoc._id == assoc_id1) # Delete (bad cases) with self.assertRaises(NotFound) as cm: self.resource_registry_service.delete_association("bogus") self.assertTrue(cm.exception.message == "Object with id bogus does not exist.") # Delete other association self.resource_registry_service.delete_association(assoc_id1) # Delete resources self.resource_registry_service.delete(actor_identity_obj_id) self.resource_registry_service.delete(user_info_obj_id) def _do_test_find_resources(self): with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_resources(RT.UserInfo, LCS.DRAFT, "name", False) self.assertTrue(cm.exception.message == "find by name does not support lcstate") ret = self.resource_registry_service.find_resources(RT.UserInfo, None, "name", False) self.assertEquals(len(ret[0]), 0) # Instantiate an object obj = IonObject("InstrumentAgentInstance", name="name") # Persist object and read it back obj_id, obj_rev = self.resource_registry_service.create(obj) read_obj = self.resource_registry_service.read(obj_id) ret = self.resource_registry_service.find_resources(RT.InstrumentAgentInstance, None, "name", False) self.assertEquals(len(ret[0]), 1) self.assertEquals(ret[0][0]._id, read_obj._id) ret = self.resource_registry_service.find_resources(RT.InstrumentAgentInstance, LCS.DEPLOYED, None, False) self.assertEquals(len(ret[0]), 1) self.assertEquals(ret[0][0]._id, read_obj._id) def _do_test_find_objects_mult(self): dp = DataProcess() transform = Transform() pd = ProcessDefinition() dp_id, _ = self.resource_registry_service.create(dp) transform_id, _ = self.resource_registry_service.create(transform) pd_id, _ = self.resource_registry_service.create(pd) self.resource_registry_service.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform) self.resource_registry_service.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition) results, _ = self.resource_registry_service.find_objects_mult(subjects=[dp_id],id_only=True) self.assertTrue(results == [transform_id]) results, _ = self.resource_registry_service.find_objects_mult(subjects=[dp_id, transform_id], id_only=True) results.sort() correct = [transform_id, pd_id] correct.sort() self.assertTrue(results == correct) @attr('EXT') def test_get_resource_extension(self): #Testing multiple instrument owners subject1 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254" actor_identity_obj1 = IonObject(RT.ActorIdentity, {"name": subject1}) actor_id1,_ = self.resource_registry_service.create(actor_identity_obj1) user_info_obj1 = IonObject(RT.UserInfo, {"name": "Foo"}) user_info_id1,_ = self.resource_registry_service.create(user_info_obj1) self.resource_registry_service.create_association(actor_id1, PRED.hasInfo, user_info_id1) subject2 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256" actor_identity_obj2 = IonObject(RT.ActorIdentity, {"name": subject2}) actor_id2,_ = self.resource_registry_service.create(actor_identity_obj2) user_info_obj2 = IonObject(RT.UserInfo, {"name": "Foo2"}) user_info_id2,_ = self.resource_registry_service.create(user_info_obj2) self.resource_registry_service.create_association(actor_id2, PRED.hasInfo, user_info_id2) test_obj = IonObject(RT.InformationResource, {"name": "TestResource"}) test_obj_id,_ = self.resource_registry_service.create(test_obj) self.resource_registry_service.create_association(test_obj_id, PRED.hasOwner, actor_id1) self.resource_registry_service.create_association(test_obj_id, PRED.hasOwner, actor_id2) extended_resource = self.resource_registry_service.get_resource_extension(test_obj_id, OT.ExtendedInformationResource ) self.assertEqual(test_obj_id,extended_resource._id) self.assertEqual(len(extended_resource.owners),2) extended_resource_list = self.resource_registry_service.get_resource_extension(str([user_info_id1,user_info_id2]), OT.ExtendedInformationResource) self.assertEqual(len(extended_resource_list), 2) optional_args = {'user_id': user_info_id1} extended_resource = self.resource_registry_service.get_resource_extension(test_obj_id, OT.TestExtendedInformationResource, optional_args=optional_args ) self.assertEqual(test_obj_id,extended_resource._id) self.assertEqual(len(extended_resource.owners),2) self.assertEqual(extended_resource.user_id, user_info_id1)
class TestPlatformInstrument(BaseIntTestPlatform): def setUp(self): self._start_container() self._pp = pprint.PrettyPrinter() log.debug("oms_uri = %s", OMS_URI) self.oms = CIOMSClientFactory.create_instance(OMS_URI) self._get_platform_attributes() url = OmsTestMixin.start_http_server() log.info("TestPlatformInstrument:setup http url %s", url) result = self.oms.event.register_event_listener(url) log.info("TestPlatformInstrument:setup register_event_listener result %s", result) # response = self.oms.port.get_platform_ports('LPJBox_CI_Ben_Hall') # log.info("TestPlatformInstrument:setup get_platform_ports %s", response) self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.datasetclient = DatasetManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.org_id = self.RR2.create(any_old(RT.Org)) log.debug("Org created: %s", self.org_id) # see _set_receive_timeout self._receive_timeout = 177 self.instrument_device = '' self.platform_device = '' self.platform_agent_instance_id = '' self._pa_client = '' def done(): CIOMSClientFactory.destroy_instance(self.oms) event_notifications = OmsTestMixin.stop_http_server() log.info("event_notifications = %s" % str(event_notifications)) self.addCleanup(done) def _get_platform_attributes(self): attr_infos = self.oms.attr.get_platform_attributes('LPJBox_CI_Ben_Hall') log.debug('_get_platform_attributes: %s', self._pp.pformat(attr_infos)) # ret_infos = attr_infos['LPJBox_CI_Ben_Hall'] # for attrName, attr_defn in ret_infos.iteritems(): # attr = AttrNode(attrName, attr_defn) # pnode.add_attribute(attr) return attr_infos @unittest.skip('Still in construction...') def test_platform_with_instrument_streaming(self): # # The following is with just a single platform and the single # instrument "SBE37_SIM_08", which corresponds to the one on port 4008. # #load the paramaters and the param dicts necesssary for the VEL3D self._load_params() #create the instrument device/agent/mode self._create_instrument_resources() #create the platform device, agent and instance self._create_platform_configuration('LPJBox_CI_Ben_Hall') self.rrclient.create_association(subject=self.platform_device, predicate=PRED.hasDevice, object=self.instrument_device) self._start_platform() # self.addCleanup(self._stop_platform, p_root) # get everything in command mode: self._ping_agent() self._initialize() _ia_client = ResourceAgentClient(self.instrument_device, process=FakeProcess()) state = _ia_client.get_agent_state() log.info("TestPlatformInstrument get_agent_state %s", state) self._go_active() # self._run() gevent.sleep(3) # note that this includes the instrument also getting to the command state # self._stream_instruments() # get client to the instrument: # the i_obj is a DotDict with various pieces captured during the # set-up of the instrument, in particular instrument_device_id #i_obj = self._get_instrument(instr_key) # log.debug("KK creating ResourceAgentClient") # ia_client = ResourceAgentClient(i_obj.instrument_device_id, # process=FakeProcess()) # log.debug("KK got ResourceAgentClient: %s", ia_client) # # # verify the instrument is command state: # state = ia_client.get_agent_state() # log.debug("KK instrument state: %s", state) # self.assertEqual(state, ResourceAgentState.COMMAND) self._reset() self._shutdown() def _load_params(self): log.info("--------------------------------------------------------------------------------------------------------") # load_parameter_scenarios self.container.spawn_process("Loader", "ion.processes.bootstrap.ion_loader", "IONLoader", config=dict( op="load", scenario="BETA", path="master", categories="ParameterFunctions,ParameterDefs,ParameterDictionary,StreamDefinition", clearcols="owner_id,org_ids", assets="res/preload/r2_ioc/ooi_assets", parseooi="True", )) def _create_platform_configuration(self, platform_id, parent_platform_id=None): """ This method is an adaptation of test_agent_instance_config in test_instrument_management_service_integration.py @param platform_id @param parent_platform_id @return a DotDict with various of the constructed elements associated to the platform. """ tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() param_dict_name = 'platform_eng_parsed' parsed_rpdict_id = self.dataset_management.read_parameter_dictionary_by_name( param_dict_name, id_only=True) self.parsed_stream_def_id = self.pubsubclient.create_stream_definition( name='parsed', parameter_dictionary_id=parsed_rpdict_id) driver_config = PLTFRM_DVR_CONFIG driver_config['attributes'] = self._get_platform_attributes() #self._platform_attributes[platform_id] #OMS returning an error for port.get_platform_ports #driver_config['ports'] = self._platform_ports[platform_id] log.debug("driver_config: %s", driver_config) # instance creation platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, { 'driver_config': driver_config}) platform_agent_instance_obj.agent_config = { 'platform_config': { 'platform_id': 'LPJBox_CI_Ben_Hall', 'parent_platform_id': None } } self.platform_agent_instance_id = self.imsclient.create_platform_agent_instance(platform_agent_instance_obj) # agent creation platform_agent_obj = any_old(RT.PlatformAgent, { "stream_configurations": self._get_platform_stream_configs(), 'driver_module': PLTFRM_DVR_MOD, 'driver_class': PLTFRM_DVR_CLS}) platform_agent_id = self.imsclient.create_platform_agent(platform_agent_obj) # device creation self.platform_device = self.imsclient.create_platform_device(any_old(RT.PlatformDevice)) # data product creation dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom}) dp_id = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=self.parsed_stream_def_id) self.damsclient.assign_data_product(input_resource_id=self.platform_device, data_product_id=dp_id) self.dpclient.activate_data_product_persistence(data_product_id=dp_id) self.addCleanup(self.dpclient.delete_data_product, dp_id) # assignments self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(self.platform_agent_instance_id, self.platform_device) self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(platform_agent_id, self.platform_agent_instance_id) self.RR2.assign_platform_device_to_org_with_has_resource(self.platform_agent_instance_id, self.org_id) ####################################### # dataset log.debug('data product = %s', dp_id) stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True) log.debug('Data product stream_ids = %s', stream_ids) stream_id = stream_ids[0] # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasDataset, RT.Dataset, True) log.debug('Data set for data_product_id1 = %s', dataset_ids[0]) ####################################### return def _create_instrument_resources(self): # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='VEL3D', description="VEL3D") instModel_id = self.imsclient.create_instrument_model(instModel_obj) log.debug( 'new InstrumentModel id = %s ', instModel_id) raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='raw' ) vel3d_b_sample = StreamConfiguration(stream_name='vel3d_b_sample', parameter_dictionary_name='vel3d_b_sample') vel3d_b_engineering = StreamConfiguration(stream_name='vel3d_b_engineering', parameter_dictionary_name='vel3d_b_engineering') # Create InstrumentAgent instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/nobska_mavs4_ooicore-0.0.7-py2.7.egg", stream_configurations = [raw_config, vel3d_b_sample, vel3d_b_engineering]) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) log.debug('new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice instDevice_obj = IonObject(RT.InstrumentDevice, name='VEL3DDevice', description="VEL3DDevice", serial_number="12345" ) self.instrument_device = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, self.instrument_device) port_agent_config = { 'device_addr': '10.180.80.6', 'device_port': 2101, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': 1025, 'data_port': 1026, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='VEL3DAgentInstance', description="VEL3DAgentInstance", port_agent_config = port_agent_config, alerts= []) instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, self.instrument_device) self._start_port_agent(self.imsclient.read_instrument_agent_instance(instAgentInstance_id)) vel3d_b_sample_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('vel3d_b_sample', id_only=True) vel3d_b_sample_stream_def_id = self.pubsubclient.create_stream_definition(name='vel3d_b_sample', parameter_dictionary_id=vel3d_b_sample_pdict_id) vel3d_b_engineering_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('vel3d_b_engineering', id_only=True) vel3d_b_engineering_stream_def_id = self.pubsubclient.create_stream_definition(name='vel3d_b_engineering', parameter_dictionary_id=vel3d_b_engineering_pdict_id) raw_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('raw', id_only=True) raw_stream_def_id = self.pubsubclient.create_stream_definition(name='raw', parameter_dictionary_id=raw_pdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='vel3d_b_sample', description='vel3d_b_sample', temporal_domain = tdom, spatial_domain = sdom) data_product_id1 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=vel3d_b_sample_stream_def_id) self.damsclient.assign_data_product(input_resource_id=self.instrument_device, data_product_id=data_product_id1) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id1) dp_obj = IonObject(RT.DataProduct, name='vel3d_b_engineering', description='vel3d_b_engineering', temporal_domain = tdom, spatial_domain = sdom) data_product_id2 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=vel3d_b_engineering_stream_def_id) self.damsclient.assign_data_product(input_resource_id=self.instrument_device, data_product_id=data_product_id2) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id2) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id3 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) self.damsclient.assign_data_product(input_resource_id=self.instrument_device, data_product_id=data_product_id3) self.dpclient.activate_data_product_persistence(data_product_id=data_product_id3) def _start_port_agent(self, instrument_agent_instance_obj=None): """ Construct and start the port agent, ONLY NEEDED FOR INSTRUMENT AGENTS. """ _port_agent_config = instrument_agent_instance_obj.port_agent_config # It blocks until the port agent starts up or a timeout _pagent = PortAgentProcess.launch_process(_port_agent_config, test_mode = True) pid = _pagent.get_pid() port = _pagent.get_data_port() cmd_port = _pagent.get_command_port() log.info("IMS:_start_pagent returned from PortAgentProcess.launch_process pid: %s ", pid) # Hack to get ready for DEMO. Further though needs to be put int # how we pass this config info around. host = 'localhost' driver_config = instrument_agent_instance_obj.driver_config comms_config = driver_config.get('comms_config') if comms_config: host = comms_config.get('addr') else: log.warn("No comms_config specified, using '%s'" % host) # Configure driver to use port agent port number. instrument_agent_instance_obj.driver_config['comms_config'] = { 'addr' : host, 'cmd_port' : cmd_port, 'port' : port } instrument_agent_instance_obj.driver_config['pagent_pid'] = pid self.imsclient.update_instrument_agent_instance(instrument_agent_instance_obj) return self.imsclient.read_instrument_agent_instance(instrument_agent_instance_obj._id) def _start_platform(self): """ Starts the given platform waiting for it to transition to the UNINITIALIZED state (note that the agent starts in the LAUNCHING state). More in concrete the sequence of steps here are: - prepares subscriber to receive the UNINITIALIZED state transition - launches the platform process - waits for the start of the process - waits for the transition to the UNINITIALIZED state """ ############################################################## # prepare to receive the UNINITIALIZED state transition: async_res = AsyncResult() def consume_event(evt, *args, **kwargs): log.debug("Got ResourceAgentStateEvent %s from origin %r", evt.state, evt.origin) if evt.state == PlatformAgentState.UNINITIALIZED: async_res.set(evt) # start subscriber: sub = EventSubscriber(event_type="ResourceAgentStateEvent", origin=self.platform_device, callback=consume_event) sub.start() log.info("registered event subscriber to wait for state=%r from origin %r", PlatformAgentState.UNINITIALIZED, self.platform_device) #self._event_subscribers.append(sub) sub._ready_event.wait(timeout=EVENT_TIMEOUT) ############################################################## # now start the platform: agent_instance_id = self.platform_agent_instance_id log.debug("about to call start_platform_agent_instance with id=%s", agent_instance_id) pid = self.imsclient.start_platform_agent_instance(platform_agent_instance_id=agent_instance_id) log.debug("start_platform_agent_instance returned pid=%s", pid) #wait for start agent_instance_obj = self.imsclient.read_platform_agent_instance(agent_instance_id) gate = AgentProcessStateGate(self.processdispatchclient.read_process, self.platform_device._id, ProcessStateEnum.RUNNING) self.assertTrue(gate.await(90), "The platform agent instance did not spawn in 90 seconds") # Start a resource agent client to talk with the agent. self._pa_client = ResourceAgentClient(self.platform_device, name=gate.process_id, process=FakeProcess()) log.debug("got platform agent client %s", str(self._pa_client)) ############################################################## # wait for the UNINITIALIZED event: async_res.get(timeout=self._receive_timeout)
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' unittest # suppress an pycharm inspector error if all unittest.skip references are commented out self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) # @unittest.skip('this test just for debugging setup') # def test_just_the_setup(self): # return @attr('EXT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode as it depends on modifying CFG on service side') def test_resources_associations_extensions(self): """ create one of each resource and association used by IMS to guard against problems in ion-definitions """ self.patch_cfg(CFG["container"], {"extended_resources": {"strip_results": False}}) #stuff we control instrument_agent_instance_id, _ = self.RR.create(any_old(RT.InstrumentAgentInstance)) instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent)) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) instrument_site_id, _ = self.RR.create(any_old(RT.InstrumentSite)) platform_agent_instance_id, _ = self.RR.create(any_old(RT.PlatformAgentInstance)) platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent)) platform_site_id, _ = self.RR.create(any_old(RT.PlatformSite)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice)) sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel)) #stuff we associate to data_producer_id, _ = self.RR.create(any_old(RT.DataProducer)) org_id, _ = self.RR.create(any_old(RT.Org)) #instrument_agent_instance_id #is only a target #instrument_agent self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id) self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id) self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id) self.RR.create_association(org_id, PRED.hasResource, instrument_device_id) instrument_model_id #is only a target platform_agent_instance_id #is only a target #platform_agent self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id) self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(instrument_site_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(platform_site_id, PRED.hasDevice, platform_device_id) self.RR.create_association(platform_site_id, PRED.hasSite, instrument_site_id) platform_model_id #is only a target #sensor_device self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id) self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id) sensor_model_id #is only a target #create a parsed product for this instrument output dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', processing_level_code='Parsed_Canonical') pdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id) data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug( 'new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1) def addInstOwner(inst_id, subject): actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject}) user_id = self.IDS.create_actor_identity(actor_identity_obj) user_info_obj = any_old(RT.UserInfo) user_info_id = self.IDS.create_user_info(user_id, user_info_obj) self.RR.create_association(inst_id, PRED.hasOwner, user_id) #Testing multiple instrument owners addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254") addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256") extended_instrument = self.IMS.get_instrument_device_extension(instrument_device_id) self.assertEqual(instrument_device_id, extended_instrument._id) self.assertEqual(len(extended_instrument.owners), 2) self.assertEqual(extended_instrument.instrument_model._id, instrument_model_id) # Lifecycle self.assertEquals(len(extended_instrument.lcstate_transitions), 6) self.assertEquals(set(extended_instrument.lcstate_transitions.keys()), set(['develop', 'deploy', 'retire', 'plan', 'integrate', 'delete'])) self.assertEquals(len(extended_instrument.availability_transitions), 2) self.assertEquals(set(extended_instrument.availability_transitions.keys()), set(['enable', 'announce'])) # Verify that computed attributes exist for the extended instrument self.assertIsInstance(extended_instrument.computed.last_data_received_datetime, ComputedFloatValue) self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue) self.assertIsInstance(extended_instrument.computed.power_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.communications_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.data_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.location_status_roll_up, ComputedIntValue) log.debug("extended_instrument.computed: %s", extended_instrument.computed) #check model inst_model_obj = self.RR.read(instrument_model_id) self.assertEqual(inst_model_obj.name, extended_instrument.instrument_model.name) #check agent instance inst_agent_instance_obj = self.RR.read(instrument_agent_instance_id) self.assertEqual(inst_agent_instance_obj.name, extended_instrument.agent_instance.name) #check agent inst_agent_obj = self.RR.read(instrument_agent_id) #compound assoc return list of lists so check the first element self.assertEqual(inst_agent_obj.name, extended_instrument.instrument_agent.name) #check platform device plat_device_obj = self.RR.read(platform_device_id) self.assertEqual(plat_device_obj.name, extended_instrument.platform_device.name) extended_platform = self.IMS.get_platform_device_extension(platform_device_id) self.assertEqual(1, len(extended_platform.portals)) self.assertEqual(1, len(extended_platform.portal_instruments)) #self.assertEqual(1, len(extended_platform.computed.portal_status.value)) # no agent started so NO statuses reported self.assertEqual(1, len(extended_platform.instrument_devices)) self.assertEqual(instrument_device_id, extended_platform.instrument_devices[0]._id) self.assertEqual(1, len(extended_platform.instrument_models)) self.assertEqual(instrument_model_id, extended_platform.instrument_models[0]._id) self.assertEquals(extended_platform.platform_agent._id, platform_agent_id) self.assertEquals(len(extended_platform.lcstate_transitions), 6) self.assertEquals(set(extended_platform.lcstate_transitions.keys()), set(['develop', 'deploy', 'retire', 'plan', 'integrate', 'delete'])) self.assertEquals(len(extended_platform.availability_transitions), 2) self.assertEquals(set(extended_platform.availability_transitions.keys()), set(['enable', 'announce'])) #check sensor devices self.assertEqual(1, len(extended_instrument.sensor_devices)) ##check data_product_parameters_set # !!! OOIION-1342 The UI does not use data_product_parameters_set and it is an expensive calc so the attribute calc was disabled # !!! Remove check in this test #self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.data_product_parameters_set.status) #self.assertTrue( 'Parsed_Canonical' in extended_instrument.computed.data_product_parameters_set.value) ## the ctd parameters should include 'temp' #self.assertTrue( 'temp' in extended_instrument.computed.data_product_parameters_set.value['Parsed_Canonical']) #none of these will work because there is no agent # self.assertEqual(ComputedValueAvailability.NOTAVAILABLE, # extended_instrument.computed.firmware_version.status) # self.assertEqual(ComputedValueAvailability.NOTAVAILABLE, # extended_instrument.computed.operational_state.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.power_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.communications_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.data_status_roll_up.status) # self.assertEqual(DeviceStatusType.STATUS_OK, # extended_instrument.computed.data_status_roll_up.value) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.location_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.recent_events.status) # self.assertEqual([], extended_instrument.computed.recent_events.value) # cleanup c = DotDict() c.resource_registry = self.RR self.RR2.pluck(instrument_agent_id) self.RR2.pluck(instrument_model_id) self.RR2.pluck(instrument_device_id) self.RR2.pluck(platform_agent_id) self.RR2.pluck(sensor_device_id) self.IMS.force_delete_instrument_agent(instrument_agent_id) self.IMS.force_delete_instrument_model(instrument_model_id) self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_platform_agent_instance(platform_agent_instance_id) self.IMS.force_delete_platform_agent(platform_agent_id) self.OMS.force_delete_instrument_site(instrument_site_id) self.OMS.force_delete_platform_site(platform_site_id) self.IMS.force_delete_platform_device(platform_device_id) self.IMS.force_delete_platform_model(platform_model_id) self.IMS.force_delete_sensor_device(sensor_device_id) self.IMS.force_delete_sensor_model(sensor_model_id) #stuff we associate to self.RR.delete(data_producer_id) self.RR.delete(org_id) def test_custom_attributes(self): """ Test assignment of custom attributes """ instModel_obj = IonObject(OT.CustomAttribute, name='SBE37IMModelAttr', description="model custom attr") instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel, {"custom_attributes": [instModel_obj] })) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice, {"custom_attributes": {"favorite_color": "red", "bogus_attr": "should raise warning" } })) self.IMS.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id) # cleanup self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_instrument_model(instrument_model_id) def _get_datastore(self, dataset_id): dataset = self.DSC.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore def test_data_producer(self): idevice_id = self.IMS.create_instrument_device(any_old(RT.InstrumentDevice)) self.assertEqual(1, len(self.RR2.find_data_producer_ids_of_instrument_device_using_has_data_producer(idevice_id))) pdevice_id = self.IMS.create_platform_device(any_old(RT.PlatformDevice)) self.assertEqual(1, len(self.RR2.find_data_producer_ids_of_platform_device_using_has_data_producer(pdevice_id))) @attr('PREP') def test_prepare_resource_support(self): """ create one of each resource and association used by IMS to guard against problems in ion-definitions """ #stuff we control instrument_agent_instance_id, _ = self.RR.create(any_old(RT.InstrumentAgentInstance)) instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent)) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) platform_agent_instance_id, _ = self.RR.create(any_old(RT.PlatformAgentInstance)) platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice)) sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel)) instrument_device2_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) instrument_device3_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) platform_device2_id, _ = self.RR.create(any_old(RT.PlatformDevice)) sensor_device2_id, _ = self.RR.create(any_old(RT.SensorDevice)) #stuff we associate to data_producer_id, _ = self.RR.create(any_old(RT.DataProducer)) org_id, _ = self.RR.create(any_old(RT.Org)) #instrument_agent_instance_id #is only a target #instrument_agent self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id) self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id) self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id) self.RR.create_association(org_id, PRED.hasResource, instrument_device_id) self.RR.create_association(instrument_device2_id, PRED.hasModel, instrument_model_id) self.RR.create_association(org_id, PRED.hasResource, instrument_device2_id) instrument_model_id #is only a target platform_agent_instance_id #is only a target #platform_agent self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id) self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(platform_device2_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device2_id, PRED.hasDevice, instrument_device2_id) platform_model_id #is only a target #sensor_device self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id) self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(sensor_device2_id, PRED.hasModel, sensor_model_id) self.RR.create_association(sensor_device2_id, PRED.hasDevice, instrument_device2_id) sensor_model_id #is only a target #set lcstate - used for testing prepare - not setting all to DEVELOP, only some self.RR.execute_lifecycle_transition(instrument_agent_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(instrument_device_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(instrument_device2_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(platform_device_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(platform_device2_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(platform_agent_id, LCE.DEVELOP) #create a parsed product for this instrument output dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', processing_level_code='Parsed_Canonical') pdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id) data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug( 'new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1) def addInstOwner(inst_id, subject): actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject}) user_id = self.IDS.create_actor_identity(actor_identity_obj) user_info_obj = any_old(RT.UserInfo) user_info_id = self.IDS.create_user_info(user_id, user_info_obj) self.RR.create_association(inst_id, PRED.hasOwner, user_id) #Testing multiple instrument owners addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254") addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256") def ion_object_encoder(obj): return obj.__dict__ #First call to create instrument_data = self.IMS.prepare_instrument_device_support() #print simplejson.dumps(instrument_data, default=ion_object_encoder, indent=2) self.assertEqual(instrument_data._id, '') self.assertEqual(instrument_data.type_, OT.InstrumentDevicePrepareSupport) self.assertEqual(len(instrument_data.associations['InstrumentModel'].resources), 1) self.assertEqual(instrument_data.associations['InstrumentModel'].resources[0]._id, instrument_model_id) self.assertEqual(len(instrument_data.associations['InstrumentAgentInstance'].resources), 1) self.assertEqual(instrument_data.associations['InstrumentAgentInstance'].resources[0]._id, instrument_agent_instance_id) self.assertEqual(len(instrument_data.associations['InstrumentModel'].associated_resources), 0) self.assertEqual(len(instrument_data.associations['InstrumentAgentInstance'].associated_resources), 0) self.assertEqual(len(instrument_data.associations['SensorDevice'].resources), 0) #Next call to update instrument_data = self.IMS.prepare_instrument_device_support(instrument_device_id) #print 'Update results' #print simplejson.dumps(instrument_data, default=ion_object_encoder, indent=2) self.assertEqual(instrument_data._id, instrument_device_id) self.assertEqual(instrument_data.type_, OT.InstrumentDevicePrepareSupport) self.assertEqual(len(instrument_data.associations['InstrumentModel'].resources), 1) self.assertEqual(instrument_data.associations['InstrumentModel'].resources[0]._id, instrument_model_id) self.assertEqual(len(instrument_data.associations['InstrumentAgentInstance'].resources), 1) self.assertEqual(instrument_data.associations['InstrumentAgentInstance'].resources[0]._id, instrument_agent_instance_id) self.assertEqual(len(instrument_data.associations['InstrumentModel'].associated_resources), 1) self.assertEqual(instrument_data.associations['InstrumentModel'].associated_resources[0].s, instrument_device_id) self.assertEqual(instrument_data.associations['InstrumentModel'].associated_resources[0].o, instrument_model_id) self.assertEqual(len(instrument_data.associations['InstrumentAgentInstance'].associated_resources), 1) self.assertEqual(instrument_data.associations['InstrumentAgentInstance'].associated_resources[0].o, instrument_agent_instance_id) self.assertEqual(instrument_data.associations['InstrumentAgentInstance'].associated_resources[0].s, instrument_device_id) self.assertEqual(len(instrument_data.associations['SensorDevice'].resources), 1) self.assertEqual(instrument_data.associations['SensorDevice'].resources[0]._id, sensor_device_id) self.assertEqual(len(instrument_data.associations['SensorDevice'].associated_resources), 1) self.assertEqual(instrument_data.associations['SensorDevice'].associated_resources[0].o, instrument_device_id) self.assertEqual(instrument_data.associations['SensorDevice'].associated_resources[0].s, sensor_device_id) self.assertEqual(instrument_data.associations['InstrumentModel'].assign_request.request_parameters['instrument_device_id'], instrument_device_id) #test prepare for create of instrument agent instance instrument_agent_data = self.IMS.prepare_instrument_agent_instance_support() #print 'Update results' #print simplejson.dumps(instrument_agent_data, default=ion_object_encoder, indent=2) self.assertEqual(instrument_agent_data._id, '') self.assertEqual(instrument_agent_data.type_, OT.InstrumentAgentInstancePrepareSupport) self.assertEqual(len(instrument_agent_data.associations['InstrumentDevice'].resources), 2) self.assertEqual(len(instrument_agent_data.associations['InstrumentAgent'].resources), 1) self.assertEqual(instrument_agent_data.associations['InstrumentAgent'].resources[0]._id, instrument_agent_id) self.assertEqual(len(instrument_agent_data.associations['InstrumentDevice'].associated_resources), 0) self.assertEqual(len(instrument_agent_data.associations['InstrumentAgent'].associated_resources), 0) #test prepare for update of instrument agent instance to see if it is associated with the instrument that was created instrument_agent_data = self.IMS.prepare_instrument_agent_instance_support(instrument_agent_instance_id=instrument_agent_instance_id) #print 'Update results' #print simplejson.dumps(instrument_agent_data, default=ion_object_encoder, indent=2) self.assertEqual(instrument_agent_data._id, instrument_agent_instance_id) self.assertEqual(instrument_agent_data.type_, OT.InstrumentAgentInstancePrepareSupport) self.assertEqual(len(instrument_agent_data.associations['InstrumentDevice'].resources), 3) self.assertEqual(len(instrument_agent_data.associations['InstrumentAgent'].resources), 1) self.assertEqual(instrument_agent_data.associations['InstrumentAgent'].resources[0]._id, instrument_agent_id) self.assertEqual(len(instrument_agent_data.associations['InstrumentDevice'].associated_resources), 1) self.assertEqual(instrument_agent_data.associations['InstrumentDevice'].associated_resources[0].s, instrument_device_id) self.assertEqual(instrument_agent_data.associations['InstrumentDevice'].associated_resources[0].o, instrument_agent_instance_id) self.assertEqual(len(instrument_agent_data.associations['InstrumentAgent'].associated_resources), 1) self.assertEqual(instrument_agent_data.associations['InstrumentAgent'].associated_resources[0].o, instrument_agent_id) self.assertEqual(instrument_agent_data.associations['InstrumentAgent'].associated_resources[0].s, instrument_agent_instance_id) self.assertEqual(instrument_agent_data.associations['InstrumentAgent'].assign_request.request_parameters['instrument_agent_instance_id'], instrument_agent_instance_id) #test prepare for update of data product to see if it is associated with the instrument that was created data_product_data = self.DP.prepare_data_product_support(data_product_id1) #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent=2) self.assertEqual(data_product_data._id, data_product_id1) self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual(len(data_product_data.associations['StreamDefinition'].resources), 1) self.assertEqual(len(data_product_data.associations['Dataset'].resources), 0) self.assertEqual(len(data_product_data.associations['StreamDefinition'].associated_resources), 1) self.assertEqual(data_product_data.associations['StreamDefinition'].associated_resources[0].s, data_product_id1) self.assertEqual(len(data_product_data.associations['Dataset'].associated_resources), 0) self.assertEqual(len(data_product_data.associations['InstrumentDeviceHasOutputProduct'].resources), 3) self.assertEqual(len(data_product_data.associations['InstrumentDeviceHasOutputProduct'].associated_resources), 1) self.assertEqual(data_product_data.associations['InstrumentDeviceHasOutputProduct'].associated_resources[0].s, instrument_device_id) self.assertEqual(data_product_data.associations['InstrumentDeviceHasOutputProduct'].associated_resources[0].o, data_product_id1) self.assertEqual(len(data_product_data.associations['PlatformDevice'].resources), 2) platform_data = self.IMS.prepare_platform_device_support() #print simplejson.dumps(platform_data, default=ion_object_encoder, indent=2) self.assertEqual(platform_data._id, '') self.assertEqual(platform_data.type_, OT.PlatformDevicePrepareSupport) self.assertEqual(len(platform_data.associations['PlatformModel'].resources), 1) self.assertEqual(platform_data.associations['PlatformModel'].resources[0]._id, platform_model_id) self.assertEqual(len(platform_data.associations['PlatformAgentInstance'].resources), 1) self.assertEqual(platform_data.associations['PlatformAgentInstance'].resources[0]._id, platform_agent_instance_id) self.assertEqual(len(platform_data.associations['PlatformModel'].associated_resources), 0) self.assertEqual(len(platform_data.associations['PlatformAgentInstance'].associated_resources), 0) self.assertEqual(len(platform_data.associations['InstrumentDevice'].resources), 1) platform_data = self.IMS.prepare_platform_device_support(platform_device_id) #print simplejson.dumps(platform_data, default=ion_object_encoder, indent=2) self.assertEqual(platform_data._id, platform_device_id) self.assertEqual(platform_data.type_, OT.PlatformDevicePrepareSupport) self.assertEqual(len(platform_data.associations['PlatformModel'].resources), 1) self.assertEqual(platform_data.associations['PlatformModel'].resources[0]._id, platform_model_id) self.assertEqual(len(platform_data.associations['PlatformAgentInstance'].resources), 1) self.assertEqual(platform_data.associations['PlatformAgentInstance'].resources[0]._id, platform_agent_instance_id) self.assertEqual(len(platform_data.associations['PlatformModel'].associated_resources), 1) self.assertEqual(platform_data.associations['PlatformModel'].associated_resources[0].s, platform_device_id) self.assertEqual(platform_data.associations['PlatformModel'].associated_resources[0].o, platform_model_id) self.assertEqual(len(platform_data.associations['PlatformAgentInstance'].associated_resources), 1) self.assertEqual(platform_data.associations['PlatformAgentInstance'].associated_resources[0].o, platform_agent_instance_id) self.assertEqual(platform_data.associations['PlatformAgentInstance'].associated_resources[0].s, platform_device_id) self.assertEqual(len(platform_data.associations['InstrumentDevice'].resources), 2) #self.assertEqual(len(platform_data.associations['InstrumentDevice'].associated_resources), 1) #self.assertEqual(platform_data.associations['InstrumentDevice'].associated_resources[0].s, platform_device_id) #self.assertEqual(platform_data.associations['InstrumentDevice'].associated_resources[0].o, instrument_device_id) self.assertEqual(platform_data.associations['PlatformModel'].assign_request.request_parameters['platform_device_id'], platform_device_id) # cleanup c = DotDict() c.resource_registry = self.RR self.RR2.pluck(instrument_agent_id) self.RR2.pluck(instrument_model_id) self.RR2.pluck(instrument_device_id) self.RR2.pluck(platform_agent_id) self.RR2.pluck(sensor_device_id) self.RR2.pluck(sensor_device2_id) self.IMS.force_delete_instrument_agent(instrument_agent_id) self.IMS.force_delete_instrument_model(instrument_model_id) self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_instrument_device(instrument_device2_id) self.IMS.force_delete_platform_agent_instance(platform_agent_instance_id) self.IMS.force_delete_platform_agent(platform_agent_id) self.IMS.force_delete_platform_device(platform_device_id) self.IMS.force_delete_platform_device(platform_device2_id) self.IMS.force_delete_platform_model(platform_model_id) self.IMS.force_delete_sensor_device(sensor_device_id) self.IMS.force_delete_sensor_device(sensor_device2_id) self.IMS.force_delete_sensor_model(sensor_model_id) #stuff we associate to self.RR.delete(data_producer_id) self.RR.delete(org_id)
class TestMarineFacilityManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2sa.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) print 'started services' def test_just_the_setup(self): return def test_resources_associations(self): """ create one of each resource and association used by MFMS to guard against problems in ion-definitions """ #raise unittest.SkipTest("https://jira.oceanobservatories.org/tasks/browse/CISWCORE-41") #stuff we control logical_instrument_id, _ = self.RR.create(any_old(RT.LogicalInstrument)) logical_platform_id, _ = self.RR.create(any_old(RT.LogicalPlatform)) logical_platform2_id, _ = self.RR.create(any_old(RT.LogicalPlatform)) marine_facility_id, _ = self.RR.create(any_old(RT.MarineFacility)) site_id, _ = self.RR.create(any_old(RT.Site)) site2_id, _ = self.RR.create(any_old(RT.Site)) #stuff we associate to instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent)) platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent)) #logical_instrument self.RR.create_association(logical_instrument_id, PRED.hasAgent, instrument_agent_id) #logical_platform self.RR.create_association(logical_platform_id, PRED.hasPlatform, logical_platform2_id) self.RR.create_association(logical_platform_id, PRED.hasInstrument, logical_instrument_id) self.RR.create_association(logical_platform_id, PRED.hasAgent, platform_agent_id) #marine_facility self.RR.create_association(marine_facility_id, PRED.hasSite, site_id) self.RR.create_association(marine_facility_id, PRED.hasPlatform, logical_platform_id) #site self.RR.create_association(site_id, PRED.hasSite, site2_id)
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) print 'started services' # @unittest.skip('this test just for debugging setup') # def test_just_the_setup(self): # return @attr('EXT') def test_resources_associations_extensions(self): """ create one of each resource and association used by IMS to guard against problems in ion-definitions """ #stuff we control instrument_agent_instance_id, _ = self.RR.create(any_old(RT.InstrumentAgentInstance)) instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent)) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) platform_agent_instance_id, _ = self.RR.create(any_old(RT.PlatformAgentInstance)) platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice)) sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel)) #stuff we associate to data_producer_id, _ = self.RR.create(any_old(RT.DataProducer)) org_id, _ = self.RR.create(any_old(RT.Org)) #instrument_agent_instance_id #is only a target #instrument_agent self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id) self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id) self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id) self.RR.create_association(org_id, PRED.hasResource, instrument_device_id) instrument_model_id #is only a target platform_agent_instance_id #is only a target #platform_agent self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id) self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) platform_model_id #is only a target #sensor_device self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id) self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id) sensor_model_id #is only a target #create a parsed product for this instrument output tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', processing_level_code='Parsed_Canonical', temporal_domain = tdom, spatial_domain = sdom) pdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id) data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug( 'new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1) def addInstOwner(inst_id, subject): actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject}) user_id = self.IDS.create_actor_identity(actor_identity_obj) user_info_obj = any_old(RT.UserInfo) user_info_id = self.IDS.create_user_info(user_id, user_info_obj) self.RR.create_association(inst_id, PRED.hasOwner, user_id) #Testing multiple instrument owners addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254") addInstOwner(instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256") extended_instrument = self.IMS.get_instrument_device_extension(instrument_device_id) self.assertEqual(instrument_device_id, extended_instrument._id) self.assertEqual(len(extended_instrument.owners), 2) self.assertEqual(extended_instrument.instrument_model._id, instrument_model_id) #check model inst_model_obj = self.RR.read(instrument_model_id) self.assertEqual(inst_model_obj.name, extended_instrument.instrument_model.name) #check agent instance inst_agent_instance_obj = self.RR.read(instrument_agent_instance_id) self.assertEqual(inst_agent_instance_obj.name, extended_instrument.agent_instance.name) #check agent inst_agent_obj = self.RR.read(instrument_agent_id) #compound assoc return list of lists so check the first element self.assertEqual(inst_agent_obj.name, extended_instrument.instrument_agent[0].name) #check platform device plat_device_obj = self.RR.read(platform_device_id) self.assertEqual(plat_device_obj.name, extended_instrument.platform_device.name) #check sensor devices self.assertEqual(1, len(extended_instrument.sensor_devices)) #check data_product_parameters_set self.assertEqual(ComputedValueAvailability.PROVIDED, extended_instrument.computed.data_product_parameters_set.status) self.assertTrue( 'Parsed_Canonical' in extended_instrument.computed.data_product_parameters_set.value) # the ctd parameters should include 'temp' self.assertTrue( 'temp' in extended_instrument.computed.data_product_parameters_set.value['Parsed_Canonical']) #none of these will work because there is no agent self.assertEqual(ComputedValueAvailability.NOTAVAILABLE, extended_instrument.computed.firmware_version.status) self.assertEqual(ComputedValueAvailability.NOTAVAILABLE, extended_instrument.computed.operational_state.status) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_instrument.computed.power_status_roll_up.status) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_instrument.computed.communications_status_roll_up.status) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_instrument.computed.data_status_roll_up.status) self.assertEqual(StatusType.STATUS_OK, extended_instrument.computed.data_status_roll_up.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_instrument.computed.location_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.recent_events.status) # self.assertEqual([], extended_instrument.computed.recent_events.value) # cleanup c = DotDict() c.resource_registry = self.RR resource_impl = ResourceImpl(c) resource_impl.pluck(instrument_agent_id) resource_impl.pluck(instrument_model_id) resource_impl.pluck(instrument_device_id) resource_impl.pluck(platform_agent_id) self.IMS.force_delete_instrument_agent(instrument_agent_id) self.IMS.force_delete_instrument_model(instrument_model_id) self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_platform_agent_instance(platform_agent_instance_id) self.IMS.force_delete_platform_agent(platform_agent_id) self.IMS.force_delete_platform_device(platform_device_id) self.IMS.force_delete_platform_model(platform_model_id) self.IMS.force_delete_sensor_device(sensor_device_id) self.IMS.force_delete_sensor_model(sensor_model_id) #stuff we associate to self.RR.delete(data_producer_id) self.RR.delete(org_id) def test_custom_attributes(self): """ Test assignment of custom attributes """ instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel, {"custom_attributes": {"favorite_color": "attr desc goes here"} })) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice, {"custom_attributes": {"favorite_color": "red", "bogus_attr": "should raise warning" } })) self.IMS.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id) # cleanup self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_instrument_model(instrument_model_id) def _get_datastore(self, dataset_id): dataset = self.DSC.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore def test_checkpoint_restore(self): # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel", stream_configuration= {'raw': 'ctd_raw_param_dict' , 'parsed': 'ctd_parsed_param_dict' }) instModel_id = self.IMS.create_instrument_model(instModel_obj) log.debug( 'new InstrumentModel id = %s ', instModel_id) # Create InstrumentAgent instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", driver_class="SBE37Driver" ) instAgent_id = self.IMS.create_instrument_agent(instAgent_obj) log.debug( 'new InstrumentAgent id = %s', instAgent_id) self.IMS.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 ' + '(SA Req: L4-CI-SA-RQ-241) ') instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) instDevice_id = self.IMS.create_instrument_device(instrument_device=instDevice_obj) self.IMS.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) port_agent_config = { 'device_addr': 'sbe37-simulator.oceanobservatories.org', 'device_port': 4001, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'command_port': 4002, 'data_port': 4003, 'log_level': 5, } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", comms_device_address='sbe37-simulator.oceanobservatories.org', comms_device_port=4001, port_agent_config = port_agent_config) instAgentInstance_id = self.IMS.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() spdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict') parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary=spdict_id) rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict') raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary=rpdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id, parameter_dictionary=spdict_id) log.debug( 'new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug( 'Data product streams1 = %s', stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True) log.debug( 'Data set for data_product_id1 = %s', dataset_ids[0]) self.parsed_dataset = dataset_ids[0] #create the datastore at the beginning of each int test that persists data self._get_datastore(self.parsed_dataset) self.DP.activate_data_product_persistence(data_product_id=data_product_id1) dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id2 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id, parameter_dictionary=rpdict_id) log.debug( 'new dp_id = %s', str(data_product_id2)) self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.DP.activate_data_product_persistence(data_product_id=data_product_id2) # spin up agent self.IMS.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) self.addCleanup(self.IMS.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) #wait for start instance_obj = self.IMS.read_instrument_agent_instance(instAgentInstance_id) gate = ProcessStateGate(self.PDC.read_process, instance_obj.agent_process_id, ProcessStateEnum.RUNNING) self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" % instance_obj.agent_process_id) # take snapshot of config snap_id = self.IMS.agent_state_checkpoint(instDevice_id, "xyzzy snapshot") snap_obj = self.RR.read_attachment(snap_id, include_content=True) print "Saved config:" print snap_obj.content #modify config instance_obj.driver_config["comms_config"] = "BAD_DATA" self.RR.update(instance_obj) #restore config self.IMS.agent_state_restore(instDevice_id, snap_id) instance_obj = self.RR.read(instAgentInstance_id) self.assertNotEqual("BAD_DATA", instance_obj.driver_config["comms_config"])
class TestResourceRegistry(IonIntegrationTestCase): # service_dependencies = [('resource_registry', {'resource_registry': {'persistent': True, 'force_clean': True}})] def setUp(self): # Start container self._start_container() # Establish endpoint with container container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) container_client.start_rel_from_url('res/deploy/r2coi.yml') # Now create client to bank service self.resource_registry_service = ResourceRegistryServiceClient(node=self.container.node) def test_crud(self): # Some quick registry tests # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", name="name", foo="bar") self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", {"name": "name", "foo": "bar"}) self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", {"name": "name"}, foo="bar") self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Instantiate an object obj = IonObject("UserInfo", name="name") # Can set attributes that aren't in the object's schema with self.assertRaises(AttributeError) as cm: setattr(obj, "foo", "bar") self.assertTrue(cm.exception.message == "'UserInfo' object has no attribute 'foo'") # Cam't call update with object that hasn't been persisted with self.assertRaises(BadRequest) as cm: self.resource_registry_service.update(obj) self.assertTrue(cm.exception.message.startswith("Object does not have required '_id' or '_rev' attribute")) # Persist object and read it back obj_id, obj_rev = self.resource_registry_service.create(obj) read_obj = self.resource_registry_service.read(obj_id) # Cannot create object with _id and _rev fields pre-set with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create(read_obj) self.assertTrue(cm.exception.message.startswith("Doc must not have '_id'")) # Update object read_obj.name = "John Doe" self.resource_registry_service.update(read_obj) # Update should fail with revision mismatch with self.assertRaises(Conflict) as cm: self.resource_registry_service.update(read_obj) self.assertTrue(cm.exception.message.startswith("Object not based on most current version")) # Re-read and update object read_obj = self.resource_registry_service.read(obj_id) self.resource_registry_service.update(read_obj) # Delete object self.resource_registry_service.delete(obj_id) # Make sure read, update and delete report error with self.assertRaises(NotFound) as cm: self.resource_registry_service.read(obj_id) self.assertTrue(cm.exception.message.startswith("Object with id")) with self.assertRaises(NotFound) as cm: self.resource_registry_service.update(read_obj) self.assertTrue(cm.exception.message.startswith("Object with id")) with self.assertRaises(NotFound) as cm: self.resource_registry_service.delete(obj_id) self.assertTrue(cm.exception.message.startswith("Object with id")) def test_lifecycle(self): att = IonObject("Attachment", name='mine', description='desc') rid,rev = self.resource_registry_service.create(att) att1 = self.resource_registry_service.read(rid) self.assertEquals(att1.name, att.name) self.assertEquals(att1.lcstate, LCS.DRAFT) new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.register) self.assertEquals(new_state, LCS.PLANNED) att2 = self.resource_registry_service.read(rid) self.assertEquals(att2.lcstate, LCS.PLANNED) with self.assertRaises(Inconsistent) as cm: self.resource_registry_service.execute_lifecycle_transition(rid, LCE.register) self.assertTrue("type=Attachment, lcstate=PLANNED has no transition for event register" in cm.exception.message) new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.develop, LCS.PLANNED) self.assertEquals(new_state, LCS.DEVELOPED) self.assertRaises(iex.Inconsistent, self.resource_registry_service.execute_lifecycle_transition, resource_id=rid, transition_event=LCE.develop, current_lcstate=LCS.PLANNED) def test_association(self): # Instantiate UserIdentity object user_identity_obj = IonObject("UserIdentity", name="name") user_identity_obj_id, user_identity_obj_rev = self.resource_registry_service.create(user_identity_obj) read_user_identity_obj = self.resource_registry_service.read(user_identity_obj_id) # Instantiate UserInfo object user_info_obj = IonObject("UserInfo", name="name") user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create(user_info_obj) read_user_info_obj = self.resource_registry_service.read(user_info_obj_id) # Test create failures with self.assertRaises(AttributeError) as cm: self.resource_registry_service.create_association(user_identity_obj_id, PRED.bogus, user_info_obj_id) self.assertTrue(cm.exception.message == "bogus") # Predicate not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(user_identity_obj_id, None, user_info_obj_id) self.assertTrue(cm.exception.message == "Association must have all elements set") # Bad association type with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, user_info_obj_id, 'bogustype') self.assertTrue(cm.exception.message == "Unsupported assoc_type: bogustype") # Subject id or object not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(None, PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message == "Association must have all elements set") # Object id or object not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, None) self.assertTrue(cm.exception.message == "Association must have all elements set") # Bad subject id with self.assertRaises(NotFound) as cm: self.resource_registry_service.create_association("bogus", PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message == "Object with id bogus does not exist.") # Bad object id with self.assertRaises(NotFound) as cm: self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, "bogus") self.assertTrue(cm.exception.message == "Object with id bogus does not exist.") # _id missing from subject with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(user_identity_obj, PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message == "Subject id or rev not available") # _id missing from object with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, user_info_obj) self.assertTrue(cm.exception.message == "Object id or rev not available") # Wrong subject type with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(user_info_obj_id, PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message == "Illegal subject type UserInfo for predicate hasInfo") # Wrong object type with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, user_identity_obj_id) self.assertTrue(cm.exception.message == "Illegal object type UserIdentity for predicate hasInfo") # Create duplicate associations assoc_id1, assoc_rev1 = self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, user_info_obj_id) assoc_id2, assoc_rev2 = self.resource_registry_service.create_association(user_identity_obj_id, PRED.hasInfo, user_info_obj_id, "H2R") # Search for associations (good cases) ret1 = self.resource_registry_service.find_associations(user_identity_obj_id, PRED.hasInfo, user_info_obj_id) ret2 = self.resource_registry_service.find_associations(user_identity_obj_id, PRED.hasInfo) ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo) self.assertTrue(len(ret1) == len(ret2) == len(ret3)) self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id) ret1 = self.resource_registry_service.find_associations(user_identity_obj_id, PRED.hasInfo, user_info_obj_id, True) ret2 = self.resource_registry_service.find_associations(user_identity_obj_id, PRED.hasInfo, id_only=True) ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=True) self.assertTrue(ret1 == ret2 == ret3) # Search for associations (good cases) ret1 = self.resource_registry_service.find_associations(read_user_identity_obj, PRED.hasInfo, read_user_info_obj) ret2 = self.resource_registry_service.find_associations(read_user_identity_obj, PRED.hasInfo) ret3 = self.resource_registry_service.find_associations(None, PRED.hasInfo) self.assertTrue(len(ret1) == len(ret2) == len(ret3)) self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id) ret1 = self.resource_registry_service.find_associations(user_identity_obj_id, PRED.hasInfo, read_user_info_obj, True) ret2 = self.resource_registry_service.find_associations(user_identity_obj_id, PRED.hasInfo, id_only=True) ret3 = self.resource_registry_service.find_associations(predicate=PRED.hasInfo, id_only=True) self.assertTrue(ret1 == ret2 == ret3) # Search for associations (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations(None, None, None) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations(user_identity_obj_id, None, None) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations(None, None, user_info_obj_id) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations(user_identity_obj, None, user_info_obj_id) self.assertTrue(cm.exception.message == "Object id not available in subject") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations(user_identity_obj_id, None, user_info_obj) self.assertTrue(cm.exception.message == "Object id not available in object") # Find subjects (good cases) subj_ret1 = self.resource_registry_service.find_subjects(RT.UserIdentity, PRED.hasInfo, user_info_obj_id, True) subj_ret2 = self.resource_registry_service.find_subjects(RT.UserIdentity, PRED.hasInfo, read_user_info_obj, True) self.assertTrue(len(subj_ret1) == len(subj_ret2)) self.assertTrue(subj_ret1[0] == subj_ret2[0]) self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id) subj_ret3 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, True) subj_ret4 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, True) self.assertTrue(len(subj_ret3) == len(subj_ret4)) self.assertTrue(subj_ret3[0] == subj_ret4[0]) self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id) subj_ret5 = self.resource_registry_service.find_subjects(None, PRED.hasInfo, user_info_obj_id, False) subj_ret6 = self.resource_registry_service.find_subjects(None, None, read_user_info_obj, False) self.assertTrue(len(subj_ret5) == len(subj_ret6)) self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id) self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id) # Find subjects (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_subjects(None, None, None) self.assertTrue(cm.exception.message == "Must provide object") with self.assertRaises(AttributeError) as cm: self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.bogus, user_info_obj_id, True) self.assertTrue(cm.exception.message == "bogus") ret = self.resource_registry_service.find_subjects(RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True) self.assertTrue(len(ret[0]) == 0) ret = self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True) self.assertTrue(len(ret[0]) == 0) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_subjects(RT.UserCredentials, PRED.hasInfo, user_info_obj, True) self.assertTrue(cm.exception.message == "Object id not available in object") # Find objects (good cases) subj_ret1 = self.resource_registry_service.find_objects(user_identity_obj_id, PRED.hasInfo, RT.UserInfo, True) subj_ret2 = self.resource_registry_service.find_objects(read_user_identity_obj, PRED.hasInfo, RT.UserInfo, True) self.assertTrue(len(subj_ret1) == len(subj_ret2)) self.assertTrue(subj_ret1[0] == subj_ret2[0]) self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id) subj_ret3 = self.resource_registry_service.find_objects(user_identity_obj_id, PRED.hasInfo, None, True) subj_ret4 = self.resource_registry_service.find_objects(user_identity_obj_id, None, None, True) self.assertTrue(len(subj_ret3) == len(subj_ret4)) self.assertTrue(subj_ret3[0] == subj_ret4[0]) self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id) subj_ret5 = self.resource_registry_service.find_objects(user_identity_obj_id, PRED.hasInfo, None, False) subj_ret6 = self.resource_registry_service.find_objects(read_user_identity_obj, None, None, False) self.assertTrue(len(subj_ret5) == len(subj_ret6)) self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id) self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id) # Find objects (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_objects(None, None, None) self.assertTrue(cm.exception.message == "Must provide subject") with self.assertRaises(AttributeError) as cm: self.resource_registry_service.find_objects(user_identity_obj_id, PRED.bogus, RT.UserCredentials, True) self.assertTrue(cm.exception.message == "bogus") ret = self.resource_registry_service.find_objects(user_identity_obj_id, PRED.hasCredentials, RT.UserIdentity, True) self.assertTrue(len(ret[0]) == 0) ret = self.resource_registry_service.find_objects(user_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True) self.assertTrue(len(ret[0]) == 0) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_objects(user_identity_obj, PRED.hasInfo, RT.UserInfo, True) self.assertTrue(cm.exception.message == "Object id not available in subject") # Get association (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association(None, None, None) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association(user_identity_obj_id, None, None) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association(None, None, user_info_obj_id) self.assertTrue(cm.exception.message == "Illegal parameters") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association(user_identity_obj, None, user_info_obj_id) self.assertTrue(cm.exception.message == "Object id not available in subject") with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association(user_identity_obj_id, None, user_info_obj) self.assertTrue(cm.exception.message == "Object id not available in object") with self.assertRaises(Inconsistent) as cm: self.resource_registry_service.get_association(user_identity_obj_id, PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message.startswith("Duplicate associations found for subject/predicate/object")) # Delete one of the associations self.resource_registry_service.delete_association(assoc_id1) assoc = self.resource_registry_service.get_association(user_identity_obj_id, PRED.hasInfo, user_info_obj_id) self.assertTrue(assoc._id == assoc_id2) # Delete (bad cases) with self.assertRaises(NotFound) as cm: self.resource_registry_service.delete_association("bogus") self.assertTrue(cm.exception.message == "Object with id bogus does not exist.") # Delete other association self.resource_registry_service.delete_association(assoc_id2) # Delete resources self.resource_registry_service.delete(user_identity_obj_id) self.resource_registry_service.delete(user_info_obj_id) def test_find_resources(self): with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_resources(RT.UserInfo, LCS.DRAFT, "name", False) self.assertTrue(cm.exception.message == "find by name does not support lcstate") ret = self.resource_registry_service.find_resources(RT.UserInfo, None, "name", False) self.assertTrue(len(ret[0]) == 0) # Instantiate an object obj = IonObject("UserInfo", name="name") # Persist object and read it back obj_id, obj_rev = self.resource_registry_service.create(obj) read_obj = self.resource_registry_service.read(obj_id) ret = self.resource_registry_service.find_resources(RT.UserInfo, None, "name", False) self.assertTrue(len(ret[0]) == 1) self.assertTrue(ret[0][0]._id == read_obj._id) ret = self.resource_registry_service.find_resources(RT.UserInfo, LCS.DRAFT, None, False) self.assertTrue(len(ret[0]) == 1) self.assertTrue(ret[0][0]._id == read_obj._id)
class TestObservatoryManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.org_management_service = OrgManagementServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #print 'TestObservatoryManagementServiceIntegration: started services' self.event_publisher = EventPublisher() # @unittest.skip('this exists only for debugging the launch process') # def test_just_the_setup(self): # return def destroy(self, resource_ids): self.OMS.force_delete_observatory(resource_ids.observatory_id) self.OMS.force_delete_subsite(resource_ids.subsite_id) self.OMS.force_delete_subsite(resource_ids.subsite2_id) self.OMS.force_delete_subsite(resource_ids.subsiteb_id) self.OMS.force_delete_subsite(resource_ids.subsitez_id) self.OMS.force_delete_platform_site(resource_ids.platform_site_id) self.OMS.force_delete_platform_site(resource_ids.platform_siteb_id) self.OMS.force_delete_platform_site(resource_ids.platform_siteb2_id) self.OMS.force_delete_platform_site(resource_ids.platform_site3_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site2_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_siteb3_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site4_id) #@unittest.skip('targeting') def test_resources_associations(self): resources = self._make_associations() self.destroy(resources) #@unittest.skip('targeting') def test_find_related_frames_of_reference(self): # finding subordinates gives a dict of obj lists, convert objs to ids def idify(adict): ids = {} for k, v in adict.iteritems(): ids[k] = [] for obj in v: ids[k].append(obj._id) return ids # a short version of the function we're testing, with id-ify def short(resource_id, output_types): ret = self.OMS.find_related_frames_of_reference(resource_id, output_types) return idify(ret) #set up associations first stuff = self._make_associations() #basic traversal of tree from instrument to platform ids = short(stuff.instrument_site_id, [RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) #since this is the first search, just make sure the input inst_id got stripped if RT.InstrumentSite in ids: self.assertNotIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #basic traversal of tree from platform to instrument ids = short(stuff.platform_siteb_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) self.assertNotIn(stuff.instrument_site2_id, ids[RT.InstrumentSite]) #full traversal of tree from observatory down to instrument ids = short(stuff.observatory_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #full traversal of tree from instrument to observatory ids = short(stuff.instrument_site_id, [RT.Observatory]) self.assertIn(RT.Observatory, ids) self.assertIn(stuff.observatory_id, ids[RT.Observatory]) #partial traversal, only down to platform ids = short(stuff.observatory_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site3_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(RT.InstrumentSite, ids) #partial traversal, only down to platform ids = short(stuff.instrument_site_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertNotIn(RT.Observatory, ids) self.destroy(stuff) def _make_associations(self): """ create one of each resource and association used by OMS to guard against problems in ion-definitions """ #raise unittest.SkipTest("https://jira.oceanobservatories.org/tasks/browse/CISWCORE-41") """ the tree we're creating (observatory, sites, platforms, instruments) rows are lettered, colums numbered. - first row is implied a - first column is implied 1 - site Z, just because O--Sz | S--S2--P3--I4 | Sb-Pb2-Ib3 | P--I2 <- PlatformDevice, InstrumentDevice2 | Pb <- PlatformDevice b | I <- InstrumentDevice """ org_id = self.OMS.create_marine_facility(any_old(RT.Org)) def create_under_org(resource_type): obj = any_old(resource_type) if RT.InstrumentDevice == resource_type: resource_id = self.IMS.create_instrument_device(obj) else: resource_id, _ = self.RR.create(obj) self.OMS.assign_resource_to_observatory_org(resource_id=resource_id, org_id=org_id) return resource_id #stuff we control observatory_id = create_under_org(RT.Observatory) subsite_id = create_under_org(RT.Subsite) subsite2_id = create_under_org(RT.Subsite) subsiteb_id = create_under_org(RT.Subsite) subsitez_id = create_under_org(RT.Subsite) platform_site_id = create_under_org(RT.PlatformSite) platform_siteb_id = create_under_org(RT.PlatformSite) platform_siteb2_id = create_under_org(RT.PlatformSite) platform_site3_id = create_under_org(RT.PlatformSite) instrument_site_id = create_under_org(RT.InstrumentSite) instrument_site2_id = create_under_org(RT.InstrumentSite) instrument_siteb3_id = create_under_org(RT.InstrumentSite) instrument_site4_id = create_under_org(RT.InstrumentSite) #stuff we associate to instrument_device_id = create_under_org(RT.InstrumentDevice) instrument_device2_id = create_under_org(RT.InstrumentDevice) platform_device_id = create_under_org(RT.PlatformDevice) platform_deviceb_id = create_under_org(RT.PlatformDevice) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) deployment_id, _ = self.RR.create(any_old(RT.Deployment)) #observatory self.RR.create_association(observatory_id, PRED.hasSite, subsite_id) self.RR.create_association(observatory_id, PRED.hasSite, subsitez_id) #site self.RR.create_association(subsite_id, PRED.hasSite, subsite2_id) self.RR.create_association(subsite_id, PRED.hasSite, subsiteb_id) self.RR.create_association(subsite2_id, PRED.hasSite, platform_site3_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_siteb2_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_site_id) #platform_site(s) self.RR.create_association(platform_site3_id, PRED.hasSite, instrument_site4_id) self.RR.create_association(platform_siteb2_id, PRED.hasSite, instrument_siteb3_id) self.RR.create_association(platform_site_id, PRED.hasSite, instrument_site2_id) self.RR.create_association(platform_site_id, PRED.hasSite, platform_siteb_id) self.RR.create_association(platform_siteb_id, PRED.hasSite, instrument_site_id) self.RR.create_association(platform_siteb_id, PRED.hasDevice, platform_deviceb_id) self.RR.create_association(platform_site_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_site_id, PRED.hasDevice, platform_device_id) self.RR.create_association(platform_site_id, PRED.hasDeployment, deployment_id) #instrument_site(s) self.RR.create_association(instrument_site_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_site_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(instrument_site_id, PRED.hasDeployment, deployment_id) self.RR.create_association(instrument_site2_id, PRED.hasDevice, instrument_device2_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device2_id, PRED.hasModel, instrument_model_id) ret = DotDict() ret.org_id = org_id ret.observatory_id = observatory_id ret.subsite_id = subsite_id ret.subsite2_id = subsite2_id ret.subsiteb_id = subsiteb_id ret.subsitez_id = subsitez_id ret.platform_site_id = platform_site_id ret.platform_siteb_id = platform_siteb_id ret.platform_siteb2_id = platform_siteb2_id ret.platform_site3_id = platform_site3_id ret.instrument_site_id = instrument_site_id ret.instrument_site2_id = instrument_site2_id ret.instrument_siteb3_id = instrument_siteb3_id ret.instrument_site4_id = instrument_site4_id ret.instrument_device_id = instrument_device_id ret.instrument_device2_id = instrument_device2_id ret.platform_device_id = platform_device_id ret.platform_deviceb_id = platform_deviceb_id ret.instrument_model_id = instrument_model_id ret.platform_model_id = platform_model_id ret.deployment_id = deployment_id return ret #@unittest.skip("targeting") def test_create_observatory(self): observatory_obj = IonObject(RT.Observatory, name='TestFacility', description='some new mf') observatory_id = self.OMS.create_observatory(observatory_obj) self.OMS.force_delete_observatory(observatory_id) #@unittest.skip("targeting") def test_find_observatory_org(self): org_obj = IonObject(RT.Org, name='TestOrg', description='some new mf org') org_id = self.OMS.create_marine_facility(org_obj) observatory_obj = IonObject(RT.Observatory, name='TestObservatory', description='some new obs') observatory_id = self.OMS.create_observatory(observatory_obj) #make association self.OMS.assign_resource_to_observatory_org(observatory_id, org_id) #find association org_objs = self.OMS.find_org_by_observatory(observatory_id) self.assertEqual(1, len(org_objs)) self.assertEqual(org_id, org_objs[0]._id) print("org_id=<" + org_id + ">") #create a subsite with parent Observatory subsite_obj = IonObject(RT.Subsite, name= 'TestSubsite', description = 'sample subsite') subsite_id = self.OMS.create_subsite(subsite_obj, observatory_id) self.assertIsNotNone(subsite_id, "Subsite not created.") # verify that Subsite is linked to Observatory mf_subsite_assoc = self.RR.get_association(observatory_id, PRED.hasSite, subsite_id) self.assertIsNotNone(mf_subsite_assoc, "Subsite not connected to Observatory.") # add the Subsite as a resource of this Observatory self.OMS.assign_resource_to_observatory_org(resource_id=subsite_id, org_id=org_id) # verify that Subsite is linked to Org org_subsite_assoc = self.RR.get_association(org_id, PRED.hasResource, subsite_id) self.assertIsNotNone(org_subsite_assoc, "Subsite not connected as resource to Org.") #create a logical platform with parent Subsite platform_site_obj = IonObject(RT.PlatformSite, name= 'TestPlatformSite', description = 'sample logical platform') platform_site_id = self.OMS.create_platform_site(platform_site_obj, subsite_id) self.assertIsNotNone(platform_site_id, "PlatformSite not created.") # verify that PlatformSite is linked to Site site_lp_assoc = self.RR.get_association(subsite_id, PRED.hasSite, platform_site_id) self.assertIsNotNone(site_lp_assoc, "PlatformSite not connected to Site.") # add the PlatformSite as a resource of this Observatory self.OMS.assign_resource_to_observatory_org(resource_id=platform_site_id, org_id=org_id) # verify that PlatformSite is linked to Org org_lp_assoc = self.RR.get_association(org_id, PRED.hasResource, platform_site_id) self.assertIsNotNone(org_lp_assoc, "PlatformSite not connected as resource to Org.") #create a logical instrument with parent logical platform instrument_site_obj = IonObject(RT.InstrumentSite, name= 'TestInstrumentSite', description = 'sample logical instrument') instrument_site_id = self.OMS.create_instrument_site(instrument_site_obj, platform_site_id) self.assertIsNotNone(instrument_site_id, "InstrumentSite not created.") # verify that InstrumentSite is linked to PlatformSite li_lp_assoc = self.RR.get_association(platform_site_id, PRED.hasSite, instrument_site_id) self.assertIsNotNone(li_lp_assoc, "InstrumentSite not connected to PlatformSite.") # add the InstrumentSite as a resource of this Observatory self.OMS.assign_resource_to_observatory_org(resource_id=instrument_site_id, org_id=org_id) # verify that InstrumentSite is linked to Org org_li_assoc = self.RR.get_association(org_id, PRED.hasResource, instrument_site_id) self.assertIsNotNone(org_li_assoc, "InstrumentSite not connected as resource to Org.") # remove the InstrumentSite as a resource of this Observatory self.OMS.unassign_resource_from_observatory_org(instrument_site_id, org_id) # verify that InstrumentSite is linked to Org assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.InstrumentSite, id_only=True ) self.assertEqual(len(assocs), 0) # remove the InstrumentSite self.OMS.delete_instrument_site(instrument_site_id) assocs, _ = self.RR.find_objects(platform_site_id, PRED.hasSite, RT.InstrumentSite, id_only=True ) self.assertEqual(len(assocs), 1) #todo: remove the dangling association # remove the PlatformSite as a resource of this Observatory self.OMS.unassign_resource_from_observatory_org(platform_site_id, org_id) # verify that PlatformSite is linked to Org assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.PlatformSite, id_only=True ) self.assertEqual(len(assocs), 0) # remove the Site as a resource of this Observatory self.OMS.unassign_resource_from_observatory_org(subsite_id, org_id) # verify that Site is linked to Org assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.Subsite, id_only=True ) self.assertEqual(len(assocs), 0) self.RR.delete(org_id) self.OMS.force_delete_observatory(observatory_id) self.OMS.force_delete_subsite(subsite_id) self.OMS.force_delete_platform_site(platform_site_id) self.OMS.force_delete_instrument_site(instrument_site_id) #@unittest.skip("in development...") @attr('EXT') def test_observatory_org_extended(self): stuff = self._make_associations() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubcli.create_stream_definition(name='parsed', parameter_dictionary_id=parsed_pdict_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id1 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) self.damsclient.assign_data_product(input_resource_id=stuff.instrument_device_id, data_product_id=data_product_id1) #-------------------------------------------------------------------------------- # Get the extended Site (platformSite) #-------------------------------------------------------------------------------- extended_site = self.OMS.get_site_extension(stuff.platform_site_id) log.debug("extended_site: %s ", str(extended_site)) self.assertEqual(1, len(extended_site.platform_devices)) self.assertEqual(1, len(extended_site.platform_models)) self.assertEqual(stuff.platform_device_id, extended_site.platform_devices[0]._id) self.assertEqual(stuff.platform_model_id, extended_site.platform_models[0]._id) #-------------------------------------------------------------------------------- # Get the extended Org #-------------------------------------------------------------------------------- #test the extended resource extended_org = self.org_management_service.get_marine_facility_extension(stuff.org_id) log.debug("test_observatory_org_extended: extended_org: %s ", str(extended_org)) #self.assertEqual(2, len(extended_org.instruments_deployed) ) #self.assertEqual(1, len(extended_org.platforms_not_deployed) ) self.assertEqual(2, extended_org.number_of_platforms) self.assertEqual(2, len(extended_org.platform_models) ) self.assertEqual(2, extended_org.number_of_instruments) self.assertEqual(2, len(extended_org.instrument_models) ) #test the extended resource of the ION org ion_org_id = self.org_management_service.find_org() extended_org = self.org_management_service.get_marine_facility_extension(ion_org_id._id) log.debug("test_observatory_org_extended: extended_ION_org: %s ", str(extended_org)) self.assertEqual(0, len(extended_org.members)) self.assertEqual(0, extended_org.number_of_platforms) #self.assertEqual(1, len(extended_org.sites)) #-------------------------------------------------------------------------------- # Get the extended Site #-------------------------------------------------------------------------------- #create device state events to use for op /non-op filtering in extended t = get_ion_ts() self.event_publisher.publish_event( ts_created= t, event_type = 'ResourceAgentStateEvent', origin = stuff.instrument_device_id, state=ResourceAgentState.STREAMING ) self.event_publisher.publish_event( ts_created= t, event_type = 'ResourceAgentStateEvent', origin = stuff.instrument_device2_id, state=ResourceAgentState.INACTIVE ) extended_site = self.OMS.get_site_extension(stuff.instrument_site2_id) log.debug("test_observatory_org_extended: extended_site: %s ", str(extended_site)) self.dpclient.delete_data_product(data_product_id1)
class TestDeployment(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) #@unittest.skip("targeting") def test_create_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment') deployment_id = self.omsclient.create_deployment( deployment_obj, site_id, device_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) #retrieve the deployment objects and check that the assoc site and device are attached read_deployment_obj = self.omsclient.read_deployment(deployment_id) log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj)) site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(site_ids), 1) device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(device_ids), 1) #delete the deployment self.omsclient.delete_deployment(deployment_id) # now try to get the deleted dp object try: deployment_obj = self.omsclient.read_deployment(deployment_id) except NotFound as ex: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def test_activate_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) platform_model__obj = IonObject(RT.PlatformModel, name='PlatformModel1', description='test platform model') model_id = self.imsclient.create_platform_model(platform_model__obj) self.imsclient.assign_platform_model_to_platform_device( model_id, device_id) self.omsclient.assign_platform_model_to_platform_site( model_id, site_id) #create a deployment with metadata and an initial site and device instrument_site__obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site( instrument_site__obj, site_id) instrument_device__obj = IonObject( RT.InstrumentDevice, name='InstrumentDevice1', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device( instrument_device__obj) self.rrclient.create_association(device_id, PRED.hasDevice, instrument_device_id) instrument_model__obj = IonObject(RT.InstrumentModel, name='InstrumentModel1', description='test instrument model') instrument_model_id = self.imsclient.create_instrument_model( instrument_model__obj) self.imsclient.assign_instrument_model_to_instrument_device( instrument_model_id, instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site( instrument_model_id, instrument_site_id) #self.rrclient.create_association(instrument_site_id, PRED.hasModel, instrument_model_id) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment') deployment_id = self.omsclient.create_deployment( deployment_obj, site_id, device_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) self.omsclient.activate_deployment(deployment_id)
class TestDataProductManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.dpsc_cli = DataProductManagementServiceClient() self.rrclient = ResourceRegistryServiceClient() self.damsclient = DataAcquisitionManagementServiceClient() self.pubsubcli = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore(datastore_name) self.stream_def_id = self.pubsubcli.create_stream_definition(name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition(name='ingestion worker') ingestion_worker_definition.executable = { 'module':'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class' :'ScienceGranuleIngestionWorker' } process_definition_id = self.process_dispatcher.create_process_definition(process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] #------------------------------------------------------------------------------------------------ # First launch the ingestors #------------------------------------------------------------------------------------------------ self.exchange_space = 'science_granule_ingestion' self.exchange_point = 'science_data' config = DotDict() config.process.datastore_name = 'datasets' config.process.queue_name = self.exchange_space self.exchange_names.append(self.exchange_space) self.exchange_points.append(self.exchange_point) pid = self.process_dispatcher.schedule_process(self.process_definitions['ingestion_worker'],configuration=config) log.debug("the ingestion worker process id: %s", pid) self.pids.append(pid) self.addCleanup(self.cleaning_up) def cleaning_up(self): for pid in self.pids: log.debug("number of pids to be terminated: %s", len(self.pids)) try: self.process_dispatcher.cancel_process(pid) log.debug("Terminated the process: %s", pid) except: log.debug("could not terminate the process id: %s" % pid) IngestionManagementIntTest.clean_subscriptions() for xn in self.exchange_names: xni = self.container.ex_manager.create_xn_queue(xn) xni.delete() for xp in self.exchange_points: xpi = self.container.ex_manager.create_xp(xp) xpi.delete() def get_datastore(self, dataset_id): dataset = self.dataset_management.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore(datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore @attr('EXT') @attr('PREP') def test_create_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict') ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ # Generic time-series data domain creation tdom, sdom = time_series_domain() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0 dp_obj.ooi_product_name = "PRODNAME" #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product( data_product= dp_obj, stream_definition_id=ctd_stream_def_id) # Assert that the data product has an associated stream at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertNotEquals(len(stream_ids), 0) # Assert that the data product has an associated stream def at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True) self.assertNotEquals(len(stream_ids), 0) self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Created data product %s', dp_obj) #------------------------------------------------------------------------------------------------ # test creating a new data product with a stream definition #------------------------------------------------------------------------------------------------ log.debug('Creating new data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp', temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id2) log.debug('new dp_id = %s' % dp_id2) #------------------------------------------------------------------------------------------------ #make sure data product is associated with stream def #------------------------------------------------------------------------------------------------ streamdefs = [] streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True) for s in streams: log.debug("Checking stream %s" % s) sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True) for sd in sdefs: log.debug("Checking streamdef %s" % sd) streamdefs.append(sd) self.assertIn(ctd_stream_def_id, streamdefs) group_names = self.dpsc_cli.get_data_product_group_list() self.assertIn("PRODNAME", group_names) # test reading a non-existent data product log.debug('reading non-existent data product') with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product('some_fake_id') # update a data product (tests read also) log.debug('Updating data product') # first get the existing dp object dp_obj = self.dpsc_cli.read_data_product(dp_id) # now tweak the object dp_obj.description = 'the very first dp' dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 20.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -20.0 # now write the dp back to the registry update_result = self.dpsc_cli.update_data_product(dp_obj) # now get the dp back to see if it was updated dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertEquals(dp_obj.description,'the very first dp') self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Updated data product %s', dp_obj) #test extension extended_product = self.dpsc_cli.get_data_product_extension(dp_id) self.assertEqual(dp_id, extended_product._id) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.product_download_size_estimated.status) self.assertEqual(0, extended_product.computed.product_download_size_estimated.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.parameters.status) #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value) def ion_object_encoder(obj): return obj.__dict__ #test prepare for create data_product_data = self.dpsc_cli.prepare_data_product_support() #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, "") self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual(len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual(len(data_product_data.associations['Dataset'].resources), 0) self.assertEqual(len(data_product_data.associations['StreamDefinition'].associated_resources), 0) self.assertEqual(len(data_product_data.associations['Dataset'].associated_resources), 0) #test prepare for update data_product_data = self.dpsc_cli.prepare_data_product_support(dp_id) #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, dp_id) self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual(len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual(len(data_product_data.associations['Dataset'].resources), 1) self.assertEqual(len(data_product_data.associations['StreamDefinition'].associated_resources), 1) self.assertEqual(data_product_data.associations['StreamDefinition'].associated_resources[0].s, dp_id) self.assertEqual(len(data_product_data.associations['Dataset'].associated_resources), 1) self.assertEqual(data_product_data.associations['Dataset'].associated_resources[0].s, dp_id) # now 'delete' the data product log.debug("deleting data product: %s" % dp_id) self.dpsc_cli.delete_data_product(dp_id) # Assert that there are no associated streams leftover after deleting the data product stream_ids, assoc_ids = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertEquals(len(stream_ids), 0) self.assertEquals(len(assoc_ids), 0) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product(dp_id) # Get the events corresponding to the data product ret = self.unsc.get_recent_events(resource_id=dp_id) events = ret.value for event in events: log.debug("event time: %s" % event.ts_created) self.assertTrue(len(events) > 0) def test_data_product_stream_def(self): pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj, stream_definition_id=ctd_stream_def_id) stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id) self.assertEquals(ctd_stream_def_id, stream_def_id) def test_derived_data_product(self): pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='ctd parsed', parameter_dictionary_id=pdict_id) self.addCleanup(self.pubsubcli.delete_stream_definition, ctd_stream_def_id) tdom, sdom = time_series_domain() dp = DataProduct(name='Instrument DP', temporal_domain=tdom.dump(), spatial_domain=sdom.dump()) dp_id = self.dpsc_cli.create_data_product(dp, stream_definition_id=ctd_stream_def_id) self.addCleanup(self.dpsc_cli.force_delete_data_product, dp_id) self.dpsc_cli.activate_data_product_persistence(dp_id) self.addCleanup(self.dpsc_cli.suspend_data_product_persistence, dp_id) dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True) if not dataset_ids: raise NotFound("Data Product %s dataset does not exist" % str(dp_id)) dataset_id = dataset_ids[0] # Make the derived data product simple_stream_def_id = self.pubsubcli.create_stream_definition(name='TEMPWAT stream def', parameter_dictionary_id=pdict_id, available_fields=['time','temp']) tempwat_dp = DataProduct(name='TEMPWAT') tempwat_dp_id = self.dpsc_cli.create_data_product(tempwat_dp, stream_definition_id=simple_stream_def_id, parent_data_product_id=dp_id) self.addCleanup(self.dpsc_cli.delete_data_product, tempwat_dp_id) # Check that the streams associated with the data product are persisted with stream_ids, _ = self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True) for stream_id in stream_ids: self.assertTrue(self.ingestclient.is_persisted(stream_id)) stream_id = stream_ids[0] route = self.pubsubcli.read_stream_route(stream_id=stream_id) rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id) rdt['time'] = np.arange(20) rdt['temp'] = np.arange(20) rdt['pressure'] = np.arange(20) publisher = StandaloneStreamPublisher(stream_id,route) dataset_modified = Event() def cb(*args, **kwargs): dataset_modified.set() es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True) es.start() self.addCleanup(es.stop) publisher.publish(rdt.to_granule()) self.assertTrue(dataset_modified.wait(30)) tempwat_dataset_ids, _ = self.rrclient.find_objects(tempwat_dp_id, PRED.hasDataset, id_only=True) tempwat_dataset_id = tempwat_dataset_ids[0] granule = self.data_retriever.retrieve(tempwat_dataset_id, delivery_format=simple_stream_def_id) rdt = RecordDictionaryTool.load_from_granule(granule) np.testing.assert_array_equal(rdt['time'], np.arange(20)) self.assertEquals(set(rdt.fields), set(['time','temp'])) def test_activate_suspend_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) log.debug("Created an IonObject for a data product: %s" % dp_obj) #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj, stream_definition_id=ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test activate and suspend data product persistence #------------------------------------------------------------------------------------------------ self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) dataset_ids, _ = self.rrclient.find_objects(subject=dp_id, predicate=PRED.hasDataset, id_only=True) if not dataset_ids: raise NotFound("Data Product %s dataset does not exist" % str(dp_id)) dataset_id = dataset_ids[0] # Check that the streams associated with the data product are persisted with stream_ids, _ = self.rrclient.find_objects(dp_id,PRED.hasStream,RT.Stream,True) for stream_id in stream_ids: self.assertTrue(self.ingestclient.is_persisted(stream_id)) stream_id = stream_ids[0] route = self.pubsubcli.read_stream_route(stream_id=stream_id) rdt = RecordDictionaryTool(stream_definition_id=ctd_stream_def_id) rdt['time'] = np.arange(20) rdt['temp'] = np.arange(20) publisher = StandaloneStreamPublisher(stream_id,route) dataset_modified = Event() def cb(*args, **kwargs): dataset_modified.set() es = EventSubscriber(event_type=OT.DatasetModified, callback=cb, origin=dataset_id, auto_delete=True) es.start() self.addCleanup(es.stop) publisher.publish(rdt.to_granule()) self.assertTrue(dataset_modified.wait(30)) #-------------------------------------------------------------------------------- # Now get the data in one chunk using an RPC Call to start_retreive #-------------------------------------------------------------------------------- replay_data = self.data_retriever.retrieve(dataset_ids[0]) self.assertIsInstance(replay_data, Granule) log.debug("The data retriever was able to replay the dataset that was attached to the data product " "we wanted to be persisted. Therefore the data product was indeed persisted with " "otherwise we could not have retrieved its dataset using the data retriever. Therefore " "this demonstration shows that L4-CI-SA-RQ-267 is satisfied: 'Data product management shall persist data products'") data_product_object = self.rrclient.read(dp_id) self.assertEquals(data_product_object.name,'DP1') self.assertEquals(data_product_object.description,'some new dp') log.debug("Towards L4-CI-SA-RQ-308: 'Data product management shall persist data product metadata'. " " Attributes in create for the data product obj, name= '%s', description='%s', match those of object from the " "resource registry, name='%s', desc='%s'" % (dp_obj.name, dp_obj.description,data_product_object.name, data_product_object.description)) #------------------------------------------------------------------------------------------------ # test suspend data product persistence #------------------------------------------------------------------------------------------------ self.dpsc_cli.suspend_data_product_persistence(dp_id) dataset_modified.clear() rdt['time'] = np.arange(20,40) publisher.publish(rdt.to_granule()) self.assertFalse(dataset_modified.wait(2)) self.dpsc_cli.activate_data_product_persistence(dp_id) dataset_modified.clear() publisher.publish(rdt.to_granule()) self.assertTrue(dataset_modified.wait(30)) granule = self.data_retriever.retrieve(dataset_id) rdt = RecordDictionaryTool.load_from_granule(granule) np.testing.assert_array_almost_equal(rdt['time'], np.arange(40)) dataset_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasDataset, id_only=True) self.assertEquals(len(dataset_ids), 1) self.dpsc_cli.suspend_data_product_persistence(dp_id) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.rrclient.read(dp_id) def test_lookup_values(self): ph = ParameterHelper(self.dataset_management, self.addCleanup) pdict_id = ph.create_lookups() stream_def_id = self.pubsubcli.create_stream_definition('lookup', parameter_dictionary_id=pdict_id) self.addCleanup(self.pubsubcli.delete_stream_definition, stream_def_id) data_product = DataProduct(name='lookup data product') tdom, sdom = time_series_domain() data_product.temporal_domain = tdom.dump() data_product.spatial_domain = sdom.dump() data_product_id = self.dpsc_cli.create_data_product(data_product, stream_definition_id=stream_def_id) self.addCleanup(self.dpsc_cli.delete_data_product, data_product_id) data_producer = DataProducer(name='producer') data_producer.producer_context = DataProcessProducerContext() data_producer.producer_context.configuration['qc_keys'] = ['offset_document'] data_producer_id, _ = self.rrclient.create(data_producer) self.addCleanup(self.rrclient.delete, data_producer_id) assoc,_ = self.rrclient.create_association(subject=data_product_id, object=data_producer_id, predicate=PRED.hasDataProducer) self.addCleanup(self.rrclient.delete_association, assoc) document_keys = self.damsclient.list_qc_references(data_product_id) self.assertEquals(document_keys, ['offset_document']) svm = StoredValueManager(self.container) svm.stored_value_cas('offset_document', {'offset_a':2.0}) self.dpsc_cli.activate_data_product_persistence(data_product_id) dataset_ids, _ = self.rrclient.find_objects(subject=data_product_id, predicate=PRED.hasDataset, id_only=True) dataset_id = dataset_ids[0] dataset_monitor = DatasetMonitor(dataset_id) self.addCleanup(dataset_monitor.stop) rdt = RecordDictionaryTool(stream_definition_id=stream_def_id) rdt['time'] = [0] rdt['temp'] = [20.] granule = rdt.to_granule() stream_ids, _ = self.rrclient.find_objects(subject=data_product_id, predicate=PRED.hasStream, id_only=True) stream_id = stream_ids[0] route = self.pubsubcli.read_stream_route(stream_id=stream_id) publisher = StandaloneStreamPublisher(stream_id, route) publisher.publish(granule) self.assertTrue(dataset_monitor.event.wait(10)) granule = self.data_retriever.retrieve(dataset_id) rdt2 = RecordDictionaryTool.load_from_granule(granule) np.testing.assert_array_equal(rdt['temp'], rdt2['temp']) np.testing.assert_array_almost_equal(rdt2['calibrated'], np.array([22.0])) svm.stored_value_cas('updated_document', {'offset_a':3.0}) dataset_monitor = DatasetMonitor(dataset_id) self.addCleanup(dataset_monitor.stop) ep = EventPublisher(event_type=OT.ExternalReferencesUpdatedEvent) ep.publish_event(origin=data_product_id, reference_keys=['updated_document']) rdt = RecordDictionaryTool(stream_definition_id=stream_def_id) rdt['time'] = [1] rdt['temp'] = [20.] granule = rdt.to_granule() gevent.sleep(2) # Yield so that the event goes through publisher.publish(granule) self.assertTrue(dataset_monitor.event.wait(10)) granule = self.data_retriever.retrieve(dataset_id) rdt2 = RecordDictionaryTool.load_from_granule(granule) np.testing.assert_array_equal(rdt2['temp'],np.array([20.,20.])) np.testing.assert_array_almost_equal(rdt2['calibrated'], np.array([22.0,23.0]))
class DiscoveryIntTest(IonIntegrationTestCase): def setUp(self): super(DiscoveryIntTest, self).setUp() self._start_container() self.addCleanup(DiscoveryIntTest.es_cleanup) self.container.start_rel_from_url('res/deploy/r2dm.yml') self.discovery = DiscoveryServiceClient() self.catalog = CatalogManagementServiceClient() self.ims = IndexManagementServiceClient() self.rr = ResourceRegistryServiceClient() if use_es: self.es_host = CFG.get_safe('server.elasticsearch.host', 'localhost') self.es_port = CFG.get_safe('server.elasticsearch.port', '9200') CFG.server.elasticsearch.shards = 1 CFG.server.elasticsearch.replicas = 0 CFG.server.elasticsearch.river_shards = 1 CFG.server.elasticsearch.river_replicas = 0 self.es = ep.ElasticSearch( host=self.es_host, port=self.es_port, timeout=10, verbose=True ) op = DotDict(CFG) op.op = 'clean_bootstrap' self.container.spawn_process('index_bootstrap','ion.processes.bootstrap.index_bootstrap','IndexBootStrap', op) @staticmethod def es_cleanup(): es_host = CFG.get_safe('server.elasticsearch.host', 'localhost') es_port = CFG.get_safe('server.elasticsearch.port', '9200') es = ep.ElasticSearch( host=es_host, port=es_port, timeout=10 ) indexes = STD_INDEXES.keys() indexes.append('%s_resources_index' % get_sys_name().lower()) indexes.append('%s_events_index' % get_sys_name().lower()) for index in indexes: IndexManagementService._es_call(es.river_couchdb_delete,index) IndexManagementService._es_call(es.index_delete,index) def poll(self, tries, callback, *args, **kwargs): ''' Polling wrapper for queries Elasticsearch may not index and cache the changes right away so we may need a couple of tries and a little time to go by before the results show. ''' for i in xrange(tries): retval = callback(*args, **kwargs) if retval: return retval time.sleep(0.2) return None def test_traversal(self): dp = DataProcess() transform = Transform() pd = ProcessDefinition() dp_id, _ = self.rr.create(dp) transform_id, _ = self.rr.create(transform) pd_id, _ = self.rr.create(pd) self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform) self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition) results = self.discovery.traverse(dp_id) results.sort() correct = [pd_id, transform_id] correct.sort() self.assertTrue(results == correct, '%s' % results) def test_iterative_traversal(self): dp = DataProcess() transform = Transform() pd = ProcessDefinition() dp_id, _ = self.rr.create(dp) transform_id, _ = self.rr.create(transform) pd_id, _ = self.rr.create(pd) self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform) self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition) results = self.discovery.iterative_traverse(dp_id) results.sort() correct = [transform_id] self.assertTrue(results == correct) results = self.discovery.iterative_traverse(dp_id, 1) results.sort() correct = [transform_id, pd_id] correct.sort() self.assertTrue(results == correct) @skipIf(not use_es, 'No ElasticSearch') def test_view_crud(self): view_id = self.discovery.create_view('big_view',fields=['name']) catalog_id = self.discovery.list_catalogs(view_id)[0] index_ids = self.catalog.list_indexes(catalog_id) self.assertTrue(len(index_ids)) view = self.discovery.read_view(view_id) self.assertIsInstance(view,View) self.assertTrue(view.name == 'big_view') view.name = 'not_so_big_view' self.discovery.update_view(view) view = self.discovery.read_view(view_id) self.assertTrue(view.name == 'not_so_big_view') self.discovery.delete_view(view_id) with self.assertRaises(NotFound): self.discovery.read_view(view_id) def test_view_best_match(self): #--------------------------------------------------------------- # Matches the best catalog available OR creates a new one #--------------------------------------------------------------- catalog_id = self.catalog.create_catalog('dev', keywords=['name','model']) view_id = self.discovery.create_view('exact_view', fields=['name','model']) catalog_ids = self.discovery.list_catalogs(view_id) self.assertTrue(catalog_ids == [catalog_id]) view_id = self.discovery.create_view('another_view', fields=['name','model']) catalog_ids = self.discovery.list_catalogs(view_id) self.assertTrue(catalog_ids == [catalog_id]) view_id = self.discovery.create_view('big_view', fields=['name']) catalog_ids = self.discovery.list_catalogs(view_id) self.assertTrue(catalog_ids != [catalog_id]) @skipIf(not use_es, 'No ElasticSearch') def test_basic_searching(self): #- - - - - - - - - - - - - - - - - # set up the fake resources #- - - - - - - - - - - - - - - - - instrument_pool = [ InstrumentDevice(name='sonobuoy1', hardware_version='1'), InstrumentDevice(name='sonobuoy2', hardware_version='2'), InstrumentDevice(name='sonobuoy3', hardware_version='3') ] for instrument in instrument_pool: self.rr.create(instrument) view_id = self.discovery.create_view('devices', fields=['hardware_version']) search_string = "search 'hardware_version' is '2' from '%s'"%view_id results = self.poll(5, self.discovery.parse,search_string) result = results[0]['_source'] self.assertIsInstance(result, InstrumentDevice) self.assertTrue(result.name == 'sonobuoy2') self.assertTrue(result.hardware_version == '2') @skipIf(not use_es, 'No ElasticSearch') def test_associative_searching(self): view_id = self.discovery.create_view('devices', fields=['model']) site_id,_ = self.rr.create(Site('my_site')) pd_id, _ = self.rr.create(PlatformDevice('my_device', model='abc123')) self.rr.create_association(subject=site_id, object=pd_id, predicate=PRED.hasDevice) search_string = "search 'model' is 'abc*' from '%s' and belongs to '%s'"%(view_id, site_id) results = self.poll(5, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(pd_id in results) def test_iterative_associative_searching(self): #-------------------------------------------------------------------------------- # Tests the ability to limit the iterations #-------------------------------------------------------------------------------- dp = DataProcess() transform = Transform() pd = ProcessDefinition() dp_id, _ = self.rr.create(dp) transform_id, _ = self.rr.create(transform) pd_id, _ = self.rr.create(pd) self.rr.create_association(subject=dp_id, object=transform_id, predicate=PRED.hasTransform) self.rr.create_association(subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition) search_string = "belongs to '%s' depth 1" % dp_id results = self.poll(5, self.discovery.parse,search_string) results = list([i._id for i in results]) correct = [transform_id] self.assertTrue(results == correct, '%s' % results) search_string = "belongs to '%s' depth 2" % dp_id results = self.poll(5, self.discovery.parse,search_string) results = list([i._id for i in results]) results.sort() correct = [transform_id, pd_id] correct.sort() self.assertTrue(results == correct) @skipIf(not use_es, 'No ElasticSearch') def test_ranged_value_searching(self): discovery = self.discovery rr = self.rr view_id = discovery.create_view('bank_view', fields=['cash_balance']) bank_id, _ = rr.create(BankAccount(name='broke', cash_balance=10)) search_string = "search 'cash_balance' values from 0 to 100 from '%s'" % view_id results = self.poll(5, discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == bank_id) @skipIf(not use_es, 'No ElasticSearch') def test_collections_searching(self): site_id, _ = self.rr.create(Site(name='black_mesa')) view_id = self.discovery.create_view('big', fields=['name']) # Add the site to a new collection collection_id = self.ims.create_collection('resource_collection', [site_id]) search_string = "search 'name' is '*' from '%s' and in '%s'" %(view_id, collection_id) results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0] == site_id, '%s' % results) @skipIf(not use_es, 'No ElasticSearch') def test_search_by_name(self): inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123') dev_id, _ = self.rr.create(inst_dev) self.discovery.create_view('devs',fields=['name','serial_number']) search_string = "search 'serial_number' is 'abc*' from 'devs'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dev_id) @skipIf(not use_es, 'No ElasticSearch') def test_search_by_name_index(self): inst_dev = InstrumentDevice(name='test_dev',serial_number='ABC123') dev_id, _ = self.rr.create(inst_dev) search_string = "search 'serial_number' is 'abc*' from 'resources_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dev_id) bank_acc = BankAccount(name='blah', cash_balance=10) res_id , _ = self.rr.create(bank_acc) search_string = "search 'cash_balance' values from 0 to 100 from 'resources_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == res_id) @skipIf(not use_es, 'No ElasticSearch') def test_data_product_search(self): # Create the dataproduct dp = DataProduct(name='test_product') dp.data_format.name = 'test_signal' dp.data_format.description = 'test signal' dp.data_format.character_set = 'utf8' dp.data_format.nominal_sampling_rate_maximum = '44000' dp.data_format.nominal_sampling_rate_minimum = '44000' dp.data_product_level = 'basic' dp_id, _ = self.rr.create(dp) search_string = "search 'data_format.name' is 'test_signal' from 'data_products_index'" results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) search_string = "search 'data_product_level' is 'basic' from 'data_products_index'" results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) search_string = "search 'data_format.character_set' is 'utf8' from 'data_products_index'" results = self.poll(9, self.discovery.parse, search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == dp_id) @skipIf(not use_es, 'No ElasticSearch') def test_events_search(self): # Create a resource to force a new event dp = DataProcess() dp_id, rev = self.rr.create(dp) search_string = "SEARCH 'origin' IS '%s' FROM 'events_index'" % dp_id results = self.poll(9, self.discovery.parse,search_string) origin_type = results[0]['_source'].origin_type origin_id = results[0]['_source'].origin self.assertTrue(origin_type == RT.DataProcess) self.assertTrue(origin_id == dp_id) @skipIf(not use_es, 'No ElasticSearch') def test_geo_distance_search(self): pd = PlatformDevice(name='test_dev') pd_id, _ = self.rr.create(pd) search_string = "search 'nominal_location' geo distance 20 km from lat 0 lon 0 from 'devices_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == pd_id) self.assertTrue(results[0]['_source'].name == 'test_dev') @skipIf(not use_es, 'No ElasticSearch') def test_geo_bbox_search(self): pd = PlatformDevice(name='test_dev') pd.nominal_location.lat = 5 pd.nominal_location.lon = 5 pd_id, _ = self.rr.create(pd) search_string = "search 'nominal_location' geo box top-left lat 10 lon 0 bottom-right lat 0 lon 10 from 'devices_index'" results = self.poll(9, self.discovery.parse,search_string) self.assertIsNotNone(results, 'Results not found') self.assertTrue(results[0]['_id'] == pd_id) self.assertTrue(results[0]['_source'].name == 'test_dev')
class TestResourceRegistryAttachments(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) print 'started services' def test_resource_registry_blob_sanity(self): resource_id, _ = self.RR.create(IonObject(RT.Resource, name="foo")) MY_CONTENT = "the quick brown fox etc etc etc" #save att_id = self.RR.create_attachment(resource_id, IonObject(RT.Attachment, name="test.txt", content=MY_CONTENT, content_type="text/plain", keywords=["test1", "test2"], attachment_type=AttachmentType.BLOB)) #load attachment = self.RR.read_attachment(att_id, include_content=True) self.assertEqual("test.txt", attachment.name) self.assertEqual("text/plain", attachment.content_type) self.assertIn("test1", attachment.keywords) self.assertIn("test2", attachment.keywords) #content has changed; it's base64-encoded from what we put in self.assertEqual(MY_CONTENT, attachment.content) obj = self.RR.read(resource_id) self.assertEqual(obj.name, "foo") obj.name = "TheDudeAbides" obj = self.RR.update(obj) obj = self.RR.read(resource_id) self.assertEqual(obj.name, "TheDudeAbides") att = self.RR.find_attachments(resource_id) self.assertNotEqual(att, None) actor_identity_obj = IonObject("ActorIdentity", name="name") actor_identity_obj_id, actor_identity_obj_rev = self.RR.create(actor_identity_obj) user_info_obj = IonObject("UserInfo", name="name") user_info_obj_id, user_info_obj_rev = self.RR.create(user_info_obj) assoc_id, assoc_rev = self.RR.create_association(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) self.assertNotEqual(assoc_id, None) find_assoc = self.RR.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) self.assertTrue(find_assoc[0]._id == assoc_id) subj = self.RR.find_subjects(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True) res_obj1 = self.RR.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo) self.assertEquals(res_obj1._id, user_info_obj_id) self.RR.delete_association(assoc_id) self.RR.delete_attachment(att_id) self.RR.delete(resource_id)
class TestIntExternalObservatoryAgentService(IonIntegrationTestCase): def setUp(self): self._start_container() # self.container.start_rel_from_url(rel_url='res/deploy/r2deploy.yml') self.container.start_rel_from_url(rel_url='res/deploy/r2eoi.yml') self.eoas_cli = ExternalObservatoryAgentServiceClient() self.rr_cli = ResourceRegistryServiceClient() self.pubsub_cli = PubsubManagementServiceClient() self.dams_cli = DataAcquisitionManagementServiceClient() self._setup_ncom() self._setup_hfr() def _setup_ncom(self): # TODO: some or all of this (or some variation) should move to DAMS # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) # dprov.institution.name = "OOI CGSN" dprov.contact.name = "Robert Weller" dprov.contact.email = "*****@*****.**" # Create DataSource dsrc = DataSource(protocol_type="DAP", institution=Institution(), contact=ContactInformation()) # dsrc.connection_params["base_data_url"] = "http://ooi.whoi.edu/thredds/dodsC/" dsrc.connection_params["base_data_url"] = "" dsrc.contact.name="Rich Signell" dsrc.contact.email = "*****@*****.**" # Create ExternalDataset dset = ExternalDataset(name="test", dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) # dset.dataset_description.parameters["dataset_path"] = "ooi/AS02CPSM_R_M.nc" dset.dataset_description.parameters["dataset_path"] = "test_data/ncom.nc" dset.dataset_description.parameters["temporal_dimension"] = "time" dset.dataset_description.parameters["zonal_dimension"] = "lon" dset.dataset_description.parameters["meridional_dimension"] = "lat" # Create ExternalDataSourceModel dsrc_model = ExternalDataSourceModel(name="dap_model") dsrc_model.model = "DAP" dsrc_model.data_handler_module = "eoi.agent.handler.dap_external_data_handler" dsrc_model.data_handler_class = "DapExternalDataHandler" ## Run everything through DAMS #TODO: Uncomment when CRUD methods in DAMS are implemented # self.ncom_ds_id = self.dams_cli.create_external_dataset(external_dataset=dset) # ext_dprov_id = self.dams_cli.create_external_data_provider(external_data_provider=dprov) # ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc) self.ncom_ds_id, _ = self.rr_cli.create(dset) ext_dprov_id, _ = self.rr_cli.create(dprov) ext_dsrc_id, _ = self.rr_cli.create(dsrc) #TODO: this needs to be added to DAMS ext_dsrc_model_id, _ = self.rr_cli.create(dsrc_model) ## Associate everything self.rr_cli.create_association(self.ncom_ds_id, PRED.hasSource, ext_dsrc_id) log.debug("Associated ExternalDataset %s with DataSource %s" % (self.ncom_ds_id, ext_dsrc_id)) self.rr_cli.create_association(ext_dsrc_id, PRED.hasProvider, ext_dprov_id) log.debug("Associated DataSource %s with ExternalDataProvider %s" % (ext_dsrc_id, ext_dprov_id)) self.rr_cli.create_association(ext_dsrc_id, PRED.hasModel, ext_dsrc_model_id) log.debug("Associated DataSource %s with ExternalDataSourceModel %s" % (ext_dsrc_id, ext_dsrc_model_id)) data_prod_id = self.dams_cli.register_external_data_set(self.ncom_ds_id) log.debug("Registered ExternalDataset {%s}: DataProducer ID = %s" % (self.ncom_ds_id, data_prod_id)) def _setup_hfr(self): # TODO: some or all of this (or some variation) should move to DAMS # Create and register the necessary resources/objects # Create DataProvider dprov = ExternalDataProvider(institution=Institution(), contact=ContactInformation()) # dprov.institution.name = "HFR UCSD" # Create DataSource dsrc = DataSource(protocol_type="DAP", institution=Institution(), contact=ContactInformation()) dsrc.connection_params["base_data_url"] = "http://hfrnet.ucsd.edu:8080/thredds/dodsC/" # Create ExternalDataset dset = ExternalDataset(name="UCSD HFR", dataset_description=DatasetDescription(), update_description=UpdateDescription(), contact=ContactInformation()) dset.dataset_description.parameters["dataset_path"] = "HFRNet/USEGC/6km/hourly/RTV" dset.dataset_description.parameters["temporal_dimension"] = "time" dset.dataset_description.parameters["zonal_dimension"] = "lon" dset.dataset_description.parameters["meridional_dimension"] = "lat" # Create ExternalDataSourceModel dsrc_model = ExternalDataSourceModel(name="dap_model") dsrc_model.model = "DAP" dsrc_model.data_handler_module = "eoi.agent.handler.dap_external_data_handler" dsrc_model.data_handler_class = "DapExternalDataHandler" ## Run everything through DAMS #TODO: Uncomment when CRUD methods in DAMS are implemented # self.ncom_ds_id = self.dams_cli.create_external_dataset(external_dataset=dset) # ext_dprov_id = self.dams_cli.create_external_data_provider(external_data_provider=dprov) # ext_dsrc_id = self.dams_cli.create_data_source(data_source=dsrc) self.hfr_ds_id, _ = self.rr_cli.create(dset) ext_dprov_id, _ = self.rr_cli.create(dprov) ext_dsrc_id, _ = self.rr_cli.create(dsrc) #TODO: this needs to be added to DAMS ext_dsrc_model_id, _ = self.rr_cli.create(dsrc_model) self.rr_cli.create_association(self.hfr_ds_id, PRED.hasSource, ext_dsrc_id) log.debug("Associated ExternalDataset %s with DataSource %s" % (self.hfr_ds_id, ext_dsrc_id)) self.rr_cli.create_association(ext_dsrc_id, PRED.hasProvider, ext_dprov_id) log.debug("Associated DataSource %s with ExternalDataProvider %s" % (ext_dsrc_id, ext_dprov_id)) self.rr_cli.create_association(ext_dsrc_id, PRED.hasModel, ext_dsrc_model_id) log.debug("Associated DataSource %s with ExternalDataSourceModel %s" % (ext_dsrc_id, ext_dsrc_model_id)) data_prod_id = self.dams_cli.register_external_data_set(self.hfr_ds_id) log.debug("Registered ExternalDataset {%s}: DataProducer ID = %s" % (self.hfr_ds_id, data_prod_id)) ########## Tests ########## # @unittest.skip("") def test_spawn_worker(self): log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id) proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id) log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id)) self.assertEquals(proc_name, self.ncom_ds_id+'_worker') @unittest.skip("Underlying method not yet implemented") def test_get_worker(self, resource_id=''): log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id) proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id) log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id)) with self.assertRaises(IonException): self.eoas_cli.get_worker(resource_id=self.ncom_ds_id) @unittest.skip("Underlying method not yet implemented") def test_get_capabilities(self): log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id) proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id) log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id)) with self.assertRaises(IonException): self.eoas_cli.get_capabilities() # @unittest.skip("") def test_execute_single_worker(self): ds_id = self.ncom_ds_id log.debug("test_spawn_worker with ds_id: %s" % ds_id) proc_name, pid, queue_id = self.eoas_cli.spawn_worker(ds_id) log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id)) # with self.assertRaises(IonException): # self.eoas_cli.execute() cmd = AgentCommand(command_id="111", command="get_attributes", kwargs={"ds_id":ds_id}) log.debug("Execute AgentCommand: %s" % cmd) ret = self.eoas_cli.execute(command=cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, "SUCCESS") self.assertTrue(type(ret.result[0]), dict) cmd = AgentCommand(command_id="112", command="get_signature", kwargs={"ds_id":ds_id}) log.debug("Execute AgentCommand: %s" % cmd) ret = self.eoas_cli.execute(command=cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, "SUCCESS") self.assertTrue(type(ret.result[0]), dict) # @unittest.skip("") def test_execute_multi_worker(self): log.debug("test_spawn_worker #1 with ds_id: %s" % self.ncom_ds_id) proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id) log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id)) log.debug("test_spawn_worker #2 with ds_id: %s" % self.hfr_ds_id) proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.hfr_ds_id) log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id)) cmd = AgentCommand(command_id="112", command="get_signature", kwargs={"ds_id":self.ncom_ds_id}) log.debug("Execute AgentCommand: %s" % cmd) ret = self.eoas_cli.execute(command=cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, "SUCCESS") self.assertTrue(type(ret.result[0]), dict) cmd = AgentCommand(command_id="112", command="get_signature", kwargs={"ds_id":self.hfr_ds_id}) log.debug("Execute AgentCommand: %s" % cmd) ret = self.eoas_cli.execute(command=cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, "SUCCESS") self.assertTrue(type(ret.result[0]), dict) # @unittest.skip("") def test_execute_acquire_data(self): ds_id = self.ncom_ds_id log.debug("test_spawn_worker with ds_id: %s" % ds_id) proc_name, pid, queue_id = self.eoas_cli.spawn_worker(ds_id) log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id)) cmd = AgentCommand(command_id="113", command="acquire_data", kwargs={"ds_id":ds_id}) log.debug("Execute AgentCommand: %s" % cmd) ret = self.eoas_cli.execute(command=cmd) log.debug("Returned: %s" % ret) self.assertEquals(ret.status, "SUCCESS") @unittest.skip("Underlying method not yet implemented") def test_set_param(self): log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id) proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id) log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id)) with self.assertRaises(IonException): self.eoas_cli.set_param(name="param", value="value") @unittest.skip("Underlying method not yet implemented") def test_get_param(self): log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id) proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id) log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id)) with self.assertRaises(IonException): self.eoas_cli.get_param(name="param") @unittest.skip("Underlying method not yet implemented") def test_execute_agent(self): log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id) proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id) log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id)) with self.assertRaises(IonException): self.eoas_cli.execute_agent() @unittest.skip("Underlying method not yet implemented") def test_set_agent_param(self): log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id) proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id) log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id)) with self.assertRaises(IonException): self.eoas_cli.set_agent_param(name="param", value="value") @unittest.skip("Underlying method not yet implemented") def test_get_agent_param(self): log.debug("test_spawn_worker with ds_id: %s" % self.ncom_ds_id) proc_name, pid, queue_id = self.eoas_cli.spawn_worker(self.ncom_ds_id) log.debug("proc_name: %s\tproc_id: %s\tqueue_id: %s" % (proc_name, pid, queue_id)) with self.assertRaises(IonException): self.eoas_cli.get_agent_param(name="param")
class TestInstrumentManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' unittest # suppress an pycharm inspector error if all unittest.skip references are commented out self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient( node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) # @unittest.skip('this test just for debugging setup') # def test_just_the_setup(self): # return @attr('EXT') def test_resources_associations_extensions(self): """ create one of each resource and association used by IMS to guard against problems in ion-definitions """ #stuff we control instrument_agent_instance_id, _ = self.RR.create( any_old(RT.InstrumentAgentInstance)) instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent)) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) instrument_site_id, _ = self.RR.create(any_old(RT.InstrumentSite)) platform_agent_instance_id, _ = self.RR.create( any_old(RT.PlatformAgentInstance)) platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent)) platform_site_id, _ = self.RR.create(any_old(RT.PlatformSite)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice)) sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel)) #stuff we associate to data_producer_id, _ = self.RR.create(any_old(RT.DataProducer)) org_id, _ = self.RR.create(any_old(RT.Org)) #instrument_agent_instance_id #is only a target #instrument_agent self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id) self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id) self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id) self.RR.create_association(org_id, PRED.hasResource, instrument_device_id) instrument_model_id #is only a target platform_agent_instance_id #is only a target #platform_agent self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id) self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(instrument_site_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(platform_site_id, PRED.hasDevice, platform_device_id) self.RR.create_association(platform_site_id, PRED.hasSite, instrument_site_id) platform_model_id #is only a target #sensor_device self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id) self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id) sensor_model_id #is only a target #create a parsed product for this instrument output tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', processing_level_code='Parsed_Canonical', temporal_domain=tdom, spatial_domain=sdom) pdict_id = self.DSC.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition( name='parsed', parameter_dictionary_id=pdict_id) data_product_id1 = self.DP.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug('new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1) def addInstOwner(inst_id, subject): actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject}) user_id = self.IDS.create_actor_identity(actor_identity_obj) user_info_obj = any_old(RT.UserInfo) user_info_id = self.IDS.create_user_info(user_id, user_info_obj) self.RR.create_association(inst_id, PRED.hasOwner, user_id) #Testing multiple instrument owners addInstOwner( instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254") addInstOwner( instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256") extended_instrument = self.IMS.get_instrument_device_extension( instrument_device_id) self.assertEqual(instrument_device_id, extended_instrument._id) self.assertEqual(len(extended_instrument.owners), 2) self.assertEqual(extended_instrument.instrument_model._id, instrument_model_id) # Lifecycle self.assertEquals(len(extended_instrument.lcstate_transitions), 5) self.assertEquals( set(extended_instrument.lcstate_transitions.keys()), set(['develop', 'deploy', 'retire', 'plan', 'integrate'])) self.assertEquals(len(extended_instrument.availability_transitions), 2) self.assertEquals( set(extended_instrument.availability_transitions.keys()), set(['enable', 'announce'])) # Verify that computed attributes exist for the extended instrument self.assertIsInstance( extended_instrument.computed.last_data_received_datetime, ComputedFloatValue) self.assertIsInstance(extended_instrument.computed.uptime, ComputedStringValue) self.assertIsInstance( extended_instrument.computed.power_status_roll_up, ComputedIntValue) self.assertIsInstance( extended_instrument.computed.communications_status_roll_up, ComputedIntValue) self.assertIsInstance(extended_instrument.computed.data_status_roll_up, ComputedIntValue) self.assertIsInstance( extended_instrument.computed.location_status_roll_up, ComputedIntValue) log.debug("extended_instrument.computed: %s", extended_instrument.computed) #check model inst_model_obj = self.RR.read(instrument_model_id) self.assertEqual(inst_model_obj.name, extended_instrument.instrument_model.name) #check agent instance inst_agent_instance_obj = self.RR.read(instrument_agent_instance_id) self.assertEqual(inst_agent_instance_obj.name, extended_instrument.agent_instance.name) #check agent inst_agent_obj = self.RR.read(instrument_agent_id) #compound assoc return list of lists so check the first element self.assertEqual(inst_agent_obj.name, extended_instrument.instrument_agent.name) #check platform device plat_device_obj = self.RR.read(platform_device_id) self.assertEqual(plat_device_obj.name, extended_instrument.platform_device.name) extended_platform = self.IMS.get_platform_device_extension( platform_device_id) self.assertEqual(1, len(extended_platform.portals)) self.assertEqual(1, len(extended_platform.portal_instruments)) #self.assertEqual(1, len(extended_platform.computed.portal_status.value)) # no agent started so NO statuses reported self.assertEqual(1, len(extended_platform.instrument_devices)) self.assertEqual(instrument_device_id, extended_platform.instrument_devices[0]._id) self.assertEqual(1, len(extended_platform.instrument_models)) self.assertEqual(instrument_model_id, extended_platform.instrument_models[0]._id) self.assertEquals(extended_platform.platform_agent._id, platform_agent_id) self.assertEquals(len(extended_platform.lcstate_transitions), 5) self.assertEquals( set(extended_platform.lcstate_transitions.keys()), set(['develop', 'deploy', 'retire', 'plan', 'integrate'])) self.assertEquals(len(extended_platform.availability_transitions), 2) self.assertEquals( set(extended_platform.availability_transitions.keys()), set(['enable', 'announce'])) #check sensor devices self.assertEqual(1, len(extended_instrument.sensor_devices)) #check data_product_parameters_set self.assertEqual( ComputedValueAvailability.PROVIDED, extended_instrument.computed.data_product_parameters_set.status) self.assertTrue('Parsed_Canonical' in extended_instrument.computed. data_product_parameters_set.value) # the ctd parameters should include 'temp' self.assertTrue('temp' in extended_instrument.computed. data_product_parameters_set.value['Parsed_Canonical']) #none of these will work because there is no agent # self.assertEqual(ComputedValueAvailability.NOTAVAILABLE, # extended_instrument.computed.firmware_version.status) # self.assertEqual(ComputedValueAvailability.NOTAVAILABLE, # extended_instrument.computed.operational_state.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.power_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.communications_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.data_status_roll_up.status) # self.assertEqual(DeviceStatusType.STATUS_OK, # extended_instrument.computed.data_status_roll_up.value) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.location_status_roll_up.status) # self.assertEqual(ComputedValueAvailability.PROVIDED, # extended_instrument.computed.recent_events.status) # self.assertEqual([], extended_instrument.computed.recent_events.value) # cleanup c = DotDict() c.resource_registry = self.RR self.RR2.pluck(instrument_agent_id) self.RR2.pluck(instrument_model_id) self.RR2.pluck(instrument_device_id) self.RR2.pluck(platform_agent_id) self.RR2.pluck(sensor_device_id) self.IMS.force_delete_instrument_agent(instrument_agent_id) self.IMS.force_delete_instrument_model(instrument_model_id) self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_platform_agent_instance( platform_agent_instance_id) self.IMS.force_delete_platform_agent(platform_agent_id) self.OMS.force_delete_instrument_site(instrument_site_id) self.OMS.force_delete_platform_site(platform_site_id) self.IMS.force_delete_platform_device(platform_device_id) self.IMS.force_delete_platform_model(platform_model_id) self.IMS.force_delete_sensor_device(sensor_device_id) self.IMS.force_delete_sensor_model(sensor_model_id) #stuff we associate to self.RR.delete(data_producer_id) self.RR.delete(org_id) def test_custom_attributes(self): """ Test assignment of custom attributes """ instModel_obj = IonObject(OT.CustomAttribute, name='SBE37IMModelAttr', description="model custom attr") instrument_model_id, _ = self.RR.create( any_old(RT.InstrumentModel, {"custom_attributes": [instModel_obj]})) instrument_device_id, _ = self.RR.create( any_old( RT.InstrumentDevice, { "custom_attributes": { "favorite_color": "red", "bogus_attr": "should raise warning" } })) self.IMS.assign_instrument_model_to_instrument_device( instrument_model_id, instrument_device_id) # cleanup self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_instrument_model(instrument_model_id) def _get_datastore(self, dataset_id): dataset = self.DSC.read_dataset(dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore( datastore_name, DataStore.DS_PROFILE.SCIDATA) return datastore def test_data_producer(self): idevice_id = self.IMS.create_instrument_device( any_old(RT.InstrumentDevice)) self.assertEqual( 1, len( self.RR2. find_data_producer_ids_of_instrument_device_using_has_data_producer( idevice_id))) pdevice_id = self.IMS.create_platform_device(any_old( RT.PlatformDevice)) self.assertEqual( 1, len( self.RR2. find_data_producer_ids_of_platform_device_using_has_data_producer( pdevice_id))) @attr('PREP') def test_prepare_resource_support(self): """ create one of each resource and association used by IMS to guard against problems in ion-definitions """ #stuff we control instrument_agent_instance_id, _ = self.RR.create( any_old(RT.InstrumentAgentInstance)) instrument_agent_id, _ = self.RR.create(any_old(RT.InstrumentAgent)) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) platform_agent_instance_id, _ = self.RR.create( any_old(RT.PlatformAgentInstance)) platform_agent_id, _ = self.RR.create(any_old(RT.PlatformAgent)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) sensor_device_id, _ = self.RR.create(any_old(RT.SensorDevice)) sensor_model_id, _ = self.RR.create(any_old(RT.SensorModel)) instrument_device2_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) instrument_device3_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) platform_device2_id, _ = self.RR.create(any_old(RT.PlatformDevice)) sensor_device2_id, _ = self.RR.create(any_old(RT.SensorDevice)) #stuff we associate to data_producer_id, _ = self.RR.create(any_old(RT.DataProducer)) org_id, _ = self.RR.create(any_old(RT.Org)) #instrument_agent_instance_id #is only a target #instrument_agent self.RR.create_association(instrument_agent_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_agent_instance_id, PRED.hasAgentDefinition, instrument_agent_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device_id, PRED.hasAgentInstance, instrument_agent_instance_id) self.RR.create_association(instrument_device_id, PRED.hasDataProducer, data_producer_id) self.RR.create_association(instrument_device_id, PRED.hasDevice, sensor_device_id) self.RR.create_association(org_id, PRED.hasResource, instrument_device_id) self.RR.create_association(instrument_device2_id, PRED.hasModel, instrument_model_id) self.RR.create_association(org_id, PRED.hasResource, instrument_device2_id) instrument_model_id #is only a target platform_agent_instance_id #is only a target #platform_agent self.RR.create_association(platform_agent_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_agent_instance_id, PRED.hasAgentDefinition, platform_agent_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device_id, PRED.hasAgentInstance, platform_agent_instance_id) self.RR.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(platform_device2_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_device2_id, PRED.hasDevice, instrument_device2_id) platform_model_id #is only a target #sensor_device self.RR.create_association(sensor_device_id, PRED.hasModel, sensor_model_id) self.RR.create_association(sensor_device_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(sensor_device2_id, PRED.hasModel, sensor_model_id) self.RR.create_association(sensor_device2_id, PRED.hasDevice, instrument_device2_id) sensor_model_id #is only a target #set lcstate - used for testing prepare - not setting all to DEVELOP, only some self.RR.execute_lifecycle_transition(instrument_agent_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(instrument_device_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(instrument_device2_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(platform_device_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(platform_device2_id, LCE.DEVELOP) self.RR.execute_lifecycle_transition(platform_agent_id, LCE.DEVELOP) #create a parsed product for this instrument output tdom, sdom = time_series_domain() tdom = tdom.dump() sdom = sdom.dump() dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', processing_level_code='Parsed_Canonical', temporal_domain=tdom, spatial_domain=sdom) pdict_id = self.DSC.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition( name='parsed', parameter_dictionary_id=pdict_id) data_product_id1 = self.DP.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug('new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=data_product_id1) def addInstOwner(inst_id, subject): actor_identity_obj = any_old(RT.ActorIdentity, {"name": subject}) user_id = self.IDS.create_actor_identity(actor_identity_obj) user_info_obj = any_old(RT.UserInfo) user_info_id = self.IDS.create_user_info(user_id, user_info_obj) self.RR.create_association(inst_id, PRED.hasOwner, user_id) #Testing multiple instrument owners addInstOwner( instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254") addInstOwner( instrument_device_id, "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256") def ion_object_encoder(obj): return obj.__dict__ #First call to create instrument_data = self.IMS.prepare_instrument_device_support() #print simplejson.dumps(instrument_data, default=ion_object_encoder, indent=2) self.assertEqual(instrument_data._id, '') self.assertEqual(instrument_data.type_, OT.InstrumentDevicePrepareSupport) self.assertEqual( len(instrument_data.associations['InstrumentModel'].resources), 1) self.assertEqual( instrument_data.associations['InstrumentModel'].resources[0]._id, instrument_model_id) self.assertEqual( len(instrument_data.associations['InstrumentAgentInstance']. resources), 1) self.assertEqual( instrument_data.associations['InstrumentAgentInstance']. resources[0]._id, instrument_agent_instance_id) self.assertEqual( len(instrument_data.associations['InstrumentModel']. associated_resources), 0) self.assertEqual( len(instrument_data.associations['InstrumentAgentInstance']. associated_resources), 0) self.assertEqual( len(instrument_data.associations['SensorDevice'].resources), 0) #Next call to update instrument_data = self.IMS.prepare_instrument_device_support( instrument_device_id) #print 'Update results' #print simplejson.dumps(instrument_data, default=ion_object_encoder, indent=2) self.assertEqual(instrument_data._id, instrument_device_id) self.assertEqual(instrument_data.type_, OT.InstrumentDevicePrepareSupport) self.assertEqual( len(instrument_data.associations['InstrumentModel'].resources), 1) self.assertEqual( instrument_data.associations['InstrumentModel'].resources[0]._id, instrument_model_id) self.assertEqual( len(instrument_data.associations['InstrumentAgentInstance']. resources), 1) self.assertEqual( instrument_data.associations['InstrumentAgentInstance']. resources[0]._id, instrument_agent_instance_id) self.assertEqual( len(instrument_data.associations['InstrumentModel']. associated_resources), 1) self.assertEqual( instrument_data.associations['InstrumentModel']. associated_resources[0].s, instrument_device_id) self.assertEqual( instrument_data.associations['InstrumentModel']. associated_resources[0].o, instrument_model_id) self.assertEqual( len(instrument_data.associations['InstrumentAgentInstance']. associated_resources), 1) self.assertEqual( instrument_data.associations['InstrumentAgentInstance']. associated_resources[0].o, instrument_agent_instance_id) self.assertEqual( instrument_data.associations['InstrumentAgentInstance']. associated_resources[0].s, instrument_device_id) self.assertEqual( len(instrument_data.associations['SensorDevice'].resources), 1) self.assertEqual( instrument_data.associations['SensorDevice'].resources[0]._id, sensor_device_id) self.assertEqual( len(instrument_data.associations['SensorDevice']. associated_resources), 1) self.assertEqual( instrument_data.associations['SensorDevice']. associated_resources[0].o, instrument_device_id) self.assertEqual( instrument_data.associations['SensorDevice']. associated_resources[0].s, sensor_device_id) self.assertEqual( instrument_data.associations['InstrumentModel'].assign_request. request_parameters['instrument_device_id'], instrument_device_id) #test prepare for create of instrument agent instance instrument_agent_data = self.IMS.prepare_instrument_agent_instance_support( ) #print 'Update results' #print simplejson.dumps(instrument_agent_data, default=ion_object_encoder, indent=2) self.assertEqual(instrument_agent_data._id, '') self.assertEqual(instrument_agent_data.type_, OT.InstrumentAgentInstancePrepareSupport) self.assertEqual( len(instrument_agent_data.associations['InstrumentDevice']. resources), 2) self.assertEqual( len(instrument_agent_data.associations['InstrumentAgent'].resources ), 1) self.assertEqual( instrument_agent_data.associations['InstrumentAgent'].resources[0]. _id, instrument_agent_id) self.assertEqual( len(instrument_agent_data.associations['InstrumentDevice']. associated_resources), 0) self.assertEqual( len(instrument_agent_data.associations['InstrumentAgent']. associated_resources), 0) #test prepare for update of instrument agent instance to see if it is associated with the instrument that was created instrument_agent_data = self.IMS.prepare_instrument_agent_instance_support( instrument_agent_instance_id=instrument_agent_instance_id) #print 'Update results' #print simplejson.dumps(instrument_agent_data, default=ion_object_encoder, indent=2) self.assertEqual(instrument_agent_data._id, instrument_agent_instance_id) self.assertEqual(instrument_agent_data.type_, OT.InstrumentAgentInstancePrepareSupport) self.assertEqual( len(instrument_agent_data.associations['InstrumentDevice']. resources), 3) self.assertEqual( len(instrument_agent_data.associations['InstrumentAgent'].resources ), 1) self.assertEqual( instrument_agent_data.associations['InstrumentAgent'].resources[0]. _id, instrument_agent_id) self.assertEqual( len(instrument_agent_data.associations['InstrumentDevice']. associated_resources), 1) self.assertEqual( instrument_agent_data.associations['InstrumentDevice']. associated_resources[0].s, instrument_device_id) self.assertEqual( instrument_agent_data.associations['InstrumentDevice']. associated_resources[0].o, instrument_agent_instance_id) self.assertEqual( len(instrument_agent_data.associations['InstrumentAgent']. associated_resources), 1) self.assertEqual( instrument_agent_data.associations['InstrumentAgent']. associated_resources[0].o, instrument_agent_id) self.assertEqual( instrument_agent_data.associations['InstrumentAgent']. associated_resources[0].s, instrument_agent_instance_id) self.assertEqual( instrument_agent_data.associations['InstrumentAgent']. assign_request.request_parameters['instrument_agent_instance_id'], instrument_agent_instance_id) #test prepare for update of data product to see if it is associated with the instrument that was created data_product_data = self.DP.prepare_data_product_support( data_product_id1) #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent=2) self.assertEqual(data_product_data._id, data_product_id1) self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual( len(data_product_data.associations['StreamDefinition'].resources), 1) self.assertEqual( len(data_product_data.associations['Dataset'].resources), 0) self.assertEqual( len(data_product_data.associations['StreamDefinition']. associated_resources), 1) self.assertEqual( data_product_data.associations['StreamDefinition']. associated_resources[0].s, data_product_id1) self.assertEqual( len(data_product_data.associations['Dataset'].associated_resources ), 0) self.assertEqual( len(data_product_data. associations['InstrumentDeviceHasOutputProduct'].resources), 3) self.assertEqual( len(data_product_data.associations[ 'InstrumentDeviceHasOutputProduct'].associated_resources), 1) self.assertEqual( data_product_data.associations['InstrumentDeviceHasOutputProduct']. associated_resources[0].s, instrument_device_id) self.assertEqual( data_product_data.associations['InstrumentDeviceHasOutputProduct']. associated_resources[0].o, data_product_id1) self.assertEqual( len(data_product_data.associations['PlatformDevice'].resources), 2) platform_data = self.IMS.prepare_platform_device_support() #print simplejson.dumps(platform_data, default=ion_object_encoder, indent=2) self.assertEqual(platform_data._id, '') self.assertEqual(platform_data.type_, OT.PlatformDevicePrepareSupport) self.assertEqual( len(platform_data.associations['PlatformModel'].resources), 1) self.assertEqual( platform_data.associations['PlatformModel'].resources[0]._id, platform_model_id) self.assertEqual( len(platform_data.associations['PlatformAgentInstance'].resources), 1) self.assertEqual( platform_data.associations['PlatformAgentInstance'].resources[0]. _id, platform_agent_instance_id) self.assertEqual( len(platform_data.associations['PlatformModel']. associated_resources), 0) self.assertEqual( len(platform_data.associations['PlatformAgentInstance']. associated_resources), 0) self.assertEqual( len(platform_data.associations['InstrumentDevice'].resources), 1) platform_data = self.IMS.prepare_platform_device_support( platform_device_id) #print simplejson.dumps(platform_data, default=ion_object_encoder, indent=2) self.assertEqual(platform_data._id, platform_device_id) self.assertEqual(platform_data.type_, OT.PlatformDevicePrepareSupport) self.assertEqual( len(platform_data.associations['PlatformModel'].resources), 1) self.assertEqual( platform_data.associations['PlatformModel'].resources[0]._id, platform_model_id) self.assertEqual( len(platform_data.associations['PlatformAgentInstance'].resources), 1) self.assertEqual( platform_data.associations['PlatformAgentInstance'].resources[0]. _id, platform_agent_instance_id) self.assertEqual( len(platform_data.associations['PlatformModel']. associated_resources), 1) self.assertEqual( platform_data.associations['PlatformModel']. associated_resources[0].s, platform_device_id) self.assertEqual( platform_data.associations['PlatformModel']. associated_resources[0].o, platform_model_id) self.assertEqual( len(platform_data.associations['PlatformAgentInstance']. associated_resources), 1) self.assertEqual( platform_data.associations['PlatformAgentInstance']. associated_resources[0].o, platform_agent_instance_id) self.assertEqual( platform_data.associations['PlatformAgentInstance']. associated_resources[0].s, platform_device_id) self.assertEqual( len(platform_data.associations['InstrumentDevice'].resources), 2) #self.assertEqual(len(platform_data.associations['InstrumentDevice'].associated_resources), 1) #self.assertEqual(platform_data.associations['InstrumentDevice'].associated_resources[0].s, platform_device_id) #self.assertEqual(platform_data.associations['InstrumentDevice'].associated_resources[0].o, instrument_device_id) self.assertEqual( platform_data.associations['PlatformModel'].assign_request. request_parameters['platform_device_id'], platform_device_id) # cleanup c = DotDict() c.resource_registry = self.RR self.RR2.pluck(instrument_agent_id) self.RR2.pluck(instrument_model_id) self.RR2.pluck(instrument_device_id) self.RR2.pluck(platform_agent_id) self.RR2.pluck(sensor_device_id) self.RR2.pluck(sensor_device2_id) self.IMS.force_delete_instrument_agent(instrument_agent_id) self.IMS.force_delete_instrument_model(instrument_model_id) self.IMS.force_delete_instrument_device(instrument_device_id) self.IMS.force_delete_instrument_device(instrument_device2_id) self.IMS.force_delete_platform_agent_instance( platform_agent_instance_id) self.IMS.force_delete_platform_agent(platform_agent_id) self.IMS.force_delete_platform_device(platform_device_id) self.IMS.force_delete_platform_device(platform_device2_id) self.IMS.force_delete_platform_model(platform_model_id) self.IMS.force_delete_sensor_device(sensor_device_id) self.IMS.force_delete_sensor_device(sensor_device2_id) self.IMS.force_delete_sensor_model(sensor_model_id) #stuff we associate to self.RR.delete(data_producer_id) self.RR.delete(org_id)
class TestDeployment(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dmpsclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.psmsclient = PubsubManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) # create missing data process definition self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node) dpd_obj = IonObject(RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.dsmsclient.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses) #@unittest.skip("targeting") def test_create_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) start = IonTime(datetime.datetime(2013,1,1)) end = IonTime(datetime.datetime(2014,1,1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_platform_site(site_id, deployment_id) self.imsclient.deploy_platform_device(device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) #retrieve the deployment objects and check that the assoc site and device are attached read_deployment_obj = self.omsclient.read_deployment(deployment_id) log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj) ) site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(site_ids), 1) device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(device_ids), 1) #delete the deployment self.RR2.pluck(deployment_id) self.omsclient.force_delete_deployment(deployment_id) # now try to get the deleted dp object try: self.omsclient.read_deployment(deployment_id) except NotFound: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def base_activate_deployment(self): #------------------------------------------------------------------------------------- # Create platform site, platform device, platform model #------------------------------------------------------------------------------------- platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') platform_site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device_obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') platform_device_id = self.imsclient.create_platform_device(platform_device_obj) platform_model__obj = IonObject(RT.PlatformModel, name='PlatformModel1', description='test platform model') platform_model_id = self.imsclient.create_platform_model(platform_model__obj) #------------------------------------------------------------------------------------- # Create instrument site #------------------------------------------------------------------------------------- instrument_site_obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, platform_site_id) pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.psmsclient.create_stream_definition(name='SBE37_CDM', parameter_dictionary_id=pdict_id) # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='Log Data Product', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) out_log_data_product_id = self.dmpsclient.create_data_product(dp_obj, ctd_stream_def_id) #---------------------------------------------------------------------------------------------------- # Start the transform (a logical transform) that acts as an instrument site #---------------------------------------------------------------------------------------------------- self.omsclient.create_site_data_product( site_id= instrument_site_id, data_product_id = out_log_data_product_id) #---------------------------------------------------------------------------------------------------- # Create an instrument device #---------------------------------------------------------------------------------------------------- instrument_device_obj = IonObject(RT.InstrumentDevice, name='InstrumentDevice1', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device(instrument_device_obj) self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) dp_obj = IonObject(RT.DataProduct, name='Instrument Data Product', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) inst_data_product_id = self.dmpsclient.create_data_product(dp_obj, ctd_stream_def_id) #assign data products appropriately self.damsclient.assign_data_product(input_resource_id=instrument_device_id, data_product_id=inst_data_product_id) #---------------------------------------------------------------------------------------------------- # Create an instrument model #---------------------------------------------------------------------------------------------------- instrument_model_obj = IonObject(RT.InstrumentModel, name='InstrumentModel1', description='test instrument model') instrument_model_id = self.imsclient.create_instrument_model(instrument_model_obj) #---------------------------------------------------------------------------------------------------- # Create a deployment object #---------------------------------------------------------------------------------------------------- start = IonTime(datetime.datetime(2013,1,1)) end = IonTime(datetime.datetime(2014,1,1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) ret = DotDict(instrument_site_id=instrument_site_id, instrument_device_id=instrument_device_id, instrument_model_id=instrument_model_id, platform_site_id=platform_site_id, platform_device_id=platform_device_id, platform_model_id=platform_model_id, deployment_id=deployment_id) return ret #@unittest.skip("targeting") def test_activate_deployment_normal(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_platform_model_to_platform_device(res.platform_model_id, res.platform_device_id) self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_platform_model_to_platform_site(res.platform_model_id, res.platform_site_id) self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("adding instrument site and device to deployment") self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug("adding platform site and device to deployment") self.omsclient.deploy_platform_site(res.platform_site_id, res.deployment_id) self.imsclient.deploy_platform_device(res.platform_device_id, res.deployment_id) log.debug("activating deployment, expecting success") self.omsclient.activate_deployment(res.deployment_id) #@unittest.skip("targeting") def test_activate_deployment_nomodels(self): res = self.base_activate_deployment() self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug("activating deployment without site+device models, expecting fail") self.assert_deploy_fail(res.deployment_id, "Expected at least 1 model for InstrumentSite") log.debug("assigning instrument site model") self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("activating deployment without device models, expecting fail") self.assert_deploy_fail(res.deployment_id, "Expected 1 model for InstrumentDevice") #@unittest.skip("targeting") def test_activate_deployment_nosite(self): res = self.base_activate_deployment() log.debug("assigning instrument models") self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument device only") self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug("activating deployment without device models, expecting fail") self.assert_deploy_fail(res.deployment_id, "No sites were found in the deployment") #@unittest.skip("targeting") def test_activate_deployment_nodevice(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument site only") self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) log.debug("activating deployment without device models, expecting fail") self.assert_deploy_fail(res.deployment_id, "The set of devices could not be mapped to the set of sites") def assert_deploy_fail(self, deployment_id, fail_message="did not specify fail_message"): with self.assertRaises(BadRequest) as cm: self.omsclient.activate_deployment(deployment_id) self.assertIn(fail_message, cm.exception.message)
class TestDeployment(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dmpsclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.psmsclient = PubsubManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses) #@unittest.skip("targeting") def test_create_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) start = IonTime(datetime.datetime(2013,1,1)) end = IonTime(datetime.datetime(2014,1,1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_platform_site(site_id, deployment_id) self.imsclient.deploy_platform_device(device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) #retrieve the deployment objects and check that the assoc site and device are attached read_deployment_obj = self.omsclient.read_deployment(deployment_id) log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj) ) site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(site_ids), 1) device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(device_ids), 1) #delete the deployment self.RR2.pluck(deployment_id) self.omsclient.force_delete_deployment(deployment_id) # now try to get the deleted dp object try: self.omsclient.read_deployment(deployment_id) except NotFound: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def test_prepare_deployment_support(self): deploy_sup = self.omsclient.prepare_deployment_support() self.assertTrue(deploy_sup) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].type_, "AssocDeploymentInstDevice") self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].associated_resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].type_, "AssocDeploymentPlatDevice") self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].associated_resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].type_, "AssocDeploymentInstSite") self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].associated_resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].type_, "AssocDeploymentPlatSite") self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].associated_resources, []) #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) start = IonTime(datetime.datetime(2013,1,1)) end = IonTime(datetime.datetime(2014,1,1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) deploy_sup = self.omsclient.prepare_deployment_support(deployment_id) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].associated_resources, []) self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformDevice'].resources), 1) self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].associated_resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].associated_resources, []) self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformSite'].resources), 1) self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].associated_resources, []) self.omsclient.assign_site_to_deployment(site_id, deployment_id) self.omsclient.assign_device_to_deployment(device_id, deployment_id) deploy_sup = self.omsclient.prepare_deployment_support(deployment_id) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].associated_resources, []) self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformDevice'].resources), 1) self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformDevice'].associated_resources), 1) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].associated_resources, []) self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformSite'].resources), 1) self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformSite'].associated_resources), 1) #delete the deployment self.RR2.pluck(deployment_id) self.omsclient.force_delete_deployment(deployment_id) # now try to get the deleted dp object try: self.omsclient.read_deployment(deployment_id) except NotFound: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def base_activate_deployment(self): #------------------------------------------------------------------------------------- # Create platform site, platform device, platform model #------------------------------------------------------------------------------------- platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') platform_site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device_obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') platform_device_id = self.imsclient.create_platform_device(platform_device_obj) platform_model__obj = IonObject(RT.PlatformModel, name='PlatformModel1', description='test platform model') platform_model_id = self.imsclient.create_platform_model(platform_model__obj) #------------------------------------------------------------------------------------- # Create instrument site #------------------------------------------------------------------------------------- instrument_site_obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, platform_site_id) pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.psmsclient.create_stream_definition(name='SBE37_CDM', parameter_dictionary_id=pdict_id) #---------------------------------------------------------------------------------------------------- # Create an instrument device #---------------------------------------------------------------------------------------------------- instrument_device_obj = IonObject(RT.InstrumentDevice, name='InstrumentDevice1', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device(instrument_device_obj) self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) #---------------------------------------------------------------------------------------------------- # Create an instrument model #---------------------------------------------------------------------------------------------------- instrument_model_obj = IonObject(RT.InstrumentModel, name='InstrumentModel1', description='test instrument model') instrument_model_id = self.imsclient.create_instrument_model(instrument_model_obj) #---------------------------------------------------------------------------------------------------- # Create a deployment object #---------------------------------------------------------------------------------------------------- start = IonTime(datetime.datetime(2013,1,1)) end = IonTime(datetime.datetime(2014,1,1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', context=IonObject(OT.CabledNodeDeploymentContext), constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) ret = DotDict(instrument_site_id=instrument_site_id, instrument_device_id=instrument_device_id, instrument_model_id=instrument_model_id, platform_site_id=platform_site_id, platform_device_id=platform_device_id, platform_model_id=platform_model_id, deployment_id=deployment_id) return ret #@unittest.skip("targeting") def test_activate_deployment_normal(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_platform_model_to_platform_device(res.platform_model_id, res.platform_device_id) self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_platform_model_to_platform_site(res.platform_model_id, res.platform_site_id) self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("adding instrument site and device to deployment") self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug("adding platform site and device to deployment") self.omsclient.deploy_platform_site(res.platform_site_id, res.deployment_id) self.imsclient.deploy_platform_device(res.platform_device_id, res.deployment_id) log.debug("activating deployment, expecting success") self.omsclient.activate_deployment(res.deployment_id) log.debug("deactivatin deployment, expecting success") self.omsclient.deactivate_deployment(res.deployment_id) #@unittest.skip("targeting") def test_activate_deployment_nomodels(self): res = self.base_activate_deployment() self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug("activating deployment without site+device models, expecting fail") self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1") log.debug("assigning instrument site model") self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("activating deployment without device models, expecting fail") self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1") #@unittest.skip("targeting") def test_activate_deployment_nosite(self): res = self.base_activate_deployment() log.debug("assigning instrument models") self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument device only") self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug("activating deployment without instrument site, expecting fail") self.assert_deploy_fail(res.deployment_id, BadRequest, "Devices in this deployment outnumber sites") #@unittest.skip("targeting") def test_activate_deployment_nodevice(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument site only") self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) log.debug("activating deployment without device, expecting fail") self.assert_deploy_fail(res.deployment_id, BadRequest, "No devices were found in the deployment") def test_activate_deployment_asymmetric_children(self): """ P0 | \ P1 P2 | I1 Complex deployment using CSP P1, P2, and P3 share the same platform model. The CSP solver should be able to work this out based on relationships to parents """ log.debug("create models") imodel_id = self.RR2.create(any_old(RT.InstrumentModel)) pmodel_id = self.RR2.create(any_old(RT.PlatformModel)) log.debug("create devices") idevice_id = self.RR2.create(any_old(RT.InstrumentDevice)) pdevice_id = [self.RR2.create(any_old(RT.PlatformDevice)) for _ in range(3)] log.debug("create sites") isite_id = self.RR2.create(any_old(RT.InstrumentSite)) psite_id = [self.RR2.create(any_old(RT.PlatformSite)) for _ in range(3)] log.debug("assign models") self.RR2.assign_instrument_model_to_instrument_device_with_has_model(imodel_id, idevice_id) self.RR2.assign_instrument_model_to_instrument_site_with_has_model(imodel_id, isite_id) for x in range(3): self.RR2.assign_platform_model_to_platform_device_with_has_model(pmodel_id, pdevice_id[x]) self.RR2.assign_platform_model_to_platform_site_with_has_model(pmodel_id, psite_id[x]) log.debug("assign hierarchy") self.RR2.assign_instrument_device_to_platform_device_with_has_device(idevice_id, pdevice_id[1]) self.RR2.assign_instrument_site_to_platform_site_with_has_site(isite_id, psite_id[1]) for x in range(1,3): self.RR2.assign_platform_device_to_platform_device_with_has_device(pdevice_id[x], pdevice_id[0]) self.RR2.assign_platform_site_to_platform_site_with_has_site(psite_id[x], psite_id[0]) log.debug("create and activate deployment") dep_id = self.RR2.create(any_old(RT.Deployment, {"context": IonObject(OT.RemotePlatformDeploymentContext)})) self.RR2.assign_deployment_to_platform_device_with_has_deployment(dep_id, pdevice_id[0]) self.RR2.assign_deployment_to_platform_site_with_has_deployment(dep_id, psite_id[0]) self.omsclient.activate_deployment(dep_id) log.debug("verifying deployment") self.assertEqual(idevice_id, self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(isite_id), "The instrument device was not assigned to the instrument site") for x in range(3): self.assertEqual(pdevice_id[x], self.RR2.find_platform_device_id_of_platform_site_using_has_device(psite_id[x]), "Platform device %d was not assigned to platform site %d" % (x, x)) def assert_deploy_fail(self, deployment_id, err_type=BadRequest, fail_message="did not specify fail_message"): with self.assertRaises(err_type) as cm: self.omsclient.activate_deployment(deployment_id) self.assertIn(fail_message, cm.exception.message) def test_3x3_matchups_remoteplatform(self): self.base_3x3_matchups(IonObject(OT.RemotePlatformDeploymentContext)) def test_3x3_matchups_cabledinstrument(self): self.base_3x3_matchups(IonObject(OT.CabledInstrumentDeploymentContext)) def test_3x3_matchups_cablednode(self): self.base_3x3_matchups(IonObject(OT.CabledNodeDeploymentContext)) def base_3x3_matchups(self, deployment_context): """ This will be 1 root platform, 3 sub platforms (2 of one model, 1 of another) and 3 sub instruments each (2-to-1) """ deployment_context_type = type(deployment_context).__name__ instrument_model_id = [self.RR2.create(any_old(RT.InstrumentModel)) for _ in range(6)] platform_model_id = [self.RR2.create(any_old(RT.PlatformModel)) for _ in range(3)] instrument_device_id = [self.RR2.create(any_old(RT.InstrumentDevice)) for _ in range(9)] platform_device_id = [self.RR2.create(any_old(RT.PlatformDevice)) for _ in range(4)] instrument_site_id = [self.RR2.create(any_old(RT.InstrumentSite, {"planned_uplink_port": IonObject(OT.PlatformPort, reference_designator="instport_%d" % (i+1))})) for i in range(9)] platform_site_id = [self.RR2.create(any_old(RT.PlatformSite, {"planned_uplink_port": IonObject(OT.PlatformPort, reference_designator="platport_%d" % (i+1))})) for i in range(4)] def instrument_model_at(platform_idx, instrument_idx): m = platform_idx * 2 if instrument_idx > 0: m += 1 return m def platform_model_at(platform_idx): if platform_idx > 0: return 1 return 0 def instrument_at(platform_idx, instrument_idx): return platform_idx * 3 + instrument_idx # set up the structure for p in range(3): m = platform_model_at(p) self.RR2.assign_platform_model_to_platform_site_with_has_model(platform_model_id[m], platform_site_id[p]) self.RR2.assign_platform_model_to_platform_device_with_has_model(platform_model_id[m], platform_device_id[p]) self.RR2.assign_platform_device_to_platform_device_with_has_device(platform_device_id[p], platform_device_id[3]) self.RR2.assign_platform_site_to_platform_site_with_has_site(platform_site_id[p], platform_site_id[3]) for i in range(3): m = instrument_model_at(p, i) idx = instrument_at(p, i) self.RR2.assign_instrument_model_to_instrument_site_with_has_model(instrument_model_id[m], instrument_site_id[idx]) self.RR2.assign_instrument_model_to_instrument_device_with_has_model(instrument_model_id[m], instrument_device_id[idx]) self.RR2.assign_instrument_device_to_platform_device_with_has_device(instrument_device_id[idx], platform_device_id[p]) self.RR2.assign_instrument_site_to_platform_site_with_has_site(instrument_site_id[idx], platform_site_id[p]) # top level models self.RR2.assign_platform_model_to_platform_device_with_has_model(platform_model_id[2], platform_device_id[3]) self.RR2.assign_platform_model_to_platform_site_with_has_model(platform_model_id[2], platform_site_id[3]) # verify structure for p in range(3): parent_id = self.RR2.find_platform_device_id_by_platform_device_using_has_device(platform_device_id[p]) self.assertEqual(platform_device_id[3], parent_id) parent_id = self.RR2.find_platform_site_id_by_platform_site_using_has_site(platform_site_id[p]) self.assertEqual(platform_site_id[3], parent_id) for i in range(len(platform_site_id)): self.assertEqual(self.RR2.find_platform_model_of_platform_device_using_has_model(platform_device_id[i]), self.RR2.find_platform_model_of_platform_site_using_has_model(platform_site_id[i])) for i in range(len(instrument_site_id)): self.assertEqual(self.RR2.find_instrument_model_of_instrument_device_using_has_model(instrument_device_id[i]), self.RR2.find_instrument_model_of_instrument_site_using_has_model(instrument_site_id[i])) port_assignments = {} for p in range(3): port_assignments[platform_device_id[p]] = "platport_%d" % (p+1) for i in range(3): idx = instrument_at(p, i) port_assignments[instrument_device_id[idx]] = "instport_%d" % (idx+1) deployment_id = self.RR2.create(any_old(RT.Deployment, {"context": deployment_context, "port_assignments": port_assignments})) log.debug("assigning device/site to %s deployment", deployment_context_type) if OT.RemotePlatformDeploymentContext == deployment_context_type: self.RR2.assign_deployment_to_platform_device_with_has_deployment(deployment_id, platform_device_id[3]) self.RR2.assign_deployment_to_platform_site_with_has_deployment(deployment_id, platform_site_id[3]) elif OT.CabledInstrumentDeploymentContext == deployment_context_type: self.RR2.assign_deployment_to_instrument_device_with_has_deployment(deployment_id, instrument_device_id[1]) self.RR2.assign_deployment_to_instrument_site_with_has_deployment(deployment_id, instrument_site_id[1]) elif OT.CabledNodeDeploymentContext == deployment_context_type: self.RR2.assign_deployment_to_platform_device_with_has_deployment(deployment_id, platform_device_id[1]) self.RR2.assign_deployment_to_platform_site_with_has_deployment(deployment_id, platform_site_id[1]) log.debug("activation of %s deployment", deployment_context_type) self.omsclient.activate_deployment(deployment_id) log.debug("validation of %s deployment", deployment_context_type) if OT.RemotePlatformDeploymentContext == deployment_context_type: # verify proper associations for i, d in enumerate(platform_device_id): self.assertEqual(d, self.RR2.find_platform_device_id_of_platform_site_using_has_device(platform_site_id[i])) for i, d in enumerate(instrument_device_id): self.assertEqual(d, self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(instrument_site_id[i])) elif OT.CabledInstrumentDeploymentContext == deployment_context_type: self.assertEqual(instrument_device_id[1], self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(instrument_site_id[1])) elif OT.CabledNodeDeploymentContext == deployment_context_type: expected_platforms = [1] expected_instruments = [3, 4, 5] # verify proper associations for i, d in enumerate(platform_device_id): self.assertEqual(i in expected_platforms, d in self.RR2.find_platform_device_ids_of_platform_site_using_has_device(platform_site_id[i])) for i, d in enumerate(instrument_device_id): self.assertEqual(i in expected_instruments, d in self.RR2.find_instrument_device_ids_of_instrument_site_using_has_device(instrument_site_id[i]))
class TestObservatoryManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.org_management_service = OrgManagementServiceClient( node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #print 'TestObservatoryManagementServiceIntegration: started services' self.event_publisher = EventPublisher() # @unittest.skip('this exists only for debugging the launch process') # def test_just_the_setup(self): # return def destroy(self, resource_ids): self.OMS.force_delete_observatory(resource_ids.observatory_id) self.OMS.force_delete_subsite(resource_ids.subsite_id) self.OMS.force_delete_subsite(resource_ids.subsite2_id) self.OMS.force_delete_subsite(resource_ids.subsiteb_id) self.OMS.force_delete_subsite(resource_ids.subsitez_id) self.OMS.force_delete_platform_site(resource_ids.platform_site_id) self.OMS.force_delete_platform_site(resource_ids.platform_siteb_id) self.OMS.force_delete_platform_site(resource_ids.platform_siteb2_id) self.OMS.force_delete_platform_site(resource_ids.platform_site3_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site2_id) self.OMS.force_delete_instrument_site( resource_ids.instrument_siteb3_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site4_id) #@unittest.skip('targeting') def test_observatory_management(self): resources = self._make_associations() self._do_test_find_related_sites(resources) self._do_test_get_sites_devices_status(resources) self._do_test_find_site_data_products(resources) self._do_test_find_related_frames_of_reference(resources) self._do_test_create_geospatial_point_center(resources) self._do_test_find_observatory_org(resources) self.destroy(resources) def _do_test_find_related_sites(self, resources): site_resources, site_children, _, _ = self.OMS.find_related_sites( resources.org_id) #import sys, pprint #print >> sys.stderr, pprint.pformat(site_resources) #print >> sys.stderr, pprint.pformat(site_children) #self.assertIn(resources.org_id, site_resources) self.assertIn(resources.observatory_id, site_resources) self.assertIn(resources.subsite_id, site_resources) self.assertIn(resources.subsite_id, site_resources) self.assertIn(resources.subsite2_id, site_resources) self.assertIn(resources.platform_site_id, site_resources) self.assertIn(resources.instrument_site_id, site_resources) self.assertEquals(len(site_resources), 13) self.assertEquals(site_resources[resources.observatory_id].type_, RT.Observatory) self.assertIn(resources.org_id, site_children) self.assertIn(resources.observatory_id, site_children) self.assertIn(resources.subsite_id, site_children) self.assertIn(resources.subsite_id, site_children) self.assertIn(resources.subsite2_id, site_children) self.assertIn(resources.platform_site_id, site_children) self.assertNotIn(resources.instrument_site_id, site_children) self.assertEquals(len(site_children), 9) self.assertIsInstance(site_children[resources.subsite_id], list) self.assertEquals(len(site_children[resources.subsite_id]), 2) def _do_test_get_sites_devices_status(self, resources): #bin/nosetests -s -v --nologcapture ion/services/sa/observatory/test/test_observatory_management_service_integration.py:TestObservatoryManagementServiceIntegration.test_observatory_management full_result_dict = self.OMS.get_sites_devices_status( parent_resource_ids=[resources.org_id], include_sites=True) result_dict = full_result_dict[resources.org_id] site_resources = result_dict.get("site_resources", None) site_children = result_dict.get("site_children", None) self.assertEquals(len(site_resources), 14) self.assertEquals(len(site_children), 9) full_result_dict = self.OMS.get_sites_devices_status( parent_resource_ids=[resources.org_id], include_sites=True, include_devices=True, include_status=True) result_dict = full_result_dict[resources.org_id] log.debug("RESULT DICT: %s", result_dict.keys()) site_resources = result_dict.get("site_resources", None) site_children = result_dict.get("site_children", None) site_status = result_dict.get("site_status", None) self.assertEquals(len(site_resources), 14) self.assertEquals(len(site_children), 9) full_result_dict = self.OMS.get_sites_devices_status( parent_resource_ids=[resources.observatory_id], include_sites=True, include_devices=True, include_status=True) result_dict = full_result_dict[resources.observatory_id] site_resources = result_dict.get("site_resources") site_children = result_dict.get("site_children") site_status = result_dict.get("site_status") self.assertEquals(len(site_resources), 13) self.assertEquals(len(site_children), 8) def _do_test_find_site_data_products(self, resources): res_dict = self.OMS.find_site_data_products(resources.org_id) #import sys, pprint #print >> sys.stderr, pprint.pformat(res_dict) self.assertIsNone(res_dict['data_product_resources']) self.assertIn(resources.platform_device_id, res_dict['device_data_products']) self.assertIn(resources.instrument_device_id, res_dict['device_data_products']) #@unittest.skip('targeting') def _do_test_find_related_frames_of_reference(self, stuff): # finding subordinates gives a dict of obj lists, convert objs to ids def idify(adict): ids = {} for k, v in adict.iteritems(): ids[k] = [] for obj in v: ids[k].append(obj._id) return ids # a short version of the function we're testing, with id-ify def short(resource_id, output_types): ret = self.OMS.find_related_frames_of_reference( resource_id, output_types) return idify(ret) #set up associations first stuff = self._make_associations() #basic traversal of tree from instrument to platform ids = short(stuff.instrument_site_id, [RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) #since this is the first search, just make sure the input inst_id got stripped if RT.InstrumentSite in ids: self.assertNotIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #basic traversal of tree from platform to instrument ids = short(stuff.platform_siteb_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) self.assertNotIn(stuff.instrument_site2_id, ids[RT.InstrumentSite]) #full traversal of tree from observatory down to instrument ids = short(stuff.observatory_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #full traversal of tree from instrument to observatory ids = short(stuff.instrument_site_id, [RT.Observatory]) self.assertIn(RT.Observatory, ids) self.assertIn(stuff.observatory_id, ids[RT.Observatory]) #partial traversal, only down to platform ids = short(stuff.observatory_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site3_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(RT.InstrumentSite, ids) #partial traversal, only down to platform ids = short(stuff.instrument_site_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertNotIn(RT.Observatory, ids) self.destroy(stuff) def _make_associations(self): """ create one of each resource and association used by OMS to guard against problems in ion-definitions """ #raise unittest.SkipTest("https://jira.oceanobservatories.org/tasks/browse/CISWCORE-41") """ the tree we're creating (observatory, sites, platforms, instruments) rows are lettered, colums numbered. - first row is implied a - first column is implied 1 - site Z, just because O--Sz | S--S2--P3--I4 | Sb-Pb2-Ib3 | P--I2 <- PlatformDevice, InstrumentDevice2 | Pb <- PlatformDevice b | I <- InstrumentDevice """ org_id = self.OMS.create_marine_facility(any_old(RT.Org)) def create_under_org(resource_type, extra_fields=None): obj = any_old(resource_type, extra_fields) if RT.InstrumentDevice == resource_type: resource_id = self.IMS.create_instrument_device(obj) else: resource_id, _ = self.RR.create(obj) self.OMS.assign_resource_to_observatory_org( resource_id=resource_id, org_id=org_id) return resource_id #stuff we control observatory_id = create_under_org(RT.Observatory) subsite_id = create_under_org(RT.Subsite) subsite2_id = create_under_org(RT.Subsite) subsiteb_id = create_under_org(RT.Subsite) subsitez_id = create_under_org(RT.Subsite) platform_site_id = create_under_org(RT.PlatformSite) platform_siteb_id = create_under_org(RT.PlatformSite) platform_siteb2_id = create_under_org(RT.PlatformSite) platform_site3_id = create_under_org(RT.PlatformSite) instrument_site_id = create_under_org(RT.InstrumentSite) instrument_site2_id = create_under_org(RT.InstrumentSite) instrument_siteb3_id = create_under_org(RT.InstrumentSite) instrument_site4_id = create_under_org(RT.InstrumentSite) #stuff we associate to instrument_device_id = create_under_org(RT.InstrumentDevice) instrument_device2_id = create_under_org(RT.InstrumentDevice) platform_device_id = create_under_org(RT.PlatformDevice) platform_deviceb_id = create_under_org(RT.PlatformDevice) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) deployment_id, _ = self.RR.create(any_old(RT.Deployment)) # marine tracking resources asset_id = create_under_org(RT.Asset) asset_type_id = create_under_org(RT.AssetType) event_duration_id = create_under_org(RT.EventDuration) event_duration_type_id = create_under_org(RT.EventDurationType) #observatory self.RR.create_association(observatory_id, PRED.hasSite, subsite_id) self.RR.create_association(observatory_id, PRED.hasSite, subsitez_id) #site self.RR.create_association(subsite_id, PRED.hasSite, subsite2_id) self.RR.create_association(subsite_id, PRED.hasSite, subsiteb_id) self.RR.create_association(subsite2_id, PRED.hasSite, platform_site3_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_siteb2_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_site_id) #platform_site(s) self.RR.create_association(platform_site3_id, PRED.hasSite, instrument_site4_id) self.RR.create_association(platform_siteb2_id, PRED.hasSite, instrument_siteb3_id) self.RR.create_association(platform_site_id, PRED.hasSite, instrument_site2_id) self.RR.create_association(platform_site_id, PRED.hasSite, platform_siteb_id) self.RR.create_association(platform_siteb_id, PRED.hasSite, instrument_site_id) self.RR.create_association(platform_siteb_id, PRED.hasDevice, platform_deviceb_id) #test network parent link self.OMS.assign_device_to_network_parent(platform_device_id, platform_deviceb_id) self.RR.create_association(platform_site_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_site_id, PRED.hasDevice, platform_device_id) self.RR.create_association(platform_site_id, PRED.hasDeployment, deployment_id) #instrument_site(s) self.RR.create_association(instrument_site_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_site_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(instrument_site_id, PRED.hasDeployment, deployment_id) self.RR.create_association(instrument_site2_id, PRED.hasDevice, instrument_device2_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device2_id, PRED.hasModel, instrument_model_id) ret = DotDict() ret.org_id = org_id ret.observatory_id = observatory_id ret.subsite_id = subsite_id ret.subsite2_id = subsite2_id ret.subsiteb_id = subsiteb_id ret.subsitez_id = subsitez_id ret.platform_site_id = platform_site_id ret.platform_siteb_id = platform_siteb_id ret.platform_siteb2_id = platform_siteb2_id ret.platform_site3_id = platform_site3_id ret.instrument_site_id = instrument_site_id ret.instrument_site2_id = instrument_site2_id ret.instrument_siteb3_id = instrument_siteb3_id ret.instrument_site4_id = instrument_site4_id ret.instrument_device_id = instrument_device_id ret.instrument_device2_id = instrument_device2_id ret.platform_device_id = platform_device_id ret.platform_deviceb_id = platform_deviceb_id ret.instrument_model_id = instrument_model_id ret.platform_model_id = platform_model_id ret.deployment_id = deployment_id ret.asset_id = asset_id ret.asset_type_id = asset_type_id ret.event_duration_id = event_duration_id ret.event_duration_type_id = event_duration_type_id return ret #@unittest.skip("targeting") def test_create_observatory(self): observatory_obj = IonObject(RT.Observatory, name='TestFacility', description='some new mf') observatory_id = self.OMS.create_observatory(observatory_obj) self.OMS.force_delete_observatory(observatory_id) #@unittest.skip("targeting") def _do_test_create_geospatial_point_center(self, resources): platformsite_obj = IonObject(RT.PlatformSite, name='TestPlatformSite', description='some new TestPlatformSite') geo_index_obj = IonObject(OT.GeospatialBounds) geo_index_obj.geospatial_latitude_limit_north = 20.0 geo_index_obj.geospatial_latitude_limit_south = 10.0 geo_index_obj.geospatial_longitude_limit_east = 15.0 geo_index_obj.geospatial_longitude_limit_west = 20.0 platformsite_obj.constraint_list = [geo_index_obj] platformsite_id = self.OMS.create_platform_site(platformsite_obj) # now get the dp back to see if it was updated platformsite_obj = self.OMS.read_platform_site(platformsite_id) self.assertEquals('some new TestPlatformSite', platformsite_obj.description) self.assertAlmostEqual(15.0, platformsite_obj.geospatial_point_center.lat, places=1) #now adjust a few params platformsite_obj.description = 'some old TestPlatformSite' geo_index_obj = IonObject(OT.GeospatialBounds) geo_index_obj.geospatial_latitude_limit_north = 30.0 geo_index_obj.geospatial_latitude_limit_south = 20.0 platformsite_obj.constraint_list = [geo_index_obj] update_result = self.OMS.update_platform_site(platformsite_obj) # now get the dp back to see if it was updated platformsite_obj = self.OMS.read_platform_site(platformsite_id) self.assertEquals('some old TestPlatformSite', platformsite_obj.description) self.assertAlmostEqual(25.0, platformsite_obj.geospatial_point_center.lat, places=1) self.OMS.force_delete_platform_site(platformsite_id) #@unittest.skip("targeting") def _do_test_find_observatory_org(self, resources): log.debug("Make TestOrg") org_obj = IonObject(RT.Org, name='TestOrg', description='some new mf org') org_id = self.OMS.create_marine_facility(org_obj) log.debug("Make Observatory") observatory_obj = IonObject(RT.Observatory, name='TestObservatory', description='some new obs') observatory_id = self.OMS.create_observatory(observatory_obj) log.debug("assign observatory to org") self.OMS.assign_resource_to_observatory_org(observatory_id, org_id) log.debug("verify assigment") org_objs = self.OMS.find_org_by_observatory(observatory_id) self.assertEqual(1, len(org_objs)) self.assertEqual(org_id, org_objs[0]._id) log.debug("org_id=<" + org_id + ">") log.debug("create a subsite with parent Observatory") subsite_obj = IonObject(RT.Subsite, name='TestSubsite', description='sample subsite') subsite_id = self.OMS.create_subsite(subsite_obj, observatory_id) self.assertIsNotNone(subsite_id, "Subsite not created.") log.debug("verify that Subsite is linked to Observatory") mf_subsite_assoc = self.RR.get_association(observatory_id, PRED.hasSite, subsite_id) self.assertIsNotNone(mf_subsite_assoc, "Subsite not connected to Observatory.") log.debug("add the Subsite as a resource of this Observatory") self.OMS.assign_resource_to_observatory_org(resource_id=subsite_id, org_id=org_id) log.debug("verify that Subsite is linked to Org") org_subsite_assoc = self.RR.get_association(org_id, PRED.hasResource, subsite_id) self.assertIsNotNone(org_subsite_assoc, "Subsite not connected as resource to Org.") log.debug("create a logical platform with parent Subsite") platform_site_obj = IonObject(RT.PlatformSite, name='TestPlatformSite', description='sample logical platform') platform_site_id = self.OMS.create_platform_site( platform_site_obj, subsite_id) self.assertIsNotNone(platform_site_id, "PlatformSite not created.") log.debug("verify that PlatformSite is linked to Site") site_lp_assoc = self.RR.get_association(subsite_id, PRED.hasSite, platform_site_id) self.assertIsNotNone(site_lp_assoc, "PlatformSite not connected to Site.") log.debug("add the PlatformSite as a resource of this Observatory") self.OMS.assign_resource_to_observatory_org( resource_id=platform_site_id, org_id=org_id) log.debug("verify that PlatformSite is linked to Org") org_lp_assoc = self.RR.get_association(org_id, PRED.hasResource, platform_site_id) self.assertIsNotNone(org_lp_assoc, "PlatformSite not connected as resource to Org.") log.debug("create a logical instrument with parent logical platform") instrument_site_obj = IonObject( RT.InstrumentSite, name='TestInstrumentSite', description='sample logical instrument') instrument_site_id = self.OMS.create_instrument_site( instrument_site_obj, platform_site_id) self.assertIsNotNone(instrument_site_id, "InstrumentSite not created.") log.debug("verify that InstrumentSite is linked to PlatformSite") li_lp_assoc = self.RR.get_association(platform_site_id, PRED.hasSite, instrument_site_id) self.assertIsNotNone(li_lp_assoc, "InstrumentSite not connected to PlatformSite.") log.debug("add the InstrumentSite as a resource of this Observatory") self.OMS.assign_resource_to_observatory_org( resource_id=instrument_site_id, org_id=org_id) log.debug("verify that InstrumentSite is linked to Org") org_li_assoc = self.RR.get_association(org_id, PRED.hasResource, instrument_site_id) self.assertIsNotNone( org_li_assoc, "InstrumentSite not connected as resource to Org.") log.debug( "remove the InstrumentSite as a resource of this Observatory") self.OMS.unassign_resource_from_observatory_org( instrument_site_id, org_id) log.debug("verify that InstrumentSite is linked to Org") assocs, _ = self.RR.find_objects(org_id, PRED.hasResource, RT.InstrumentSite, id_only=True) self.assertEqual(0, len(assocs)) log.debug( "remove the InstrumentSite, association should drop automatically") self.OMS.delete_instrument_site(instrument_site_id) assocs, _ = self.RR.find_objects(platform_site_id, PRED.hasSite, RT.InstrumentSite, id_only=True) self.assertEqual(0, len(assocs)) log.debug("remove the PlatformSite as a resource of this Observatory") self.OMS.unassign_resource_from_observatory_org( platform_site_id, org_id) log.debug("verify that PlatformSite is linked to Org") assocs, _ = self.RR.find_objects(org_id, PRED.hasResource, RT.PlatformSite, id_only=True) self.assertEqual(0, len(assocs)) log.debug("remove the Site as a resource of this Observatory") self.OMS.unassign_resource_from_observatory_org(subsite_id, org_id) log.debug("verify that Site is linked to Org") assocs, _ = self.RR.find_objects(org_id, PRED.hasResource, RT.Subsite, id_only=True) self.assertEqual(0, len(assocs)) self.RR.delete(org_id) self.OMS.force_delete_observatory(observatory_id) self.OMS.force_delete_subsite(subsite_id) self.OMS.force_delete_platform_site(platform_site_id) self.OMS.force_delete_instrument_site(instrument_site_id) @attr('EXT') @unittest.skipIf(os.getenv( 'CEI_LAUNCH_TEST', False ), 'Skip test while in CEI LAUNCH mode as it depends on modifying CFG on service side' ) def test_observatory_extensions(self): self.patch_cfg(CFG["container"], {"extended_resources": { "strip_results": False }}) obs_id = self.RR2.create(any_old(RT.Observatory)) pss_id = self.RR2.create( any_old(RT.PlatformSite, dict(alt_resource_type="StationSite"))) pas_id = self.RR2.create( any_old(RT.PlatformSite, dict(alt_resource_type="PlatformAssemblySite"))) pcs_id = self.RR2.create( any_old(RT.PlatformSite, dict(alt_resource_type="PlatformComponentSite"))) ins_id = self.RR2.create(any_old(RT.InstrumentSite)) obs_obj = self.RR2.read(obs_id) pss_obj = self.RR2.read(pss_id) pas_obj = self.RR2.read(pas_id) pcs_obj = self.RR2.read(pcs_id) ins_obj = self.RR2.read(ins_id) self.RR2.create_association(obs_id, PRED.hasSite, pss_id) self.RR2.create_association(pss_id, PRED.hasSite, pas_id) self.RR2.create_association(pas_id, PRED.hasSite, pcs_id) self.RR2.create_association(pcs_id, PRED.hasSite, ins_id) extended_obs = self.OMS.get_observatory_site_extension(obs_id, user_id=12345) self.assertEqual([pss_obj], extended_obs.platform_station_sites) self.assertEqual([pas_obj], extended_obs.platform_assembly_sites) self.assertEqual([pcs_obj], extended_obs.platform_component_sites) self.assertEqual([ins_obj], extended_obs.instrument_sites) extended_pss = self.OMS.get_observatory_site_extension(obs_id, user_id=12345) self.assertEqual([pas_obj], extended_pss.platform_assembly_sites) self.assertEqual([pcs_obj], extended_pss.platform_component_sites) self.assertEqual([ins_obj], extended_pss.instrument_sites) extended_pas = self.OMS.get_observatory_site_extension(pas_id, user_id=12345) self.assertEqual([pcs_obj], extended_pas.platform_component_sites) self.assertEqual([ins_obj], extended_pas.instrument_sites) extended_pcs = self.OMS.get_platform_component_site_extension( pcs_id, user_id=12345) self.assertEqual([ins_obj], extended_pcs.instrument_sites) #@unittest.skip("in development...") @attr('EXT') @attr('EXT1') @unittest.skipIf(os.getenv( 'CEI_LAUNCH_TEST', False ), 'Skip test while in CEI LAUNCH mode as it depends on modifying CFG on service side' ) def test_observatory_org_extended(self): self.patch_cfg(CFG["container"], {"extended_resources": { "strip_results": False }}) stuff = self._make_associations() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubcli.create_stream_definition( name='parsed', parameter_dictionary_id=parsed_pdict_id) dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test') data_product_id1 = self.dpclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) self.damsclient.assign_data_product( input_resource_id=stuff.instrument_device_id, data_product_id=data_product_id1) #Create a user to be used as regular member member_actor_obj = IonObject(RT.ActorIdentity, name='org member actor') member_actor_id, _ = self.RR.create(member_actor_obj) assert (member_actor_id) member_actor_header = get_actor_header(member_actor_id) member_user_obj = IonObject(RT.UserInfo, name='org member user') member_user_id, _ = self.RR.create(member_user_obj) assert (member_user_id) self.RR.create_association(subject=member_actor_id, predicate=PRED.hasInfo, object=member_user_id) #Build the Service Agreement Proposal to enroll a user actor sap = IonObject(OT.EnrollmentProposal, consumer=member_actor_id, provider=stuff.org_id) sap_response = self.org_management_service.negotiate( sap, headers=member_actor_header) #enroll the member without using negotiation self.org_management_service.enroll_member(org_id=stuff.org_id, actor_id=member_actor_id) #-------------------------------------------------------------------------------- # Get the extended Site (platformSite) #-------------------------------------------------------------------------------- try: extended_site = self.OMS.get_site_extension(stuff.platform_site_id) except: log.error('failed to get extended site', exc_info=True) raise log.debug("extended_site: %r ", extended_site) self.assertEquals(stuff.subsiteb_id, extended_site.parent_site._id) self.assertEqual(2, len(extended_site.sites)) self.assertEqual(2, len(extended_site.platform_devices)) self.assertEqual(2, len(extended_site.platform_models)) self.assertIn(stuff.platform_device_id, [o._id for o in extended_site.platform_devices]) self.assertIn( stuff.platform_model_id, [o._id for o in extended_site.platform_models if o is not None]) log.debug( "verify that PlatformDeviceb is linked to PlatformDevice with hasNetworkParent link" ) associations = self.RR.find_associations( subject=stuff.platform_deviceb_id, predicate=PRED.hasNetworkParent, object=stuff.platform_device_id, id_only=True) self.assertIsNotNone( associations, "PlatformDevice child not connected to PlatformDevice parent.") #-------------------------------------------------------------------------------- # Get the extended Org #-------------------------------------------------------------------------------- #test the extended resource extended_org = self.OMS.get_marine_facility_extension(stuff.org_id) log.debug("test_observatory_org_extended: extended_org: %s ", str(extended_org)) #self.assertEqual(2, len(extended_org.instruments_deployed) ) #self.assertEqual(1, len(extended_org.platforms_not_deployed) ) self.assertEqual(2, extended_org.number_of_platforms) self.assertEqual(2, len(extended_org.platform_models)) self.assertEqual(2, extended_org.number_of_instruments) self.assertEqual(2, len(extended_org.instrument_models)) self.assertEqual(1, len(extended_org.members)) self.assertNotEqual(extended_org.members[0]._id, member_actor_id) self.assertEqual(extended_org.members[0]._id, member_user_id) self.assertEqual(1, len(extended_org.open_requests)) self.assertTrue(len(extended_site.deployments) > 0) self.assertEqual(len(extended_site.deployments), len(extended_site.deployment_info)) self.assertEqual(1, extended_org.number_of_assets) self.assertEqual(1, extended_org.number_of_asset_types) self.assertEqual(1, extended_org.number_of_event_durations) self.assertEqual(1, extended_org.number_of_event_duration_types) #test the extended resource of the ION org ion_org_id = self.org_management_service.find_org() extended_org = self.OMS.get_marine_facility_extension(ion_org_id._id, user_id=12345) log.debug("test_observatory_org_extended: extended_ION_org: %s ", str(extended_org)) self.assertEqual(1, len(extended_org.members)) self.assertEqual(0, extended_org.number_of_platforms) #self.assertEqual(1, len(extended_org.sites)) #-------------------------------------------------------------------------------- # Get the extended Site #-------------------------------------------------------------------------------- #create device state events to use for op /non-op filtering in extended t = get_ion_ts() self.event_publisher.publish_event( ts_created=t, event_type='ResourceAgentStateEvent', origin=stuff.instrument_device_id, state=ResourceAgentState.STREAMING) self.event_publisher.publish_event( ts_created=t, event_type='ResourceAgentStateEvent', origin=stuff.instrument_device2_id, state=ResourceAgentState.INACTIVE) extended_site = self.OMS.get_site_extension(stuff.instrument_site2_id) log.debug("test_observatory_org_extended: extended_site: %s ", str(extended_site)) self.dpclient.delete_data_product(data_product_id1)
class TestResourceRegistryAttachments(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) print 'started services' def test_resource_registry_blob_sanity(self): resource_id, _ = self.RR.create(IonObject(RT.Resource, name="foo")) MY_CONTENT = "the quick brown fox etc etc etc" #save att_id = self.RR.create_attachment( resource_id, IonObject(RT.Attachment, name="test.txt", content=MY_CONTENT, content_type="text/plain", keywords=["test1", "test2"], attachment_type=AttachmentType.BLOB)) #load attachment = self.RR.read_attachment(att_id, include_content=True) self.assertEqual("test.txt", attachment.name) self.assertEqual("text/plain", attachment.content_type) self.assertIn("test1", attachment.keywords) self.assertIn("test2", attachment.keywords) #content has changed; it's base64-encoded from what we put in self.assertEqual(MY_CONTENT, attachment.content) obj = self.RR.read(resource_id) self.assertEqual(obj.name, "foo") obj.name = "TheDudeAbides" obj = self.RR.update(obj) obj = self.RR.read(resource_id) self.assertEqual(obj.name, "TheDudeAbides") att = self.RR.find_attachments(resource_id) self.assertNotEqual(att, None) actor_identity_obj = IonObject("ActorIdentity", name="name") actor_identity_obj_id, actor_identity_obj_rev = self.RR.create( actor_identity_obj) user_info_obj = IonObject("UserInfo", name="name") user_info_obj_id, user_info_obj_rev = self.RR.create(user_info_obj) assoc_id, assoc_rev = self.RR.create_association( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) self.assertNotEqual(assoc_id, None) find_assoc = self.RR.find_associations(actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) self.assertTrue(find_assoc[0]._id == assoc_id) subj = self.RR.find_subjects(RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True) res_obj1 = self.RR.read_object(actor_identity_obj_id, PRED.hasInfo, RT.UserInfo) self.assertEquals(res_obj1._id, user_info_obj_id) self.RR.delete_association(assoc_id) self.RR.delete_attachment(att_id) self.RR.delete(resource_id)
class TestDataProductProvenance(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.dpmsclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources( RT.DataProcess, None, None, True)[0]: self.dataprocessclient.deactivate_data_process(proc_id) self.dataprocessclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses) def test_get_data_product_provenance_report(self): #Create a test device device_obj = Device(name='Device1', description='test instrument site') device_id, _ = self.rrclient.create(device_obj) self.addCleanup(self.rrclient.delete, device_id) #Create a test DataProduct data_product1_obj = DataProduct(name='DataProduct1', description='test data product 1') data_product1_id, _ = self.rrclient.create(data_product1_obj) self.addCleanup(self.rrclient.delete, data_product1_id) #Create a test DataProcess data_process_obj = DataProcess(name='DataProcess', description='test data process') data_process_id, _ = self.rrclient.create(data_process_obj) self.addCleanup(self.rrclient.delete, data_process_id) #Create a second test DataProduct data_product2_obj = DataProduct(name='DataProduct2', description='test data product 2') data_product2_id, _ = self.rrclient.create(data_product2_obj) self.addCleanup(self.rrclient.delete, data_product2_id) #Create a test DataProducer data_producer_obj = DataProducer(name='DataProducer', description='test data producer') data_producer_id, rev = self.rrclient.create(data_producer_obj) #Link the DataProcess to the second DataProduct manually assoc_id, _ = self.rrclient.create_association( subject=data_process_id, predicate=PRED.hasInputProduct, object=data_product2_id) self.addCleanup(self.rrclient.delete_association, assoc_id) # Register the instrument and process. This links the device and the data process # with their own producers self.damsclient.register_instrument(device_id) self.addCleanup(self.damsclient.unregister_instrument, device_id) self.damsclient.register_process(data_process_id) self.addCleanup(self.damsclient.unregister_process, data_process_id) #Manually link the first DataProduct with the test DataProducer assoc_id, _ = self.rrclient.create_association( subject=data_product1_id, predicate=PRED.hasDataProducer, object=data_producer_id) #Get the DataProducer linked to the DataProcess (created in register_process above) #Associate that with with DataProduct1's DataProducer data_process_producer_ids, _ = self.rrclient.find_objects( subject=data_process_id, predicate=PRED.hasDataProducer, object_type=RT.DataProducer, id_only=True) assoc_id, _ = self.rrclient.create_association( subject=data_process_producer_ids[0], predicate=PRED.hasParent, object=data_producer_id) self.addCleanup(self.rrclient.delete_association, assoc_id) #Get the DataProducer linked to the Device (created in register_instrument #Associate that with the DataProcess's DataProducer device_producer_ids, _ = self.rrclient.find_objects( subject=device_id, predicate=PRED.hasDataProducer, object_type=RT.DataProducer, id_only=True) assoc_id, _ = self.rrclient.create_association( subject=data_producer_id, predicate=PRED.hasParent, object=device_producer_ids[0]) #Create the links between the Device, DataProducts, DataProcess, and all DataProducers self.damsclient.assign_data_product(input_resource_id=device_id, data_product_id=data_product1_id) self.addCleanup(self.damsclient.unassign_data_product, device_id, data_product1_id) self.damsclient.assign_data_product(input_resource_id=data_process_id, data_product_id=data_product2_id) self.addCleanup(self.damsclient.unassign_data_product, data_process_id, data_product2_id) #Traverse through the relationships to get the links between objects res = self.dpmsclient.get_data_product_provenance_report( data_product2_id) #Make sure there are four keys self.assertEqual(len(res.keys()), 4) parent_count = 0 config_count = 0 for v in res.itervalues(): if 'parent' in v: parent_count += 1 if 'config' in v: config_count += 1 #Make sure there are three parents and four configs self.assertEqual(parent_count, 3) self.assertEqual(config_count, 4) @unittest.skip('This test is obsolete with new framework') def test_get_provenance(self): #create a deployment with metadata and an initial site and device instrument_site_obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site( instrument_site_obj, "") log.debug('test_get_provenance: new instrument_site_id id = %s ', str(instrument_site_id)) # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") try: instModel_id = self.imsclient.create_instrument_model( instModel_obj) except BadRequest as ex: self.fail("failed to create new InstrumentModel: %s" % ex) log.debug('test_get_provenance: new InstrumentModel id = %s ', str(instModel_id)) self.omsclient.assign_instrument_model_to_instrument_site( instModel_id, instrument_site_id) # Create InstrumentAgent parsed_config = StreamConfiguration( stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict') instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD, stream_configurations=[parsed_config]) try: instAgent_id = self.imsclient.create_instrument_agent( instAgent_obj) except BadRequest as ex: self.fail("failed to create new InstrumentAgent: %s" % ex) log.debug('test_get_provenance:new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) # Create InstrumentDevice log.debug( 'test_get_provenance: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ' ) instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345") try: instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, instDevice_id) except BadRequest as ex: self.fail("failed to create new InstrumentDevice: %s" % ex) log.debug( "test_get_provenance: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) #------------------------------- # Create CTD Parsed data product #------------------------------- tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubclient.create_stream_definition( name='parsed', parameter_dictionary_id=pdict_id) log.debug( 'test_get_provenance:Creating new CDM data product with a stream definition' ) dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain=tdom, spatial_domain=sdom) ctd_parsed_data_product = self.dpmsclient.create_data_product( data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug('new dp_id = %s', ctd_parsed_data_product) self.damsclient.assign_data_product( input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product) self.dpmsclient.activate_data_product_persistence( data_product_id=ctd_parsed_data_product) #------------------------------- # create a data product for the site to pass the OMS check.... we need to remove this check #------------------------------- dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain=tdom, spatial_domain=sdom) log_data_product_id = self.dpmsclient.create_data_product( dp_obj, parsed_stream_def_id) #------------------------------- # Deploy instrument device to instrument site #------------------------------- deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment') deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_instrument_site(instrument_site_id, deployment_id) self.imsclient.deploy_instrument_device(instDevice_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) self.omsclient.activate_deployment(deployment_id) inst_device_objs, _ = self.rrclient.find_objects( subject=instrument_site_id, predicate=PRED.hasDevice, object_type=RT.InstrumetDevice, id_only=False) log.debug("test_create_deployment: deployed device: %s ", str(inst_device_objs[0])) #------------------------------- # Create the agent instance #------------------------------- port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config=port_agent_config) instAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, instDevice_id) #------------------------------- # L0 Conductivity - Temperature - Pressure: Data Process Definition #------------------------------- log.debug( "TestDataProductProvenance: create data process definition ctd_L0_all" ) dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L0_all', description='transform ctd package into three separate L0 streams', module='ion.processes.data.transforms.ctd.ctd_L0_all', class_name='ctd_L0_all') try: ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) except BadRequest as ex: self.fail( "failed to create new ctd_L0_all data process definition: %s" % ex) #------------------------------- # L1 Conductivity: Data Process Definition #------------------------------- log.debug( "TestDataProductProvenance: create data process definition CTDL1ConductivityTransform" ) dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L1_conductivity', description='create the L1 conductivity data product', module='ion.processes.data.transforms.ctd.ctd_L1_conductivity', class_name='CTDL1ConductivityTransform') try: ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) except BadRequest as ex: self.fail( "failed to create new CTDL1ConductivityTransform data process definition: %s" % ex) #------------------------------- # L1 Pressure: Data Process Definition #------------------------------- log.debug( "TestDataProductProvenance: create data process definition CTDL1PressureTransform" ) dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L1_pressure', description='create the L1 pressure data product', module='ion.processes.data.transforms.ctd.ctd_L1_pressure', class_name='CTDL1PressureTransform') try: ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) except BadRequest as ex: self.fail( "failed to create new CTDL1PressureTransform data process definition: %s" % ex) #------------------------------- # L1 Temperature: Data Process Definition #------------------------------- log.debug( "TestDataProductProvenance: create data process definition CTDL1TemperatureTransform" ) dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L1_temperature', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L1_temperature', class_name='CTDL1TemperatureTransform') try: ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) except BadRequest as ex: self.fail( "failed to create new CTDL1TemperatureTransform data process definition: %s" % ex) #------------------------------- # L2 Salinity: Data Process Definition #------------------------------- log.debug( "TestDataProductProvenance: create data process definition SalinityTransform" ) dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L2_salinity', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L2_salinity', class_name='SalinityTransform') try: ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) except BadRequest as ex: self.fail( "failed to create new SalinityTransform data process definition: %s" % ex) #------------------------------- # L2 Density: Data Process Definition #------------------------------- log.debug( "TestDataProductProvenance: create data process definition DensityTransform" ) dpd_obj = IonObject( RT.DataProcessDefinition, name='ctd_L2_density', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L2_density', class_name='DensityTransform') try: ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition( dpd_obj) except BadRequest as ex: self.fail( "failed to create new DensityTransform data process definition: %s" % ex) #------------------------------- # L0 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition( name='L0_Conductivity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity') outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition( name='L0_Pressure', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure') outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition( name='L0_Temperature', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature') log.debug( "TestDataProductProvenance: create output data product L0 conductivity" ) ctd_l0_conductivity_output_dp_obj = IonObject( RT.DataProduct, name='L0_Conductivity', description='transform output conductivity', temporal_domain=tdom, spatial_domain=sdom) ctd_l0_conductivity_output_dp_id = self.dpmsclient.create_data_product( ctd_l0_conductivity_output_dp_obj, outgoing_stream_l0_conductivity_id) log.debug( "TestDataProductProvenance: create output data product L0 pressure" ) ctd_l0_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L0_Pressure', description='transform output pressure', temporal_domain=tdom, spatial_domain=sdom) ctd_l0_pressure_output_dp_id = self.dpmsclient.create_data_product( ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id) log.debug( "TestDataProductProvenance: create output data product L0 temperature" ) ctd_l0_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L0_Temperature', description='transform output temperature', temporal_domain=tdom, spatial_domain=sdom) ctd_l0_temperature_output_dp_id = self.dpmsclient.create_data_product( ctd_l0_temperature_output_dp_obj, outgoing_stream_l0_temperature_id) #------------------------------- # L1 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition( name='L1_conductivity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l1_conductivity_id, ctd_L1_conductivity_dprocdef_id, binding='conductivity') outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition( name='L1_Pressure', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l1_pressure_id, ctd_L1_pressure_dprocdef_id, binding='pressure') outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition( name='L1_Temperature', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l1_temperature_id, ctd_L1_temperature_dprocdef_id, binding='temperature') log.debug( "TestDataProductProvenance: create output data product L1 conductivity" ) ctd_l1_conductivity_output_dp_obj = IonObject( RT.DataProduct, name='L1_Conductivity', description='transform output L1 conductivity', temporal_domain=tdom, spatial_domain=sdom) ctd_l1_conductivity_output_dp_id = self.dpmsclient.create_data_product( ctd_l1_conductivity_output_dp_obj, outgoing_stream_l1_conductivity_id) log.debug( "TestDataProductProvenance: create output data product L1 pressure" ) ctd_l1_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L1_Pressure', description='transform output L1 pressure', temporal_domain=tdom, spatial_domain=sdom) ctd_l1_pressure_output_dp_id = self.dpmsclient.create_data_product( ctd_l1_pressure_output_dp_obj, outgoing_stream_l1_pressure_id) log.debug( "TestDataProductProvenance: create output data product L1 temperature" ) ctd_l1_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L1_Temperature', description='transform output L1 temperature', temporal_domain=tdom, spatial_domain=sdom) ctd_l1_temperature_output_dp_id = self.dpmsclient.create_data_product( ctd_l1_temperature_output_dp_obj, outgoing_stream_l1_temperature_id) #------------------------------- # L2 Salinity - Density: Output Data Products #------------------------------- outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition( name='L2_salinity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l2_salinity_id, ctd_L2_salinity_dprocdef_id, binding='salinity') outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition( name='L2_Density', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( outgoing_stream_l2_density_id, ctd_L2_density_dprocdef_id, binding='density') log.debug( "TestDataProductProvenance: create output data product L2 Salinity" ) ctd_l2_salinity_output_dp_obj = IonObject( RT.DataProduct, name='L2_Salinity', description='transform output L2 salinity', temporal_domain=tdom, spatial_domain=sdom) ctd_l2_salinity_output_dp_id = self.dpmsclient.create_data_product( ctd_l2_salinity_output_dp_obj, outgoing_stream_l2_salinity_id) log.debug( "TestDataProductProvenance: create output data product L2 Density") # ctd_l2_density_output_dp_obj = IonObject( RT.DataProduct, # name='L2_Density', # description='transform output pressure', # temporal_domain = tdom, # spatial_domain = sdom) # # ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj, # outgoing_stream_l2_density_id, # parameter_dictionary) contactInfo = ContactInformation() contactInfo.individual_names_given = "Bill" contactInfo.individual_name_family = "Smith" contactInfo.street_address = "111 First St" contactInfo.city = "San Diego" contactInfo.email = "*****@*****.**" contactInfo.phones = ["858-555-6666"] contactInfo.country = "USA" contactInfo.postal_code = "92123" ctd_l2_density_output_dp_obj = IonObject( RT.DataProduct, name='L2_Density', description='transform output pressure', contacts=[contactInfo], iso_topic_category="my_iso_topic_category_here", quality_control_level="1", temporal_domain=tdom, spatial_domain=sdom) ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product( ctd_l2_density_output_dp_obj, outgoing_stream_l2_density_id) #------------------------------- # L0 Conductivity - Temperature - Pressure: Create the data process #------------------------------- log.debug( "TestDataProductProvenance: create L0 all data_process start") try: input_data_products = [ctd_parsed_data_product] output_data_products = [ ctd_l0_conductivity_output_dp_id, ctd_l0_pressure_output_dp_id, ctd_l0_temperature_output_dp_id ] ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=ctd_L0_all_dprocdef_id, in_data_product_ids=input_data_products, out_data_product_ids=output_data_products) #activate only this data process just for coverage self.dataprocessclient.activate_data_process( ctd_l0_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" % ex) contents = "this is the lookup table contents, replace with a file..." att = IonObject(RT.Attachment, name='deviceLookupTable', content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII) deviceAttachment = self.rrclient.create_attachment( ctd_l0_all_data_process_id, att) log.info( 'test_createTransformsThenActivateInstrument: InstrumentDevice attachment id = %s', deviceAttachment) log.debug( "TestDataProductProvenance: create L0 all data_process return") #------------------------------- # L1 Conductivity: Create the data process #------------------------------- log.debug( "TestDataProductProvenance: create L1 Conductivity data_process start" ) try: l1_conductivity_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=ctd_L1_conductivity_dprocdef_id, in_data_product_ids=[ctd_l0_conductivity_output_dp_id], out_data_product_ids=[ctd_l1_conductivity_output_dp_id]) self.dataprocessclient.activate_data_process( l1_conductivity_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" % ex) #------------------------------- # L1 Pressure: Create the data process #------------------------------- log.debug( "TestDataProductProvenance: create L1_Pressure data_process start") try: l1_pressure_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=ctd_L1_pressure_dprocdef_id, in_data_product_ids=[ctd_l0_pressure_output_dp_id], out_data_product_ids=[ctd_l1_pressure_output_dp_id]) self.dataprocessclient.activate_data_process( l1_pressure_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" % ex) #------------------------------- # L1 Temperature: Create the data process #------------------------------- log.debug( "TestDataProductProvenance: create L1_Pressure data_process start") try: l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=ctd_L1_temperature_dprocdef_id, in_data_product_ids=[ctd_l0_temperature_output_dp_id], out_data_product_ids=[ctd_l1_temperature_output_dp_id]) self.dataprocessclient.activate_data_process( l1_temperature_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" % ex) #------------------------------- # L2 Salinity: Create the data process #------------------------------- log.debug( "TestDataProductProvenance: create L2_salinity data_process start") try: l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=ctd_L2_salinity_dprocdef_id, in_data_product_ids=[ ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id ], out_data_product_ids=[ctd_l2_salinity_output_dp_id]) self.dataprocessclient.activate_data_process( l2_salinity_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" % ex) #------------------------------- # L2 Density: Create the data process #------------------------------- log.debug( "TestDataProductProvenance: create L2_Density data_process start") try: in_dp_ids = [ ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id ] out_dp_ids = [ctd_l2_density_output_dp_id] l2_density_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=ctd_L2_density_dprocdef_id, in_data_product_ids=in_dp_ids, out_data_product_ids=out_dp_ids) self.dataprocessclient.activate_data_process( l2_density_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" % ex) #------------------------------- # Launch InstrumentAgentInstance, connect to the resource agent client #------------------------------- self.imsclient.start_instrument_agent_instance( instrument_agent_instance_id=instAgentInstance_id) inst_agent_instance_obj = self.imsclient.read_instrument_agent_instance( instAgentInstance_id) print 'TestDataProductProvenance: Instrument agent instance obj: = ', inst_agent_instance_obj # Start a resource agent client to talk with the instrument agent. # self._ia_client = ResourceAgentClient('iaclient', name=ResourceAgentClient._get_agent_process_id(instDevice_id, process=FakeProcess()) # print 'activate_instrument: got ia client %s', self._ia_client # log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client)) #------------------------------- # Deactivate InstrumentAgentInstance #------------------------------- self.imsclient.stop_instrument_agent_instance( instrument_agent_instance_id=instAgentInstance_id) self.dataprocessclient.deactivate_data_process( l2_density_all_data_process_id) self.dataprocessclient.deactivate_data_process( l2_salinity_all_data_process_id) self.dataprocessclient.deactivate_data_process( l1_temperature_all_data_process_id) self.dataprocessclient.deactivate_data_process( l1_pressure_data_process_id) self.dataprocessclient.deactivate_data_process( l1_conductivity_data_process_id) self.dataprocessclient.deactivate_data_process( ctd_l0_all_data_process_id) #------------------------------- # Retrieve the provenance info for the ctd density data product #------------------------------- provenance_dict = self.dpmsclient.get_data_product_provenance( ctd_l2_density_output_dp_id) log.debug("TestDataProductProvenance: provenance_dict %s", str(provenance_dict)) #validate that products are represented self.assertTrue(provenance_dict[str(ctd_l1_conductivity_output_dp_id)]) self.assertTrue(provenance_dict[str(ctd_l0_conductivity_output_dp_id)]) self.assertTrue(provenance_dict[str(ctd_l2_density_output_dp_id)]) self.assertTrue(provenance_dict[str(ctd_l1_temperature_output_dp_id)]) self.assertTrue(provenance_dict[str(ctd_l0_temperature_output_dp_id)]) density_dict = (provenance_dict[str(ctd_l2_density_output_dp_id)]) self.assertEquals(density_dict['producer'], [l2_density_all_data_process_id]) #------------------------------- # Retrieve the extended resource for this data product #------------------------------- extended_product = self.dpmsclient.get_data_product_extension( ctd_l2_density_output_dp_id) self.assertEqual(1, len(extended_product.data_processes)) self.assertEqual(3, len(extended_product.process_input_data_products)) # log.debug("TestDataProductProvenance: DataProduct provenance_product_list %s", str(extended_product.provenance_product_list)) # log.debug("TestDataProductProvenance: DataProduct data_processes %s", str(extended_product.data_processes)) # log.debug("TestDataProductProvenance: DataProduct process_input_data_products %s", str(extended_product.process_input_data_products)) # log.debug("TestDataProductProvenance: provenance %s", str(extended_product.computed.provenance.value)) #------------------------------- # Retrieve the extended resource for this data process #------------------------------- extended_process_def = self.dataprocessclient.get_data_process_definition_extension( ctd_L0_all_dprocdef_id) # log.debug("TestDataProductProvenance: DataProcess extended_process_def %s", str(extended_process_def)) # log.debug("TestDataProductProvenance: DataProcess data_processes %s", str(extended_process_def.data_processes)) # log.debug("TestDataProductProvenance: DataProcess data_products %s", str(extended_process_def.data_products)) self.assertEqual(1, len(extended_process_def.data_processes)) self.assertEqual(3, len(extended_process_def.output_stream_definitions)) self.assertEqual(3, len(extended_process_def.data_products) ) #one list because of one data process #------------------------------- # Request the xml report #------------------------------- results = self.dpmsclient.get_data_product_provenance_report( ctd_l2_density_output_dp_id) print results #------------------------------- # Cleanup #------------------------------- self.dpmsclient.delete_data_product(ctd_parsed_data_product) self.dpmsclient.delete_data_product(log_data_product_id) self.dpmsclient.delete_data_product(ctd_l0_conductivity_output_dp_id) self.dpmsclient.delete_data_product(ctd_l0_pressure_output_dp_id) self.dpmsclient.delete_data_product(ctd_l0_temperature_output_dp_id) self.dpmsclient.delete_data_product(ctd_l1_conductivity_output_dp_id) self.dpmsclient.delete_data_product(ctd_l1_pressure_output_dp_id) self.dpmsclient.delete_data_product(ctd_l1_temperature_output_dp_id) self.dpmsclient.delete_data_product(ctd_l2_salinity_output_dp_id) self.dpmsclient.delete_data_product(ctd_l2_density_output_dp_id)
class TestObservatoryManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.org_management_service = OrgManagementServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #print 'TestObservatoryManagementServiceIntegration: started services' self.event_publisher = EventPublisher() # @unittest.skip('this exists only for debugging the launch process') # def test_just_the_setup(self): # return def destroy(self, resource_ids): self.OMS.force_delete_observatory(resource_ids.observatory_id) self.OMS.force_delete_subsite(resource_ids.subsite_id) self.OMS.force_delete_subsite(resource_ids.subsite2_id) self.OMS.force_delete_subsite(resource_ids.subsiteb_id) self.OMS.force_delete_subsite(resource_ids.subsitez_id) self.OMS.force_delete_platform_site(resource_ids.platform_site_id) self.OMS.force_delete_platform_site(resource_ids.platform_siteb_id) self.OMS.force_delete_platform_site(resource_ids.platform_siteb2_id) self.OMS.force_delete_platform_site(resource_ids.platform_site3_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site2_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_siteb3_id) self.OMS.force_delete_instrument_site(resource_ids.instrument_site4_id) #@unittest.skip('targeting') def test_observatory_management(self): resources = self._make_associations() self._do_test_find_related_sites(resources) self._do_test_get_sites_devices_status(resources) self._do_test_find_site_data_products(resources) self._do_test_find_related_frames_of_reference(resources) self._do_test_create_geospatial_point_center(resources) self._do_test_find_observatory_org(resources) self.destroy(resources) def _do_test_find_related_sites(self, resources): site_resources, site_children = self.OMS.find_related_sites(resources.org_id) #import sys, pprint #print >> sys.stderr, pprint.pformat(site_resources) #print >> sys.stderr, pprint.pformat(site_children) #self.assertIn(resources.org_id, site_resources) self.assertIn(resources.observatory_id, site_resources) self.assertIn(resources.subsite_id, site_resources) self.assertIn(resources.subsite_id, site_resources) self.assertIn(resources.subsite2_id, site_resources) self.assertIn(resources.platform_site_id, site_resources) self.assertIn(resources.instrument_site_id, site_resources) self.assertEquals(len(site_resources), 13) self.assertEquals(site_resources[resources.observatory_id].type_, RT.Observatory) self.assertIn(resources.org_id, site_children) self.assertIn(resources.observatory_id, site_children) self.assertIn(resources.subsite_id, site_children) self.assertIn(resources.subsite_id, site_children) self.assertIn(resources.subsite2_id, site_children) self.assertIn(resources.platform_site_id, site_children) self.assertNotIn(resources.instrument_site_id, site_children) self.assertEquals(len(site_children), 9) self.assertIsInstance(site_children[resources.subsite_id], list) self.assertEquals(len(site_children[resources.subsite_id]), 2) def _do_test_get_sites_devices_status(self, resources): result_dict = self.OMS.get_sites_devices_status(resources.org_id) site_resources = result_dict.get("site_resources", None) site_children = result_dict.get("site_children", None) self.assertEquals(len(site_resources), 14) self.assertEquals(len(site_children), 9) result_dict = self.OMS.get_sites_devices_status(resources.org_id, include_devices=True, include_status=True) log.debug("RESULT DICT: %s", result_dict.keys()) site_resources = result_dict.get("site_resources", None) site_children = result_dict.get("site_children", None) site_status = result_dict.get("site_status", None) self.assertEquals(len(site_resources), 14) self.assertEquals(len(site_children), 9) result_dict = self.OMS.get_sites_devices_status(resources.observatory_id, include_devices=True, include_status=True) site_resources = result_dict.get("site_resources") site_children = result_dict.get("site_children") site_status = result_dict.get("site_status") self.assertEquals(len(site_resources), 13) self.assertEquals(len(site_children), 8) def _do_test_find_site_data_products(self, resources): res_dict = self.OMS.find_site_data_products(resources.org_id) #import sys, pprint #print >> sys.stderr, pprint.pformat(res_dict) self.assertIsNone(res_dict['data_product_resources']) self.assertIn(resources.platform_device_id, res_dict['device_data_products']) self.assertIn(resources.instrument_device_id, res_dict['device_data_products']) #@unittest.skip('targeting') def _do_test_find_related_frames_of_reference(self, stuff): # finding subordinates gives a dict of obj lists, convert objs to ids def idify(adict): ids = {} for k, v in adict.iteritems(): ids[k] = [] for obj in v: ids[k].append(obj._id) return ids # a short version of the function we're testing, with id-ify def short(resource_id, output_types): ret = self.OMS.find_related_frames_of_reference(resource_id, output_types) return idify(ret) #set up associations first stuff = self._make_associations() #basic traversal of tree from instrument to platform ids = short(stuff.instrument_site_id, [RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) #since this is the first search, just make sure the input inst_id got stripped if RT.InstrumentSite in ids: self.assertNotIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #basic traversal of tree from platform to instrument ids = short(stuff.platform_siteb_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) self.assertNotIn(stuff.instrument_site2_id, ids[RT.InstrumentSite]) #full traversal of tree from observatory down to instrument ids = short(stuff.observatory_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #full traversal of tree from instrument to observatory ids = short(stuff.instrument_site_id, [RT.Observatory]) self.assertIn(RT.Observatory, ids) self.assertIn(stuff.observatory_id, ids[RT.Observatory]) #partial traversal, only down to platform ids = short(stuff.observatory_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site3_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(RT.InstrumentSite, ids) #partial traversal, only down to platform ids = short(stuff.instrument_site_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertNotIn(RT.Observatory, ids) self.destroy(stuff) def _make_associations(self): """ create one of each resource and association used by OMS to guard against problems in ion-definitions """ #raise unittest.SkipTest("https://jira.oceanobservatories.org/tasks/browse/CISWCORE-41") """ the tree we're creating (observatory, sites, platforms, instruments) rows are lettered, colums numbered. - first row is implied a - first column is implied 1 - site Z, just because O--Sz | S--S2--P3--I4 | Sb-Pb2-Ib3 | P--I2 <- PlatformDevice, InstrumentDevice2 | Pb <- PlatformDevice b | I <- InstrumentDevice """ org_id = self.OMS.create_marine_facility(any_old(RT.Org)) def create_under_org(resource_type, extra_fields=None): obj = any_old(resource_type, extra_fields) if RT.InstrumentDevice == resource_type: resource_id = self.IMS.create_instrument_device(obj) else: resource_id, _ = self.RR.create(obj) self.OMS.assign_resource_to_observatory_org(resource_id=resource_id, org_id=org_id) return resource_id #stuff we control observatory_id = create_under_org(RT.Observatory) subsite_id = create_under_org(RT.Subsite) subsite2_id = create_under_org(RT.Subsite) subsiteb_id = create_under_org(RT.Subsite) subsitez_id = create_under_org(RT.Subsite) platform_site_id = create_under_org(RT.PlatformSite) platform_siteb_id = create_under_org(RT.PlatformSite) platform_siteb2_id = create_under_org(RT.PlatformSite) platform_site3_id = create_under_org(RT.PlatformSite) instrument_site_id = create_under_org(RT.InstrumentSite) instrument_site2_id = create_under_org(RT.InstrumentSite) instrument_siteb3_id = create_under_org(RT.InstrumentSite) instrument_site4_id = create_under_org(RT.InstrumentSite) #stuff we associate to instrument_device_id = create_under_org(RT.InstrumentDevice) instrument_device2_id = create_under_org(RT.InstrumentDevice) platform_device_id = create_under_org(RT.PlatformDevice) platform_deviceb_id = create_under_org(RT.PlatformDevice) instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) deployment_id, _ = self.RR.create(any_old(RT.Deployment)) #observatory self.RR.create_association(observatory_id, PRED.hasSite, subsite_id) self.RR.create_association(observatory_id, PRED.hasSite, subsitez_id) #site self.RR.create_association(subsite_id, PRED.hasSite, subsite2_id) self.RR.create_association(subsite_id, PRED.hasSite, subsiteb_id) self.RR.create_association(subsite2_id, PRED.hasSite, platform_site3_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_siteb2_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_site_id) #platform_site(s) self.RR.create_association(platform_site3_id, PRED.hasSite, instrument_site4_id) self.RR.create_association(platform_siteb2_id, PRED.hasSite, instrument_siteb3_id) self.RR.create_association(platform_site_id, PRED.hasSite, instrument_site2_id) self.RR.create_association(platform_site_id, PRED.hasSite, platform_siteb_id) self.RR.create_association(platform_siteb_id, PRED.hasSite, instrument_site_id) self.RR.create_association(platform_siteb_id, PRED.hasDevice, platform_deviceb_id) #test network parent link self.OMS.assign_device_to_network_parent(platform_device_id, platform_deviceb_id) self.RR.create_association(platform_site_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_site_id, PRED.hasDevice, platform_device_id) self.RR.create_association(platform_site_id, PRED.hasDeployment, deployment_id) #instrument_site(s) self.RR.create_association(instrument_site_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_site_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(instrument_site_id, PRED.hasDeployment, deployment_id) self.RR.create_association(instrument_site2_id, PRED.hasDevice, instrument_device2_id) #platform_device self.RR.create_association(platform_device_id, PRED.hasModel, platform_model_id) #instrument_device self.RR.create_association(instrument_device_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_device2_id, PRED.hasModel, instrument_model_id) ret = DotDict() ret.org_id = org_id ret.observatory_id = observatory_id ret.subsite_id = subsite_id ret.subsite2_id = subsite2_id ret.subsiteb_id = subsiteb_id ret.subsitez_id = subsitez_id ret.platform_site_id = platform_site_id ret.platform_siteb_id = platform_siteb_id ret.platform_siteb2_id = platform_siteb2_id ret.platform_site3_id = platform_site3_id ret.instrument_site_id = instrument_site_id ret.instrument_site2_id = instrument_site2_id ret.instrument_siteb3_id = instrument_siteb3_id ret.instrument_site4_id = instrument_site4_id ret.instrument_device_id = instrument_device_id ret.instrument_device2_id = instrument_device2_id ret.platform_device_id = platform_device_id ret.platform_deviceb_id = platform_deviceb_id ret.instrument_model_id = instrument_model_id ret.platform_model_id = platform_model_id ret.deployment_id = deployment_id return ret #@unittest.skip("targeting") def test_create_observatory(self): observatory_obj = IonObject(RT.Observatory, name='TestFacility', description='some new mf') observatory_id = self.OMS.create_observatory(observatory_obj) self.OMS.force_delete_observatory(observatory_id) #@unittest.skip("targeting") def _do_test_create_geospatial_point_center(self, resources): platformsite_obj = IonObject(RT.PlatformSite, name='TestPlatformSite', description='some new TestPlatformSite') geo_index_obj = IonObject(OT.GeospatialBounds) geo_index_obj.geospatial_latitude_limit_north = 20.0 geo_index_obj.geospatial_latitude_limit_south = 10.0 geo_index_obj.geospatial_longitude_limit_east = 15.0 geo_index_obj.geospatial_longitude_limit_west = 20.0 platformsite_obj.constraint_list = [geo_index_obj] platformsite_id = self.OMS.create_platform_site(platformsite_obj) # now get the dp back to see if it was updated platformsite_obj = self.OMS.read_platform_site(platformsite_id) self.assertEquals('some new TestPlatformSite', platformsite_obj.description) self.assertAlmostEqual(15.0, platformsite_obj.geospatial_point_center.lat, places=1) #now adjust a few params platformsite_obj.description = 'some old TestPlatformSite' geo_index_obj = IonObject(OT.GeospatialBounds) geo_index_obj.geospatial_latitude_limit_north = 30.0 geo_index_obj.geospatial_latitude_limit_south = 20.0 platformsite_obj.constraint_list = [geo_index_obj] update_result = self.OMS.update_platform_site(platformsite_obj) # now get the dp back to see if it was updated platformsite_obj = self.OMS.read_platform_site(platformsite_id) self.assertEquals('some old TestPlatformSite', platformsite_obj.description) self.assertAlmostEqual(25.0, platformsite_obj.geospatial_point_center.lat, places=1) self.OMS.force_delete_platform_site(platformsite_id) #@unittest.skip("targeting") def _do_test_find_observatory_org(self, resources): log.debug("Make TestOrg") org_obj = IonObject(RT.Org, name='TestOrg', description='some new mf org') org_id = self.OMS.create_marine_facility(org_obj) log.debug("Make Observatory") observatory_obj = IonObject(RT.Observatory, name='TestObservatory', description='some new obs') observatory_id = self.OMS.create_observatory(observatory_obj) log.debug("assign observatory to org") self.OMS.assign_resource_to_observatory_org(observatory_id, org_id) log.debug("verify assigment") org_objs = self.OMS.find_org_by_observatory(observatory_id) self.assertEqual(1, len(org_objs)) self.assertEqual(org_id, org_objs[0]._id) log.debug("org_id=<" + org_id + ">") log.debug("create a subsite with parent Observatory") subsite_obj = IonObject(RT.Subsite, name= 'TestSubsite', description = 'sample subsite') subsite_id = self.OMS.create_subsite(subsite_obj, observatory_id) self.assertIsNotNone(subsite_id, "Subsite not created.") log.debug("verify that Subsite is linked to Observatory") mf_subsite_assoc = self.RR.get_association(observatory_id, PRED.hasSite, subsite_id) self.assertIsNotNone(mf_subsite_assoc, "Subsite not connected to Observatory.") log.debug("add the Subsite as a resource of this Observatory") self.OMS.assign_resource_to_observatory_org(resource_id=subsite_id, org_id=org_id) log.debug("verify that Subsite is linked to Org") org_subsite_assoc = self.RR.get_association(org_id, PRED.hasResource, subsite_id) self.assertIsNotNone(org_subsite_assoc, "Subsite not connected as resource to Org.") log.debug("create a logical platform with parent Subsite") platform_site_obj = IonObject(RT.PlatformSite, name= 'TestPlatformSite', description = 'sample logical platform') platform_site_id = self.OMS.create_platform_site(platform_site_obj, subsite_id) self.assertIsNotNone(platform_site_id, "PlatformSite not created.") log.debug("verify that PlatformSite is linked to Site") site_lp_assoc = self.RR.get_association(subsite_id, PRED.hasSite, platform_site_id) self.assertIsNotNone(site_lp_assoc, "PlatformSite not connected to Site.") log.debug("add the PlatformSite as a resource of this Observatory") self.OMS.assign_resource_to_observatory_org(resource_id=platform_site_id, org_id=org_id) log.debug("verify that PlatformSite is linked to Org") org_lp_assoc = self.RR.get_association(org_id, PRED.hasResource, platform_site_id) self.assertIsNotNone(org_lp_assoc, "PlatformSite not connected as resource to Org.") log.debug("create a logical instrument with parent logical platform") instrument_site_obj = IonObject(RT.InstrumentSite, name= 'TestInstrumentSite', description = 'sample logical instrument') instrument_site_id = self.OMS.create_instrument_site(instrument_site_obj, platform_site_id) self.assertIsNotNone(instrument_site_id, "InstrumentSite not created.") log.debug("verify that InstrumentSite is linked to PlatformSite") li_lp_assoc = self.RR.get_association(platform_site_id, PRED.hasSite, instrument_site_id) self.assertIsNotNone(li_lp_assoc, "InstrumentSite not connected to PlatformSite.") log.debug("add the InstrumentSite as a resource of this Observatory") self.OMS.assign_resource_to_observatory_org(resource_id=instrument_site_id, org_id=org_id) log.debug("verify that InstrumentSite is linked to Org") org_li_assoc = self.RR.get_association(org_id, PRED.hasResource, instrument_site_id) self.assertIsNotNone(org_li_assoc, "InstrumentSite not connected as resource to Org.") log.debug("remove the InstrumentSite as a resource of this Observatory") self.OMS.unassign_resource_from_observatory_org(instrument_site_id, org_id) log.debug("verify that InstrumentSite is linked to Org") assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.InstrumentSite, id_only=True ) self.assertEqual(0, len(assocs)) log.debug("remove the InstrumentSite, association should drop automatically") self.OMS.delete_instrument_site(instrument_site_id) assocs, _ = self.RR.find_objects(platform_site_id, PRED.hasSite, RT.InstrumentSite, id_only=True ) self.assertEqual(0, len(assocs)) log.debug("remove the PlatformSite as a resource of this Observatory") self.OMS.unassign_resource_from_observatory_org(platform_site_id, org_id) log.debug("verify that PlatformSite is linked to Org") assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.PlatformSite, id_only=True ) self.assertEqual(0, len(assocs)) log.debug("remove the Site as a resource of this Observatory") self.OMS.unassign_resource_from_observatory_org(subsite_id, org_id) log.debug("verify that Site is linked to Org") assocs,_ = self.RR.find_objects(org_id, PRED.hasResource, RT.Subsite, id_only=True ) self.assertEqual(0, len(assocs)) self.RR.delete(org_id) self.OMS.force_delete_observatory(observatory_id) self.OMS.force_delete_subsite(subsite_id) self.OMS.force_delete_platform_site(platform_site_id) self.OMS.force_delete_instrument_site(instrument_site_id) @attr('EXT') def test_observatory_extensions(self): obs_id = self.RR2.create(any_old(RT.Observatory)) pss_id = self.RR2.create(any_old(RT.PlatformSite, dict(alt_resource_type="StationSite"))) pas_id = self.RR2.create(any_old(RT.PlatformSite, dict(alt_resource_type="PlatformAssemblySite"))) pcs_id = self.RR2.create(any_old(RT.PlatformSite, dict(alt_resource_type="PlatformComponentSite"))) ins_id = self.RR2.create(any_old(RT.InstrumentSite)) obs_obj = self.RR2.read(obs_id) pss_obj = self.RR2.read(pss_id) pas_obj = self.RR2.read(pas_id) pcs_obj = self.RR2.read(pcs_id) ins_obj = self.RR2.read(ins_id) self.RR2.create_association(obs_id, PRED.hasSite, pss_id) self.RR2.create_association(pss_id, PRED.hasSite, pas_id) self.RR2.create_association(pas_id, PRED.hasSite, pcs_id) self.RR2.create_association(pcs_id, PRED.hasSite, ins_id) extended_obs = self.OMS.get_observatory_site_extension(obs_id, user_id=12345) self.assertEqual([pss_obj], extended_obs.computed.platform_station_sites.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_obs.computed.platform_station_sites.status) self.assertEqual([pas_obj], extended_obs.computed.platform_assembly_sites.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_obs.computed.platform_assembly_sites.status) self.assertEqual([pcs_obj], extended_obs.computed.platform_component_sites.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_obs.computed.platform_component_sites.status) self.assertEqual([ins_obj], extended_obs.computed.instrument_sites.value) extended_pss = self.OMS.get_observatory_site_extension(obs_id, user_id=12345) self.assertEqual([pas_obj], extended_pss.computed.platform_assembly_sites.value) self.assertEqual([pcs_obj], extended_pss.computed.platform_component_sites.value) self.assertEqual([ins_obj], extended_pss.computed.instrument_sites.value) extended_pas = self.OMS.get_observatory_site_extension(pas_id, user_id=12345) self.assertEqual([pcs_obj], extended_pas.computed.platform_component_sites.value) self.assertEqual([ins_obj], extended_pas.computed.instrument_sites.value) extended_pcs = self.OMS.get_platform_component_site_extension(pcs_id, user_id=12345) self.assertEqual([ins_obj], extended_pcs.computed.instrument_sites.value) #@unittest.skip("in development...") @attr('EXT') @attr('EXT1') def test_observatory_org_extended(self): stuff = self._make_associations() parsed_pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubcli.create_stream_definition(name='parsed', parameter_dictionary_id=parsed_pdict_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id1 = self.dpclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) self.damsclient.assign_data_product(input_resource_id=stuff.instrument_device_id, data_product_id=data_product_id1) #Create a user to be used as regular member member_actor_obj = IonObject(RT.ActorIdentity, name='org member actor') member_actor_id,_ = self.RR.create(member_actor_obj) assert(member_actor_id) member_actor_header = get_actor_header(member_actor_id) member_user_obj = IonObject(RT.UserInfo, name='org member user') member_user_id,_ = self.RR.create(member_user_obj) assert(member_user_id) self.RR.create_association(subject=member_actor_id, predicate=PRED.hasInfo, object=member_user_id) #Build the Service Agreement Proposal to enroll a user actor sap = IonObject(OT.EnrollmentProposal,consumer=member_actor_id, provider=stuff.org_id ) sap_response = self.org_management_service.negotiate(sap, headers=member_actor_header ) #enroll the member without using negotiation self.org_management_service.enroll_member(org_id=stuff.org_id, actor_id=member_actor_id) #-------------------------------------------------------------------------------- # Get the extended Site (platformSite) #-------------------------------------------------------------------------------- try: extended_site = self.OMS.get_site_extension(stuff.platform_site_id) except: log.error('failed to get extended site', exc_info=True) raise log.debug("extended_site: %r ", extended_site) self.assertEqual(1, len(extended_site.platform_devices)) self.assertEqual(1, len(extended_site.platform_models)) self.assertEqual(stuff.platform_device_id, extended_site.platform_devices[0]._id) self.assertEqual(stuff.platform_model_id, extended_site.platform_models[0]._id) log.debug("verify that PlatformDeviceb is linked to PlatformDevice with hasNetworkParent link") associations = self.RR.find_associations(subject=stuff.platform_deviceb_id, predicate=PRED.hasNetworkParent, object=stuff.platform_device_id, id_only=True) self.assertIsNotNone(associations, "PlatformDevice child not connected to PlatformDevice parent.") #-------------------------------------------------------------------------------- # Get the extended Org #-------------------------------------------------------------------------------- #test the extended resource extended_org = self.OMS.get_marine_facility_extension(stuff.org_id) log.debug("test_observatory_org_extended: extended_org: %s ", str(extended_org)) #self.assertEqual(2, len(extended_org.instruments_deployed) ) #self.assertEqual(1, len(extended_org.platforms_not_deployed) ) self.assertEqual(2, extended_org.number_of_platforms) self.assertEqual(2, len(extended_org.platform_models) ) self.assertEqual(2, extended_org.number_of_instruments) self.assertEqual(2, len(extended_org.instrument_models) ) self.assertEqual(1, len(extended_org.members)) self.assertNotEqual(extended_org.members[0]._id, member_actor_id) self.assertEqual(extended_org.members[0]._id, member_user_id) self.assertEqual(1, len(extended_org.open_requests)) self.assertTrue(len(extended_site.deployments)>0) self.assertEqual(len(extended_site.deployments), len(extended_site.deployment_info)) #test the extended resource of the ION org ion_org_id = self.org_management_service.find_org() extended_org = self.OMS.get_marine_facility_extension(ion_org_id._id, user_id=12345) log.debug("test_observatory_org_extended: extended_ION_org: %s ", str(extended_org)) self.assertEqual(1, len(extended_org.members)) self.assertEqual(0, extended_org.number_of_platforms) #self.assertEqual(1, len(extended_org.sites)) #-------------------------------------------------------------------------------- # Get the extended Site #-------------------------------------------------------------------------------- #create device state events to use for op /non-op filtering in extended t = get_ion_ts() self.event_publisher.publish_event( ts_created= t, event_type = 'ResourceAgentStateEvent', origin = stuff.instrument_device_id, state=ResourceAgentState.STREAMING ) self.event_publisher.publish_event( ts_created= t, event_type = 'ResourceAgentStateEvent', origin = stuff.instrument_device2_id, state=ResourceAgentState.INACTIVE ) extended_site = self.OMS.get_site_extension(stuff.instrument_site2_id) log.debug("test_observatory_org_extended: extended_site: %s ", str(extended_site)) self.dpclient.delete_data_product(data_product_id1)
class TestDeployment(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dmpsclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.psmsclient = PubsubManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses) #@unittest.skip("targeting") def test_create_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) start = str(int(time.mktime(datetime.datetime(2013, 1, 1).timetuple()))) end = str(int(time.mktime(datetime.datetime(2014, 1, 1).timetuple()))) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start, end_datetime=end) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.assign_site_to_deployment(site_id, deployment_id) self.omsclient.assign_device_to_deployment(device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) #retrieve the deployment objects and check that the assoc site and device are attached read_deployment_obj = self.omsclient.read_deployment(deployment_id) log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj) ) site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(site_ids), 1) device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(device_ids), 1) #delete the deployment self.omsclient.force_delete_deployment(deployment_id) # now try to get the deleted dp object try: self.omsclient.read_deployment(deployment_id) except NotFound: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def test_prepare_deployment_support(self): deploy_sup = self.omsclient.prepare_deployment_support() self.assertTrue(deploy_sup) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].type_, "AssocDeploymentInstDevice") self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].associated_resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].type_, "AssocDeploymentPlatDevice") self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].associated_resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].type_, "AssocDeploymentInstSite") self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].associated_resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].type_, "AssocDeploymentPlatSite") self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].associated_resources, []) #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) start = str(int(time.mktime(datetime.datetime(2013, 1, 1).timetuple()))) end = str(int(time.mktime(datetime.datetime(2014, 1, 1).timetuple()))) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start, end_datetime=end) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) deploy_sup = self.omsclient.prepare_deployment_support(deployment_id) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].associated_resources, []) self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformDevice'].resources), 1) self.assertEquals(deploy_sup.associations['DeploymentHasPlatformDevice'].associated_resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].associated_resources, []) self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformSite'].resources), 1) self.assertEquals(deploy_sup.associations['DeploymentHasPlatformSite'].associated_resources, []) self.omsclient.assign_site_to_deployment(site_id, deployment_id) self.omsclient.assign_device_to_deployment(device_id, deployment_id) deploy_sup = self.omsclient.prepare_deployment_support(deployment_id) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentDevice'].associated_resources, []) self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformDevice'].resources), 1) self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformDevice'].associated_resources), 1) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].resources, []) self.assertEquals(deploy_sup.associations['DeploymentHasInstrumentSite'].associated_resources, []) self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformSite'].resources), 1) self.assertEquals(len(deploy_sup.associations['DeploymentHasPlatformSite'].associated_resources), 1) #delete the deployment self.omsclient.force_delete_deployment(deployment_id) # now try to get the deleted dp object try: self.omsclient.read_deployment(deployment_id) except NotFound: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def base_activate_deployment(self, make_assigns=False): # Create platform site, platform device, platform model bounds = GeospatialBounds(geospatial_latitude_limit_north=float(5), geospatial_latitude_limit_south=float(5), geospatial_longitude_limit_west=float(15), geospatial_longitude_limit_east=float(15), geospatial_vertical_min=float(0), geospatial_vertical_max=float(1000)) platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site', constraint_list=[bounds]) platform_site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device_obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') platform_device_id = self.imsclient.create_platform_device(platform_device_obj) platform_model__obj = IonObject(RT.PlatformModel, name='PlatformModel1', description='test platform model') platform_model_id = self.imsclient.create_platform_model(platform_model__obj) # Create instrument site #------------------------------------------------------------------------------------- bounds = GeospatialBounds(geospatial_latitude_limit_north=float(45), geospatial_latitude_limit_south=float(40), geospatial_longitude_limit_west=float(-75), geospatial_longitude_limit_east=float(-70), geospatial_vertical_min=float(0), geospatial_vertical_max=float(500)) instrument_site_obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site', reference_designator='GA01SUMO-FI003-01-CTDMO0999', constraint_list=[bounds]) instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, platform_site_id) pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.psmsclient.create_stream_definition(name='SBE37_CDM', parameter_dictionary_id=pdict_id) # Create an instrument device instrument_device_obj = IonObject(RT.InstrumentDevice, name='InstrumentDevice1', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device(instrument_device_obj) self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) pp_obj = IonObject(OT.PlatformPort, reference_designator='GA01SUMO-FI003-01-CTDMO0999', port_type= PortTypeEnum.PAYLOAD, ip_address='1' ) port_assignments = {instrument_device_id : pp_obj} #---------------------------------------------------------------------------------------------------- # Create an instrument model instrument_model_obj = IonObject(RT.InstrumentModel, name='InstrumentModel1', description='test instrument model') instrument_model_id = self.imsclient.create_instrument_model(instrument_model_obj) # Create a deployment object #---------------------------------------------------------------------------------------------------- start = str(int(time.mktime(datetime.datetime(2013, 1, 1).timetuple()))) end = str(int(time.mktime(datetime.datetime(2020, 1, 1).timetuple()))) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start, end_datetime=end) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', context=IonObject(OT.CabledNodeDeploymentContext), port_assignments=port_assignments, constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) if make_assigns: self.imsclient.assign_platform_model_to_platform_device(platform_model_id, platform_device_id) self.imsclient.assign_instrument_model_to_instrument_device(instrument_model_id, instrument_device_id) self.omsclient.assign_platform_model_to_platform_site(platform_model_id, platform_site_id) self.omsclient.assign_instrument_model_to_instrument_site(instrument_model_id, instrument_site_id) self.omsclient.assign_site_to_deployment(platform_site_id, deployment_id) self.omsclient.assign_device_to_deployment(platform_device_id, deployment_id) ret = DotDict(instrument_site_id=instrument_site_id, instrument_device_id=instrument_device_id, instrument_model_id=instrument_model_id, platform_site_id=platform_site_id, platform_device_id=platform_device_id, platform_model_id=platform_model_id, deployment_id=deployment_id) return ret def _create_subsequent_deployment(self, prior_dep_info): platform_device_obj = IonObject(RT.PlatformDevice, name='PlatformDevice2', description='test platform device') platform_device_id = self.imsclient.create_platform_device(platform_device_obj) instrument_device_obj = IonObject(RT.InstrumentDevice, name='InstrumentDevice2', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device(instrument_device_obj) self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) self.imsclient.assign_platform_model_to_platform_device(prior_dep_info.platform_model_id, platform_device_id) self.imsclient.assign_instrument_model_to_instrument_device(prior_dep_info.instrument_model_id, instrument_device_id) start = str(int(time.mktime(datetime.datetime(2013, 6, 1).timetuple()))) end = str(int(time.mktime(datetime.datetime(2020, 6, 1).timetuple()))) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start, end_datetime=end) deployment_obj = IonObject(RT.Deployment, name='TestDeployment2', description='some new deployment', context=IonObject(OT.CabledNodeDeploymentContext), constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.assign_site_to_deployment(prior_dep_info.platform_site_id, deployment_id) self.omsclient.assign_device_to_deployment(prior_dep_info.platform_device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) ret = DotDict(instrument_device_id=instrument_device_id, platform_device_id=platform_device_id, deployment_id=deployment_id) return ret #@unittest.skip("targeting") def test_activate_deployment_normal(self): res = self.base_activate_deployment(make_assigns=True) before_activate_instrument_device_obj = self.rrclient.read(res.instrument_device_id) self.assertNotEquals(before_activate_instrument_device_obj.lcstate, LCS.DEPLOYED) log.debug("activating deployment, expecting success") self.omsclient.activate_deployment(res.deployment_id) # OOIION-1239: retrieve the extended resource and validate that only two sites are in the list of portals extended_deployment = self.omsclient.get_deployment_extension(res.deployment_id) self.assertEquals( len(extended_deployment.computed.portals.value), 2) def assertGeospatialBoundsEquals(a, b): self.assertEquals(a['geospatial_latitude_limit_north'],b['geospatial_latitude_limit_north']) self.assertEquals(a['geospatial_latitude_limit_south'],b['geospatial_latitude_limit_south']) self.assertEquals(a['geospatial_longitude_limit_west'],b['geospatial_longitude_limit_west']) self.assertEquals(a['geospatial_longitude_limit_east'],b['geospatial_longitude_limit_east']) def assertGeospatialBoundsNotEquals(a, b): self.assertNotEquals(a['geospatial_latitude_limit_north'],b['geospatial_latitude_limit_north']) self.assertNotEquals(a['geospatial_latitude_limit_south'],b['geospatial_latitude_limit_south']) self.assertNotEquals(a['geospatial_longitude_limit_west'],b['geospatial_longitude_limit_west']) self.assertNotEquals(a['geospatial_longitude_limit_east'],b['geospatial_longitude_limit_east']) after_activate_instrument_device_obj = self.rrclient.read(res.instrument_device_id) assertGeospatialBoundsNotEquals(before_activate_instrument_device_obj.geospatial_bounds,after_activate_instrument_device_obj.geospatial_bounds) deployment_obj = self.RR2.read(res.deployment_id) self.assertEquals(deployment_obj.lcstate, LCS.DEPLOYED) extended_deployment = self.omsclient.get_deployment_extension(res.deployment_id) # two sites in this test self.assertEquals(len(extended_deployment.computed.portals.value), 2) # only one portal instrument self.assertEquals(len(extended_deployment.portal_instruments), 1) log.debug("deactivatin deployment, expecting success") self.omsclient.deactivate_deployment(res.deployment_id) after_deactivate_instrument_device_obj = self.rrclient.read(res.instrument_device_id) assertGeospatialBoundsNotEquals(after_activate_instrument_device_obj.geospatial_bounds, after_deactivate_instrument_device_obj.geospatial_bounds) deployment_obj = self.RR2.read(res.deployment_id) self.assertEquals(deployment_obj.lcstate, LCS.INTEGRATED) def test_activate_deployment_redeploy(self): dep_util = DeploymentUtil(self.container) res = self.base_activate_deployment(make_assigns=True) log.debug("activating first deployment, expecting success") self.omsclient.activate_deployment(res.deployment_id) deployment_obj1 = self.RR2.read(res.deployment_id) self.assertEquals(deployment_obj1.lcstate, LCS.DEPLOYED) next_dep_info = self._create_subsequent_deployment(res) deployment_obj2 = self.RR2.read(next_dep_info.deployment_id) self.assertNotEquals(deployment_obj2.lcstate, LCS.DEPLOYED) log.debug("activating subsequent deployment, expecting success") self.omsclient.activate_deployment(next_dep_info.deployment_id) deployment_obj1 = self.RR2.read(res.deployment_id) self.assertEquals(deployment_obj1.lcstate, LCS.INTEGRATED) deployment_obj2 = self.RR2.read(next_dep_info.deployment_id) self.assertEquals(deployment_obj2.lcstate, LCS.DEPLOYED) dep1_tc = dep_util.get_temporal_constraint(deployment_obj1) dep2_tc = dep_util.get_temporal_constraint(deployment_obj2) self.assertLessEqual(float(dep1_tc.end_datetime), float(dep2_tc.end_datetime)) log.debug("deactivating second deployment, expecting success") self.omsclient.deactivate_deployment(next_dep_info.deployment_id) deployment_obj2 = self.RR2.read(next_dep_info.deployment_id) self.assertEquals(deployment_obj2.lcstate, LCS.INTEGRATED) #@unittest.skip("targeting") def test_activate_deployment_nomodels(self): res = self.base_activate_deployment() self.omsclient.assign_site_to_deployment(res.platform_site_id, res.deployment_id) self.omsclient.assign_device_to_deployment(res.platform_device_id, res.deployment_id) log.debug("activating deployment without site+device models, expecting fail") self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1") log.debug("assigning instrument site model") self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("activating deployment without device models, expecting fail") self.assert_deploy_fail(res.deployment_id, NotFound, "Expected 1") #@unittest.skip("targeting") def test_activate_deployment_nosite(self): res = self.base_activate_deployment() log.debug("assigning instrument models") self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument device only") self.omsclient.assign_device_to_deployment(res.instrument_device_id, res.deployment_id) log.debug("activating deployment without instrument site, expecting fail") self.assert_deploy_fail(res.deployment_id, BadRequest) #@unittest.skip("targeting") def test_activate_deployment_nodevice(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_instrument_model_to_instrument_device(res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site(res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument site only") self.omsclient.assign_site_to_deployment(res.instrument_site_id, res.deployment_id) log.debug("activating deployment without device, expecting fail") self.assert_deploy_fail(res.deployment_id, BadRequest, "No devices were found in the deployment") def assert_deploy_fail(self, deployment_id, err_type=BadRequest, fail_message=""): with self.assertRaises(err_type) as cm: self.omsclient.activate_deployment(deployment_id) log.debug("assert_deploy_fail cm: %s", str(cm) ) if fail_message: self.assertIn(fail_message, cm.exception.message) def test_3x3_matchups_remoteplatform(self): self.base_3x3_matchups(IonObject(OT.RemotePlatformDeploymentContext)) def test_3x3_matchups_cabledinstrument(self): self.base_3x3_matchups(IonObject(OT.CabledInstrumentDeploymentContext)) def test_3x3_matchups_cablednode(self): self.base_3x3_matchups(IonObject(OT.CabledNodeDeploymentContext)) def base_3x3_matchups(self, deployment_context): """ This will be 1 root platform, 3 sub platforms (2 of one model, 1 of another) and 3 sub instruments each (2-to-1) """ deployment_context_type = type(deployment_context).__name__ instrument_model_id = [self.RR2.create(any_old(RT.InstrumentModel)) for _ in range(6)] platform_model_id = [self.RR2.create(any_old(RT.PlatformModel)) for _ in range(3)] instrument_device_id = [self.RR2.create(any_old(RT.InstrumentDevice)) for _ in range(9)] platform_device_id = [self.RR2.create(any_old(RT.PlatformDevice)) for _ in range(4)] instrument_site_id = [self.RR2.create(any_old(RT.InstrumentSite, { "reference_designator" : "GA01SUMO-FI003-0%s-CTDMO0999" % (i+1), "planned_uplink_port": IonObject(OT.PlatformPort, reference_designator="GA01SUMO-FI003-0%s-CTDMO0999" % (i+1) )})) for i in range(9)] platform_site_id = [self.RR2.create(any_old(RT.PlatformSite, { "reference_designator" : "GA01SUMO-FI003-0%s-CTDMO0888" % (i+1) , "planned_uplink_port": IonObject(OT.PlatformPort, reference_designator="GA01SUMO-FI003-0%s-CTDMO0888" % (i+1))})) for i in range(4)] def instrument_model_at(platform_idx, instrument_idx): m = platform_idx * 2 if instrument_idx > 0: m += 1 return m def platform_model_at(platform_idx): if platform_idx > 0: return 1 return 0 def instrument_at(platform_idx, instrument_idx): return platform_idx * 3 + instrument_idx # set up the structure for p in range(3): m = platform_model_at(p) self.RR2.assign_platform_model_to_platform_site_with_has_model(platform_model_id[m], platform_site_id[p]) self.RR2.assign_platform_model_to_platform_device_with_has_model(platform_model_id[m], platform_device_id[p]) self.RR2.assign_platform_device_to_platform_device_with_has_device(platform_device_id[p], platform_device_id[3]) self.RR2.assign_platform_site_to_platform_site_with_has_site(platform_site_id[p], platform_site_id[3]) for i in range(3): m = instrument_model_at(p, i) idx = instrument_at(p, i) self.RR2.assign_instrument_model_to_instrument_site_with_has_model(instrument_model_id[m], instrument_site_id[idx]) self.RR2.assign_instrument_model_to_instrument_device_with_has_model(instrument_model_id[m], instrument_device_id[idx]) self.RR2.assign_instrument_device_to_platform_device_with_has_device(instrument_device_id[idx], platform_device_id[p]) self.RR2.assign_instrument_site_to_platform_site_with_has_site(instrument_site_id[idx], platform_site_id[p]) # top level models self.RR2.assign_platform_model_to_platform_device_with_has_model(platform_model_id[2], platform_device_id[3]) self.RR2.assign_platform_model_to_platform_site_with_has_model(platform_model_id[2], platform_site_id[3]) # verify structure for p in range(3): parent_id = self.RR2.find_platform_device_id_by_platform_device_using_has_device(platform_device_id[p]) self.assertEqual(platform_device_id[3], parent_id) parent_id = self.RR2.find_platform_site_id_by_platform_site_using_has_site(platform_site_id[p]) self.assertEqual(platform_site_id[3], parent_id) for i in range(len(platform_site_id)): self.assertEqual(self.RR2.find_platform_model_of_platform_device_using_has_model(platform_device_id[i]), self.RR2.find_platform_model_of_platform_site_using_has_model(platform_site_id[i])) for i in range(len(instrument_site_id)): self.assertEqual(self.RR2.find_instrument_model_of_instrument_device_using_has_model(instrument_device_id[i]), self.RR2.find_instrument_model_of_instrument_site_using_has_model(instrument_site_id[i])) # OOIReferenceDesignator format: GA01SUMO-FI003-03-CTDMO0999 (site-platform_id-port-device_id) port_assignments = {} for p in range(3): ref_desig = "GA01SUMO-FI003-0%s-CTDMO0888" % (p+1) pp_obj = IonObject(OT.PlatformPort, reference_designator=ref_desig, port_type= PortTypeEnum.PAYLOAD, ip_address=str(p) ) port_assignments[platform_device_id[p]] = pp_obj for i in range(3): ref_desig = "GA01SUMO-FI003-0%s-CTDMO0999" % ((p*3)+i+1) pp_obj = IonObject(OT.PlatformPort, reference_designator=ref_desig, port_type= PortTypeEnum.PAYLOAD, ip_address=str(p) ) idx = instrument_at(p, i) port_assignments[instrument_device_id[idx]] = pp_obj deployment_id = self.RR2.create(any_old(RT.Deployment, {"context": deployment_context, "port_assignments": port_assignments})) log.debug("assigning device/site to %s deployment", deployment_context_type) if OT.RemotePlatformDeploymentContext == deployment_context_type: self.RR2.assign_deployment_to_platform_device_with_has_deployment(deployment_id, platform_device_id[3]) self.RR2.assign_deployment_to_platform_site_with_has_deployment(deployment_id, platform_site_id[3]) elif OT.CabledInstrumentDeploymentContext == deployment_context_type: self.RR2.assign_deployment_to_instrument_device_with_has_deployment(deployment_id, instrument_device_id[1]) self.RR2.assign_deployment_to_instrument_site_with_has_deployment(deployment_id, instrument_site_id[1]) elif OT.CabledNodeDeploymentContext == deployment_context_type: self.RR2.assign_deployment_to_platform_device_with_has_deployment(deployment_id, platform_device_id[1]) self.RR2.assign_deployment_to_platform_site_with_has_deployment(deployment_id, platform_site_id[1]) log.debug("activation of %s deployment", deployment_context_type) self.omsclient.activate_deployment(deployment_id) log.debug("validation of %s deployment", deployment_context_type) if OT.RemotePlatformDeploymentContext == deployment_context_type: # verify proper associations for i, d in enumerate(platform_device_id): self.assertEqual(d, self.RR2.find_platform_device_id_of_platform_site_using_has_device(platform_site_id[i])) for i, d in enumerate(instrument_device_id): self.assertEqual(d, self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(instrument_site_id[i])) elif OT.CabledInstrumentDeploymentContext == deployment_context_type: self.assertEqual(instrument_device_id[1], self.RR2.find_instrument_device_id_of_instrument_site_using_has_device(instrument_site_id[1])) elif OT.CabledNodeDeploymentContext == deployment_context_type: expected_platforms = [1] # verify proper associations for i, d in enumerate(platform_device_id): self.assertEqual(i in expected_platforms, d in self.RR2.find_platform_device_ids_of_platform_site_using_has_device(platform_site_id[i]))
class TestDeployment(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dmpsclient = DataProductManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.psmsclient = PubsubManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) # create missing data process definition self.dsmsclient = DataProcessManagementServiceClient( node=self.container.node) dpd_obj = IonObject( RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.dsmsclient.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources( RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses) #@unittest.skip("targeting") def test_create_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) start = IonTime(datetime.datetime(2013, 1, 1)) end = IonTime(datetime.datetime(2014, 1, 1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_platform_site(site_id, deployment_id) self.imsclient.deploy_platform_device(device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) #retrieve the deployment objects and check that the assoc site and device are attached read_deployment_obj = self.omsclient.read_deployment(deployment_id) log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj)) site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(site_ids), 1) device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(device_ids), 1) #delete the deployment self.RR2.pluck(deployment_id) self.omsclient.force_delete_deployment(deployment_id) # now try to get the deleted dp object try: self.omsclient.read_deployment(deployment_id) except NotFound: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def base_activate_deployment(self): #------------------------------------------------------------------------------------- # Create platform site, platform device, platform model #------------------------------------------------------------------------------------- platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') platform_site_id = self.omsclient.create_platform_site( platform_site__obj) platform_device_obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') platform_device_id = self.imsclient.create_platform_device( platform_device_obj) platform_model__obj = IonObject(RT.PlatformModel, name='PlatformModel1', description='test platform model') platform_model_id = self.imsclient.create_platform_model( platform_model__obj) #------------------------------------------------------------------------------------- # Create instrument site #------------------------------------------------------------------------------------- instrument_site_obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site( instrument_site_obj, platform_site_id) pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.psmsclient.create_stream_definition( name='SBE37_CDM', parameter_dictionary_id=pdict_id) # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='Log Data Product', description='some new dp', temporal_domain=tdom, spatial_domain=sdom) out_log_data_product_id = self.dmpsclient.create_data_product( dp_obj, ctd_stream_def_id) #---------------------------------------------------------------------------------------------------- # Start the transform (a logical transform) that acts as an instrument site #---------------------------------------------------------------------------------------------------- self.omsclient.create_site_data_product( site_id=instrument_site_id, data_product_id=out_log_data_product_id) #---------------------------------------------------------------------------------------------------- # Create an instrument device #---------------------------------------------------------------------------------------------------- instrument_device_obj = IonObject(RT.InstrumentDevice, name='InstrumentDevice1', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device( instrument_device_obj) self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) dp_obj = IonObject(RT.DataProduct, name='Instrument Data Product', description='some new dp', temporal_domain=tdom, spatial_domain=sdom) inst_data_product_id = self.dmpsclient.create_data_product( dp_obj, ctd_stream_def_id) #assign data products appropriately self.damsclient.assign_data_product( input_resource_id=instrument_device_id, data_product_id=inst_data_product_id) #---------------------------------------------------------------------------------------------------- # Create an instrument model #---------------------------------------------------------------------------------------------------- instrument_model_obj = IonObject(RT.InstrumentModel, name='InstrumentModel1', description='test instrument model') instrument_model_id = self.imsclient.create_instrument_model( instrument_model_obj) #---------------------------------------------------------------------------------------------------- # Create a deployment object #---------------------------------------------------------------------------------------------------- start = IonTime(datetime.datetime(2013, 1, 1)) end = IonTime(datetime.datetime(2014, 1, 1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) ret = DotDict(instrument_site_id=instrument_site_id, instrument_device_id=instrument_device_id, instrument_model_id=instrument_model_id, platform_site_id=platform_site_id, platform_device_id=platform_device_id, platform_model_id=platform_model_id, deployment_id=deployment_id) return ret #@unittest.skip("targeting") def test_activate_deployment_normal(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_platform_model_to_platform_device( res.platform_model_id, res.platform_device_id) self.imsclient.assign_instrument_model_to_instrument_device( res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_platform_model_to_platform_site( res.platform_model_id, res.platform_site_id) self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug("adding instrument site and device to deployment") self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug("adding platform site and device to deployment") self.omsclient.deploy_platform_site(res.platform_site_id, res.deployment_id) self.imsclient.deploy_platform_device(res.platform_device_id, res.deployment_id) log.debug("activating deployment, expecting success") self.omsclient.activate_deployment(res.deployment_id) #@unittest.skip("targeting") def test_activate_deployment_nomodels(self): res = self.base_activate_deployment() self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug( "activating deployment without site+device models, expecting fail") self.assert_deploy_fail( res.deployment_id, "Expected at least 1 model for InstrumentSite") log.debug("assigning instrument site model") self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug( "activating deployment without device models, expecting fail") self.assert_deploy_fail(res.deployment_id, "Expected 1 model for InstrumentDevice") #@unittest.skip("targeting") def test_activate_deployment_nosite(self): res = self.base_activate_deployment() log.debug("assigning instrument models") self.imsclient.assign_instrument_model_to_instrument_device( res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument device only") self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug( "activating deployment without device models, expecting fail") self.assert_deploy_fail(res.deployment_id, "No sites were found in the deployment") #@unittest.skip("targeting") def test_activate_deployment_nodevice(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_instrument_model_to_instrument_device( res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument site only") self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) log.debug( "activating deployment without device models, expecting fail") self.assert_deploy_fail( res.deployment_id, "The set of devices could not be mapped to the set of sites") def assert_deploy_fail(self, deployment_id, fail_message="did not specify fail_message"): with self.assertRaises(BadRequest) as cm: self.omsclient.activate_deployment(deployment_id) self.assertIn(fail_message, cm.exception.message)
class TestResourceRegistry(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to bank service self.resource_registry_service = ResourceRegistryServiceClient() @unittest.skip('Represents a bug in storage/retrieval') def test_tuple_in_dict(self): # create a resource with a tuple saved in a dict transform_obj = IonObject(RT.Transform) transform_obj.configuration = {} transform_obj.configuration["tuple"] = ('STRING', ) transform_id, _ = self.resource_registry_service.create(transform_obj) # read the resource back returned_transform_obj = self.resource_registry_service.read( transform_id) self.assertEqual(transform_obj.configuration["tuple"], returned_transform_obj.configuration["tuple"]) def test_basics(self): # Sequence all the tests so that we can save numerous system start and stops self._do_test_crud() self._do_test_read_mult() self._do_test_lifecycle() self._do_test_attach() self._do_test_association() self._do_test_find_resources() self._do_test_find_objects_mult() def _do_test_crud(self): # Some quick registry tests # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", name="name", foo="bar") self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", {"name": "name", "foo": "bar"}) self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Can't call new with fields that aren't defined in the object's schema with self.assertRaises(TypeError) as cm: IonObject("UserInfo", {"name": "name"}, foo="bar") self.assertTrue(cm.exception.message == "__init__() got an unexpected keyword argument 'foo'") # Instantiate an object obj = IonObject("UserInfo", name="name") # Can set attributes that aren't in the object's schema with self.assertRaises(AttributeError) as cm: setattr(obj, "foo", "bar") self.assertTrue( cm.exception.message == "'UserInfo' object has no attribute 'foo'") # Cam't call update with object that hasn't been persisted with self.assertRaises(BadRequest) as cm: self.resource_registry_service.update(obj) self.assertTrue( cm.exception.message.startswith( "Object does not have required '_id' or '_rev' attribute")) # Persist object and read it back obj_id, obj_rev = self.resource_registry_service.create(obj) read_obj = self.resource_registry_service.read(obj_id) # Cannot create object with _id and _rev fields pre-set with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create(read_obj) # Update object read_obj.name = "John Doe" self.resource_registry_service.update(read_obj) # Update should fail with revision mismatch with self.assertRaises(Conflict) as cm: self.resource_registry_service.update(read_obj) # Re-read and update object read_obj = self.resource_registry_service.read(obj_id) self.resource_registry_service.update(read_obj) # Delete object self.resource_registry_service.delete(obj_id) # Make sure read, update and delete report error with self.assertRaises(NotFound) as cm: self.resource_registry_service.read(obj_id) self.assertTrue(cm.exception.message.startswith("Object with id")) with self.assertRaises(NotFound) as cm: self.resource_registry_service.update(read_obj) self.assertTrue(cm.exception.message.startswith("Object with id")) with self.assertRaises(NotFound) as cm: self.resource_registry_service.delete(obj_id) self.assertTrue(cm.exception.message.startswith("Object with id")) # Owner creation tests user = IonObject("ActorIdentity", name='user') uid, _ = self.resource_registry_service.create(user) inst = IonObject("InstrumentDevice", name='instrument') iid, _ = self.resource_registry_service.create( inst, headers={'ion-actor-id': str(uid)}) ids, _ = self.resource_registry_service.find_objects(iid, PRED.hasOwner, RT.ActorIdentity, id_only=True) self.assertEquals(len(ids), 1) assoc = self.resource_registry_service.read(ids[0]) self.resource_registry_service.delete(iid) with self.assertRaises(NotFound) as ex: assoc = self.resource_registry_service.read(iid) def _do_test_read_mult(self): test_resource1_id, _ = self.resource_registry_service.create( Resource(name='test1')) test_resource2_id, _ = self.resource_registry_service.create( Resource(name='test2')) res_list = [test_resource1_id, test_resource2_id] objects = self.resource_registry_service.read_mult(res_list) for o in objects: self.assertIsInstance(o, Resource) self.assertTrue(o._id in res_list) def _do_test_lifecycle(self): # Lifecycle tests att = IonObject("InstrumentDevice", name='mine', description='desc') rid, rev = self.resource_registry_service.create(att) att1 = self.resource_registry_service.read(rid) self.assertEquals(att1.name, att.name) self.assertEquals(att1.lcstate, LCS.DRAFT) self.assertEquals(att1.availability, AS.PRIVATE) new_state = self.resource_registry_service.execute_lifecycle_transition( rid, LCE.PLAN) self.assertEquals(new_state, lcstate(LCS.PLANNED, AS.PRIVATE)) att2 = self.resource_registry_service.read(rid) self.assertEquals(att2.lcstate, LCS.PLANNED) self.assertEquals(att2.availability, AS.PRIVATE) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.execute_lifecycle_transition( rid, LCE.UNANNOUNCE) self.assertTrue( "type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce" in cm.exception.message) new_state = self.resource_registry_service.execute_lifecycle_transition( rid, LCE.DEVELOP) self.assertEquals(new_state, lcstate(LCS.DEVELOPED, AS.PRIVATE)) with self.assertRaises(BadRequest): self.resource_registry_service.execute_lifecycle_transition( resource_id=rid, transition_event='NONE##') self.resource_registry_service.set_lifecycle_state( rid, lcstate(LCS.INTEGRATED, AS.PRIVATE)) att1 = self.resource_registry_service.read(rid) self.assertEquals(att1.lcstate, LCS.INTEGRATED) self.assertEquals(att1.availability, AS.PRIVATE) def _do_test_attach(self): binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82" # Owner creation tests instrument = IonObject("InstrumentDevice", name='instrument') iid, _ = self.resource_registry_service.create(instrument) att = Attachment(content=binary, attachment_type=AttachmentType.BLOB) aid1 = self.resource_registry_service.create_attachment(iid, att) att1 = self.resource_registry_service.read_attachment( aid1, include_content=True) self.assertEquals(binary, att1.content) import base64 att = Attachment(content=base64.encodestring(binary), attachment_type=AttachmentType.ASCII) aid2 = self.resource_registry_service.create_attachment(iid, att) att1 = self.resource_registry_service.read_attachment( aid2, include_content=True) self.assertEquals(binary, base64.decodestring(att1.content)) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, [aid1, aid2]) att_ids = self.resource_registry_service.find_attachments( iid, id_only=True, descending=True) self.assertEquals(att_ids, [aid2, aid1]) att_ids = self.resource_registry_service.find_attachments( iid, id_only=True, descending=True, limit=1) self.assertEquals(att_ids, [aid2]) atts = self.resource_registry_service.find_attachments( iid, id_only=False, include_content=True, limit=1) self.assertEquals(atts[0].content, binary) self.resource_registry_service.delete_attachment(aid1) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, [aid2]) self.resource_registry_service.delete_attachment(aid2) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, []) def _do_test_association(self): # Instantiate ActorIdentity object actor_identity_obj = IonObject("ActorIdentity", name="name") actor_identity_obj_id, actor_identity_obj_rev = self.resource_registry_service.create( actor_identity_obj) read_actor_identity_obj = self.resource_registry_service.read( actor_identity_obj_id) # Instantiate UserInfo object user_info_obj = IonObject("UserInfo", name="name") user_info_obj_id, user_info_obj_rev = self.resource_registry_service.create( user_info_obj) read_user_info_obj = self.resource_registry_service.read( user_info_obj_id) # Test create failures with self.assertRaises(AttributeError) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, PRED.bogus, user_info_obj_id) self.assertTrue(cm.exception.message == "bogus") # Predicate not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, None, user_info_obj_id) self.assertTrue( cm.exception.message == "Association must have all elements set") # Subject id or object not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( None, PRED.hasInfo, user_info_obj_id) self.assertTrue( cm.exception.message == "Association must have all elements set") # Object id or object not provided with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, None) self.assertTrue( cm.exception.message == "Association must have all elements set") # Bad subject id with self.assertRaises(NotFound) as cm: self.resource_registry_service.create_association( "bogus", PRED.hasInfo, user_info_obj_id) self.assertTrue( cm.exception.message == "Object with id bogus does not exist.") # Bad object id with self.assertRaises(NotFound) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, "bogus") self.assertTrue( cm.exception.message == "Object with id bogus does not exist.") # _id missing from subject with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( actor_identity_obj, PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message.startswith("Subject id")) # _id missing from object with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, user_info_obj) self.assertTrue(cm.exception.message.startswith("Object id")) # Wrong subject type with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( user_info_obj_id, PRED.hasInfo, user_info_obj_id) self.assertTrue(cm.exception.message == "Illegal subject type UserInfo for predicate hasInfo") # Wrong object type with self.assertRaises(BadRequest) as cm: self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, actor_identity_obj_id) self.assertTrue( cm.exception.message == "Illegal object type ActorIdentity for predicate hasInfo") # Create two different association types between the same subject and predicate assoc_id1, assoc_rev1 = self.resource_registry_service.create_association( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) # Read object, subject res_obj1 = self.resource_registry_service.read_object( actor_identity_obj_id, PRED.hasInfo, RT.UserInfo) self.assertEquals(res_obj1._id, user_info_obj_id) res_obj1 = self.resource_registry_service.read_object( actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, id_only=True) self.assertEquals(res_obj1, user_info_obj_id) res_obj2 = self.resource_registry_service.read_subject( RT.ActorIdentity, PRED.hasInfo, user_info_obj_id) self.assertEquals(res_obj2._id, actor_identity_obj_id) res_obj2 = self.resource_registry_service.read_subject( RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, id_only=True) self.assertEquals(res_obj2, actor_identity_obj_id) # Search for associations (good cases) ret1 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) ret2 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo) ret3 = self.resource_registry_service.find_associations( None, PRED.hasInfo) self.assertTrue(len(ret1) == len(ret2) == len(ret3)) self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id) ret1 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id, None, False) ret2 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo, id_only=False) ret3 = self.resource_registry_service.find_associations( predicate=PRED.hasInfo, id_only=False) self.assertTrue(ret1 == ret2 == ret3) # Search for associations (good cases) ret1 = self.resource_registry_service.find_associations( read_actor_identity_obj, PRED.hasInfo, read_user_info_obj) ret2 = self.resource_registry_service.find_associations( read_actor_identity_obj, PRED.hasInfo) ret3 = self.resource_registry_service.find_associations( None, PRED.hasInfo) self.assertTrue(len(ret1) == len(ret2) == len(ret3)) self.assertTrue(ret1[0]._id == ret2[0]._id == ret3[0]._id) ret1 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo, read_user_info_obj, None, True) ret2 = self.resource_registry_service.find_associations( actor_identity_obj_id, PRED.hasInfo, id_only=True) ret3 = self.resource_registry_service.find_associations( predicate=PRED.hasInfo, id_only=True) self.assertTrue(ret1 == ret2 == ret3) # Search for associations (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_associations(None, None, None) self.assertIn("Illegal parameters", cm.exception.message) # Find subjects (good cases) subj_ret1 = self.resource_registry_service.find_subjects( RT.ActorIdentity, PRED.hasInfo, user_info_obj_id, True) subj_ret2 = self.resource_registry_service.find_subjects( RT.ActorIdentity, PRED.hasInfo, read_user_info_obj, True) self.assertTrue(len(subj_ret1) == len(subj_ret2)) self.assertTrue(subj_ret1[0] == subj_ret2[0]) self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id) subj_ret3 = self.resource_registry_service.find_subjects( None, PRED.hasInfo, user_info_obj_id, True) subj_ret4 = self.resource_registry_service.find_subjects( None, None, read_user_info_obj, True) self.assertTrue(len(subj_ret3) == len(subj_ret4)) self.assertTrue(subj_ret3[0] == subj_ret4[0]) self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id) subj_ret5 = self.resource_registry_service.find_subjects( None, PRED.hasInfo, user_info_obj_id, False) subj_ret6 = self.resource_registry_service.find_subjects( None, None, read_user_info_obj, False) self.assertTrue(len(subj_ret5) == len(subj_ret6)) self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id) self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id) # Find subjects (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_subjects(None, None, None) self.assertTrue(cm.exception.message == "Must provide object") with self.assertRaises(AttributeError) as cm: self.resource_registry_service.find_subjects( RT.UserCredentials, PRED.bogus, user_info_obj_id, True) self.assertTrue(cm.exception.message == "bogus") ret = self.resource_registry_service.find_subjects( RT.UserInfo, PRED.hasCredentials, user_info_obj_id, True) self.assertTrue(len(ret[0]) == 0) ret = self.resource_registry_service.find_subjects( RT.UserCredentials, PRED.hasInfo, user_info_obj_id, True) self.assertTrue(len(ret[0]) == 0) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_subjects( RT.UserCredentials, PRED.hasInfo, user_info_obj, True) self.assertTrue( cm.exception.message == "Object id not available in object") # Find objects (good cases) subj_ret1 = self.resource_registry_service.find_objects( actor_identity_obj_id, PRED.hasInfo, RT.UserInfo, True) subj_ret2 = self.resource_registry_service.find_objects( read_actor_identity_obj, PRED.hasInfo, RT.UserInfo, True) self.assertTrue(len(subj_ret1) == len(subj_ret2)) self.assertTrue(subj_ret1[0] == subj_ret2[0]) self.assertTrue(subj_ret1[1][0]._id == subj_ret2[1][0]._id) subj_ret3 = self.resource_registry_service.find_objects( actor_identity_obj_id, PRED.hasInfo, None, True) subj_ret4 = self.resource_registry_service.find_objects( actor_identity_obj_id, None, None, True) self.assertTrue(len(subj_ret3) == len(subj_ret4)) self.assertTrue(subj_ret3[0] == subj_ret4[0]) self.assertTrue(subj_ret3[1][0]._id == subj_ret4[1][0]._id) subj_ret5 = self.resource_registry_service.find_objects( actor_identity_obj_id, PRED.hasInfo, None, False) subj_ret6 = self.resource_registry_service.find_objects( read_actor_identity_obj, None, None, False) self.assertTrue(len(subj_ret5) == len(subj_ret6)) self.assertTrue(subj_ret5[0][0]._id == subj_ret6[0][0]._id) self.assertTrue(subj_ret5[1][0]._id == subj_ret6[1][0]._id) # Find objects (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_objects(None, None, None) self.assertTrue(cm.exception.message == "Must provide subject") with self.assertRaises(AttributeError) as cm: self.resource_registry_service.find_objects( actor_identity_obj_id, PRED.bogus, RT.UserCredentials, True) self.assertTrue(cm.exception.message == "bogus") ret = self.resource_registry_service.find_objects( actor_identity_obj_id, PRED.hasCredentials, RT.ActorIdentity, True) self.assertTrue(len(ret[0]) == 0) ret = self.resource_registry_service.find_objects( actor_identity_obj_id, PRED.hasInfo, RT.UserCredentials, True) self.assertTrue(len(ret[0]) == 0) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_objects( actor_identity_obj, PRED.hasInfo, RT.UserInfo, True) self.assertTrue( cm.exception.message == "Object id not available in subject") # Get association (bad cases) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.get_association(None, None, None) self.assertIn("Illegal parameters", cm.exception.message) assoc = self.resource_registry_service.get_association( actor_identity_obj_id, PRED.hasInfo, user_info_obj_id) self.assertTrue(assoc._id == assoc_id1) # Delete (bad cases) with self.assertRaises(NotFound) as cm: self.resource_registry_service.delete_association("bogus") self.assertTrue( cm.exception.message == "Object with id bogus does not exist.") # Delete other association self.resource_registry_service.delete_association(assoc_id1) # Delete resources self.resource_registry_service.delete(actor_identity_obj_id) self.resource_registry_service.delete(user_info_obj_id) def _do_test_find_resources(self): with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_resources( RT.UserInfo, LCS.DRAFT, "name", False) self.assertTrue( cm.exception.message == "find by name does not support lcstate") ret = self.resource_registry_service.find_resources( RT.UserInfo, None, "name", False) self.assertEquals(len(ret[0]), 0) # Instantiate an object obj = IonObject("InstrumentAgentInstance", name="name") # Persist object and read it back obj_id, obj_rev = self.resource_registry_service.create(obj) read_obj = self.resource_registry_service.read(obj_id) ret = self.resource_registry_service.find_resources( RT.InstrumentAgentInstance, None, "name", False) self.assertEquals(len(ret[0]), 1) self.assertEquals(ret[0][0]._id, read_obj._id) ret = self.resource_registry_service.find_resources( RT.InstrumentAgentInstance, LCS.DEPLOYED, None, False) self.assertEquals(len(ret[0]), 1) self.assertEquals(ret[0][0]._id, read_obj._id) def _do_test_find_objects_mult(self): dp = DataProcess() transform = Transform() pd = ProcessDefinition() dp_id, _ = self.resource_registry_service.create(dp) transform_id, _ = self.resource_registry_service.create(transform) pd_id, _ = self.resource_registry_service.create(pd) self.resource_registry_service.create_association( subject=dp_id, object=transform_id, predicate=PRED.hasTransform) self.resource_registry_service.create_association( subject=transform_id, object=pd_id, predicate=PRED.hasProcessDefinition) results, _ = self.resource_registry_service.find_objects_mult( subjects=[dp_id], id_only=True) self.assertTrue(results == [transform_id]) results, _ = self.resource_registry_service.find_objects_mult( subjects=[dp_id, transform_id], id_only=True) results.sort() correct = [transform_id, pd_id] correct.sort() self.assertTrue(results == correct) @attr('EXT') def test_get_resource_extension(self): #Testing multiple instrument owners subject1 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254" actor_identity_obj1 = IonObject(RT.ActorIdentity, {"name": subject1}) actor_id1, _ = self.resource_registry_service.create( actor_identity_obj1) user_info_obj1 = IonObject(RT.UserInfo, {"name": "Foo"}) user_info_id1, _ = self.resource_registry_service.create( user_info_obj1) self.resource_registry_service.create_association( actor_id1, PRED.hasInfo, user_info_id1) subject2 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256" actor_identity_obj2 = IonObject(RT.ActorIdentity, {"name": subject2}) actor_id2, _ = self.resource_registry_service.create( actor_identity_obj2) user_info_obj2 = IonObject(RT.UserInfo, {"name": "Foo2"}) user_info_id2, _ = self.resource_registry_service.create( user_info_obj2) self.resource_registry_service.create_association( actor_id2, PRED.hasInfo, user_info_id2) test_obj = IonObject(RT.InformationResource, {"name": "TestResource"}) test_obj_id, _ = self.resource_registry_service.create(test_obj) self.resource_registry_service.create_association( test_obj_id, PRED.hasOwner, actor_id1) self.resource_registry_service.create_association( test_obj_id, PRED.hasOwner, actor_id2) extended_resource = self.resource_registry_service.get_resource_extension( test_obj_id, OT.ExtendedInformationResource) self.assertEqual(test_obj_id, extended_resource._id) self.assertEqual(len(extended_resource.owners), 2) extended_resource_list = self.resource_registry_service.get_resource_extension( str([user_info_id1, user_info_id2]), OT.ExtendedInformationResource) self.assertEqual(len(extended_resource_list), 2) optional_args = {'user_id': user_info_id1} extended_resource = self.resource_registry_service.get_resource_extension( test_obj_id, OT.TestExtendedInformationResource, optional_args=optional_args) self.assertEqual(test_obj_id, extended_resource._id) self.assertEqual(len(extended_resource.owners), 2) self.assertEqual(extended_resource.user_id, user_info_id1) @attr('PREP') def test_prepare_resource_support(self): prepare_data = self.resource_registry_service.prepare_resource_support( resource_type=RT.StreamDefinition) self.assertEqual(prepare_data.create_request.service_name, "resource_registry") self.assertEqual(prepare_data.create_request.service_operation, "create") self.assertEqual(prepare_data.create_request.request_parameters, {"object": "$(object)"}) self.assertEqual(prepare_data.update_request.service_name, "resource_registry") self.assertEqual(prepare_data.update_request.service_operation, "update") self.assertEqual(prepare_data.update_request.request_parameters, {"object": "$(object)"}) res_id, _ = self.resource_registry_service.create( prepare_data.resource) prepare_data = self.resource_registry_service.prepare_resource_support( resource_type=RT.StreamDefinition, resource_id=res_id) prepare_data.resource.name = "test_stream_def" prepare_data.resource.stream_type = "test_type" stream_def_id, _ = self.resource_registry_service.update( prepare_data.resource) #def ion_object_encoder(obj): # return obj.__dict__ #print simplejson.dumps(prepare_data, default=ion_object_encoder, indent=2) stream_def = self.resource_registry_service.read(stream_def_id) self.assertEqual(stream_def.name, prepare_data.resource.name) self.assertEqual(stream_def.stream_type, prepare_data.resource.stream_type)
class TestDataProductProvenance(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.dpmsclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.process_dispatcher = ProcessDispatcherServiceClient() self.dataset_management = DatasetManagementServiceClient() # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dataprocessclient.deactivate_data_process(proc_id) self.dataprocessclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses) def test_get_data_product_provenance_report(self): #Create a test device device_obj = Device(name='Device1', description='test instrument site') device_id, _ = self.rrclient.create(device_obj) self.addCleanup(self.rrclient.delete, device_id) #Create a test DataProduct data_product1_obj = DataProduct(name='DataProduct1', description='test data product 1') data_product1_id, _ = self.rrclient.create(data_product1_obj) self.addCleanup(self.rrclient.delete, data_product1_id) #Create a test DataProcess data_process_obj = DataProcess(name='DataProcess', description='test data process') data_process_id, _ = self.rrclient.create(data_process_obj) self.addCleanup(self.rrclient.delete, data_process_id) #Create a second test DataProduct data_product2_obj = DataProduct(name='DataProduct2', description='test data product 2') data_product2_id, _ = self.rrclient.create(data_product2_obj) self.addCleanup(self.rrclient.delete, data_product2_id) #Create a test DataProducer data_producer_obj = DataProducer(name='DataProducer', description='test data producer') data_producer_id, rev = self.rrclient.create(data_producer_obj) #Link the DataProcess to the second DataProduct manually assoc_id, _ = self.rrclient.create_association(subject=data_process_id, predicate=PRED.hasInputProduct, object=data_product2_id) self.addCleanup(self.rrclient.delete_association, assoc_id) # Register the instrument and process. This links the device and the data process # with their own producers self.damsclient.register_instrument(device_id) self.addCleanup(self.damsclient.unregister_instrument, device_id) self.damsclient.register_process(data_process_id) self.addCleanup(self.damsclient.unregister_process, data_process_id) #Manually link the first DataProduct with the test DataProducer assoc_id, _ = self.rrclient.create_association(subject=data_product1_id, predicate=PRED.hasDataProducer, object=data_producer_id) #Get the DataProducer linked to the DataProcess (created in register_process above) #Associate that with with DataProduct1's DataProducer data_process_producer_ids, _ = self.rrclient.find_objects(subject=data_process_id, predicate=PRED.hasDataProducer, object_type=RT.DataProducer, id_only=True) assoc_id, _ = self.rrclient.create_association(subject=data_process_producer_ids[0], predicate=PRED.hasParent, object=data_producer_id) self.addCleanup(self.rrclient.delete_association, assoc_id) #Get the DataProducer linked to the Device (created in register_instrument #Associate that with the DataProcess's DataProducer device_producer_ids, _ = self.rrclient.find_objects(subject=device_id, predicate=PRED.hasDataProducer, object_type=RT.DataProducer, id_only=True) assoc_id, _ = self.rrclient.create_association(subject=data_producer_id, predicate=PRED.hasParent, object=device_producer_ids[0]) #Create the links between the Device, DataProducts, DataProcess, and all DataProducers self.damsclient.assign_data_product(input_resource_id=device_id, data_product_id=data_product1_id) self.addCleanup(self.damsclient.unassign_data_product, device_id, data_product1_id) self.damsclient.assign_data_product(input_resource_id=data_process_id, data_product_id=data_product2_id) self.addCleanup(self.damsclient.unassign_data_product, data_process_id, data_product2_id) #Traverse through the relationships to get the links between objects res = self.dpmsclient.get_data_product_provenance_report(data_product2_id) #Make sure there are four keys self.assertEqual(len(res.keys()), 4) parent_count = 0 config_count = 0 for v in res.itervalues(): if 'parent' in v: parent_count += 1 if 'config' in v: config_count += 1 #Make sure there are three parents and four configs self.assertEqual(parent_count, 3) self.assertEqual(config_count, 4) @unittest.skip('This test is obsolete with new framework') def test_get_provenance(self): #create a deployment with metadata and an initial site and device instrument_site_obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site(instrument_site_obj, "") log.debug( 'test_get_provenance: new instrument_site_id id = %s ', str(instrument_site_id)) # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" ) try: instModel_id = self.imsclient.create_instrument_model(instModel_obj) except BadRequest as ex: self.fail("failed to create new InstrumentModel: %s" %ex) log.debug( 'test_get_provenance: new InstrumentModel id = %s ', str(instModel_id)) self.omsclient.assign_instrument_model_to_instrument_site(instModel_id, instrument_site_id) # Create InstrumentAgent parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict' ) instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD, stream_configurations = [parsed_config] ) try: instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) except BadRequest as ex: self.fail("failed to create new InstrumentAgent: %s" %ex) log.debug( 'test_get_provenance:new InstrumentAgent id = %s', instAgent_id) self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice log.debug('test_get_provenance: Create instrument resource to represent the SBE37 (SA Req: L4-CI-SA-RQ-241) ') instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) try: instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) except BadRequest as ex: self.fail("failed to create new InstrumentDevice: %s" %ex) log.debug("test_get_provenance: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) #------------------------------- # Create CTD Parsed data product #------------------------------- tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.pubsubclient.create_stream_definition(name='parsed', parameter_dictionary_id=pdict_id) log.debug( 'test_get_provenance:Creating new CDM data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) ctd_parsed_data_product = self.dpmsclient.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug( 'new dp_id = %s', ctd_parsed_data_product) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product) self.dpmsclient.activate_data_product_persistence(data_product_id=ctd_parsed_data_product) #------------------------------- # create a data product for the site to pass the OMS check.... we need to remove this check #------------------------------- dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) log_data_product_id = self.dpmsclient.create_data_product(dp_obj, parsed_stream_def_id) #------------------------------- # Deploy instrument device to instrument site #------------------------------- deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment') deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_instrument_site(instrument_site_id, deployment_id) self.imsclient.deploy_instrument_device(instDevice_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id) ) self.omsclient.activate_deployment(deployment_id) inst_device_objs, _ = self.rrclient.find_objects(subject=instrument_site_id, predicate=PRED.hasDevice, object_type=RT.InstrumetDevice, id_only=False) log.debug("test_create_deployment: deployed device: %s ", str(inst_device_objs[0]) ) #------------------------------- # Create the agent instance #------------------------------- port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config = port_agent_config) instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) #------------------------------- # L0 Conductivity - Temperature - Pressure: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition ctd_L0_all") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L0_all', description='transform ctd package into three separate L0 streams', module='ion.processes.data.transforms.ctd.ctd_L0_all', class_name='ctd_L0_all') try: ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new ctd_L0_all data process definition: %s" %ex) #------------------------------- # L1 Conductivity: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition CTDL1ConductivityTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L1_conductivity', description='create the L1 conductivity data product', module='ion.processes.data.transforms.ctd.ctd_L1_conductivity', class_name='CTDL1ConductivityTransform') try: ctd_L1_conductivity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new CTDL1ConductivityTransform data process definition: %s" %ex) #------------------------------- # L1 Pressure: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition CTDL1PressureTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L1_pressure', description='create the L1 pressure data product', module='ion.processes.data.transforms.ctd.ctd_L1_pressure', class_name='CTDL1PressureTransform') try: ctd_L1_pressure_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new CTDL1PressureTransform data process definition: %s" %ex) #------------------------------- # L1 Temperature: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition CTDL1TemperatureTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L1_temperature', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L1_temperature', class_name='CTDL1TemperatureTransform') try: ctd_L1_temperature_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new CTDL1TemperatureTransform data process definition: %s" %ex) #------------------------------- # L2 Salinity: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition SalinityTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L2_salinity', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L2_salinity', class_name='SalinityTransform') try: ctd_L2_salinity_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new SalinityTransform data process definition: %s" %ex) #------------------------------- # L2 Density: Data Process Definition #------------------------------- log.debug("TestDataProductProvenance: create data process definition DensityTransform") dpd_obj = IonObject(RT.DataProcessDefinition, name='ctd_L2_density', description='create the L1 temperature data product', module='ion.processes.data.transforms.ctd.ctd_L2_density', class_name='DensityTransform') try: ctd_L2_density_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj) except BadRequest as ex: self.fail("failed to create new DensityTransform data process definition: %s" %ex) #------------------------------- # L0 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' ) outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' ) outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' ) log.debug("TestDataProductProvenance: create output data product L0 conductivity") ctd_l0_conductivity_output_dp_obj = IonObject( RT.DataProduct, name='L0_Conductivity', description='transform output conductivity', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_conductivity_output_dp_obj, outgoing_stream_l0_conductivity_id) log.debug("TestDataProductProvenance: create output data product L0 pressure") ctd_l0_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L0_Pressure', description='transform output pressure', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id) log.debug("TestDataProductProvenance: create output data product L0 temperature") ctd_l0_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L0_Temperature', description='transform output temperature', temporal_domain = tdom, spatial_domain = sdom) ctd_l0_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l0_temperature_output_dp_obj, outgoing_stream_l0_temperature_id) #------------------------------- # L1 Conductivity - Temperature - Pressure: Output Data Products #------------------------------- outgoing_stream_l1_conductivity_id = self.pubsubclient.create_stream_definition(name='L1_conductivity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_conductivity_id, ctd_L1_conductivity_dprocdef_id, binding='conductivity' ) outgoing_stream_l1_pressure_id = self.pubsubclient.create_stream_definition(name='L1_Pressure', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_pressure_id, ctd_L1_pressure_dprocdef_id, binding='pressure' ) outgoing_stream_l1_temperature_id = self.pubsubclient.create_stream_definition(name='L1_Temperature', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l1_temperature_id, ctd_L1_temperature_dprocdef_id, binding='temperature' ) log.debug("TestDataProductProvenance: create output data product L1 conductivity") ctd_l1_conductivity_output_dp_obj = IonObject(RT.DataProduct, name='L1_Conductivity', description='transform output L1 conductivity', temporal_domain = tdom, spatial_domain = sdom) ctd_l1_conductivity_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_conductivity_output_dp_obj, outgoing_stream_l1_conductivity_id) log.debug("TestDataProductProvenance: create output data product L1 pressure") ctd_l1_pressure_output_dp_obj = IonObject( RT.DataProduct, name='L1_Pressure', description='transform output L1 pressure', temporal_domain = tdom, spatial_domain = sdom) ctd_l1_pressure_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_pressure_output_dp_obj, outgoing_stream_l1_pressure_id) log.debug("TestDataProductProvenance: create output data product L1 temperature") ctd_l1_temperature_output_dp_obj = IonObject( RT.DataProduct, name='L1_Temperature', description='transform output L1 temperature', temporal_domain = tdom, spatial_domain = sdom) ctd_l1_temperature_output_dp_id = self.dpmsclient.create_data_product(ctd_l1_temperature_output_dp_obj, outgoing_stream_l1_temperature_id) #------------------------------- # L2 Salinity - Density: Output Data Products #------------------------------- outgoing_stream_l2_salinity_id = self.pubsubclient.create_stream_definition(name='L2_salinity', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_salinity_id, ctd_L2_salinity_dprocdef_id, binding='salinity' ) outgoing_stream_l2_density_id = self.pubsubclient.create_stream_definition(name='L2_Density', parameter_dictionary_id=pdict_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l2_density_id, ctd_L2_density_dprocdef_id, binding='density' ) log.debug("TestDataProductProvenance: create output data product L2 Salinity") ctd_l2_salinity_output_dp_obj = IonObject( RT.DataProduct, name='L2_Salinity', description='transform output L2 salinity', temporal_domain = tdom, spatial_domain = sdom) ctd_l2_salinity_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_salinity_output_dp_obj, outgoing_stream_l2_salinity_id) log.debug("TestDataProductProvenance: create output data product L2 Density") # ctd_l2_density_output_dp_obj = IonObject( RT.DataProduct, # name='L2_Density', # description='transform output pressure', # temporal_domain = tdom, # spatial_domain = sdom) # # ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj, # outgoing_stream_l2_density_id, # parameter_dictionary) contactInfo = ContactInformation() contactInfo.individual_names_given = "Bill" contactInfo.individual_name_family = "Smith" contactInfo.street_address = "111 First St" contactInfo.city = "San Diego" contactInfo.email = "*****@*****.**" contactInfo.phones = ["858-555-6666"] contactInfo.country = "USA" contactInfo.postal_code = "92123" ctd_l2_density_output_dp_obj = IonObject( RT.DataProduct, name='L2_Density', description='transform output pressure', contacts = [contactInfo], iso_topic_category = "my_iso_topic_category_here", quality_control_level = "1", temporal_domain = tdom, spatial_domain = sdom) ctd_l2_density_output_dp_id = self.dpmsclient.create_data_product(ctd_l2_density_output_dp_obj, outgoing_stream_l2_density_id) #------------------------------- # L0 Conductivity - Temperature - Pressure: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L0 all data_process start") try: input_data_products = [ctd_parsed_data_product] output_data_products = [ctd_l0_conductivity_output_dp_id, ctd_l0_pressure_output_dp_id, ctd_l0_temperature_output_dp_id] ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id = ctd_L0_all_dprocdef_id, in_data_product_ids = input_data_products, out_data_product_ids = output_data_products ) #activate only this data process just for coverage self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) contents = "this is the lookup table contents, replace with a file..." att = IonObject(RT.Attachment, name='deviceLookupTable', content=base64.encodestring(contents), keywords=['DataProcessInput'], attachment_type=AttachmentType.ASCII) deviceAttachment = self.rrclient.create_attachment(ctd_l0_all_data_process_id, att) log.info( 'test_createTransformsThenActivateInstrument: InstrumentDevice attachment id = %s', deviceAttachment) log.debug("TestDataProductProvenance: create L0 all data_process return") #------------------------------- # L1 Conductivity: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L1 Conductivity data_process start") try: l1_conductivity_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id = ctd_L1_conductivity_dprocdef_id, in_data_product_ids = [ctd_l0_conductivity_output_dp_id], out_data_product_ids = [ctd_l1_conductivity_output_dp_id]) self.dataprocessclient.activate_data_process(l1_conductivity_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # L1 Pressure: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L1_Pressure data_process start") try: l1_pressure_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id = ctd_L1_pressure_dprocdef_id, in_data_product_ids = [ctd_l0_pressure_output_dp_id], out_data_product_ids = [ctd_l1_pressure_output_dp_id]) self.dataprocessclient.activate_data_process(l1_pressure_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # L1 Temperature: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L1_Pressure data_process start") try: l1_temperature_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id = ctd_L1_temperature_dprocdef_id, in_data_product_ids = [ctd_l0_temperature_output_dp_id], out_data_product_ids = [ctd_l1_temperature_output_dp_id]) self.dataprocessclient.activate_data_process(l1_temperature_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # L2 Salinity: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L2_salinity data_process start") try: l2_salinity_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id = ctd_L2_salinity_dprocdef_id, in_data_product_ids = [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id], out_data_product_ids = [ctd_l2_salinity_output_dp_id]) self.dataprocessclient.activate_data_process(l2_salinity_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # L2 Density: Create the data process #------------------------------- log.debug("TestDataProductProvenance: create L2_Density data_process start") try: in_dp_ids = [ctd_l1_conductivity_output_dp_id, ctd_l1_pressure_output_dp_id, ctd_l1_temperature_output_dp_id] out_dp_ids = [ctd_l2_density_output_dp_id] l2_density_all_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id = ctd_L2_density_dprocdef_id, in_data_product_ids = in_dp_ids, out_data_product_ids = out_dp_ids) self.dataprocessclient.activate_data_process(l2_density_all_data_process_id) except BadRequest as ex: self.fail("failed to create new data process: %s" %ex) #------------------------------- # Launch InstrumentAgentInstance, connect to the resource agent client #------------------------------- self.imsclient.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) inst_agent_instance_obj= self.imsclient.read_instrument_agent_instance(instAgentInstance_id) print 'TestDataProductProvenance: Instrument agent instance obj: = ', inst_agent_instance_obj # Start a resource agent client to talk with the instrument agent. # self._ia_client = ResourceAgentClient('iaclient', name=ResourceAgentClient._get_agent_process_id(instDevice_id, process=FakeProcess()) # print 'activate_instrument: got ia client %s', self._ia_client # log.debug(" test_createTransformsThenActivateInstrument:: got ia client %s", str(self._ia_client)) #------------------------------- # Deactivate InstrumentAgentInstance #------------------------------- self.imsclient.stop_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) self.dataprocessclient.deactivate_data_process(l2_density_all_data_process_id) self.dataprocessclient.deactivate_data_process(l2_salinity_all_data_process_id) self.dataprocessclient.deactivate_data_process(l1_temperature_all_data_process_id) self.dataprocessclient.deactivate_data_process(l1_pressure_data_process_id) self.dataprocessclient.deactivate_data_process(l1_conductivity_data_process_id) self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id) #------------------------------- # Retrieve the provenance info for the ctd density data product #------------------------------- provenance_dict = self.dpmsclient.get_data_product_provenance(ctd_l2_density_output_dp_id) log.debug("TestDataProductProvenance: provenance_dict %s", str(provenance_dict)) #validate that products are represented self.assertTrue (provenance_dict[str(ctd_l1_conductivity_output_dp_id)]) self.assertTrue (provenance_dict[str(ctd_l0_conductivity_output_dp_id)]) self.assertTrue (provenance_dict[str(ctd_l2_density_output_dp_id)]) self.assertTrue (provenance_dict[str(ctd_l1_temperature_output_dp_id)]) self.assertTrue (provenance_dict[str(ctd_l0_temperature_output_dp_id)]) density_dict = (provenance_dict[str(ctd_l2_density_output_dp_id)]) self.assertEquals(density_dict['producer'], [l2_density_all_data_process_id]) #------------------------------- # Retrieve the extended resource for this data product #------------------------------- extended_product = self.dpmsclient.get_data_product_extension(ctd_l2_density_output_dp_id) self.assertEqual(1, len(extended_product.data_processes) ) self.assertEqual(3, len(extended_product.process_input_data_products) ) # log.debug("TestDataProductProvenance: DataProduct provenance_product_list %s", str(extended_product.provenance_product_list)) # log.debug("TestDataProductProvenance: DataProduct data_processes %s", str(extended_product.data_processes)) # log.debug("TestDataProductProvenance: DataProduct process_input_data_products %s", str(extended_product.process_input_data_products)) # log.debug("TestDataProductProvenance: provenance %s", str(extended_product.computed.provenance.value)) #------------------------------- # Retrieve the extended resource for this data process #------------------------------- extended_process_def = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id) # log.debug("TestDataProductProvenance: DataProcess extended_process_def %s", str(extended_process_def)) # log.debug("TestDataProductProvenance: DataProcess data_processes %s", str(extended_process_def.data_processes)) # log.debug("TestDataProductProvenance: DataProcess data_products %s", str(extended_process_def.data_products)) self.assertEqual(1, len(extended_process_def.data_processes) ) self.assertEqual(3, len(extended_process_def.output_stream_definitions) ) self.assertEqual(3, len(extended_process_def.data_products) ) #one list because of one data process #------------------------------- # Request the xml report #------------------------------- results = self.dpmsclient.get_data_product_provenance_report(ctd_l2_density_output_dp_id) print results #------------------------------- # Cleanup #------------------------------- self.dpmsclient.delete_data_product(ctd_parsed_data_product) self.dpmsclient.delete_data_product(log_data_product_id) self.dpmsclient.delete_data_product(ctd_l0_conductivity_output_dp_id) self.dpmsclient.delete_data_product(ctd_l0_pressure_output_dp_id) self.dpmsclient.delete_data_product(ctd_l0_temperature_output_dp_id) self.dpmsclient.delete_data_product(ctd_l1_conductivity_output_dp_id) self.dpmsclient.delete_data_product(ctd_l1_pressure_output_dp_id) self.dpmsclient.delete_data_product(ctd_l1_temperature_output_dp_id) self.dpmsclient.delete_data_product(ctd_l2_salinity_output_dp_id) self.dpmsclient.delete_data_product(ctd_l2_density_output_dp_id)
class TestObservatoryManagementServiceIntegration(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) #print 'TestObservatoryManagementServiceIntegration: started services' @unittest.skip('this exists only for debugging the launch process') def test_just_the_setup(self): return #@unittest.skip('targeting') def test_resources_associations(self): self._make_associations() #@unittest.skip('targeting') def test_find_related_frames_of_reference(self): # finding subordinates gives a dict of obj lists, convert objs to ids def idify(adict): ids = {} for k, v in adict.iteritems(): ids[k] = [] for obj in v: ids[k].append(obj._id) return ids # a short version of the function we're testing, with id-ify def short(resource_id, output_types): ret = self.OMS.find_related_frames_of_reference( resource_id, output_types) return idify(ret) #set up associations first stuff = self._make_associations() #basic traversal of tree from instrument to platform ids = short(stuff.instrument_site_id, [RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) #since this is the first search, just make sure the input inst_id got stripped if RT.InstrumentSite in ids: self.assertNotIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #basic traversal of tree from platform to instrument ids = short(stuff.platform_siteb_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) self.assertNotIn(stuff.instrument_site2_id, ids[RT.InstrumentSite]) #full traversal of tree from observatory down to instrument ids = short(stuff.observatory_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, ids) self.assertIn(stuff.instrument_site_id, ids[RT.InstrumentSite]) #full traversal of tree from instrument to observatory ids = short(stuff.instrument_site_id, [RT.Observatory]) self.assertIn(RT.Observatory, ids) self.assertIn(stuff.observatory_id, ids[RT.Observatory]) #partial traversal, only down to platform ids = short(stuff.observatory_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site3_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(RT.InstrumentSite, ids) #partial traversal, only down to platform ids = short(stuff.instrument_site_id, [RT.Subsite, RT.PlatformSite]) self.assertIn(RT.PlatformSite, ids) self.assertIn(RT.Subsite, ids) self.assertIn(stuff.platform_siteb_id, ids[RT.PlatformSite]) self.assertIn(stuff.platform_site_id, ids[RT.PlatformSite]) self.assertIn(stuff.subsite_id, ids[RT.Subsite]) self.assertIn(stuff.subsiteb_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsite2_id, ids[RT.Subsite]) self.assertNotIn(stuff.subsitez_id, ids[RT.Subsite]) self.assertNotIn(stuff.platform_siteb2_id, ids[RT.PlatformSite]) self.assertNotIn(RT.Observatory, ids) def _make_associations(self): """ create one of each resource and association used by OMS to guard against problems in ion-definitions """ #raise unittest.SkipTest("https://jira.oceanobservatories.org/tasks/browse/CISWCORE-41") """ the tree we're creating (observatory, sites, platforms, instruments) rows are lettered, colums numbered. - first row is implied a - first column is implied 1 - site Z, just because O--Sz | S--S2--P3--I4 | Sb-Pb2-Ib3 | P--I2 | Pb | I """ #stuff we control observatory_id, _ = self.RR.create(any_old(RT.Observatory)) subsite_id, _ = self.RR.create(any_old(RT.Subsite)) subsite2_id, _ = self.RR.create(any_old(RT.Subsite)) subsiteb_id, _ = self.RR.create(any_old(RT.Subsite)) subsitez_id, _ = self.RR.create(any_old(RT.Subsite)) platform_site_id, _ = self.RR.create(any_old(RT.PlatformSite)) platform_siteb_id, _ = self.RR.create(any_old(RT.PlatformSite)) platform_siteb2_id, _ = self.RR.create(any_old(RT.PlatformSite)) platform_site3_id, _ = self.RR.create(any_old(RT.PlatformSite)) instrument_site_id, _ = self.RR.create(any_old(RT.InstrumentSite)) instrument_site2_id, _ = self.RR.create(any_old(RT.InstrumentSite)) instrument_siteb3_id, _ = self.RR.create(any_old(RT.InstrumentSite)) instrument_site4_id, _ = self.RR.create(any_old(RT.InstrumentSite)) #stuff we associate to instrument_model_id, _ = self.RR.create(any_old(RT.InstrumentModel)) instrument_device_id, _ = self.RR.create(any_old(RT.InstrumentDevice)) platform_model_id, _ = self.RR.create(any_old(RT.PlatformModel)) platform_device_id, _ = self.RR.create(any_old(RT.PlatformDevice)) deployment_id, _ = self.RR.create(any_old(RT.Deployment)) #observatory self.RR.create_association(observatory_id, PRED.hasSite, subsite_id) self.RR.create_association(observatory_id, PRED.hasSite, subsitez_id) #site self.RR.create_association(subsite_id, PRED.hasSite, subsite2_id) self.RR.create_association(subsite_id, PRED.hasSite, subsiteb_id) self.RR.create_association(subsite2_id, PRED.hasSite, platform_site3_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_siteb2_id) self.RR.create_association(subsiteb_id, PRED.hasSite, platform_site_id) #platform_site self.RR.create_association(platform_site3_id, PRED.hasSite, instrument_site4_id) self.RR.create_association(platform_siteb2_id, PRED.hasSite, instrument_siteb3_id) self.RR.create_association(platform_site_id, PRED.hasSite, instrument_site2_id) self.RR.create_association(platform_site_id, PRED.hasSite, platform_siteb_id) self.RR.create_association(platform_siteb_id, PRED.hasSite, instrument_site_id) self.RR.create_association(platform_site_id, PRED.hasModel, platform_model_id) self.RR.create_association(platform_site_id, PRED.hasDevice, platform_device_id) self.RR.create_association(platform_site_id, PRED.hasDeployment, deployment_id) #instrument_site self.RR.create_association(instrument_site_id, PRED.hasModel, instrument_model_id) self.RR.create_association(instrument_site_id, PRED.hasDevice, instrument_device_id) self.RR.create_association(instrument_site_id, PRED.hasDeployment, deployment_id) ret = DotDict() ret.observatory_id = observatory_id ret.subsite_id = subsite_id ret.subsite2_id = subsite2_id ret.subsiteb_id = subsiteb_id ret.subsitez_id = subsitez_id ret.platform_site_id = platform_site_id ret.platform_siteb_id = platform_siteb_id ret.platform_siteb2_id = platform_siteb2_id ret.platform_site3_id = platform_site3_id ret.instrument_site_id = instrument_site_id ret.instrument_site2_id = instrument_site2_id ret.instrument_siteb3_id = instrument_siteb3_id ret.instrument_site4_id = instrument_site4_id return ret #@unittest.skip("targeting") def test_create_observatory(self): observatory_obj = IonObject(RT.Observatory, name='TestFacility', description='some new mf') self.OMS.create_observatory(observatory_obj) #@unittest.skip('targeting') def test_find_observatory_org(self): org_obj = IonObject(RT.Org, name='TestOrg', description='some new mf org') org_id = self.OMS.create_marine_facility(org_obj) observatory_obj = IonObject(RT.Observatory, name='TestObservatory', description='some new obs') observatory_id = self.OMS.create_observatory(observatory_obj) #make association self.OMS.assign_resource_to_observatory_org(observatory_id, org_id) #find association org_objs = self.OMS.find_org_by_observatory(observatory_id) self.assertEqual(1, len(org_objs)) self.assertEqual(org_id, org_objs[0]._id) print("org_id=<" + org_id + ">") #create a subsite with parent Observatory subsite_obj = IonObject(RT.Subsite, name='TestSubsite', description='sample subsite') subsite_id = self.OMS.create_subsite(subsite_obj, observatory_id) self.assertIsNotNone(subsite_id, "Subsite not created.") # verify that Subsite is linked to Observatory mf_subsite_assoc = self.RR.get_association(observatory_id, PRED.hasSite, subsite_id) self.assertIsNotNone(mf_subsite_assoc, "Subsite not connected to Observatory.") # add the Subsite as a resource of this Observatory self.OMS.assign_resource_to_observatory_org(resource_id=subsite_id, org_id=org_id) # verify that Subsite is linked to Org org_subsite_assoc = self.RR.get_association(org_id, PRED.hasResource, subsite_id) self.assertIsNotNone(org_subsite_assoc, "Subsite not connected as resource to Org.") #create a logical platform with parent Subsite platform_site_obj = IonObject(RT.PlatformSite, name='TestPlatformSite', description='sample logical platform') platform_site_id = self.OMS.create_platform_site( platform_site_obj, subsite_id) self.assertIsNotNone(platform_site_id, "PlatformSite not created.") # verify that PlatformSite is linked to Site site_lp_assoc = self.RR.get_association(subsite_id, PRED.hasSite, platform_site_id) self.assertIsNotNone(site_lp_assoc, "PlatformSite not connected to Site.") # add the PlatformSite as a resource of this Observatory self.OMS.assign_resource_to_observatory_org( resource_id=platform_site_id, org_id=org_id) # verify that PlatformSite is linked to Org org_lp_assoc = self.RR.get_association(org_id, PRED.hasResource, platform_site_id) self.assertIsNotNone(org_lp_assoc, "PlatformSite not connected as resource to Org.") #create a logical instrument with parent logical platform instrument_site_obj = IonObject( RT.InstrumentSite, name='TestInstrumentSite', description='sample logical instrument') instrument_site_id = self.OMS.create_instrument_site( instrument_site_obj, platform_site_id) self.assertIsNotNone(instrument_site_id, "InstrumentSite not created.") # verify that InstrumentSite is linked to PlatformSite li_lp_assoc = self.RR.get_association(platform_site_id, PRED.hasSite, instrument_site_id) self.assertIsNotNone(li_lp_assoc, "InstrumentSite not connected to PlatformSite.") # add the InstrumentSite as a resource of this Observatory self.OMS.assign_resource_to_observatory_org( resource_id=instrument_site_id, org_id=org_id) # verify that InstrumentSite is linked to Org org_li_assoc = self.RR.get_association(org_id, PRED.hasResource, instrument_site_id) self.assertIsNotNone( org_li_assoc, "InstrumentSite not connected as resource to Org.") # remove the InstrumentSite as a resource of this Observatory self.OMS.unassign_resource_from_observatory_org( instrument_site_id, org_id) # verify that InstrumentSite is linked to Org assocs, _ = self.RR.find_objects(org_id, PRED.hasResource, RT.InstrumentSite, id_only=True) self.assertEqual(len(assocs), 0) # remove the InstrumentSite self.OMS.delete_instrument_site(instrument_site_id) assocs, _ = self.RR.find_objects(platform_site_id, PRED.hasInstrument, RT.InstrumentSite, id_only=True) self.assertEqual(len(assocs), 0) # remove the PlatformSite as a resource of this Observatory self.OMS.unassign_resource_from_observatory_org( platform_site_id, org_id) # verify that PlatformSite is linked to Org assocs, _ = self.RR.find_objects(org_id, PRED.hasResource, RT.PlatformSite, id_only=True) self.assertEqual(len(assocs), 0) # remove the Site as a resource of this Observatory self.OMS.unassign_resource_from_observatory_org(subsite_id, org_id) # verify that Site is linked to Org assocs, _ = self.RR.find_objects(org_id, PRED.hasResource, RT.Subsite, id_only=True) self.assertEqual(len(assocs), 0)