def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Instantiate a process to represent the test process = TransformWorkerTestProcess() self.dataset_management_client = DatasetManagementServiceClient( node=self.container.node) self.pubsub_client = PubsubManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceProcessClient( node=self.container.node, process=process) self.time_dom, self.spatial_dom = time_series_domain() self.ph = ParameterHelper(self.dataset_management_client, self.addCleanup) self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)
def setUp(self): # Start container logging.disable(logging.ERROR) self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # simulate preloading preload_ion_params(self.container) logging.disable(logging.NOTSET) #Instantiate a process to represent the test process=VisualizationServiceTestProcess() # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceProcessClient(node=self.container.node, process=process) self.damsclient = DataAcquisitionManagementServiceProcessClient(node=self.container.node, process=process) self.pubsubclient = PubsubManagementServiceProcessClient(node=self.container.node, process=process) self.ingestclient = IngestionManagementServiceProcessClient(node=self.container.node, process=process) self.imsclient = InstrumentManagementServiceProcessClient(node=self.container.node, process=process) self.dataproductclient = DataProductManagementServiceProcessClient(node=self.container.node, process=process) self.dataprocessclient = DataProcessManagementServiceProcessClient(node=self.container.node, process=process) self.datasetclient = DatasetManagementServiceProcessClient(node=self.container.node, process=process) self.workflowclient = WorkflowManagementServiceProcessClient(node=self.container.node, process=process) self.process_dispatcher = ProcessDispatcherServiceProcessClient(node=self.container.node, process=process) self.data_retriever = DataRetrieverServiceProcessClient(node=self.container.node, process=process) self.vis_client = VisualizationServiceProcessClient(node=self.container.node, process=process) self.ctd_stream_def = SBE37_CDM_stream_definition()
def setUp(self): # Start container self._start_container() #Load a deploy file that also loads basic policy. self.container.start_rel_from_url('res/deploy/r2gov.yml') process=FakeProcess() self.rr_client = ResourceRegistryServiceProcessClient(node=self.container.node, process=process) self.id_client = IdentityManagementServiceProcessClient(node=self.container.node, process=process) self.org_client = OrgManagementServiceProcessClient(node=self.container.node, process=process) self.ims_client = InstrumentManagementServiceProcessClient(node=self.container.node, process=process) self.ion_org = self.org_client.find_org() self.system_actor = self.id_client.find_actor_identity_by_name(name=CFG.system.system_actor) log.debug('system actor:' + self.system_actor._id) sa_header_roles = get_role_message_headers(self.org_client.find_all_roles_by_user(self.system_actor._id)) self.sa_user_header = {'ion-actor-id': self.system_actor._id, 'ion-actor-roles': sa_header_roles }
def setUp(self): # Start container self._start_container() #Load a deploy file self.container.start_rel_from_url('res/deploy/r2deploy.yml') #Instantiate a process to represent the test process = GovernanceTestProcess() #Load system policies after container has started all of the services LoadSystemPolicy.op_load_system_policies(process) self.rr_client = ResourceRegistryServiceProcessClient( node=self.container.node, process=process) self.id_client = IdentityManagementServiceProcessClient( node=self.container.node, process=process) self.pol_client = PolicyManagementServiceProcessClient( node=self.container.node, process=process) self.org_client = OrgManagementServiceProcessClient( node=self.container.node, process=process) self.ims_client = InstrumentManagementServiceProcessClient( node=self.container.node, process=process) self.ems_client = ExchangeManagementServiceProcessClient( node=self.container.node, process=process) self.ion_org = self.org_client.find_org() self.system_actor = self.id_client.find_actor_identity_by_name( name=CFG.system.system_actor) log.debug('system actor:' + self.system_actor._id) sa_header_roles = get_role_message_headers( self.org_client.find_all_roles_by_user(self.system_actor._id)) self.sa_user_header = { 'ion-actor-id': self.system_actor._id, 'ion-actor-roles': sa_header_roles }
def seed_gov(container, process=FakeProcess()): dp_client = DataProductManagementServiceProcessClient(node=container.node, process=process) dp_obj = IonObject(RT.DataProduct, name='DataProd1', description='some new dp') dp_client.create_data_product(dp_obj) dp_obj = IonObject(RT.DataProduct, name='DataProd2', description='and of course another new dp') dp_client.create_data_product(dp_obj,) dp_obj = IonObject(RT.DataProduct, name='DataProd3', description='yet another new dp') dp_client.create_data_product(dp_obj,) log.debug('Data Products') dp_list = dp_client.find_data_products() for dp_obj in dp_list: log.debug( str(dp_obj)) ims_client = InstrumentManagementServiceProcessClient(node=container.node, process=process) ia_obj = IonObject(RT.InstrumentAgent, name='Instrument Agent1', description='The first Instrument Agent') ims_client.create_instrument_agent(ia_obj) ia_obj = IonObject(RT.InstrumentAgent, name='Instrument Agent2', description='The second Instrument Agent') ims_client.create_instrument_agent(ia_obj) log.debug( 'Instrument Agents') ia_list = ims_client.find_instrument_agents() for ia_obj in ia_list: log.debug( str(ia_obj)) org_client = OrgManagementServiceProcessClient(node=container.node, process=process) ion_org = org_client.find_org() policy_client = PolicyManagementServiceProcessClient(node=container.node, process=process) role_obj = IonObject(RT.UserRole, name='Instrument Operator', description='Users assigned to this role are instrument operators') role_id = policy_client.create_role(role_obj) org_client.add_user_role(ion_org._id, role_id) try: role_id = policy_client.create_role(role_obj) org_client.add_user_role(ion_org._id, role_id) except Exception, e: log.info("This should fail") log.info(e.message)
def start_agent(self, agent_instance_id, resource_id): if not agent_instance_id or not resource_id: log.warn("Could not op=%s agent %s for device %s", self.op, agent_instance_id, resource_id) return res_obj = self.rr.read(resource_id) ai_obj = self.rr.read(agent_instance_id) try: client = ResourceAgentClient(resource_id, process=self) if self.force: log.warn("Agent for resource %s seems running - continuing", resource_id) if self.autoclean: self.cleanup_agent(agent_instance_id, resource_id) else: log.warn("Agent for resource %s seems running", resource_id) return except NotFound: pass # This is expected log.info('Starting agent...') if ai_obj.type_ == RT.ExternalDatasetAgentInstance: dams = DataAcquisitionManagementServiceProcessClient(process=self) dams.start_external_dataset_agent_instance(agent_instance_id, headers=self._get_system_actor_headers(), timeout=self.timeout) elif ai_obj.type_ == RT.InstrumentAgentInstance: ims = InstrumentManagementServiceProcessClient(process=self) ims.start_instrument_agent_instance(agent_instance_id, headers=self._get_system_actor_headers(), timeout=self.timeout) elif ai_obj.type_ == RT.PlatformAgentInstance: ims = InstrumentManagementServiceProcessClient(process=self) ims.start_platform_agent_instance(agent_instance_id, headers=self._get_system_actor_headers(), timeout=self.timeout) else: BadRequest("Attempt to start unsupported agent type: %s", ai_obj.type_) log.info('Agent started!') activate = self.CFG.get("activate", True) if activate: log.info('Activating agent...') client = ResourceAgentClient(resource_id, process=self) client.execute_agent(AgentCommand(command=ResourceAgentEvent.INITIALIZE), headers=self._get_system_actor_headers(), timeout=self.timeout) client.execute_agent(AgentCommand(command=ResourceAgentEvent.GO_ACTIVE), headers=self._get_system_actor_headers(), timeout=self.timeout) client.execute_agent(AgentCommand(command=ResourceAgentEvent.RUN), headers=self._get_system_actor_headers(), timeout=self.timeout) client.execute_resource(command=AgentCommand(command=DriverEvent.START_AUTOSAMPLE), headers=self._get_system_actor_headers(), timeout=self.timeout) log.info('Agent in auto-sample mode!')
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Instantiate a process to represent the test process=TransformWorkerTestProcess() self.dataset_management_client = DatasetManagementServiceClient(node=self.container.node) self.pubsub_client = PubsubManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceProcessClient(node=self.container.node, process = process) self.time_dom, self.spatial_dom = time_series_domain() self.ph = ParameterHelper(self.dataset_management_client, self.addCleanup) self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10)
def setUp(self): # Start container self._start_container() #Load a deploy file self.container.start_rel_from_url('res/deploy/r2deploy.yml') #Instantiate a process to represent the test process=GovernanceTestProcess() #Load system policies after container has started all of the services LoadSystemPolicy.op_load_system_policies(process) gevent.sleep(1) # Wait for events to be fired and policy updated self.rr_client = ResourceRegistryServiceProcessClient(node=self.container.node, process=process) self.id_client = IdentityManagementServiceProcessClient(node=self.container.node, process=process) self.pol_client = PolicyManagementServiceProcessClient(node=self.container.node, process=process) self.org_client = OrgManagementServiceProcessClient(node=self.container.node, process=process) self.ims_client = InstrumentManagementServiceProcessClient(node=self.container.node, process=process) self.ems_client = ExchangeManagementServiceProcessClient(node=self.container.node, process=process) self.ion_org = self.org_client.find_org() self.system_actor = self.id_client.find_actor_identity_by_name(name=CFG.system.system_actor) log.debug('system actor:' + self.system_actor._id) sa_header_roles = get_role_message_headers(self.org_client.find_all_roles_by_user(self.system_actor._id)) self.sa_user_header = {'ion-actor-id': self.system_actor._id, 'ion-actor-roles': sa_header_roles }
def stop_agent(self, agent_instance_id, resource_id): res_obj = self.rr.read(resource_id) if not agent_instance_id or not resource_id: log.warn("Could not %s agent %s for device %s", self.op, agent_instance_id, resource_id) return try: client = ResourceAgentClient(resource_id, process=self) except NotFound: log.warn("Agent for resource %s seems not running", resource_id) log.info('Stopping agent...') if res_obj.type_ == RT.ExternalDatasetAgentInstance or res_obj == RT.ExternalDataset: dams = DataAcquisitionManagementServiceProcessClient(process=self) try: dams.stop_external_dataset_agent_instance(agent_instance_id, headers=self._get_system_actor_headers(), timeout=self.timeout) except NotFound: log.warn("Agent for resource %s not found", resource_id) elif res_obj.type_ == RT.InstrumentDevice: ims = InstrumentManagementServiceProcessClient(process=self) try: ims.stop_instrument_agent_instance(agent_instance_id, headers=self._get_system_actor_headers(), timeout=self.timeout) except NotFound: log.warn("Agent for resource %s not found", resource_id) elif res_obj.type_ == RT.PlatformDevice: ims = InstrumentManagementServiceProcessClient(process=self) try: ims.stop_platform_agent_instance(agent_instance_id, headers=self._get_system_actor_headers(), timeout=self.timeout) except NotFound: log.warn("Agent for resource %s not found", resource_id) else: BadRequest("Attempt to start unsupported agent type: %s", res_obj.type_) log.info('Agent stopped!')
def start_agent(self, agent_instance_id, resource_id): if not agent_instance_id or not resource_id: log.warn("Could not %s agent %s for device %s", self.op, agent_instance_id, resource_id) return res_obj = self.rr.read(resource_id) log.info('Starting agent...') if res_obj.type_ == RT.ExternalDatasetAgentInstance or res_obj == RT.ExternalDataset: dams = DataAcquisitionManagementServiceProcessClient(process=self) dams.start_external_dataset_agent_instance(agent_instance_id, headers=self._get_system_actor_headers(), timeout=self.timeout) elif res_obj.type_ == RT.InstrumentDevice: ims = InstrumentManagementServiceProcessClient(process=self) ims.start_instrument_agent_instance(agent_instance_id, headers=self._get_system_actor_headers(), timeout=self.timeout) elif res_obj.type_ == RT.PlatformDevice: ims = InstrumentManagementServiceProcessClient(process=self) ims.start_platform_agent_instance(agent_instance_id, headers=self._get_system_actor_headers(), timeout=self.timeout) else: BadRequest("Attempt to start unsupported agent type: %s", res_obj.type_) log.info('Agent started!') activate = self.CFG.get("activate", True) if activate: log.info('Activating agent...') client = ResourceAgentClient(resource_id, process=self) client.execute_agent(AgentCommand(command=ResourceAgentEvent.INITIALIZE), headers=self._get_system_actor_headers(), timeout=self.timeout) client.execute_agent(AgentCommand(command=ResourceAgentEvent.GO_ACTIVE), headers=self._get_system_actor_headers(), timeout=self.timeout) client.execute_agent(AgentCommand(command=ResourceAgentEvent.RUN), headers=self._get_system_actor_headers(), timeout=self.timeout) client.execute_resource(command=AgentCommand(command=DriverEvent.START_AUTOSAMPLE), headers=self._get_system_actor_headers(), timeout=self.timeout) log.info('Agent active!')
description='yet another new dp') dp_client.create_data_product(dp_obj, headers={'ion-actor-id': system_actor._id}) log.info('Data Products') dp_list = dp_client.find_data_products() for dp_obj in dp_list: log.debug( str(dp_obj)) results = find_data_products() log.info(results) header_roles = get_role_message_headers(org_client.find_all_roles_by_user(user._id)) ims_client = InstrumentManagementServiceProcessClient(node=container.node, process=process) try: ia_obj = IonObject(RT.InstrumentAgent, name='Instrument Agent1', description='The first Instrument Agent') ims_client.create_instrument_agent(ia_obj, headers={'ion-actor-id': user._id, 'ion-actor-roles': header_roles }) except Exception, e: log.info('This operation should be denied: ' + e.message) header_roles = get_role_message_headers(org_client.find_all_roles_by_user(system_actor._id)) org_client.grant_role(ion_org._id, user._id, 'INSTRUMENT_OPERATOR', headers={'ion-actor-id': system_actor._id, 'ion-actor-roles': header_roles }) header_roles = get_role_message_headers(org_client.find_all_roles_by_user(user._id))
class TestGovernanceInt(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() #Load a deploy file self.container.start_rel_from_url('res/deploy/r2deploy.yml') #Instantiate a process to represent the test process = GovernanceTestProcess() #Load system policies after container has started all of the services LoadSystemPolicy.op_load_system_policies(process) self.rr_client = ResourceRegistryServiceProcessClient( node=self.container.node, process=process) self.id_client = IdentityManagementServiceProcessClient( node=self.container.node, process=process) self.pol_client = PolicyManagementServiceProcessClient( node=self.container.node, process=process) self.org_client = OrgManagementServiceProcessClient( node=self.container.node, process=process) self.ims_client = InstrumentManagementServiceProcessClient( node=self.container.node, process=process) self.ems_client = ExchangeManagementServiceProcessClient( node=self.container.node, process=process) self.ion_org = self.org_client.find_org() self.system_actor = self.id_client.find_actor_identity_by_name( name=CFG.system.system_actor) log.debug('system actor:' + self.system_actor._id) sa_header_roles = get_role_message_headers( self.org_client.find_all_roles_by_user(self.system_actor._id)) self.sa_user_header = { 'ion-actor-id': self.system_actor._id, 'ion-actor-roles': sa_header_roles } @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Not integrated for CEI') def test_basic_policy(self): #Make sure that the system policies have been loaded policy_list, _ = self.rr_client.find_resources(restype=RT.Policy) self.assertNotEqual( len(policy_list), 0, "The system policies have not been loaded into the Resource Registry" ) #Attempt to access an operation in service which does not have specific policies set es_obj = IonObject(RT.ExchangeSpace, description='ION test XS', name='ioncore2') with self.assertRaises(Unauthorized) as cm: self.ems_client.create_exchange_space(es_obj) self.assertIn( 'exchange_management(create_exchange_space) has been denied', cm.exception.message) #Add a new policy to allow the the above service call. policy_obj = IonObject( RT.Policy, name='Exchange_Management_Test_Policy', definition_type="Service", rule=TEST_POLICY_TEXT, description= 'Allow specific operations in the Exchange Management Service for anonymous user' ) test_policy_id = self.pol_client.create_policy( policy_obj, headers=self.sa_user_header) self.pol_client.add_service_policy('exchange_management', test_policy_id, headers=self.sa_user_header) log.info('Policy created: ' + policy_obj.name) gevent.sleep(2) # Wait for events to be fired and policy updated #The previous attempt at this operations should now be allowed. es_obj = IonObject(RT.ExchangeSpace, description='ION test XS', name='ioncore2') with self.assertRaises(BadRequest) as cm: self.ems_client.create_exchange_space(es_obj) self.assertIn('Arguments not set', cm.exception.message) #disable the test policy to try again self.pol_client.disable_policy(test_policy_id, headers=self.sa_user_header) gevent.sleep(2) # Wait for events to be fired and policy updated #The same request that previously was allowed should not be denied es_obj = IonObject(RT.ExchangeSpace, description='ION test XS', name='ioncore2') with self.assertRaises(Unauthorized) as cm: self.ems_client.create_exchange_space(es_obj) self.assertIn( 'exchange_management(create_exchange_space) has been denied', cm.exception.message) #now enable the test policy to try again self.pol_client.enable_policy(test_policy_id, headers=self.sa_user_header) gevent.sleep(2) # Wait for events to be fired and policy updated #The previous attempt at this operations should now be allowed. es_obj = IonObject(RT.ExchangeSpace, description='ION test XS', name='ioncore2') with self.assertRaises(BadRequest) as cm: self.ems_client.create_exchange_space(es_obj) self.assertIn('Arguments not set', cm.exception.message) self.pol_client.remove_service_policy('exchange_management', test_policy_id, headers=self.sa_user_header) self.pol_client.delete_policy(test_policy_id, headers=self.sa_user_header) gevent.sleep(2) # Wait for events to be fired and policy updated #The same request that previously was allowed should not be denied es_obj = IonObject(RT.ExchangeSpace, description='ION test XS', name='ioncore2') with self.assertRaises(Unauthorized) as cm: self.ems_client.create_exchange_space(es_obj) self.assertIn( 'exchange_management(create_exchange_space) has been denied', cm.exception.message) @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Not integrated for CEI') def test_org_policy(self): #Make sure that the system policies have been loaded policy_list, _ = self.rr_client.find_resources(restype=RT.Policy) self.assertNotEqual( len(policy_list), 0, "The system policies have not been loaded into the Resource Registry" ) with self.assertRaises(BadRequest) as cm: myorg = self.org_client.read_org() self.assertTrue( cm.exception.message == 'The org_id parameter is missing') user_id, valid_until, registered = self.id_client.signon( USER1_CERTIFICATE, True) log.debug("user id=" + user_id) user_roles = get_role_message_headers( self.org_client.find_all_roles_by_user(user_id)) user_header = {'ion-actor-id': user_id, 'ion-actor-roles': user_roles} #Attempt to enroll a user anonymously - should not be allowed with self.assertRaises(Unauthorized) as cm: self.org_client.enroll_member(self.ion_org._id, user_id) self.assertIn('org_management(enroll_member) has been denied', cm.exception.message) #Attempt to let a user enroll themselves - should not be allowed with self.assertRaises(Unauthorized) as cm: self.org_client.enroll_member(self.ion_org._id, user_id, headers=user_header) self.assertIn('org_management(enroll_member) has been denied', cm.exception.message) #Attept to enroll the user in the ION Root org as a manager - should not be allowed since #registration with the system implies membership in the ROOT Org. with self.assertRaises(BadRequest) as cm: self.org_client.enroll_member(self.ion_org._id, user_id, headers=self.sa_user_header) self.assertTrue( cm.exception.message == 'A request to enroll in the root ION Org is not allowed') with self.assertRaises(Unauthorized) as cm: users = self.org_client.find_enrolled_users(self.ion_org._id) self.assertIn('org_management(find_enrolled_users) has been denied', cm.exception.message) with self.assertRaises(Unauthorized) as cm: users = self.org_client.find_enrolled_users(self.ion_org._id, headers=user_header) self.assertIn('org_management(find_enrolled_users) has been denied', cm.exception.message) users = self.org_client.find_enrolled_users( self.ion_org._id, headers=self.sa_user_header) self.assertEqual(len(users), 2) ## test_org_roles and policies roles = self.org_client.find_org_roles(self.ion_org._id) self.assertEqual(len(roles), 3) self.assertItemsEqual([r.name for r in roles], [MANAGER_ROLE, MEMBER_ROLE, ION_MANAGER]) roles = self.org_client.find_roles_by_user(self.ion_org._id, self.system_actor._id, headers=self.sa_user_header) self.assertEqual(len(roles), 3) self.assertItemsEqual([r.name for r in roles], [MEMBER_ROLE, MANAGER_ROLE, ION_MANAGER]) roles = self.org_client.find_roles_by_user(self.ion_org._id, user_id, headers=self.sa_user_header) self.assertEqual(len(roles), 1) self.assertItemsEqual([r.name for r in roles], [MEMBER_ROLE]) with self.assertRaises(NotFound) as nf: org2 = self.org_client.find_org(ORG2) self.assertIn('The Org with name Org2 does not exist', nf.exception.message) org2 = IonObject(RT.Org, name=ORG2, description='A second Org') org2_id = self.org_client.create_org(org2, headers=self.sa_user_header) org2 = self.org_client.find_org(ORG2) self.assertEqual(org2_id, org2._id) roles = self.org_client.find_org_roles(org2_id) self.assertEqual(len(roles), 2) self.assertItemsEqual([r.name for r in roles], [MANAGER_ROLE, MEMBER_ROLE]) operator_role = IonObject(RT.UserRole, name=INSTRUMENT_OPERATOR, label='Instrument Operator', description='Instrument Operator') #First try to add the user role anonymously with self.assertRaises(Unauthorized) as cm: self.org_client.add_user_role(org2_id, operator_role) self.assertIn('org_management(add_user_role) has been denied', cm.exception.message) self.org_client.add_user_role(org2_id, operator_role, headers=self.sa_user_header) roles = self.org_client.find_org_roles(org2_id) self.assertEqual(len(roles), 3) self.assertItemsEqual([r.name for r in roles], [MANAGER_ROLE, MEMBER_ROLE, INSTRUMENT_OPERATOR]) # test requests for enrollments and roles. #First try to find user requests anonymously with self.assertRaises(Unauthorized) as cm: requests = self.org_client.find_requests(org2_id) self.assertIn('org_management(find_requests) has been denied', cm.exception.message) #Next try to find user requests as as a basic member with self.assertRaises(Unauthorized) as cm: requests = self.org_client.find_requests(org2_id, headers=user_header) self.assertIn('org_management(find_requests) has been denied', cm.exception.message) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests), 0) # First try to request a role without being a member with self.assertRaises(BadRequest) as cm: req_id = self.org_client.request_role(org2_id, user_id, INSTRUMENT_OPERATOR, headers=user_header) self.assertIn( 'A precondition for this request has not been satisfied: is_enrolled(org_id,user_id)', cm.exception.message) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests), 0) req_id = self.org_client.request_enroll(org2_id, user_id, headers=user_header) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests), 1) requests = self.org_client.find_user_requests( user_id, org2_id, headers=self.sa_user_header) self.assertEqual(len(requests), 1) #User tried requesting enrollment again - this should fail with self.assertRaises(BadRequest) as cm: req_id = self.org_client.request_enroll(org2_id, user_id, headers=user_header) self.assertIn( 'A precondition for this request has not been satisfied: enroll_req_not_exist(org_id,user_id)', cm.exception.message) #Manager denies the request self.org_client.deny_request(org2_id, req_id, 'To test the deny process', headers=self.sa_user_header) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests), 1) self.assertEqual(requests[0].status, REQUEST_DENIED) #Manager approves request self.org_client.approve_request(org2_id, req_id, headers=self.sa_user_header) users = self.org_client.find_enrolled_users( org2_id, headers=self.sa_user_header) self.assertEqual(len(users), 0) #User Accepts request self.org_client.accept_request(org2_id, req_id, headers=user_header) users = self.org_client.find_enrolled_users( org2_id, headers=self.sa_user_header) self.assertEqual(len(users), 1) #User tried requesting enrollment again - this should fail with self.assertRaises(BadRequest) as cm: req_id = self.org_client.request_enroll(org2_id, user_id, headers=user_header) self.assertIn( 'A precondition for this request has not been satisfied: is_not_enrolled(org_id,user_id)', cm.exception.message) req_id = self.org_client.request_role(org2_id, user_id, INSTRUMENT_OPERATOR, headers=user_header) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests), 2) requests = self.org_client.find_requests(org2_id, request_status='Open', headers=self.sa_user_header) self.assertEqual(len(requests), 1) requests = self.org_client.find_user_requests(user_id, org2_id, headers=user_header) self.assertEqual(len(requests), 2) requests = self.org_client.find_user_requests( user_id, org2_id, request_type=RT.RoleRequest, headers=user_header) self.assertEqual(len(requests), 1) requests = self.org_client.find_user_requests(user_id, org2_id, request_status="Open", headers=user_header) self.assertEqual(len(requests), 1) ia_list, _ = self.rr_client.find_resources(restype=RT.InstrumentAgent) self.assertEqual(len(ia_list), 0) ia_obj = IonObject(RT.InstrumentAgent, name='Instrument Agent1', description='The first Instrument Agent') with self.assertRaises(Unauthorized) as cm: self.ims_client.create_instrument_agent(ia_obj) self.assertIn( 'instrument_management(create_instrument_agent) has been denied', cm.exception.message) with self.assertRaises(Unauthorized) as cm: self.ims_client.create_instrument_agent(ia_obj, headers=user_header) self.assertIn( 'instrument_management(create_instrument_agent) has been denied', cm.exception.message) #Manager approves request self.org_client.approve_request(org2_id, req_id, headers=self.sa_user_header) requests = self.org_client.find_user_requests(user_id, org2_id, request_status="Open", headers=user_header) self.assertEqual(len(requests), 0) #User accepts request self.org_client.accept_request(org2_id, req_id, headers=user_header) #Refresh headers with new role user_roles = get_role_message_headers( self.org_client.find_all_roles_by_user(user_id)) user_header = {'ion-actor-id': user_id, 'ion-actor-roles': user_roles} self.ims_client.create_instrument_agent(ia_obj, headers=user_header) ia_obj = IonObject(RT.InstrumentAgent, name='Instrument Agent2', description='The second Instrument Agent') self.ims_client.create_instrument_agent(ia_obj, headers=user_header) ia_list, _ = self.rr_client.find_resources(restype=RT.InstrumentAgent) self.assertEqual(len(ia_list), 2) #First make a acquire resource request with an non-enrolled user. with self.assertRaises(BadRequest) as cm: req_id = self.org_client.request_acquire_resource( org2_id, self.system_actor._id, ia_list[0]._id, headers=self.sa_user_header) self.assertIn( 'A precondition for this request has not been satisfied: is_enrolled(org_id,user_id)', cm.exception.message) req_id = self.org_client.request_acquire_resource(org2_id, user_id, ia_list[0]._id, headers=user_header) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests), 3) requests = self.org_client.find_user_requests(user_id, org2_id, headers=user_header) self.assertEqual(len(requests), 3) requests = self.org_client.find_user_requests( user_id, org2_id, request_type=RT.ResourceRequest, headers=user_header) self.assertEqual(len(requests), 1) requests = self.org_client.find_user_requests(user_id, org2_id, request_status="Open", headers=user_header) self.assertEqual(len(requests), 1) self.assertEqual(requests[0]._id, req_id) #Manager approves Instrument request self.org_client.approve_request(org2_id, req_id, headers=self.sa_user_header) requests = self.org_client.find_user_requests(user_id, org2_id, request_status="Open", headers=user_header) self.assertEqual(len(requests), 0) #User accepts request self.org_client.accept_request(org2_id, req_id, headers=user_header) #Check commitments commitments, _ = self.rr_client.find_objects(ia_list[0]._id, PRED.hasCommitment, RT.ResourceCommitment) self.assertEqual(len(commitments), 1) commitments, _ = self.rr_client.find_objects(user_id, PRED.hasCommitment, RT.ResourceCommitment) self.assertEqual(len(commitments), 1) #Release the resource self.org_client.release_resource( org2_id, user_id, ia_list[0]._id, headers=self.sa_user_header, timeout=15) #TODO - Refactor release_resource #Check commitments commitments, _ = self.rr_client.find_objects(ia_list[0]._id, PRED.hasCommitment, RT.ResourceCommitment) self.assertEqual(len(commitments), 0) commitments, _ = self.rr_client.find_objects(user_id, PRED.hasCommitment, RT.ResourceCommitment) self.assertEqual(len(commitments), 0)
class AgentManagementService(BaseAgentManagementService): """The Agent Management Service is the service that manages Agent Definitions, Agent Instance configurations and running Agents in the system. @see https://confluence.oceanobservatories.org/display/syseng/CIAD+COI+OV+Agent+Management+Service """ def on_init(self): self.ims_client = InstrumentManagementServiceProcessClient(process=self) self.dams_client = DataAcquisitionManagementServiceProcessClient(process=self) def create_agent_definition(self, agent_definition=None): """Creates an Agent Definition resource from the parameter AgentDefinition object. @param agent_definition AgentDefinition @retval agent_definition_id str @throws BadRequest if object passed has _id or _rev attribute """ atype, ainst, svc_client = self._get_agent_type(agent_definition) if atype == AT_INSTUMENT_AGENT: res = self.ims_client.create_instrument_agent(agent_definition) elif atype == AT_PLATFORM_AGENT: res = self.ims_client.create_platform_agent(agent_definition) elif atype == AT_EXTDATASET_AGENT: res = self.dams_client.create_external_dataset_agent(agent_definition) else: raise BadRequest("Unknown agent type: %s" % atype) return res def update_agent_definition(self, agent_definition=None): """Updates an existing Agent Definition resource. @param agent_definition AgentDefinition @throws BadRequest if object does not have _id or _rev attribute @throws NotFound object with specified id does not exist @throws Conflict object not based on latest persisted object version """ atype, ainst, svc_client = self._get_agent_type(agent_definition) if atype == AT_INSTUMENT_AGENT: res = self.ims_client.update_instrument_agent(agent_definition) elif atype == AT_PLATFORM_AGENT: res = self.ims_client.update_platform_agent(agent_definition) elif atype == AT_EXTDATASET_AGENT: res = self.dams_client.update_external_dataset_agent(agent_definition) else: raise BadRequest("Unknown agent type: %s" % atype) return res def read_agent_definition(self, agent_definition_id=''): """Returns an existing Agent Definition resource. @param agent_definition_id str @retval agent_definition AgentDefinition @throws NotFound object with specified id does not exist """ atype, ainst, svc_client = self._get_agent_type(agent_definition_id) if atype == AT_INSTUMENT_AGENT: res = self.ims_client.read_instrument_agent(agent_definition_id) elif atype == AT_PLATFORM_AGENT: res = self.ims_client.read_platform_agent(agent_definition_id) elif atype == AT_EXTDATASET_AGENT: res = self.dams_client.read_external_dataset_agent(agent_definition_id) else: raise BadRequest("Unknown agent type: %s" % atype) return res def delete_agent_definition(self, agent_definition_id=''): """Deletes an existing Agent Definition resource. @param agent_definition_id str @throws NotFound object with specified id does not exist """ atype, ainst, svc_client = self._get_agent_type(agent_definition_id) if atype == AT_INSTUMENT_AGENT: res = self.ims_client.delete_instrument_agent(agent_definition_id) elif atype == AT_PLATFORM_AGENT: res = self.ims_client.delete_platform_agent(agent_definition_id) elif atype == AT_EXTDATASET_AGENT: res = self.dams_client.delete_external_dataset_agent(agent_definition_id) else: raise BadRequest("Unknown agent type: %s" % atype) return res def create_agent_instance(self, agent_instance=None): """Creates an Agent Instance resource from the parameter AgentInstance object. @param agent_instance AgentInstance @retval agent_instance_id str @throws BadRequest if object passed has _id or _rev attribute """ atype, ainst, svc_client = self._get_agent_type(agent_instance) if atype == AT_INSTUMENT_AGENT: res = self.ims_client.create_instrument_agent_instance_(agent_instance) elif atype == AT_PLATFORM_AGENT: res = self.ims_client.create_platform_agent_instance(agent_instance) elif atype == AT_EXTDATASET_AGENT: res = self.dams_client.create_external_dataset_agent_instance(agent_instance) else: raise BadRequest("Unknown agent type: %s" % atype) return res def update_agent_instance(self, agent_instance=None): """Updates an existing Agent Instance resource. @param agent_instance AgentInstance @throws BadRequest if object does not have _id or _rev attribute @throws NotFound object with specified id does not exist @throws Conflict object not based on latest persisted object version """ atype, ainst, svc_client = self._get_agent_type(agent_instance) if atype == AT_INSTUMENT_AGENT: res = self.ims_client.update_instrument_agent_instance(agent_instance) elif atype == AT_PLATFORM_AGENT: res = self.ims_client.update_platform_agent_instance(agent_instance) elif atype == AT_EXTDATASET_AGENT: res = self.dams_client.update_data_source_agent_instance(agent_instance) else: raise BadRequest("Unknown agent type: %s" % atype) return res def read_agent_instance(self, agent_instance_id=''): """Returns an existing Agent Instance resource. @param agent_instance_id str @retval agent_instance AgentInstance @throws NotFound object with specified id does not exist """ atype, ainst, svc_client = self._get_agent_type(agent_instance_id) if atype == AT_INSTUMENT_AGENT: res = self.ims_client.read_instrument_agent_instance(agent_instance_id) elif atype == AT_PLATFORM_AGENT: res = self.ims_client.read_platform_agent_instance(agent_instance_id) elif atype == AT_EXTDATASET_AGENT: res = self.dams_client.read_external_dataset_agent_instance(agent_instance_id) else: raise BadRequest("Unknown agent type: %s" % atype) return res def delete_agent_instance(self, agent_instance_id=''): """Deletes an existing Agent Instance resource. @param agent_instance_id str @throws NotFound object with specified id does not exist """ atype, ainst, svc_client = self._get_agent_type(agent_instance_id) if atype == AT_INSTUMENT_AGENT: res = self.ims_client.delete_instrument_agent_instance(agent_instance_id) elif atype == AT_PLATFORM_AGENT: res = self.ims_client.delete_platform_agent_instance(agent_instance_id) elif atype == AT_EXTDATASET_AGENT: res = self.dams_client.delete_external_dataset_agent_instance(agent_instance_id) else: raise BadRequest("Unknown agent type: %s" % atype) return res def start_agent_instance(self, agent_instance_id=''): """Start the given Agent Instance. Delegates to the type specific service. @param agent_instance_id str @retval process_id str @throws NotFound object with specified id does not exist """ atype, ainst, svc_client = self._get_agent_type(agent_instance_id) if atype == AT_INSTUMENT_AGENT: res = self.ims_client.start_instrument_agent_instance(agent_instance_id) elif atype == AT_PLATFORM_AGENT: res = self.ims_client.start_platform_agent_instance(agent_instance_id) elif atype == AT_EXTDATASET_AGENT: res = self.dams_client.start_external_dataset_agent_instance(agent_instance_id) else: raise BadRequest("Unknown agent type: %s" % atype) return res def stop_agent_instance(self, agent_instance_id=''): """Stop the given Agent Instance. Delegates to the type specific service. @param agent_instance_id str @throws NotFound object with specified id does not exist """ atype, ainst, svc_client = self._get_agent_type(agent_instance_id) if atype == AT_INSTUMENT_AGENT: res = self.ims_client.stop_instrument_agent_instance(agent_instance_id) elif atype == AT_PLATFORM_AGENT: res = self.ims_client.stop_platform_agent_instance(agent_instance_id) elif atype == AT_EXTDATASET_AGENT: res = self.dams_client.stop_external_dataset_agent_instance(agent_instance_id) else: raise BadRequest("Unknown agent type: %s" % atype) return res def _get_agent_type(self, agent=''): if isinstance(agent, str): res_obj = self.clients.resource_registry.read(agent) elif isinstance(agent, AgentInstance) or isinstance(agent, AgentDefinition): res_obj = agent else: raise BadRequest("Resource must be AgentInstance or AgentDefinition, is: %s" % agent.type_) atype = None svc_client = None is_instance = res_obj.type_.endswith("Instance") if isinstance(res_obj, InstrumentAgentInstance) or isinstance(res_obj, InstrumentAgent): atype = AT_INSTUMENT_AGENT svc_client = self.ims_client elif isinstance(res_obj, PlatformAgentInstance) or isinstance(res_obj, PlatformAgent): atype = AT_PLATFORM_AGENT svc_client = self.ims_client elif isinstance(res_obj, ExternalDatasetAgentInstance) or isinstance(res_obj, ExternalDatasetAgent): atype = AT_EXTDATASET_AGENT svc_client = self.dams_client else: raise BadRequest("Resource must is unknown AgentInstance: %s" % res_obj.type_) return atype, res_obj, svc_client
class TestTransformWorker(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Instantiate a process to represent the test process=TransformWorkerTestProcess() self.dataset_management_client = DatasetManagementServiceClient(node=self.container.node) self.pubsub_client = PubsubManagementServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceProcessClient(node=self.container.node, process = process) self.time_dom, self.spatial_dom = time_series_domain() self.ph = ParameterHelper(self.dataset_management_client, self.addCleanup) self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10) def push_granule(self, data_product_id): ''' Publishes and monitors that the granule arrived ''' datasets, _ = self.rrclient.find_objects(data_product_id, PRED.hasDataset, id_only=True) dataset_monitor = DatasetMonitor(datasets[0]) rdt = self.ph.rdt_for_data_product(data_product_id) self.ph.fill_parsed_rdt(rdt) self.ph.publish_rdt_to_data_product(data_product_id, rdt) assert dataset_monitor.wait() dataset_monitor.stop() @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_transform_worker(self): # test that a data process (type: data-product-in / data-product-out) can be defined and launched. # verify that the output granule fields are correctly populated # test that the input and output data products are linked to facilitate provenance self.dp_list = [] self.data_process_objs = [] self._output_stream_ids = [] self.granule_verified = Event() self.worker_assigned_event_verified = Event() self.dp_created_event_verified = Event() self.heartbeat_event_verified = Event() self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject( RT.DataProduct, name='input_data_product', description='input test stream', temporal_domain = self.time_dom.dump(), spatial_domain = self.spatial_dom.dump()) self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] self.start_event_listener() # create the DPD, DataProcess and output DataProduct dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process() self.dp_list.append(dataprocess_id) # validate the repository for data product algorithms persists the new resources NEW SA-1 # create_data_process call created one of each dpd_ids, _ = self.rrclient.find_resources(restype=OT.DataProcessDefinition, id_only=False) # there will be more than one becuase of the DPDs that reperesent the PFs in the data product above self.assertTrue(dpd_ids is not None) dp_ids, _ = self.rrclient.find_resources(restype=OT.DataProcess, id_only=False) # only one DP becuase the PFs that are in the code dataproduct above are not activated yet. self.assertEquals(len(dp_ids), 1) # validate the name and version label NEW SA - 2 dataprocessdef_obj = self.dataprocessclient.read_data_process_definition(dataprocessdef_id) self.assertEqual(dataprocessdef_obj.version_label, '1.0a') self.assertEqual(dataprocessdef_obj.name, 'add_arrays') # validate that the DPD has an attachment NEW SA - 21 attachment_ids, assoc_ids = self.rrclient.find_objects(dataprocessdef_id, PRED.hasAttachment, RT.Attachment, True) self.assertEqual(len(attachment_ids), 1) attachment_obj = self.rrclient.read_attachment(attachment_ids[0]) log.debug('attachment: %s', attachment_obj) # validate that the data process resource has input and output data products associated # L4-CI-SA-RQ-364 and NEW SA-3 outproduct_ids, assoc_ids = self.rrclient.find_objects(dataprocess_id, PRED.hasOutputProduct, RT.DataProduct, True) self.assertEqual(len(outproduct_ids), 1) inproduct_ids, assoc_ids = self.rrclient.find_objects(dataprocess_id, PRED.hasInputProduct, RT.DataProduct, True) self.assertEqual(len(inproduct_ids), 1) # Test for provenance. Get Data product produced by the data processes output_data_product_id,_ = self.rrclient.find_objects(subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=True) output_data_product_provenance = self.dataproductclient.get_data_product_provenance(output_data_product_id[0]) # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the # DataProcessDefinition creating the child from the parent. self.assertTrue(len(output_data_product_provenance) == 2) self.assertTrue(self.input_dp_id in output_data_product_provenance[output_data_product_id[0]]['parents']) self.assertTrue(output_data_product_provenance[output_data_product_id[0]]['parents'][self.input_dp_id]['data_process_definition_id'] == dataprocessdef_id) # NEW SA - 4 | Data processing shall include the appropriate data product algorithm name and version number in # the metadata of each output data product created by the data product algorithm. output_data_product_obj,_ = self.rrclient.find_objects(subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=False) self.assertTrue(output_data_product_obj[0].name != None) self.assertTrue(output_data_product_obj[0]._rev != None) # retrieve subscription from data process subscription_objs, _ = self.rrclient.find_objects(subject=dataprocess_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False) log.debug('test_transform_worker subscription_obj: %s', subscription_objs[0]) # create a queue to catch the published granules self.subscription_id = self.pubsub_client.create_subscription(name='parsed_subscription', stream_ids=[self.stream_id], exchange_name=subscription_objs[0].exchange_name) self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_id) self.pubsub_client.activate_subscription(self.subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_id) stream_route = self.pubsub_client.read_stream_route(self.stream_id) self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id, stream_route=stream_route ) for n in range(1, 101): rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['time'] = [0] # time should always come first rdt['conductivity'] = [1] rdt['pressure'] = [2] rdt['salinity'] = [8] self.publisher.publish(rdt.to_granule()) # validate that the output granule is received and the updated value is correct self.assertTrue(self.granule_verified.wait(self.wait_time)) # validate that the data process loaded into worker event is received (L4-CI-SA-RQ-182) self.assertTrue(self.worker_assigned_event_verified.wait(self.wait_time)) # validate that the data process create (with data product ids) event is received (NEW SA -42) self.assertTrue(self.dp_created_event_verified.wait(self.wait_time)) # validate that the data process heartbeat event is received (for every hundred granules processed) (L4-CI-SA-RQ-182) #this takes a while so set wait limit to large value self.assertTrue(self.heartbeat_event_verified.wait(200)) # validate that the code from the transform function can be retrieve via inspect_data_process_definition src = self.dataprocessclient.inspect_data_process_definition(dataprocessdef_id) self.assertIn( 'def add_arrays(a, b)', src) # now delete the DPD and DP then verify that the resources are retired so that information required for provenance are still available self.dataprocessclient.delete_data_process(dataprocess_id) self.dataprocessclient.delete_data_process_definition(dataprocessdef_id) in_dp_objs, _ = self.rrclient.find_objects(subject=dataprocess_id, predicate=PRED.hasInputProduct, object_type=RT.DataProduct, id_only=True) self.assertTrue(in_dp_objs is not None) dpd_objs, _ = self.rrclient.find_subjects(subject_type=RT.DataProcessDefinition, predicate=PRED.hasDataProcess, object=dataprocess_id, id_only=True) self.assertTrue(dpd_objs is not None) @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_transform_worker_with_instrumentdevice(self): # test that a data process (type: data-product-in / data-product-out) can be defined and launched. # verify that the output granule fields are correctly populated # test that the input and output data products are linked to facilitate provenance self.data_process_objs = [] self._output_stream_ids = [] self.event_verified = Event() # Create CTD Parsed as the initial data product # create a stream definition for the data from the ctd simulator self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject( RT.DataProduct, name='input_data_product', description='input test stream', temporal_domain = self.time_dom.dump(), spatial_domain = self.spatial_dom.dump()) self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id) # only ever need one device for testing purposes. instDevice_obj,_ = self.rrclient.find_resources(restype=RT.InstrumentDevice, name='test_ctd_device') if instDevice_obj: instDevice_id = instDevice_obj[0]._id else: instDevice_obj = IonObject(RT.InstrumentDevice, name='test_ctd_device', description="test_ctd_device", serial_number="12345" ) instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=self.input_dp_id) # create the DPD, DataProcess and output DataProduct dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process() self.addCleanup(self.dataprocessclient.delete_data_process, dataprocess_id) self.addCleanup(self.dataprocessclient.delete_data_process_definition, dataprocessdef_id) # Test for provenance. Get Data product produced by the data processes output_data_product_id,_ = self.rrclient.find_objects(subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=True) output_data_product_provenance = self.dataproductclient.get_data_product_provenance(output_data_product_id[0]) # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the # DataProcessDefinition creating the child from the parent. self.assertTrue(len(output_data_product_provenance) == 3) self.assertTrue(self.input_dp_id in output_data_product_provenance[output_data_product_id[0]]['parents']) self.assertTrue(instDevice_id in output_data_product_provenance[self.input_dp_id]['parents']) self.assertTrue(output_data_product_provenance[instDevice_id]['type'] == 'InstrumentDevice') @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_transform_worker_with_platformdevice(self): # test that a data process (type: data-product-in / data-product-out) can be defined and launched. # verify that the output granule fields are correctly populated # test that the input and output data products are linked to facilitate provenance self.data_process_objs = [] self._output_stream_ids = [] self.event_verified = Event() # Create CTD Parsed as the initial data product # create a stream definition for the data from the ctd simulator self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject( RT.DataProduct, name='input_data_product', description='input test stream', temporal_domain = self.time_dom.dump(), spatial_domain = self.spatial_dom.dump()) self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id) # only ever need one device for testing purposes. platform_device_obj,_ = self.rrclient.find_resources(restype=RT.PlatformDevice, name='TestPlatform') if platform_device_obj: platform_device_id = platform_device_obj[0]._id else: platform_device_obj = IonObject(RT.PlatformDevice, name='TestPlatform', description="TestPlatform", serial_number="12345" ) platform_device_id = self.imsclient.create_platform_device(platform_device=platform_device_obj) self.damsclient.assign_data_product(input_resource_id=platform_device_id, data_product_id=self.input_dp_id) # create the DPD, DataProcess and output DataProduct dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process() self.addCleanup(self.dataprocessclient.delete_data_process, dataprocess_id) self.addCleanup(self.dataprocessclient.delete_data_process_definition, dataprocessdef_id) # Test for provenance. Get Data product produced by the data processes output_data_product_id,_ = self.rrclient.find_objects(subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=True) output_data_product_provenance = self.dataproductclient.get_data_product_provenance(output_data_product_id[0]) # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the # DataProcessDefinition creating the child from the parent. self.assertTrue(len(output_data_product_provenance) == 3) self.assertTrue(self.input_dp_id in output_data_product_provenance[output_data_product_id[0]]['parents']) self.assertTrue(platform_device_id in output_data_product_provenance[self.input_dp_id]['parents']) self.assertTrue(output_data_product_provenance[platform_device_id]['type'] == 'PlatformDevice') @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_event_transform_worker(self): self.data_process_objs = [] self._output_stream_ids = [] self.event_verified = Event() # test that a data process (type: data-product-in / event-out) can be defined and launched. # verify that event fields are correctly populated self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct input_dp_obj = IonObject( RT.DataProduct, name='input_data_product', description='input test stream', temporal_domain = self.time_dom.dump(), spatial_domain = self.spatial_dom.dump()) self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects(self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] # create the DPD and two DPs self.event_data_process_id = self.create_event_data_processes() # retrieve subscription from data process subscription_objs, _ = self.rrclient.find_objects(subject=self.event_data_process_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False) log.debug('test_event_transform_worker subscription_obj: %s', subscription_objs[0]) # create a queue to catch the published granules self.subscription_id = self.pubsub_client.create_subscription(name='parsed_subscription', stream_ids=[self.stream_id], exchange_name=subscription_objs[0].exchange_name) self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_id) self.pubsub_client.activate_subscription(self.subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_id) stream_route = self.pubsub_client.read_stream_route(self.stream_id) self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id, stream_route=stream_route ) self.start_event_transform_listener() self.data_modified = Event() rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['time'] = [0] # time should always come first rdt['conductivity'] = [1] rdt['pressure'] = [2] rdt['salinity'] = [8] self.publisher.publish(rdt.to_granule()) self.assertTrue(self.event_verified.wait(self.wait_time)) @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_bad_argument_map(self): self._output_stream_ids = [] # test that a data process (type: data-product-in / data-product-out) parameter mapping it validated during # data process creation and that the correct exception is raised for both input and output. self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name(name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition(name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject( RT.DataProduct, name='input_data_product', description='input test stream', temporal_domain = self.time_dom.dump(), spatial_domain = self.spatial_dom.dump()) self.input_dp_id = self.dataproductclient.create_data_product(data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # two data processes using one transform and one DPD dp1_func_output_dp_id = self.create_output_data_product() # Set up DPD and DP #2 - array add function tf_obj = IonObject(RT.TransformFunction, name='add_array_func', description='adds values in an array', function='add_arrays', module="ion_example.add_arrays", arguments=['arr1', 'arr2'], function_type=TransformFunctionType.TRANSFORM, uri='http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg' ) add_array_func_id, rev = self.rrclient.create(tf_obj) dpd_obj = IonObject(RT.DataProcessDefinition, name='add_arrays', description='adds the values of two arrays', data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS ) add_array_dpd_id = self.dataprocessclient.create_data_process_definition(data_process_definition=dpd_obj, function_id=add_array_func_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.stream_def_id, add_array_dpd_id, binding='add_array_func' ) # create the data process with invalid argument map argument_map = {"arr1": "foo", "arr2": "bar"} output_param = "salinity" with self.assertRaises(BadRequest) as cm: dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param) ex = cm.exception log.debug(' exception raised: %s', cm) self.assertEqual(ex.message, "Input data product does not contain the parameters defined in argument map") # create the data process with invalid output parameter name argument_map = {"arr1": "conductivity", "arr2": "pressure"} output_param = "foo" with self.assertRaises(BadRequest) as cm: dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param) ex = cm.exception log.debug(' exception raised: %s', cm) self.assertEqual(ex.message, "Output data product does not contain the output parameter name provided") def create_event_data_processes(self): # two data processes using one transform and one DPD argument_map= {"a": "salinity"} # set up DPD and DP #2 - array add function tf_obj = IonObject(RT.TransformFunction, name='validate_salinity_array', description='validate_salinity_array', function='validate_salinity_array', module="ion.processes.data.transforms.test.test_transform_worker", arguments=['a'], function_type=TransformFunctionType.TRANSFORM ) add_array_func_id, rev = self.rrclient.create(tf_obj) dpd_obj = IonObject(RT.DataProcessDefinition, name='validate_salinity_array', description='validate_salinity_array', data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS, ) add_array_dpd_id = self.dataprocessclient.create_data_process_definition(data_process_definition=dpd_obj, function_id=add_array_func_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.stream_def_id, add_array_dpd_id, binding='validate_salinity_array' ) # create the data process dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=None, argument_map=argument_map) self.damsclient.register_process(dp1_data_process_id) self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id) return dp1_data_process_id def create_data_process(self): # two data processes using one transform and one DPD dp1_func_output_dp_id = self.create_output_data_product() argument_map = {"arr1": "conductivity", "arr2": "pressure"} output_param = "salinity" # set up DPD and DP #2 - array add function tf_obj = IonObject(RT.TransformFunction, name='add_array_func', description='adds values in an array', function='add_arrays', module="ion_example.add_arrays", arguments=['arr1', 'arr2'], function_type=TransformFunctionType.TRANSFORM, uri='http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg' ) add_array_func_id, rev = self.rrclient.create(tf_obj) dpd_obj = IonObject(RT.DataProcessDefinition, name='add_arrays', description='adds the values of two arrays', data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS, version_label='1.0a' ) add_array_dpd_id = self.dataprocessclient.create_data_process_definition(data_process_definition=dpd_obj, function_id=add_array_func_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition(self.stream_def_id, add_array_dpd_id, binding='add_array_func' ) # create the data process dp1_data_process_id = self.dataprocessclient.create_data_process(data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param) self.damsclient.register_process(dp1_data_process_id) #self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id) # add an attachment object to this DPD to test new SA-21 import msgpack attachment_content = 'foo bar' attachment_obj = IonObject( RT.Attachment, name='test_attachment', attachment_type=AttachmentType.ASCII, content_type='text/plain', content=msgpack.packb(attachment_content)) att_id = self.rrclient.create_attachment(add_array_dpd_id, attachment_obj) self.addCleanup(self.rrclient.delete_attachment, att_id) return add_array_dpd_id, dp1_data_process_id, dp1_func_output_dp_id def create_output_data_product(self): dp1_outgoing_stream_id = self.pubsub_client.create_stream_definition(name='dp1_stream', parameter_dictionary_id=self.parameter_dict_id) dp1_output_dp_obj = IonObject( RT.DataProduct, name='data_process1_data_product', description='output of add array func', temporal_domain = self.time_dom.dump(), spatial_domain = self.spatial_dom.dump()) dp1_func_output_dp_id = self.dataproductclient.create_data_product(dp1_output_dp_obj, dp1_outgoing_stream_id) self.addCleanup(self.dataproductclient.delete_data_product, dp1_func_output_dp_id) # retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects(dp1_func_output_dp_id, PRED.hasStream, None, True) self._output_stream_ids.append(stream_ids[0]) subscription_id = self.pubsub_client.create_subscription('validator', data_product_ids=[dp1_func_output_dp_id]) self.addCleanup(self.pubsub_client.delete_subscription, subscription_id) def on_granule(msg, route, stream_id): log.debug('recv_packet stream_id: %s route: %s msg: %s', stream_id, route, msg) self.validate_output_granule(msg, route, stream_id) self.granule_verified.set() validator = StandaloneStreamSubscriber('validator', callback=on_granule) validator.start() self.addCleanup(validator.stop) self.pubsub_client.activate_subscription(subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, subscription_id) return dp1_func_output_dp_id def validate_event(self, *args, **kwargs): """ This method is a callback function for receiving DataProcessStatusEvent. """ data_process_event = args[0] log.debug("DataProcessStatusEvent: %s" , str(data_process_event.__dict__)) # if data process already created, check origin if self.dp_list: self.assertIn( data_process_event.origin, self.dp_list) # if this is a heartbeat event then 100 granules have been processed if 'data process status update.' in data_process_event.description: self.heartbeat_event_verified.set() else: # else check that this is the assign event if 'Data process assigned to transform worker' in data_process_event.description: self.worker_assigned_event_verified.set() elif 'Data process created for data product' in data_process_event.description: self.dp_created_event_verified.set() def validate_output_granule(self, msg, route, stream_id): self.assertIn( stream_id, self._output_stream_ids) rdt = RecordDictionaryTool.load_from_granule(msg) log.debug('validate_output_granule rdt: %s', rdt) sal_val = rdt['salinity'] np.testing.assert_array_equal(sal_val, np.array([3])) def start_event_listener(self): es = EventSubscriber(event_type=OT.DataProcessStatusEvent, callback=self.validate_event) es.start() self.addCleanup(es.stop) def validate_transform_event(self, *args, **kwargs): """ This method is a callback function for receiving DataProcessStatusEvent. """ status_alert_event = args[0] np.testing.assert_array_equal(status_alert_event.origin, self.stream_id ) np.testing.assert_array_equal(status_alert_event.values, np.array([self.event_data_process_id])) log.debug("DeviceStatusAlertEvent: %s" , str(status_alert_event.__dict__)) self.event_verified.set() def start_event_transform_listener(self): es = EventSubscriber(event_type=OT.DeviceStatusAlertEvent, callback=self.validate_transform_event) es.start() self.addCleanup(es.stop) def test_download(self): egg_url = 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg' egg_path = TransformWorker.download_egg(egg_url) import pkg_resources pkg_resources.working_set.add_entry(egg_path) from ion_example.add_arrays import add_arrays a = add_arrays(1,2) self.assertEquals(a,3)
def on_init(self): self.ims_client = InstrumentManagementServiceProcessClient(process=self) self.dams_client = DataAcquisitionManagementServiceProcessClient(process=self)
class TestTransformWorker(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Instantiate a process to represent the test process = TransformWorkerTestProcess() self.dataset_management_client = DatasetManagementServiceClient( node=self.container.node) self.pubsub_client = PubsubManagementServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.dataprocessclient = DataProcessManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceProcessClient( node=self.container.node, process=process) self.time_dom, self.spatial_dom = time_series_domain() self.ph = ParameterHelper(self.dataset_management_client, self.addCleanup) self.wait_time = CFG.get_safe('endpoint.receive.timeout', 10) def push_granule(self, data_product_id): ''' Publishes and monitors that the granule arrived ''' datasets, _ = self.rrclient.find_objects(data_product_id, PRED.hasDataset, id_only=True) dataset_monitor = DatasetMonitor(datasets[0]) rdt = self.ph.rdt_for_data_product(data_product_id) self.ph.fill_parsed_rdt(rdt) self.ph.publish_rdt_to_data_product(data_product_id, rdt) assert dataset_monitor.wait() dataset_monitor.stop() @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_transform_worker(self): # test that a data process (type: data-product-in / data-product-out) can be defined and launched. # verify that the output granule fields are correctly populated # test that the input and output data products are linked to facilitate provenance self.dp_list = [] self.data_process_objs = [] self._output_stream_ids = [] self.granule_verified = Event() self.worker_assigned_event_verified = Event() self.dp_created_event_verified = Event() self.heartbeat_event_verified = Event() self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name( name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition( name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject(RT.DataProduct, name='input_data_product', description='input test stream') self.input_dp_id = self.dataproductclient.create_data_product( data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects( self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] self.start_event_listener() # create the DPD, DataProcess and output DataProduct dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process( ) self.dp_list.append(dataprocess_id) # validate the repository for data product algorithms persists the new resources NEW SA-1 # create_data_process call created one of each dpd_ids, _ = self.rrclient.find_resources( restype=OT.DataProcessDefinition, id_only=False) # there will be more than one becuase of the DPDs that reperesent the PFs in the data product above self.assertTrue(dpd_ids is not None) dp_ids, _ = self.rrclient.find_resources(restype=OT.DataProcess, id_only=False) # only one DP becuase the PFs that are in the code dataproduct above are not activated yet. self.assertEquals(len(dp_ids), 1) # validate the name and version label NEW SA - 2 dataprocessdef_obj = self.dataprocessclient.read_data_process_definition( dataprocessdef_id) self.assertEqual(dataprocessdef_obj.version_label, '1.0a') self.assertEqual(dataprocessdef_obj.name, 'add_arrays') # validate that the DPD has an attachment NEW SA - 21 attachment_ids, assoc_ids = self.rrclient.find_objects( dataprocessdef_id, PRED.hasAttachment, RT.Attachment, True) self.assertEqual(len(attachment_ids), 1) attachment_obj = self.rrclient.read_attachment(attachment_ids[0]) log.debug('attachment: %s', attachment_obj) # validate that the data process resource has input and output data products associated # L4-CI-SA-RQ-364 and NEW SA-3 outproduct_ids, assoc_ids = self.rrclient.find_objects( dataprocess_id, PRED.hasOutputProduct, RT.DataProduct, True) self.assertEqual(len(outproduct_ids), 1) inproduct_ids, assoc_ids = self.rrclient.find_objects( dataprocess_id, PRED.hasInputProduct, RT.DataProduct, True) self.assertEqual(len(inproduct_ids), 1) # Test for provenance. Get Data product produced by the data processes output_data_product_id, _ = self.rrclient.find_objects( subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=True) output_data_product_provenance = self.dataproductclient.get_data_product_provenance( output_data_product_id[0]) # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the # DataProcessDefinition creating the child from the parent. self.assertTrue(len(output_data_product_provenance) == 2) self.assertTrue(self.input_dp_id in output_data_product_provenance[ output_data_product_id[0]]['parents']) self.assertTrue(output_data_product_provenance[ output_data_product_id[0]]['parents'][self.input_dp_id] ['data_process_definition_id'] == dataprocessdef_id) # NEW SA - 4 | Data processing shall include the appropriate data product algorithm name and version number in # the metadata of each output data product created by the data product algorithm. output_data_product_obj, _ = self.rrclient.find_objects( subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=False) self.assertTrue(output_data_product_obj[0].name != None) self.assertTrue(output_data_product_obj[0]._rev != None) # retrieve subscription from data process subscription_objs, _ = self.rrclient.find_objects( subject=dataprocess_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False) log.debug('test_transform_worker subscription_obj: %s', subscription_objs[0]) # create a queue to catch the published granules self.subscription_id = self.pubsub_client.create_subscription( name='parsed_subscription', stream_ids=[self.stream_id], exchange_name=subscription_objs[0].exchange_name) self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_id) self.pubsub_client.activate_subscription(self.subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_id) stream_route = self.pubsub_client.read_stream_route(self.stream_id) self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id, stream_route=stream_route) for n in range(1, 101): rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['time'] = [0] # time should always come first rdt['conductivity'] = [1] rdt['pressure'] = [2] rdt['salinity'] = [8] self.publisher.publish(rdt.to_granule()) # validate that the output granule is received and the updated value is correct self.assertTrue(self.granule_verified.wait(self.wait_time)) # validate that the data process loaded into worker event is received (L4-CI-SA-RQ-182) self.assertTrue( self.worker_assigned_event_verified.wait(self.wait_time)) # validate that the data process create (with data product ids) event is received (NEW SA -42) self.assertTrue(self.dp_created_event_verified.wait(self.wait_time)) # validate that the data process heartbeat event is received (for every hundred granules processed) (L4-CI-SA-RQ-182) #this takes a while so set wait limit to large value self.assertTrue(self.heartbeat_event_verified.wait(200)) # validate that the code from the transform function can be retrieve via inspect_data_process_definition src = self.dataprocessclient.inspect_data_process_definition( dataprocessdef_id) self.assertIn('def add_arrays(a, b)', src) # now delete the DPD and DP then verify that the resources are retired so that information required for provenance are still available self.dataprocessclient.delete_data_process(dataprocess_id) self.dataprocessclient.delete_data_process_definition( dataprocessdef_id) in_dp_objs, _ = self.rrclient.find_objects( subject=dataprocess_id, predicate=PRED.hasInputProduct, object_type=RT.DataProduct, id_only=True) self.assertTrue(in_dp_objs is not None) dpd_objs, _ = self.rrclient.find_subjects( subject_type=RT.DataProcessDefinition, predicate=PRED.hasDataProcess, object=dataprocess_id, id_only=True) self.assertTrue(dpd_objs is not None) @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_transform_worker_with_instrumentdevice(self): # test that a data process (type: data-product-in / data-product-out) can be defined and launched. # verify that the output granule fields are correctly populated # test that the input and output data products are linked to facilitate provenance self.data_process_objs = [] self._output_stream_ids = [] self.event_verified = Event() # Create CTD Parsed as the initial data product # create a stream definition for the data from the ctd simulator self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) self.stream_def_id = self.pubsub_client.create_stream_definition( name='stream_def', parameter_dictionary_id=self.parameter_dict_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject(RT.DataProduct, name='input_data_product', description='input test stream') self.input_dp_id = self.dataproductclient.create_data_product( data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects( self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id) # only ever need one device for testing purposes. instDevice_obj, _ = self.rrclient.find_resources( restype=RT.InstrumentDevice, name='test_ctd_device') if instDevice_obj: instDevice_id = instDevice_obj[0]._id else: instDevice_obj = IonObject(RT.InstrumentDevice, name='test_ctd_device', description="test_ctd_device", serial_number="12345") instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=self.input_dp_id) # create the DPD, DataProcess and output DataProduct dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process( ) self.addCleanup(self.dataprocessclient.delete_data_process, dataprocess_id) self.addCleanup(self.dataprocessclient.delete_data_process_definition, dataprocessdef_id) # Test for provenance. Get Data product produced by the data processes output_data_product_id, _ = self.rrclient.find_objects( subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=True) output_data_product_provenance = self.dataproductclient.get_data_product_provenance( output_data_product_id[0]) # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the # DataProcessDefinition creating the child from the parent. self.assertTrue(len(output_data_product_provenance) == 3) self.assertTrue(self.input_dp_id in output_data_product_provenance[ output_data_product_id[0]]['parents']) self.assertTrue(instDevice_id in output_data_product_provenance[ self.input_dp_id]['parents']) self.assertTrue(output_data_product_provenance[instDevice_id]['type'] == 'InstrumentDevice') @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_transform_worker_with_platformdevice(self): # test that a data process (type: data-product-in / data-product-out) can be defined and launched. # verify that the output granule fields are correctly populated # test that the input and output data products are linked to facilitate provenance self.data_process_objs = [] self._output_stream_ids = [] self.event_verified = Event() # Create CTD Parsed as the initial data product # create a stream definition for the data from the ctd simulator self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) self.stream_def_id = self.pubsub_client.create_stream_definition( name='stream_def', parameter_dictionary_id=self.parameter_dict_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject(RT.DataProduct, name='input_data_product', description='input test stream') self.input_dp_id = self.dataproductclient.create_data_product( data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects( self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] log.debug('new ctd_parsed_data_product_id = %s' % self.input_dp_id) # only ever need one device for testing purposes. platform_device_obj, _ = self.rrclient.find_resources( restype=RT.PlatformDevice, name='TestPlatform') if platform_device_obj: platform_device_id = platform_device_obj[0]._id else: platform_device_obj = IonObject(RT.PlatformDevice, name='TestPlatform', description="TestPlatform", serial_number="12345") platform_device_id = self.imsclient.create_platform_device( platform_device=platform_device_obj) self.damsclient.assign_data_product( input_resource_id=platform_device_id, data_product_id=self.input_dp_id) # create the DPD, DataProcess and output DataProduct dataprocessdef_id, dataprocess_id, dataproduct_id = self.create_data_process( ) self.addCleanup(self.dataprocessclient.delete_data_process, dataprocess_id) self.addCleanup(self.dataprocessclient.delete_data_process_definition, dataprocessdef_id) # Test for provenance. Get Data product produced by the data processes output_data_product_id, _ = self.rrclient.find_objects( subject=dataprocess_id, object_type=RT.DataProduct, predicate=PRED.hasOutputProduct, id_only=True) output_data_product_provenance = self.dataproductclient.get_data_product_provenance( output_data_product_id[0]) # Do a basic check to see if there were 3 entries in the provenance graph. Parent and Child and the # DataProcessDefinition creating the child from the parent. self.assertTrue(len(output_data_product_provenance) == 3) self.assertTrue(self.input_dp_id in output_data_product_provenance[ output_data_product_id[0]]['parents']) self.assertTrue(platform_device_id in output_data_product_provenance[ self.input_dp_id]['parents']) self.assertTrue(output_data_product_provenance[platform_device_id] ['type'] == 'PlatformDevice') @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_event_transform_worker(self): self.data_process_objs = [] self._output_stream_ids = [] self.event_verified = Event() # test that a data process (type: data-product-in / event-out) can be defined and launched. # verify that event fields are correctly populated self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name( name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition( name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct input_dp_obj = IonObject(RT.DataProduct, name='input_data_product', description='input test stream') self.input_dp_id = self.dataproductclient.create_data_product( data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # retrieve the Stream for this data product stream_ids, assoc_ids = self.rrclient.find_objects( self.input_dp_id, PRED.hasStream, RT.Stream, True) self.stream_id = stream_ids[0] # create the DPD and two DPs self.event_data_process_id = self.create_event_data_processes() # retrieve subscription from data process subscription_objs, _ = self.rrclient.find_objects( subject=self.event_data_process_id, predicate=PRED.hasSubscription, object_type=RT.Subscription, id_only=False) log.debug('test_event_transform_worker subscription_obj: %s', subscription_objs[0]) # create a queue to catch the published granules self.subscription_id = self.pubsub_client.create_subscription( name='parsed_subscription', stream_ids=[self.stream_id], exchange_name=subscription_objs[0].exchange_name) self.addCleanup(self.pubsub_client.delete_subscription, self.subscription_id) self.pubsub_client.activate_subscription(self.subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, self.subscription_id) stream_route = self.pubsub_client.read_stream_route(self.stream_id) self.publisher = StandaloneStreamPublisher(stream_id=self.stream_id, stream_route=stream_route) self.start_event_transform_listener() self.data_modified = Event() rdt = RecordDictionaryTool(stream_definition_id=self.stream_def_id) rdt['time'] = [0] # time should always come first rdt['conductivity'] = [1] rdt['pressure'] = [2] rdt['salinity'] = [8] self.publisher.publish(rdt.to_granule()) self.assertTrue(self.event_verified.wait(self.wait_time)) @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_bad_argument_map(self): self._output_stream_ids = [] # test that a data process (type: data-product-in / data-product-out) parameter mapping it validated during # data process creation and that the correct exception is raised for both input and output. self.parameter_dict_id = self.dataset_management_client.read_parameter_dictionary_by_name( name='ctd_parsed_param_dict', id_only=True) # create the StreamDefinition self.stream_def_id = self.pubsub_client.create_stream_definition( name='stream_def', parameter_dictionary_id=self.parameter_dict_id) self.addCleanup(self.pubsub_client.delete_stream_definition, self.stream_def_id) # create the DataProduct that is the input to the data processes input_dp_obj = IonObject(RT.DataProduct, name='input_data_product', description='input test stream') self.input_dp_id = self.dataproductclient.create_data_product( data_product=input_dp_obj, stream_definition_id=self.stream_def_id) # two data processes using one transform and one DPD dp1_func_output_dp_id = self.create_output_data_product() # Set up DPD and DP #2 - array add function tf_obj = IonObject( RT.TransformFunction, name='add_array_func', description='adds values in an array', function='add_arrays', module="ion_example.add_arrays", arguments=['arr1', 'arr2'], function_type=TransformFunctionType.TRANSFORM, uri= 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg' ) add_array_func_id, rev = self.rrclient.create(tf_obj) dpd_obj = IonObject( RT.DataProcessDefinition, name='add_arrays', description='adds the values of two arrays', data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS) add_array_dpd_id = self.dataprocessclient.create_data_process_definition( data_process_definition=dpd_obj, function_id=add_array_func_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( self.stream_def_id, add_array_dpd_id, binding='add_array_func') # create the data process with invalid argument map argument_map = {"arr1": "foo", "arr2": "bar"} output_param = "salinity" with self.assertRaises(BadRequest) as cm: dp1_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param) ex = cm.exception log.debug(' exception raised: %s', cm) self.assertEqual( ex.message, "Input data product does not contain the parameters defined in argument map" ) # create the data process with invalid output parameter name argument_map = {"arr1": "conductivity", "arr2": "pressure"} output_param = "foo" with self.assertRaises(BadRequest) as cm: dp1_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param) ex = cm.exception log.debug(' exception raised: %s', cm) self.assertEqual( ex.message, "Output data product does not contain the output parameter name provided" ) def create_event_data_processes(self): # two data processes using one transform and one DPD argument_map = {"a": "salinity"} # set up DPD and DP #2 - array add function tf_obj = IonObject( RT.TransformFunction, name='validate_salinity_array', description='validate_salinity_array', function='validate_salinity_array', module="ion.processes.data.transforms.test.test_transform_worker", arguments=['a'], function_type=TransformFunctionType.TRANSFORM) add_array_func_id, rev = self.rrclient.create(tf_obj) dpd_obj = IonObject( RT.DataProcessDefinition, name='validate_salinity_array', description='validate_salinity_array', data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS, ) add_array_dpd_id = self.dataprocessclient.create_data_process_definition( data_process_definition=dpd_obj, function_id=add_array_func_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( self.stream_def_id, add_array_dpd_id, binding='validate_salinity_array') # create the data process dp1_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=None, argument_map=argument_map) self.damsclient.register_process(dp1_data_process_id) self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id) return dp1_data_process_id def create_data_process(self): # two data processes using one transform and one DPD dp1_func_output_dp_id = self.create_output_data_product() argument_map = {"arr1": "conductivity", "arr2": "pressure"} output_param = "salinity" # set up DPD and DP #2 - array add function tf_obj = IonObject( RT.TransformFunction, name='add_array_func', description='adds values in an array', function='add_arrays', module="ion_example.add_arrays", arguments=['arr1', 'arr2'], function_type=TransformFunctionType.TRANSFORM, uri= 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg' ) add_array_func_id, rev = self.rrclient.create(tf_obj) dpd_obj = IonObject( RT.DataProcessDefinition, name='add_arrays', description='adds the values of two arrays', data_process_type=DataProcessTypeEnum.TRANSFORM_PROCESS, version_label='1.0a') add_array_dpd_id = self.dataprocessclient.create_data_process_definition( data_process_definition=dpd_obj, function_id=add_array_func_id) self.dataprocessclient.assign_stream_definition_to_data_process_definition( self.stream_def_id, add_array_dpd_id, binding='add_array_func') # create the data process dp1_data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id=add_array_dpd_id, inputs=[self.input_dp_id], outputs=[dp1_func_output_dp_id], argument_map=argument_map, out_param_name=output_param) self.damsclient.register_process(dp1_data_process_id) #self.addCleanup(self.dataprocessclient.delete_data_process, dp1_data_process_id) # add an attachment object to this DPD to test new SA-21 import msgpack attachment_content = 'foo bar' attachment_obj = IonObject(RT.Attachment, name='test_attachment', attachment_type=AttachmentType.ASCII, content_type='text/plain', content=msgpack.packb(attachment_content)) att_id = self.rrclient.create_attachment(add_array_dpd_id, attachment_obj) self.addCleanup(self.rrclient.delete_attachment, att_id) return add_array_dpd_id, dp1_data_process_id, dp1_func_output_dp_id def create_output_data_product(self): dp1_outgoing_stream_id = self.pubsub_client.create_stream_definition( name='dp1_stream', parameter_dictionary_id=self.parameter_dict_id) dp1_output_dp_obj = IonObject(RT.DataProduct, name='data_process1_data_product', description='output of add array func') dp1_func_output_dp_id = self.dataproductclient.create_data_product( dp1_output_dp_obj, dp1_outgoing_stream_id) self.addCleanup(self.dataproductclient.delete_data_product, dp1_func_output_dp_id) # retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects(dp1_func_output_dp_id, PRED.hasStream, None, True) self._output_stream_ids.append(stream_ids[0]) subscription_id = self.pubsub_client.create_subscription( 'validator', data_product_ids=[dp1_func_output_dp_id]) self.addCleanup(self.pubsub_client.delete_subscription, subscription_id) def on_granule(msg, route, stream_id): log.debug('recv_packet stream_id: %s route: %s msg: %s', stream_id, route, msg) self.validate_output_granule(msg, route, stream_id) self.granule_verified.set() validator = StandaloneStreamSubscriber('validator', callback=on_granule) validator.start() self.addCleanup(validator.stop) self.pubsub_client.activate_subscription(subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, subscription_id) return dp1_func_output_dp_id def validate_event(self, *args, **kwargs): """ This method is a callback function for receiving DataProcessStatusEvent. """ data_process_event = args[0] log.debug("DataProcessStatusEvent: %s", str(data_process_event.__dict__)) # if data process already created, check origin if self.dp_list: self.assertIn(data_process_event.origin, self.dp_list) # if this is a heartbeat event then 100 granules have been processed if 'data process status update.' in data_process_event.description: self.heartbeat_event_verified.set() else: # else check that this is the assign event if 'Data process assigned to transform worker' in data_process_event.description: self.worker_assigned_event_verified.set() elif 'Data process created for data product' in data_process_event.description: self.dp_created_event_verified.set() def validate_output_granule(self, msg, route, stream_id): self.assertIn(stream_id, self._output_stream_ids) rdt = RecordDictionaryTool.load_from_granule(msg) log.debug('validate_output_granule rdt: %s', rdt) sal_val = rdt['salinity'] np.testing.assert_array_equal(sal_val, np.array([3])) def start_event_listener(self): es = EventSubscriber(event_type=OT.DataProcessStatusEvent, callback=self.validate_event) es.start() self.addCleanup(es.stop) def validate_transform_event(self, *args, **kwargs): """ This method is a callback function for receiving DataProcessStatusEvent. """ status_alert_event = args[0] np.testing.assert_array_equal(status_alert_event.origin, self.stream_id) np.testing.assert_array_equal(status_alert_event.values, np.array([self.event_data_process_id])) log.debug("DeviceStatusAlertEvent: %s", str(status_alert_event.__dict__)) self.event_verified.set() def start_event_transform_listener(self): es = EventSubscriber(event_type=OT.DeviceStatusAlertEvent, callback=self.validate_transform_event) es.start() self.addCleanup(es.stop) def test_download(self): egg_url = 'http://sddevrepo.oceanobservatories.org/releases/ion_example-0.1-py2.7.egg' egg_path = TransformWorker.download_egg(egg_url) import pkg_resources pkg_resources.working_set.add_entry(egg_path) from ion_example.add_arrays import add_arrays a = add_arrays(1, 2) self.assertEquals(a, 3)
def stop_agent(self, agent_instance_id, resource_id): if not agent_instance_id or not resource_id: return res_obj = self.rr.read(resource_id) ai_obj = self.rr.read(agent_instance_id) try: client = ResourceAgentClient(resource_id, process=self) except NotFound: if self.force: log.warn("Agent for resource %s seems not running - continuing", resource_id) else: log.info("Agent for resource %s seems not running", resource_id) return log.info('Stopping agent...') if ai_obj.type_ == RT.ExternalDatasetAgentInstance: dams = DataAcquisitionManagementServiceProcessClient(process=self) try: dams.stop_external_dataset_agent_instance(agent_instance_id, headers=self._get_system_actor_headers(), timeout=self.timeout) log.info('Agent stopped!') except NotFound: log.warn("Agent for resource %s not found", resource_id) if self.autoclean: self.cleanup_agent(agent_instance_id, resource_id) except Exception: if self.autoclean: self.cleanup_agent(agent_instance_id, resource_id) raise elif ai_obj.type_ == RT.InstrumentAgentInstance: ims = InstrumentManagementServiceProcessClient(process=self) try: ims.stop_instrument_agent_instance(agent_instance_id, headers=self._get_system_actor_headers(), timeout=self.timeout) log.info('Agent stopped!') except NotFound: log.warn("Agent for resource %s not found", resource_id) if self.autoclean: self.cleanup_agent(agent_instance_id, resource_id) except Exception: if self.autoclean: self.cleanup_agent(agent_instance_id, resource_id) raise elif ai_obj.type_ == RT.PlatformAgentInstance: ims = InstrumentManagementServiceProcessClient(process=self) try: ims.stop_platform_agent_instance(agent_instance_id, headers=self._get_system_actor_headers(), timeout=self.timeout) log.info('Agent stopped!') except NotFound: log.warn("Agent for resource %s not found", resource_id) if self.autoclean: self.cleanup_agent(agent_instance_id, resource_id) except Exception: if self.autoclean: self.cleanup_agent(agent_instance_id, resource_id) raise else: BadRequest("Attempt to stop unsupported agent type: %s", ai_obj.type_)
dp_client.create_data_product(dp_obj, headers={'ion-actor-id': system_actor._id}) log.info('Data Products') dp_list = dp_client.find_data_products() for dp_obj in dp_list: log.debug(str(dp_obj)) results = find_data_products() log.info(results) header_roles = get_role_message_headers( org_client.find_all_roles_by_user(user._id)) ims_client = InstrumentManagementServiceProcessClient(node=container.node, process=process) try: ia_obj = IonObject(RT.InstrumentAgent, name='Instrument Agent1', description='The first Instrument Agent') ims_client.create_instrument_agent(ia_obj, headers={ 'ion-actor-id': user._id, 'ion-actor-roles': header_roles }) except Exception, e: log.info('This operation should be denied: ' + e.message) header_roles = get_role_message_headers(
class TestGovernanceInt(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() #Load a deploy file self.container.start_rel_from_url('res/deploy/r2deploy.yml') #Instantiate a process to represent the test process=GovernanceTestProcess() #Load system policies after container has started all of the services LoadSystemPolicy.op_load_system_policies(process) self.rr_client = ResourceRegistryServiceProcessClient(node=self.container.node, process=process) self.id_client = IdentityManagementServiceProcessClient(node=self.container.node, process=process) self.pol_client = PolicyManagementServiceProcessClient(node=self.container.node, process=process) self.org_client = OrgManagementServiceProcessClient(node=self.container.node, process=process) self.ims_client = InstrumentManagementServiceProcessClient(node=self.container.node, process=process) self.ems_client = ExchangeManagementServiceProcessClient(node=self.container.node, process=process) self.ion_org = self.org_client.find_org() self.system_actor = self.id_client.find_actor_identity_by_name(name=CFG.system.system_actor) log.debug('system actor:' + self.system_actor._id) sa_header_roles = get_role_message_headers(self.org_client.find_all_roles_by_user(self.system_actor._id)) self.sa_user_header = {'ion-actor-id': self.system_actor._id, 'ion-actor-roles': sa_header_roles } @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI') def test_basic_policy(self): #Make sure that the system policies have been loaded policy_list,_ = self.rr_client.find_resources(restype=RT.Policy) self.assertNotEqual(len(policy_list),0,"The system policies have not been loaded into the Resource Registry") #Attempt to access an operation in service which does not have specific policies set es_obj = IonObject(RT.ExchangeSpace, description= 'ION test XS', name='ioncore2' ) with self.assertRaises(Unauthorized) as cm: self.ems_client.create_exchange_space(es_obj) self.assertIn( 'exchange_management(create_exchange_space) has been denied',cm.exception.message) #Add a new policy to allow the the above service call. policy_obj = IonObject(RT.Policy, name='Exchange_Management_Test_Policy', definition_type="Service", rule=TEST_POLICY_TEXT, description='Allow specific operations in the Exchange Management Service for anonymous user') test_policy_id = self.pol_client.create_policy(policy_obj, headers=self.sa_user_header) self.pol_client.add_service_policy('exchange_management', test_policy_id, headers=self.sa_user_header) log.info('Policy created: ' + policy_obj.name) gevent.sleep(2) # Wait for events to be fired and policy updated #The previous attempt at this operations should now be allowed. es_obj = IonObject(RT.ExchangeSpace, description= 'ION test XS', name='ioncore2' ) with self.assertRaises(BadRequest) as cm: self.ems_client.create_exchange_space(es_obj) self.assertIn( 'Arguments not set',cm.exception.message) #disable the test policy to try again self.pol_client.disable_policy(test_policy_id, headers=self.sa_user_header) gevent.sleep(2) # Wait for events to be fired and policy updated #The same request that previously was allowed should not be denied es_obj = IonObject(RT.ExchangeSpace, description= 'ION test XS', name='ioncore2' ) with self.assertRaises(Unauthorized) as cm: self.ems_client.create_exchange_space(es_obj) self.assertIn( 'exchange_management(create_exchange_space) has been denied',cm.exception.message) #now enable the test policy to try again self.pol_client.enable_policy(test_policy_id, headers=self.sa_user_header) gevent.sleep(2) # Wait for events to be fired and policy updated #The previous attempt at this operations should now be allowed. es_obj = IonObject(RT.ExchangeSpace, description= 'ION test XS', name='ioncore2' ) with self.assertRaises(BadRequest) as cm: self.ems_client.create_exchange_space(es_obj) self.assertIn( 'Arguments not set',cm.exception.message) self.pol_client.remove_service_policy('exchange_management', test_policy_id, headers=self.sa_user_header) self.pol_client.delete_policy(test_policy_id, headers=self.sa_user_header) gevent.sleep(2) # Wait for events to be fired and policy updated #The same request that previously was allowed should not be denied es_obj = IonObject(RT.ExchangeSpace, description= 'ION test XS', name='ioncore2' ) with self.assertRaises(Unauthorized) as cm: self.ems_client.create_exchange_space(es_obj) self.assertIn( 'exchange_management(create_exchange_space) has been denied',cm.exception.message) @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI') def test_org_policy(self): #Make sure that the system policies have been loaded policy_list,_ = self.rr_client.find_resources(restype=RT.Policy) self.assertNotEqual(len(policy_list),0,"The system policies have not been loaded into the Resource Registry") with self.assertRaises(BadRequest) as cm: myorg = self.org_client.read_org() self.assertTrue(cm.exception.message == 'The org_id parameter is missing') user_id, valid_until, registered = self.id_client.signon(USER1_CERTIFICATE, True) log.debug( "user id=" + user_id) user_roles = get_role_message_headers(self.org_client.find_all_roles_by_user(user_id)) user_header = {'ion-actor-id': user_id, 'ion-actor-roles': user_roles } #Attempt to enroll a user anonymously - should not be allowed with self.assertRaises(Unauthorized) as cm: self.org_client.enroll_member(self.ion_org._id,user_id) self.assertIn( 'org_management(enroll_member) has been denied',cm.exception.message) #Attempt to let a user enroll themselves - should not be allowed with self.assertRaises(Unauthorized) as cm: self.org_client.enroll_member(self.ion_org._id,user_id, headers=user_header) self.assertIn( 'org_management(enroll_member) has been denied',cm.exception.message) #Attept to enroll the user in the ION Root org as a manager - should not be allowed since #registration with the system implies membership in the ROOT Org. with self.assertRaises(BadRequest) as cm: self.org_client.enroll_member(self.ion_org._id,user_id, headers=self.sa_user_header) self.assertTrue(cm.exception.message == 'A request to enroll in the root ION Org is not allowed') with self.assertRaises(Unauthorized) as cm: users = self.org_client.find_enrolled_users(self.ion_org._id) self.assertIn('org_management(find_enrolled_users) has been denied',cm.exception.message) with self.assertRaises(Unauthorized) as cm: users = self.org_client.find_enrolled_users(self.ion_org._id, headers=user_header) self.assertIn( 'org_management(find_enrolled_users) has been denied',cm.exception.message) users = self.org_client.find_enrolled_users(self.ion_org._id, headers=self.sa_user_header) self.assertEqual(len(users),2) ## test_org_roles and policies roles = self.org_client.find_org_roles(self.ion_org._id) self.assertEqual(len(roles),3) self.assertItemsEqual([r.name for r in roles], [MANAGER_ROLE, MEMBER_ROLE, ION_MANAGER]) roles = self.org_client.find_roles_by_user(self.ion_org._id, self.system_actor._id, headers=self.sa_user_header) self.assertEqual(len(roles),3) self.assertItemsEqual([r.name for r in roles], [MEMBER_ROLE, MANAGER_ROLE, ION_MANAGER]) roles = self.org_client.find_roles_by_user(self.ion_org._id, user_id, headers=self.sa_user_header) self.assertEqual(len(roles),1) self.assertItemsEqual([r.name for r in roles], [MEMBER_ROLE]) with self.assertRaises(NotFound) as nf: org2 = self.org_client.find_org(ORG2) self.assertIn('The Org with name Org2 does not exist',nf.exception.message) org2 = IonObject(RT.Org, name=ORG2, description='A second Org') org2_id = self.org_client.create_org(org2, headers=self.sa_user_header) org2 = self.org_client.find_org(ORG2) self.assertEqual(org2_id, org2._id) roles = self.org_client.find_org_roles(org2_id) self.assertEqual(len(roles),2) self.assertItemsEqual([r.name for r in roles], [MANAGER_ROLE, MEMBER_ROLE]) operator_role = IonObject(RT.UserRole, name=INSTRUMENT_OPERATOR,label='Instrument Operator', description='Instrument Operator') #First try to add the user role anonymously with self.assertRaises(Unauthorized) as cm: self.org_client.add_user_role(org2_id, operator_role) self.assertIn('org_management(add_user_role) has been denied',cm.exception.message) self.org_client.add_user_role(org2_id, operator_role, headers=self.sa_user_header) roles = self.org_client.find_org_roles(org2_id) self.assertEqual(len(roles),3) self.assertItemsEqual([r.name for r in roles], [MANAGER_ROLE, MEMBER_ROLE, INSTRUMENT_OPERATOR]) # test requests for enrollments and roles. #First try to find user requests anonymously with self.assertRaises(Unauthorized) as cm: requests = self.org_client.find_requests(org2_id) self.assertIn('org_management(find_requests) has been denied',cm.exception.message) #Next try to find user requests as as a basic member with self.assertRaises(Unauthorized) as cm: requests = self.org_client.find_requests(org2_id, headers=user_header) self.assertIn('org_management(find_requests) has been denied',cm.exception.message) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),0) # First try to request a role without being a member with self.assertRaises(BadRequest) as cm: req_id = self.org_client.request_role(org2_id, user_id, INSTRUMENT_OPERATOR, headers=user_header ) self.assertIn('A precondition for this request has not been satisfied: is_enrolled(org_id,user_id)',cm.exception.message) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),0) req_id = self.org_client.request_enroll(org2_id, user_id, headers=user_header ) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),1) requests = self.org_client.find_user_requests(user_id, org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),1) #User tried requesting enrollment again - this should fail with self.assertRaises(BadRequest) as cm: req_id = self.org_client.request_enroll(org2_id, user_id, headers=user_header ) self.assertIn('A precondition for this request has not been satisfied: enroll_req_not_exist(org_id,user_id)',cm.exception.message) #Manager denies the request self.org_client.deny_request(org2_id,req_id,'To test the deny process', headers=self.sa_user_header) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),1) self.assertEqual(requests[0].status, REQUEST_DENIED) #Manager approves request self.org_client.approve_request(org2_id,req_id, headers=self.sa_user_header) users = self.org_client.find_enrolled_users(org2_id, headers=self.sa_user_header) self.assertEqual(len(users),0) #User Accepts request self.org_client.accept_request(org2_id,req_id, headers=user_header) users = self.org_client.find_enrolled_users(org2_id, headers=self.sa_user_header) self.assertEqual(len(users),1) #User tried requesting enrollment again - this should fail with self.assertRaises(BadRequest) as cm: req_id = self.org_client.request_enroll(org2_id, user_id, headers=user_header ) self.assertIn('A precondition for this request has not been satisfied: is_not_enrolled(org_id,user_id)',cm.exception.message) req_id = self.org_client.request_role(org2_id, user_id, INSTRUMENT_OPERATOR, headers=user_header ) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),2) requests = self.org_client.find_requests(org2_id,request_status='Open', headers=self.sa_user_header) self.assertEqual(len(requests),1) requests = self.org_client.find_user_requests(user_id, org2_id, headers=user_header) self.assertEqual(len(requests),2) requests = self.org_client.find_user_requests(user_id, org2_id, request_type=RT.RoleRequest, headers=user_header) self.assertEqual(len(requests),1) requests = self.org_client.find_user_requests(user_id, org2_id, request_status="Open", headers=user_header) self.assertEqual(len(requests),1) ia_list,_ = self.rr_client.find_resources(restype=RT.InstrumentAgent) self.assertEqual(len(ia_list),0) ia_obj = IonObject(RT.InstrumentAgent, name='Instrument Agent1', description='The first Instrument Agent') with self.assertRaises(Unauthorized) as cm: self.ims_client.create_instrument_agent(ia_obj) self.assertIn('instrument_management(create_instrument_agent) has been denied',cm.exception.message) with self.assertRaises(Unauthorized) as cm: self.ims_client.create_instrument_agent(ia_obj, headers=user_header) self.assertIn('instrument_management(create_instrument_agent) has been denied',cm.exception.message) #Manager approves request self.org_client.approve_request(org2_id,req_id, headers=self.sa_user_header) requests = self.org_client.find_user_requests(user_id, org2_id, request_status="Open", headers=user_header) self.assertEqual(len(requests),0) #User accepts request self.org_client.accept_request(org2_id, req_id, headers=user_header) #Refresh headers with new role user_roles = get_role_message_headers(self.org_client.find_all_roles_by_user(user_id)) user_header = {'ion-actor-id': user_id, 'ion-actor-roles': user_roles } self.ims_client.create_instrument_agent(ia_obj, headers=user_header) ia_obj = IonObject(RT.InstrumentAgent, name='Instrument Agent2', description='The second Instrument Agent') self.ims_client.create_instrument_agent(ia_obj, headers=user_header) ia_list,_ = self.rr_client.find_resources(restype=RT.InstrumentAgent) self.assertEqual(len(ia_list),2) #First make a acquire resource request with an non-enrolled user. with self.assertRaises(BadRequest) as cm: req_id = self.org_client.request_acquire_resource(org2_id,self.system_actor._id,ia_list[0]._id , headers=self.sa_user_header) self.assertIn('A precondition for this request has not been satisfied: is_enrolled(org_id,user_id)',cm.exception.message) req_id = self.org_client.request_acquire_resource(org2_id,user_id,ia_list[0]._id , headers=user_header) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),3) requests = self.org_client.find_user_requests(user_id, org2_id, headers=user_header) self.assertEqual(len(requests),3) requests = self.org_client.find_user_requests(user_id, org2_id, request_type=RT.ResourceRequest, headers=user_header) self.assertEqual(len(requests),1) requests = self.org_client.find_user_requests(user_id, org2_id, request_status="Open", headers=user_header) self.assertEqual(len(requests),1) self.assertEqual(requests[0]._id, req_id) #Manager approves Instrument request self.org_client.approve_request(org2_id,req_id, headers=self.sa_user_header) requests = self.org_client.find_user_requests(user_id, org2_id, request_status="Open", headers=user_header) self.assertEqual(len(requests),0) #User accepts request self.org_client.accept_request(org2_id,req_id, headers=user_header) #Check commitments commitments, _ = self.rr_client.find_objects(ia_list[0]._id,PRED.hasCommitment, RT.ResourceCommitment) self.assertEqual(len(commitments),1) commitments, _ = self.rr_client.find_objects(user_id,PRED.hasCommitment, RT.ResourceCommitment) self.assertEqual(len(commitments),1) #Release the resource self.org_client.release_resource(org2_id,user_id ,ia_list[0]._id, headers=self.sa_user_header,timeout=15) #TODO - Refactor release_resource #Check commitments commitments, _ = self.rr_client.find_objects(ia_list[0]._id,PRED.hasCommitment, RT.ResourceCommitment) self.assertEqual(len(commitments),0) commitments, _ = self.rr_client.find_objects(user_id,PRED.hasCommitment, RT.ResourceCommitment) self.assertEqual(len(commitments),0)
class TestGovernanceInt(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() #Load a deploy file that also loads basic policy. self.container.start_rel_from_url('res/deploy/r2gov.yml') process=FakeProcess() self.rr_client = ResourceRegistryServiceProcessClient(node=self.container.node, process=process) self.id_client = IdentityManagementServiceProcessClient(node=self.container.node, process=process) self.org_client = OrgManagementServiceProcessClient(node=self.container.node, process=process) self.ims_client = InstrumentManagementServiceProcessClient(node=self.container.node, process=process) self.ion_org = self.org_client.find_org() self.system_actor = self.id_client.find_actor_identity_by_name(name=CFG.system.system_actor) log.debug('system actor:' + self.system_actor._id) sa_header_roles = get_role_message_headers(self.org_client.find_all_roles_by_user(self.system_actor._id)) self.sa_user_header = {'ion-actor-id': self.system_actor._id, 'ion-actor-roles': sa_header_roles } @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI') @unittest.skip("Not working on buildbot for some reason but works on Mac") def test_org_policy(self): with self.assertRaises(BadRequest) as cm: myorg = self.org_client.read_org() self.assertTrue(cm.exception.message == 'The org_id parameter is missing') user_id, valid_until, registered = self.id_client.signon(USER1_CERTIFICATE, True) log.debug( "user id=" + user_id) user_roles = get_role_message_headers(self.org_client.find_all_roles_by_user(user_id)) user_header = {'ion-actor-id': user_id, 'ion-actor-roles': user_roles } #Attempt to enroll a user anonymously - should not be allowed with self.assertRaises(Unauthorized) as cm: self.org_client.enroll_member(self.ion_org._id,user_id) self.assertIn( 'org_management(enroll_member) has been denied',cm.exception.message) #Attempt to let a user enroll themselves - should not be allowed with self.assertRaises(Unauthorized) as cm: self.org_client.enroll_member(self.ion_org._id,user_id, headers=user_header) self.assertIn( 'org_management(enroll_member) has been denied',cm.exception.message) #Attept to enroll the user in the ION Root org as a manager - should not be allowed since #registration with the system implies membership in the ROOT Org. with self.assertRaises(BadRequest) as cm: self.org_client.enroll_member(self.ion_org._id,user_id, headers=self.sa_user_header) self.assertTrue(cm.exception.message == 'A request to enroll in the root ION Org is not allowed') with self.assertRaises(Unauthorized) as cm: users = self.org_client.find_enrolled_users(self.ion_org._id) self.assertIn('org_management(find_enrolled_users) has been denied',cm.exception.message) with self.assertRaises(Unauthorized) as cm: users = self.org_client.find_enrolled_users(self.ion_org._id, headers=user_header) self.assertIn( 'org_management(find_enrolled_users) has been denied',cm.exception.message) users = self.org_client.find_enrolled_users(self.ion_org._id, headers=self.sa_user_header) self.assertEqual(len(users),2) ## test_org_roles and policies roles = self.org_client.find_org_roles(self.ion_org._id) self.assertEqual(len(roles),3) self.assertItemsEqual([r.name for r in roles], [MANAGER_ROLE, MEMBER_ROLE, ION_MANAGER]) roles = self.org_client.find_roles_by_user(self.ion_org._id, self.system_actor._id, headers=self.sa_user_header) self.assertEqual(len(roles),3) self.assertItemsEqual([r.name for r in roles], [MEMBER_ROLE, MANAGER_ROLE, ION_MANAGER]) roles = self.org_client.find_roles_by_user(self.ion_org._id, user_id, headers=self.sa_user_header) self.assertEqual(len(roles),1) self.assertItemsEqual([r.name for r in roles], [MEMBER_ROLE]) with self.assertRaises(NotFound) as nf: org2 = self.org_client.find_org(ORG2) self.assertIn('The Org with name Org2 does not exist',nf.exception.message) org2 = IonObject(RT.Org, name=ORG2, description='A second Org') org2_id = self.org_client.create_org(org2, headers=self.sa_user_header) org2 = self.org_client.find_org(ORG2) self.assertEqual(org2_id, org2._id) roles = self.org_client.find_org_roles(org2_id) self.assertEqual(len(roles),2) self.assertItemsEqual([r.name for r in roles], [MANAGER_ROLE, MEMBER_ROLE]) operator_role = IonObject(RT.UserRole, name=INSTRUMENT_OPERATOR,label='Instrument Operator', description='Instrument Operator') #First try to add the user role anonymously with self.assertRaises(Unauthorized) as cm: self.org_client.add_user_role(org2_id, operator_role) self.assertIn('org_management(add_user_role) has been denied',cm.exception.message) self.org_client.add_user_role(org2_id, operator_role, headers=self.sa_user_header) roles = self.org_client.find_org_roles(org2_id) self.assertEqual(len(roles),3) self.assertItemsEqual([r.name for r in roles], [MANAGER_ROLE, MEMBER_ROLE, INSTRUMENT_OPERATOR]) # test requests for enrollments and roles. #First try to find user requests anonymously with self.assertRaises(Unauthorized) as cm: requests = self.org_client.find_requests(org2_id) self.assertIn('org_management(find_requests) has been denied',cm.exception.message) #Next try to find user requests as as a basic member with self.assertRaises(Unauthorized) as cm: requests = self.org_client.find_requests(org2_id, headers=user_header) self.assertIn('org_management(find_requests) has been denied',cm.exception.message) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),0) # First try to request a role without being a member with self.assertRaises(BadRequest) as cm: req_id = self.org_client.request_role(org2_id, user_id, INSTRUMENT_OPERATOR, headers=user_header ) self.assertIn('A precondition for this request has not been satisfied: is_enrolled(org_id,user_id) == True',cm.exception.message) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),0) req_id = self.org_client.request_enroll(org2_id, user_id, headers=user_header ) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),1) requests = self.org_client.find_user_requests(user_id, org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),1) #User tried requesting enrollment again - this should fail with self.assertRaises(BadRequest) as cm: req_id = self.org_client.request_enroll(org2_id, user_id, headers=user_header ) self.assertIn('A precondition for this request has not been satisfied: enroll_req_exists(org_id,user_id) == False',cm.exception.message) #Manager denies the request self.org_client.deny_request(org2_id,req_id,'To test the deny process', headers=self.sa_user_header) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),1) self.assertEqual(requests[0].status, REQUEST_DENIED) #Manager approves request self.org_client.approve_request(org2_id,req_id, headers=self.sa_user_header) users = self.org_client.find_enrolled_users(org2_id, headers=self.sa_user_header) self.assertEqual(len(users),0) #User Accepts request self.org_client.accept_request(org2_id,req_id, headers=user_header) users = self.org_client.find_enrolled_users(org2_id, headers=self.sa_user_header) self.assertEqual(len(users),1) req_id = self.org_client.request_role(org2_id, user_id, INSTRUMENT_OPERATOR, headers=user_header ) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),2) requests = self.org_client.find_requests(org2_id,request_status='Open', headers=self.sa_user_header) self.assertEqual(len(requests),1) requests = self.org_client.find_user_requests(user_id, org2_id, headers=user_header) self.assertEqual(len(requests),2) requests = self.org_client.find_user_requests(user_id, org2_id, request_type=RT.RoleRequest, headers=user_header) self.assertEqual(len(requests),1) requests = self.org_client.find_user_requests(user_id, org2_id, request_status="Open", headers=user_header) self.assertEqual(len(requests),1) ia_list = self.ims_client.find_instrument_agents() self.assertEqual(len(ia_list),0) ia_obj = IonObject(RT.InstrumentAgent, name='Instrument Agent1', description='The first Instrument Agent') with self.assertRaises(Unauthorized) as cm: self.ims_client.create_instrument_agent(ia_obj) self.assertIn('instrument_management(create_instrument_agent) has been denied',cm.exception.message) with self.assertRaises(Unauthorized) as cm: self.ims_client.create_instrument_agent(ia_obj, headers=user_header) self.assertIn('instrument_management(create_instrument_agent) has been denied',cm.exception.message) #Manager approves request self.org_client.approve_request(org2_id,req_id, headers=self.sa_user_header) requests = self.org_client.find_user_requests(user_id, org2_id, request_status="Open", headers=user_header) self.assertEqual(len(requests),0) #User accepts request self.org_client.accept_request(org2_id, req_id, headers=user_header) #Refresh headers with new role user_roles = get_role_message_headers(self.org_client.find_all_roles_by_user(user_id)) user_header = {'ion-actor-id': user_id, 'ion-actor-roles': user_roles } self.ims_client.create_instrument_agent(ia_obj, headers=user_header) ia_obj = IonObject(RT.InstrumentAgent, name='Instrument Agent2', description='The second Instrument Agent') self.ims_client.create_instrument_agent(ia_obj, headers=user_header) ia_list = self.ims_client.find_instrument_agents() self.assertEqual(len(ia_list),2) #First make a acquire resource request with an non-enrolled user. with self.assertRaises(BadRequest) as cm: req_id = self.org_client.request_acquire_resource(org2_id,self.system_actor._id,ia_list[0]._id , headers=self.sa_user_header) self.assertIn('A precondition for this request has not been satisfied: is_enrolled(org_id,user_id) == True',cm.exception.message) req_id = self.org_client.request_acquire_resource(org2_id,user_id,ia_list[0]._id , headers=user_header) requests = self.org_client.find_requests(org2_id, headers=self.sa_user_header) self.assertEqual(len(requests),3) requests = self.org_client.find_user_requests(user_id, org2_id, headers=user_header) self.assertEqual(len(requests),3) requests = self.org_client.find_user_requests(user_id, org2_id, request_type=RT.ResourceRequest, headers=user_header) self.assertEqual(len(requests),1) requests = self.org_client.find_user_requests(user_id, org2_id, request_status="Open", headers=user_header) self.assertEqual(len(requests),1) self.assertEqual(requests[0]._id, req_id) #Manager approves Instrument request self.org_client.approve_request(org2_id,req_id, headers=self.sa_user_header) requests = self.org_client.find_user_requests(user_id, org2_id, request_status="Open", headers=user_header) self.assertEqual(len(requests),0) #User accepts request self.org_client.accept_request(org2_id,req_id, headers=user_header) #Check commitments commitments, _ = self.rr_client.find_objects(ia_list[0]._id,PRED.hasCommitment, RT.ResourceCommitment) self.assertEqual(len(commitments),1) commitments, _ = self.rr_client.find_objects(user_id,PRED.hasCommitment, RT.ResourceCommitment) self.assertEqual(len(commitments),1) #Release the resource self.org_client.release_resource(org2_id,user_id ,ia_list[0]._id, headers=self.sa_user_header) #Check commitments commitments, _ = self.rr_client.find_objects(ia_list[0]._id,PRED.hasCommitment, RT.ResourceCommitment) self.assertEqual(len(commitments),0) commitments, _ = self.rr_client.find_objects(user_id,PRED.hasCommitment, RT.ResourceCommitment) self.assertEqual(len(commitments),0)